code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package org.machine.engine.graph.commands.workflows
import org.scalatest._
import org.scalatest.mock._
import org.machine.engine.TestUtils
import java.io.File;
import java.io.IOException;
import org.neo4j.io.fs.FileUtils
import com.typesafe.config._
import org.neo4j.graphdb.GraphDatabaseService
import org.machine.engine.Engine
import org.machine.engine.TestUtils
import org.machine.engine.graph.Neo4JHelper
import org.machine.engine.graph.commands.{CommandScope, CommandScopes, GraphCommandOptions}
import org.machine.engine.graph.nodes.{PropertyDefinition, PropertyDefinitions}
import scala.util.{Either, Left, Right}
import org.machine.engine.viz.GraphVizHelper
class ElementDefintionWorkflowFunctionsSpec extends FunSpecLike
with Matchers
with BeforeAndAfterAll{
import ElementDefintionWorkflowFunctions._
import TestUtils._
import Neo4JHelper._
private val config = ConfigFactory.load()
var engine:Engine = null
val options = GraphCommandOptions()
private var activeUserId:String = null
override def beforeAll(){
engine = Engine.getInstance
perge
activeUserId = Engine.getInstance
.createUser
.withFirstName("Bob")
.withLastName("Grey")
.withEmailAddress("[email protected]")
.withUserName("pennywise")
.withUserPassword("You'll float too...")
.end
}
override def afterAll(){
perge
}
describe("Element Defintion Workflow Functions"){
describe("generate mid if not present"){
it("should be defined at when workflow status is ok and options does not contains mid"){
options.reset
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
mIdGuard.isDefinedAt(capsule) should be(true)
}
it("should not be defined at when workflow status is error"){
options.reset
val capsule = (null, null, options, Left(WorkflowStatuses.Error))
mIdGuard.isDefinedAt(capsule) should be(false)
}
it("should not be defined at with mid is provided"){
options.reset
options.addOption("mid","123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
mIdGuard.isDefinedAt(capsule) should be(false)
}
it("should apply mid when none exists"){
options.reset
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
mIdGuard(capsule)
options.option[String]("mid") should not be null
}
}
describe("verifying required command options"){
it("should be defined at when workflow is ok"){
val capsule = (null, null, null, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions.isDefinedAt(capsule) should equal(true)
}
it("should require mid"){
options.reset
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions(capsule)._4 should equal(Right(MissingMidErrorMsg))
}
it("should require name"){
options.reset
options.addOption("mid","123")
options.addOption("activeUserId", "abc123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions(capsule)._4 should equal(Right(MissingNameErrorMsg))
}
it("should require description"){
options.reset
options.addOption("mid","123")
options.addOption("name","abc")
options.addOption("activeUserId", "abc123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions(capsule)._4 should equal(Right(MissingDescErrorMsg))
}
it("should require creationTime"){
options.reset
options.addOption("mid","123")
options.addOption("name","abc")
options.addOption("description","asdfas")
options.addOption("activeUserId", "abc123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions(capsule)._4 should equal(Right(MissingCreationTimeErrorMsg))
}
it ("should set the status to OK when all required options are provided"){
options.reset
options.addOption("mid","123")
options.addOption("name","abc")
options.addOption("description","asdfas")
options.addOption("creationTime", Neo4JHelper.time)
options.addOption("activeUserId", "abc123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
verifyRequiredCmdOptions(capsule)._4 should equal(Left(WorkflowStatuses.OK))
}
}
describe("verifing uniqueness"){
it("should be defined at when workflow is ok"){
val capsule = (null, null, null, Left(WorkflowStatuses.OK))
verifyUniqueness.isDefinedAt(capsule) should equal(true)
}
it("should not be defined at when workflow is error"){
val capsule = (null, null, null, Right("error msg"))
verifyUniqueness.isDefinedAt(capsule) should equal(false)
}
/*
If we where trying to create an element definition with name "Gem" in a Dataset.
*/
it("should pass status of OK when no element definition exists in scope of dataset"){
options.reset
val dsId = engine.forUser(activeUserId).createDataSet("Dataset A", "A dataset")
options.addOption("dsId", dsId)
options.addOption("name", "Gem") //The Element Definitions name...
options.addOption("activeUserId", activeUserId)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = verifyUniqueness(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
}
it("should pass error message when an element definition exists in scope of dataset"){
options.reset
val dsId = engine.forUser(activeUserId).createDataSet("Dataset B", "Another dataset")
engine.forUser(activeUserId)
.onDataSet(dsId)
.defineElement("Gem", "A precious stone.")
.withProperty("color", "String", "The color of light the gem reflects.")
.end
options.addOption("dsId", dsId)
options.addOption("name", "Gem")
options.addOption("activeUserId", activeUserId)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = verifyUniqueness(capsule)
processed._4 should equal(Right("Element Definition already exists with the provided name."))
}
}
describe("Create Element Definition Statement"){
it ("should be defined at when status is Left(OK)"){
val capsule = (null, null, null, Left(WorkflowStatuses.OK))
createElementDefinitionStmt.isDefinedAt(capsule) should equal(true)
}
it ("should not be defined at when status is Right(String)"){
val capsule = (null, null, null, Right("error msg"))
createElementDefinitionStmt.isDefinedAt(capsule) should equal(false)
}
it ("should generate a create statement with dsId"){
options.reset
options.addOption("dsId", "123")
val capsule = (null, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = createElementDefinitionStmt(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createElementDefinitionStmt") should equal(true)
val expected = normalize("""
|match (ss:data_set) where ss.mid = {dsId}
|create (ss)-[:exists_in]->(ed:element_definition {
| mid:{mid},
| name:{name},
| description:{description},
| creation_time:{creationTime}
|})
|return ed.mid as edId
""".stripMargin)
val actual = normalize(processed._3.option[String]("createElementDefinitionStmt"))
actual should equal(expected)
}
it ("should generate a create statement with dsName"){
options.reset
options.addOption("dsName", "asdf")
val capsule = (null, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = createElementDefinitionStmt(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createElementDefinitionStmt") should equal(true)
val expected = normalize("""
|match (ss:data_set) where ss.name = {dsName}
|create (ss)-[:exists_in]->(ed:element_definition {
| mid:{mid},
| name:{name},
| description:{description},
| creation_time:{creationTime}
|})
|return ed.mid as edId
""".stripMargin)
val actual = normalize(processed._3.option[String]("createElementDefinitionStmt"))
actual should equal(expected)
}
it ("should generate a create statement in user space scope"){
options.reset
options.addOption("activeUserId", activeUserId)
val capsule = (null, CommandScopes.UserSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = createElementDefinitionStmt(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createElementDefinitionStmt") should equal(true)
val expected = normalize("""
|match (ss:user) where ss.mid={activeUserId}
|create (ss)-[:exists_in]->(ed:element_definition {
| mid:{mid},
| name:{name},
| description:{description},
| creation_time:{creationTime}
|})
|return ed.mid as edId
""".stripMargin)
val actual = normalize(processed._3.option[String]("createElementDefinitionStmt"))
actual should equal(expected)
}
it ("should generate a create statement in system space scope"){
options.reset
val capsule = (null, CommandScopes.SystemSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = createElementDefinitionStmt(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createElementDefinitionStmt") should equal(true)
val expected = normalize("""
|match (ss:internal_system_space)
|create (ss)-[:exists_in]->(ed:element_definition {
| mid:{mid},
| name:{name},
| description:{description},
| creation_time:{creationTime}
|})
|return ed.mid as edId
""".stripMargin)
val actual = normalize(processed._3.option[String]("createElementDefinitionStmt"))
actual should equal(expected)
}
it ("should throw an exception when dataset scope doesn't provide dsId or dsName"){
options.reset
val capsule = (null, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = createElementDefinitionStmt(capsule)
processed._4 should equal(Right(DataSetFilterRequiredErrorMsg))
}
}
describe("Create Element Definition"){
it ("should be defined at when status is Left(OK) and options contains createElementDefinitionStmt"){
options.reset
options.addOption("createElementDefinitionStmt", "abc")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createElementDefinition.isDefinedAt(capsule) should equal(true)
}
it ("should not be defined at when status is Left(OK) and options does not contain createElementDefinitionStmt"){
options.reset
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createElementDefinition.isDefinedAt(capsule) should equal(false)
}
it ("should not be defined at when status is Right(String)"){
val capsule = (null, null, options, Right("error msg"))
createElementDefinition.isDefinedAt(capsule) should equal(false)
}
val minimalCreateWF = Function.chain(Seq(createElementDefinitionStmt, createElementDefinition))
it ("should create the new element definition node and associate it with a dataset by dsId"){
options.reset
val dsId = engine.createDataSet("Dataset C", "Yet Another dataset")
options.addOption("dsId", dsId)
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createdElementDefinitionId") should equal(true)
val mappedEdIds = findEdOnDSById(dsId, edId)
mappedEdIds.length should equal(1)
mappedEdIds.head should equal(edId)
}
it ("should create the new element definition node and associate it with a dataset by dsName"){
options.reset
val DsName = "Dataset D"
val dsId = engine.createDataSet(DsName, "Another dataset")
options.addOption("dsName", DsName)
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box1")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createdElementDefinitionId") should equal(true)
val mappedEdIds = findEdOnDSById(dsId, edId)
mappedEdIds.length should equal(1)
mappedEdIds.head should equal(edId)
}
it ("should create the new element definition node and associate it with a user"){
options.reset
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box2")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
options.addOption("activeUserId", activeUserId)
val capsule = (engine.database, CommandScopes.UserSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createdElementDefinitionId") should equal(true)
val mappedEdIds = findEdInUsById(edId)
mappedEdIds.length should equal(1)
mappedEdIds.head should equal(edId)
}
it ("should create the new element definition node and associate it with the system space"){
options.reset
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box3")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
val capsule = (engine.database, CommandScopes.SystemSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains("createdElementDefinitionId") should equal(true)
val mappedEdIds = findEdInSsById(edId)
mappedEdIds.length should equal(1)
mappedEdIds.head should equal(edId)
}
}
val minimalCreateWithPropsWF = Function.chain(Seq(createElementDefinitionStmt, createElementDefinition, createPropertyDefinitions))
describe("Create Property Definitions"){
it ("should be defined at when status is Left(OK), options contains createdElementDefinitionId & properties"){
options.reset
options.addOption("createdElementDefinitionId", "123")
val props = new PropertyDefinitions()
props.addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createPropertyDefinitions.isDefinedAt(capsule) should equal(true)
}
it ("should not be defined at when properties are empty"){
options.reset
options.addOption("createdElementDefinitionId", "123")
val props = new PropertyDefinitions()
options.addOption("properties", props)
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createPropertyDefinitions.isDefinedAt(capsule) should equal(false)
}
it ("should not be defined at when properties are not provided"){
options.reset
options.addOption("createdElementDefinitionId", "123")
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createPropertyDefinitions.isDefinedAt(capsule) should equal(false)
}
it ("should not be defined at when createdElementDefinitionId not provided"){
options.reset
val props = new PropertyDefinitions()
props.addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (null, null, options, Left(WorkflowStatuses.OK))
createPropertyDefinitions.isDefinedAt(capsule) should equal(false)
}
it ("should not be defined at when status is Right(msg)"){
options.reset
options.addOption("createdElementDefinitionId", "123")
val props = new PropertyDefinitions()
props.addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (null, null, options, Right("woops..."))
createPropertyDefinitions.isDefinedAt(capsule) should equal(false)
}
it("should enforce unique element definitions in system space"){
options.reset
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
options.addOption("activeUserId", activeUserId)
val props = new PropertyDefinitions().addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (engine.database, CommandScopes.SystemSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = workflow(capsule)
processed._4 should equal(Right("Internal Error: Element Definition could not be created."))
}
/*
I believe this demonstrates a bug.
Properties the ED & Props are not being created.
*/
it("should create properties on element defintion in a dataset by dsName"){
options.reset
val dsName = "Dataset F"
val dsId = engine.createDataSet(dsName, "Dataset. My Dataset. Oh how I've missed you.")
options.addOption("dsName", dsName)
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box4")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
val props = new PropertyDefinitions().addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWithPropsWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains(CreatedElementDefinitionId) should equal(true)
val ed = engine.onDataSet(dsId).findElementDefinitionById(edId);
ed.properties.length should equal(1)
}
it("should create properties on element defintion in a dataset by dsId"){
options.reset
val dsName = "Dataset G"
val dsId = engine.createDataSet(dsName, "~~~")
options.addOption("dsId", dsId)
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box5")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
val props = new PropertyDefinitions().addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (engine.database, CommandScopes.DataSetScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWithPropsWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains(CreatedElementDefinitionId) should equal(true)
val ed = engine.onDataSet(dsId).findElementDefinitionById(edId);
ed.properties.length should equal(1)
}
//Note: This test does not attempt to enforce uniqueness.
it("should create properties on element defintion in user space"){
options.reset
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "box6")
options.addOption("description", "A container with equal sized dimensions on all axis.")
options.addOption("creationTime", time)
options.addOption("activeUserId", activeUserId)
val props = new PropertyDefinitions().addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (engine.database, CommandScopes.UserSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = minimalCreateWithPropsWF(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains(CreatedElementDefinitionId) should equal(true)
val ed = engine.inUserSpace().findElementDefinitionById(edId);
ed.properties.length should equal(1)
}
it("should create properties on element defintion in system space"){
options.reset
val edId = uuid()
options.addOption("mid", edId)
options.addOption("name", "Happy Happy, Joy Joy")
options.addOption("description", "Silly saying from a childhood cartoon.")
options.addOption("creationTime", time)
options.addOption("activeUserId", activeUserId)
val props = new PropertyDefinitions().addProperty(PropertyDefinition(uuid, "pA", "String", "A property"))
options.addOption("properties", props)
val capsule = (engine.database, CommandScopes.SystemSpaceScope, options, Left(WorkflowStatuses.OK))
val processed = workflow(capsule)
processed._4 should equal(Left(WorkflowStatuses.OK))
processed._3.contains(CreatedElementDefinitionId) should equal(true)
val ed = engine.inSystemSpace().findElementDefinitionById(edId);
ed.properties.length should equal(1)
}
}
}
def findEdOnDSById(dsId: String, edId: String):Seq[String] = {
val stmt = """
|match (ds:data_set {mid:{dsId}})-[:exists_in]->(ed:element_definition {mid:{mid}})
|return ed.mid as edId
""".stripMargin
val validationOptions = GraphCommandOptions().addOption("dsId", dsId)
.addOption("mid", edId)
val mappedEdIds:Array[String] = query[String](engine.database,
stmt,
validationOptions.toJavaMap,
elementDefIdResultsProcessor)
return mappedEdIds.toList
}
def findEdInUsById(edId: String):Seq[String] = {
val stmt = """
|match (u:user {mid:{activeUserId}})-[:exists_in]->(ed:element_definition {mid:{mid}})
|return ed.mid as edId
""".stripMargin
val validationOptions = GraphCommandOptions()
.addOption("mid", edId)
.addOption("activeUserId", activeUserId)
val mappedEdIds:Array[String] = query[String](engine.database,
stmt,
validationOptions.toJavaMap,
elementDefIdResultsProcessor)
return mappedEdIds.toList
}
def findEdInSsById(edId: String):Seq[String] = {
val stmt = """
|match (ds:internal_system_space)-[:exists_in]->(ed:element_definition {mid:{mid}})
|return ed.mid as edId
""".stripMargin
val validationOptions = GraphCommandOptions().addOption("mid", edId)
val mappedEdIds:Array[String] = query[String](engine.database,
stmt,
validationOptions.toJavaMap,
elementDefIdResultsProcessor)
return mappedEdIds.toList
}
}
|
sholloway/graph-engine
|
src/test/scala/org/machine/engine/graph/commands/workflows/ElementDefintionWorkflowFunctionsSpec.scala
|
Scala
|
mit
| 24,510 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.data
import org.apache.accumulo.core.client._
import org.apache.accumulo.core.client.admin.TableOperations
import org.apache.accumulo.core.security.Authorizations
import org.geotools.data.Query
import org.locationtech.geomesa.accumulo._
import org.locationtech.geomesa.accumulo.audit.AccumuloAuditService
import org.locationtech.geomesa.accumulo.data.stats._
import org.locationtech.geomesa.accumulo.index._
import org.locationtech.geomesa.accumulo.iterators.ProjectVersionIterator
import org.locationtech.geomesa.accumulo.security.AccumuloAuthsProvider
import org.locationtech.geomesa.accumulo.util.ZookeeperLocking
import org.locationtech.geomesa.index.api.GeoMesaFeatureIndex
import org.locationtech.geomesa.index.geotools.GeoMesaDataStoreFactory.GeoMesaDataStoreConfig
import org.locationtech.geomesa.index.geotools.{GeoMesaFeatureCollection, GeoMesaFeatureSource}
import org.locationtech.geomesa.index.metadata.{GeoMesaMetadata, MetadataStringSerializer}
import org.locationtech.geomesa.index.utils.Explainer
import org.locationtech.geomesa.utils.audit.{AuditProvider, AuditReader, AuditWriter}
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.index.IndexMode
import org.locationtech.geomesa.utils.index.IndexMode.IndexMode
import org.locationtech.geomesa.utils.stats.Stat
import org.opengis.feature.`type`.Name
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import scala.collection.JavaConversions._
import scala.util.control.NonFatal
/**
* This class handles DataStores which are stored in Accumulo Tables. To be clear, one table may
* contain multiple features addressed by their featureName.
*
* @param connector Accumulo connector
* @param config configuration values
*/
class AccumuloDataStore(val connector: Connector, override val config: AccumuloDataStoreConfig)
extends AccumuloDataStoreType(config) with ZookeeperLocking {
override val metadata = new AccumuloBackedMetadata(connector, config.catalog, MetadataStringSerializer)
private val oldMetadata = new SingleRowAccumuloMetadata(metadata)
override def manager: AccumuloIndexManagerType = AccumuloFeatureIndex
private val statsTable = GeoMesaFeatureIndex.formatSharedTableName(config.catalog, "stats")
override val stats = new AccumuloGeoMesaStats(this, statsTable, config.generateStats)
// some convenience operations
def auths: Authorizations = config.authProvider.getAuthorizations
val tableOps: TableOperations = connector.tableOperations()
override def delete(): Unit = {
// note: don't delete the query audit table
val auditTable = config.audit.map(_._1.asInstanceOf[AccumuloAuditService].table).toSeq
val tables = getTypeNames.flatMap(getAllTableNames).distinct.filterNot(_ == auditTable)
tables.par.filter(tableOps.exists).foreach(tableOps.delete)
}
override def getAllTableNames(typeName: String): Seq[String] = {
val others = Seq(statsTable) ++ config.audit.map(_._1.asInstanceOf[AccumuloAuditService].table).toSeq
super.getAllTableNames(typeName) ++ others
}
// data store hooks
override protected def createFeatureWriterAppend(sft: SimpleFeatureType,
indices: Option[Seq[AccumuloFeatureIndexType]]): AccumuloFeatureWriterType =
new AccumuloAppendFeatureWriter(sft, this, indices, config.defaultVisibilities)
override protected def createFeatureWriterModify(sft: SimpleFeatureType,
indices: Option[Seq[AccumuloFeatureIndexType]],
filter: Filter): AccumuloFeatureWriterType =
new AccumuloModifyFeatureWriter(sft, this, indices, config.defaultVisibilities, filter)
override protected def createFeatureCollection(query: Query, source: GeoMesaFeatureSource): GeoMesaFeatureCollection =
new AccumuloFeatureCollection(source, query)
override protected def createQueryPlanner(): AccumuloQueryPlannerType = new AccumuloQueryPlanner(this)
override protected def getIteratorVersion: String = {
val scanner = connector.createScanner(config.catalog, new Authorizations())
try {
ProjectVersionIterator.scanProjectVersion(scanner)
} catch {
case NonFatal(e) => "unavailable"
} finally {
scanner.close()
}
}
override def getQueryPlan(query: Query,
index: Option[AccumuloFeatureIndexType],
explainer: Explainer): Seq[AccumuloQueryPlan] =
super.getQueryPlan(query, index, explainer).asInstanceOf[Seq[AccumuloQueryPlan]]
// extensions and back-compatibility checks for core data store methods
override def getTypeNames: Array[String] = super.getTypeNames ++ oldMetadata.getFeatureTypes
override def createSchema(sft: SimpleFeatureType): Unit = {
// check for old enabled indices and re-map them
SimpleFeatureTypes.Configs.ENABLED_INDEX_OPTS.find(sft.getUserData.containsKey).foreach { key =>
val indices = sft.getUserData.remove(key).toString.split(",").map(_.trim.toLowerCase)
// check for old attribute index name
val enabled = if (indices.contains("attr_idx")) {
indices.updated(indices.indexOf("attr_idx"), AttributeIndex.name)
} else {
indices
}
sft.getUserData.put(SimpleFeatureTypes.Configs.ENABLED_INDICES, enabled.mkString(","))
}
super.createSchema(sft)
val lock = acquireCatalogLock()
try {
// configure the stats combining iterator on the table for this sft
stats.configureStatCombiner(connector, sft)
} finally {
lock.release()
}
}
override def getSchema(typeName: String): SimpleFeatureType = {
import GeoMesaMetadata.{ATTRIBUTES_KEY, SCHEMA_ID_KEY, STATS_GENERATION_KEY, VERSION_KEY}
import SimpleFeatureTypes.Configs.{ENABLED_INDEX_OPTS, ENABLED_INDICES}
import SimpleFeatureTypes.InternalConfigs.{INDEX_VERSIONS, SCHEMA_VERSION_KEY}
var sft = super.getSchema(typeName)
if (sft == null) {
// check for old-style metadata and re-write it if necessary
if (oldMetadata.getFeatureTypes.contains(typeName)) {
val lock = acquireCatalogLock()
try {
if (oldMetadata.getFeatureTypes.contains(typeName)) {
oldMetadata.migrate(typeName)
new SingleRowAccumuloMetadata[Stat](stats.metadata).migrate(typeName)
}
} finally {
lock.release()
}
sft = super.getSchema(typeName)
}
}
if (sft != null) {
// back compatible check for index versions
if (!sft.getUserData.contains(INDEX_VERSIONS)) {
// back compatible check if user data wasn't encoded with the sft
if (!sft.getUserData.containsKey(SCHEMA_VERSION_KEY)) {
metadata.read(typeName, "dtgfield").foreach(sft.setDtgField)
sft.getUserData.put(SCHEMA_VERSION_KEY, metadata.readRequired(typeName, VERSION_KEY))
// If no data is written, we default to 'false' in order to support old tables.
if (metadata.read(typeName, "tables.sharing").exists(_.toBoolean)) {
sft.setTableSharing(true)
// use schema id if available or fall back to old type name for backwards compatibility
val prefix = metadata.read(typeName, SCHEMA_ID_KEY).getOrElse(s"${sft.getTypeName}~")
sft.setTableSharingPrefix(prefix)
} else {
sft.setTableSharing(false)
sft.setTableSharingPrefix("")
}
ENABLED_INDEX_OPTS.foreach { i =>
metadata.read(typeName, i).foreach(e => sft.getUserData.put(ENABLED_INDICES, e))
}
}
// set the enabled indices
sft.setIndices(AccumuloDataStore.getEnabledIndices(sft))
// store the metadata and reload the sft again to validate indices
metadata.insert(typeName, ATTRIBUTES_KEY, SimpleFeatureTypes.encodeType(sft, includeUserData = true))
sft = super.getSchema(typeName)
}
// back compatibility check for stat configuration
if (config.generateStats && metadata.read(typeName, STATS_GENERATION_KEY).isEmpty) {
// configure the stats combining iterator - we only use this key for older data stores
val configuredKey = "stats-configured"
if (!metadata.read(typeName, configuredKey).exists(_ == "true")) {
val lock = acquireCatalogLock()
try {
if (!metadata.read(typeName, configuredKey, cache = false).exists(_ == "true")) {
stats.configureStatCombiner(connector, sft)
metadata.insert(typeName, configuredKey, "true")
}
} finally {
lock.release()
}
}
// kick off asynchronous stats run for the existing data
// this may get triggered more than once, but should only run one time
val statsRunner = new StatsRunner(this)
statsRunner.submit(sft)
statsRunner.close()
}
}
sft
}
override def updateSchema(typeName: Name, sft: SimpleFeatureType): Unit = {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
val previousSft = getSchema(typeName)
super.updateSchema(typeName, sft)
val lock = acquireCatalogLock()
try {
// check for newly indexed attributes and re-configure the splits
val previousAttrIndices = previousSft.getAttributeDescriptors.collect { case d if d.isIndexed => d.getLocalName }
if (sft.getAttributeDescriptors.exists(d => d.isIndexed && !previousAttrIndices.contains(d.getLocalName))) {
manager.indices(sft, IndexMode.Any).foreach {
case s: AttributeSplittable => s.configureSplits(sft, this)
case _ => // no-op
}
}
// configure the stats combining iterator on the table for this sft
stats.configureStatCombiner(connector, sft)
} finally {
lock.release()
}
}
}
object AccumuloDataStore {
/**
* Reads the indices configured using SimpleFeatureTypes.ENABLED_INDICES, or the
* default indices for the schema version
*
* @param sft simple feature type
* @return sequence of index (name, version)
*/
private def getEnabledIndices(sft: SimpleFeatureType): Seq[(String, Int, IndexMode)] = {
val marked: Seq[String] = SimpleFeatureTypes.Configs.ENABLED_INDEX_OPTS.map(sft.getUserData.get).find(_ != null) match {
case None => AccumuloFeatureIndex.AllIndices.map(_.name).distinct
case Some(enabled) =>
val e = enabled.toString.split(",").map(_.trim).filter(_.length > 0)
// check for old attribute index name
if (e.contains("attr_idx")) { e :+ AttributeIndex.name } else { e }
}
AccumuloFeatureIndex.getDefaultIndices(sft).collect {
case i if marked.contains(i.name) => (i.name, i.version, IndexMode.ReadWrite)
}
}
}
//
/**
* Configuration options for AccumuloDataStore
*
* @param catalog table in Accumulo used to store feature type metadata
* @param defaultVisibilities default visibilities applied to any data written
* @param generateStats write stats on data during ingest
* @param authProvider provides the authorizations used to access data
* @param audit optional implementations to audit queries
* @param queryTimeout optional timeout (in millis) before a long-running query will be terminated
* @param looseBBox sacrifice some precision for speed
* @param caching cache feature results - WARNING can use large amounts of memory
* @param writeThreads numer of threads used for writing
* @param queryThreads number of threads used per-query
* @param recordThreads number of threads used to join against the record table. Because record scans
* are single-row ranges, increasing this too much can cause performance to decrease
*/
case class AccumuloDataStoreConfig(catalog: String,
defaultVisibilities: String,
generateStats: Boolean,
authProvider: AccumuloAuthsProvider,
audit: Option[(AuditWriter with AuditReader, AuditProvider, String)],
queryTimeout: Option[Long],
looseBBox: Boolean,
caching: Boolean,
writeThreads: Int,
queryThreads: Int,
recordThreads: Int) extends GeoMesaDataStoreConfig
|
MutahirKazmi/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/data/AccumuloDataStore.scala
|
Scala
|
apache-2.0
| 13,221 |
package org.cddcore.rendering.view
import org.cddcore.engine.Engine
import org.cddcore.enginecomponents.{EngineComponent, Scenario, UseCase}
import org.cddcore.rendering.RenderContext
object View {
val Class = "class"
val Stack = "stack"
val Message = "message"
val Advice = "advice"
val Reason = "reason"
val Explaination = "explaination"
val Actual = "actual"
val Title = "title"
val LinkUrl = "linkUrl"
val IconUrl = "iconUrl"
val DefinedAt = "definedAt"
val Id = "id"
val Type = "type"
val References = "references"
val Comment = "comment"
val Error = "errors"
val decisionTreeKey = "decisionTree"
val traceKey = "trace"
val durationKey = "duration"
val EngineTypeName = "Engine"
val UseCaseTypeName = "UseCase"
val ScenarioTypeName = "Scenario"
val situationKey = "situation"
val expectedKey = "expected"
val actualKey = "actual"
val scenariosKey = "scenarios"
val scenariosIconsKey = "scenarioIcons"
val useCasesKey = "useCases"
val linkKey = "link"
val summaryKey = "summary"
val selectedPostFixKey = "selected"
val trueFalseKey = "trueFalseKey"
val conclusionNodeKey = "conclusionNode"
val decisionNodeKey = "decisionNode"
val conditionKey = "condition"
val conclusionKey = "conclusion"
val reasonKey = "reason"
val trueNodeKey = "trueNode"
val falseNodeKey = "falseNode"
def findTypeName(e: EngineComponent[_, _]) = e match {
case e: Engine[_, _] => EngineTypeName
case u: UseCase[_, _] => UseCaseTypeName
case s: Scenario[_, _] => ScenarioTypeName
}
}
class Views {
val exceptionView = new ExceptionView
val iconUrlFinder: IconUrlFinder = new IconUrlFinder
val linkView = new LinkView(iconUrlFinder)
val scenarioView = new ScenarioView(linkView, exceptionView)
}
trait View[T] {
def apply(t: T)(implicit renderContext: RenderContext): Map[String, Object]
}
|
phil-rice/CddCore2
|
module/rendering/src/main/scala/org/cddcore/rendering/view/View.scala
|
Scala
|
bsd-2-clause
| 1,956 |
package com.twitter.finagle.mux
import com.twitter.concurrent.AsyncQueue
import com.twitter.conversions.time._
import com.twitter.finagle.liveness.FailureDetector
import com.twitter.finagle.mux.transport.Message
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.transport.QueueTransport
import com.twitter.finagle.{Path, Service}
import com.twitter.io.Buf
import com.twitter.util.{Await, Promise}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class DispatcherTest extends FunSuite with Eventually with IntegrationPatience {
test("Discard request properly sent") {
@volatile var handled = false
val p = Promise[Response]()
p.setInterruptHandler {
case t: Throwable =>
handled = true
}
val svc = Service.mk[Request, Response](_ => p)
val q0, q1 = new AsyncQueue[Message]
val clientTrans = new QueueTransport[Message, Message](q0, q1)
val serverTrans = new QueueTransport[Message, Message](q1, q0)
val server = ServerDispatcher.newRequestResponse(serverTrans, svc)
val session =
new ClientSession(clientTrans, FailureDetector.NullConfig, "test", NullStatsReceiver)
val client = ClientDispatcher.newRequestResponse(session)
val f = client(Request(Path.empty, Nil, Buf.Empty))
assert(!f.isDefined)
assert(!p.isDefined)
f.raise(new Exception())
eventually { assert(handled) }
Await.ready(server.close().join(client.close()), 5.seconds)
}
}
|
mkhq/finagle
|
finagle-mux/src/test/scala/com/twitter/finagle/mux/DispatcherTest.scala
|
Scala
|
apache-2.0
| 1,613 |
package net.ultrametrics.fractactor
import scala.actors.Actor
import scala.actors.Actor._
import net.ultrametrics.math.Complex
/**
* Fractal point calculator.
*
* Given a coordinate in the complex plane, computes the corresponding
* value of a holomorphic function (such as the Mandelbrot set) layered
* onto this calculator as a trait. PointScaler trait is used to
* transform the calculated fractal point into a color in the viewplane.
*
* Functions as an actor which responds to Complex coordinate messages
* or a standalone function which synchronously computes a point.
*/
class FractalCalculator(val iterationLimit: Int)
extends HolomorphicFunction with Actor
{
def act() {
loop {
react {
case LineRequest(y, count, re1, re2, im, client) => {
val results = new Array[Int](count)
val increment = (re2 - re1) / count
var re = re1
for(x <- 0 until count) {
results(x) = calculate(new Complex(re, im))
re += increment
}
client !! LineResponse(y, results)
}
case PixelRequest(x, y, c, client) => {
client !! PixelResponse(x, y, calculate(c))
}
case SimpleRequest(c, client) => {
client !! SimpleResponse(c, calculate(c))
}
case msg => println("unknown message: " + msg)
}
}
}
}
|
pchuck/fractactor
|
src/main/scala/net/ultrametrics/fractactor/FractalCalculator.scala
|
Scala
|
bsd-2-clause
| 1,385 |
/*
* @author Philip Stutz
* @author Sara Magliacane
*
* Copyright 2014 University of Zurich & VU University Amsterdam
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.psl.serialization
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.signalcollect.admm.Wolf
import com.signalcollect.admm.utils.ConvergencePlotter
import com.signalcollect.psl.Inferencer
import com.signalcollect.psl.InferencerConfig
import com.signalcollect.psl.parser.PslParser
import com.signalcollect.psl.parser.ParsedPslFile
import com.signalcollect.psl.model.GroundedRule
import com.signalcollect.psl.model.GroundedPredicate
import com.signalcollect.psl.model.GroundedRule
import com.signalcollect.psl.translate.PSLToCvxConverter
import com.signalcollect.psl.model.PredicateInRule
import scala.annotation.tailrec
import com.signalcollect.util.TestAnnouncements
class PslSerializationSpec extends FlatSpec with Matchers with TestAnnouncements {
val example = """
predicate: likes(_, _)
predicate: playsIn(_, _)
rule [weight = 1]: likes(PERSON, MOVIE) && playsIn(ACTOR, MOVIE) => likes(PERSON, ACTOR)
rule [weight = 1]: likes(PERSON, MOVIE-A) && playsIn(ACTOR, MOVIE-B) && playsIn(ACTOR, MOVIE-B) => likes(PERSON, MOVIE-B)
rule [weight = 1]: likes(PERSON-A, A) && likes(PERSON-B, A) && likes(PERSON-B, B) => likes(PERSON-A, B)
fact: playsIn(john-travolta, pulp-fiction)
fact: playsIn(john-travolta, grease)
fact [truthValue = 0.7]: likes(sara, john-travolta)
fact [truthValue = 0.9]: likes(sara, pulp-fiction)
fact [truthValue = 0.8]: likes(sara, star-wars)
fact [truthValue = 0.9]: likes(philip, pulp-fiction)
fact [truthValue = 0.8]: likes(philip, john-travolta)
fact [truthValue = 0.7]: likes(philip, grease)
"""
"PSL Serialization" should "be able to do inference when all messages are serialized" in {
val config = InferencerConfig(computeObjectiveValueOfSolution = true, absoluteEpsilon = 10e-09, relativeEpsilon = 10e-04, serializeMessages = true)
val inferenceResults = Inferencer.runInferenceFromString(example, config = config)
val solution = inferenceResults.solution
val gps = inferenceResults.idToGpMap
val objectiveFunctionVal = inferenceResults.objectiveFun match {
case Some(v) => v
case None => Double.MaxValue // will make the test break.
}
solution.results.foreach {
case (id, truthValue) =>
if (truthValue > 0.01) {
val gp = gps(id)
if (!gp.truthValue.isDefined) {
//println(s"$gp has truth value $truthValue")
}
}
}
//println("Objective function value: " + objectiveFunctionVal)
objectiveFunctionVal should be(0.02 +- 0.02)
}
}
|
uzh/fox
|
src/test/scala/com/signalcollect/psl/serialization/PslSerializationSpec.scala
|
Scala
|
apache-2.0
| 3,261 |
package com.seanshubin.uptodate.logic
import java.nio.charset.StandardCharsets
import org.scalatest.FunSuite
class MetadataParserTest extends FunSuite {
test("parse") {
val charset = StandardCharsets.UTF_8
val metadataParser: MetadataParser = new MetadataParserImpl(charset)
val actual = metadataParser.parseVersions(sampleMetadataContents)
val expected = Seq(
"20030203.000550", "2.5-SNAPSHOT", "2.4", "2.3", "2.2", "2.1", "2.0.1", "2.0", "1.4-backport-IO-168", "1.4",
"1.4", "1.3.2", "1.3.1", "1.3", "1.2", "1.1", "1.0", "0.1")
assert(actual === expected)
}
val sampleMetadataContents =
"""<?xml version="1.0" encoding="UTF-8"?>
|<metadata modelVersion="1.1.0">
| <groupId>commons-io</groupId>
| <artifactId>commons-io</artifactId>
| <version>1.4</version>
| <versioning>
| <latest>20030203.000550</latest>
| <release>20030203.000550</release>
| <versions>
| <version>0.1</version>
| <version>1.0</version>
| <version>1.1</version>
| <version>1.2</version>
| <version>1.3</version>
| <version>1.3.1</version>
| <version>1.3.2</version>
| <version>1.4</version>
| <version>1.4-backport-IO-168</version>
| <version>2.0</version>
| <version>2.0.1</version>
| <version>2.1</version>
| <version>2.2</version>
| <version>2.3</version>
| <version>2.4</version>
| <version>2.5-SNAPSHOT</version>
| <version>20030203.000550</version>
| </versions>
| <lastUpdated>20140623122436</lastUpdated>
| </versioning>
|</metadata>
| """.stripMargin
}
|
SeanShubin/up-to-date
|
logic/src/test/scala/com/seanshubin/uptodate/logic/MetadataParserTest.scala
|
Scala
|
unlicense
| 1,765 |
package scdbpf
import org.parboiled.scala._
import DbpfProperty._
import passera.unsigned._
private class PropertyParser extends Parser {
def HexDigit = rule(SuppressNode) { "0" - "9" | "a" - "f" | "A" - "F" }
def HexNumber = rule(SuppressNode) { rule { "0" ~ ignoreCase("x") ~ oneOrMore(HexDigit) } ~> (java.lang.Long.decode(_).toLong) }
def Digit = rule(SuppressNode) { "0" - "9" }
def Number = rule(SuppressNode) { oneOrMore(Digit) ~> (_.toLong) }
def Text = rule(SuppressNode) { "{\\"" ~ zeroOrMore(!"\\"}" ~ ANY) ~> identity ~ "\\"}" }
// def ValType = rule {
// zeroOrMore(!":" ~ ANY) ~> { s =>
// ValueType.values.find(_.toString equalsIgnoreCase s)
// } ~~~? (_.isDefined) ~~> (_.get)
// }
def True = rule { ignoreCase("true") ~ push(1L) }
def False = rule { ignoreCase("false") ~ push(0L) }
private def buildProp[A](id: Int, desc: String, reps: Long, vals: Seq[A])(implicit vt: ValueType[A] with ValueType.Numerical[A]) = {
if (reps == 0 && vals.size == 1)
(UInt(id), Single(vals.head))
else
(UInt(id), Multi(vals: _*))
}
def Prop: Rule1[Property] = rule {
SInt32Rule ~ ":" ~ Text ~ "=" ~ rule {
rule { ignoreCase("Float32") ~ ":" ~ Float32Vals } |
rule { ignoreCase("Sint64") ~ ":" ~ SInt64Vals } |
rule { ignoreCase("Sint32") ~ ":" ~ SInt32Vals } |
rule { ignoreCase("Uint32") ~ ":" ~ UInt32Vals } |
rule { ignoreCase("Uint16") ~ ":" ~ UInt16Vals } |
rule { ignoreCase("Uint8") ~ ":" ~ UInt8Vals } |
rule { ignoreCase("Bool") ~ ":" ~ BoolVals } |
rule { ignoreCase("String") ~ ":" ~ Number ~ ":" ~ StringValue ~~>
((id: Int, _: String, reps: Long, vals: Seq[String]) => (UInt(id), Single(vals.head)))
}
}
}
def StringValue: Rule1[Seq[String]] = rule { Text ~~> (Seq(_)) }
// couldn't figure out how to factor out common components
def UInt32Vals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ UInt32Rule , separator = ","~Blanks) ~ "}" ~~> buildProp[UInt] _ }
def UInt16Vals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ UInt16Rule , separator = ","~Blanks) ~ "}" ~~> buildProp[UShort] _ }
def UInt8Vals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ UInt8Rule , separator = ","~Blanks) ~ "}" ~~> buildProp[UByte] _ }
def SInt32Vals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ SInt32Rule , separator = ","~Blanks) ~ "}" ~~> buildProp[Int] _ }
def SInt64Vals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ SInt64Rule , separator = ","~Blanks) ~ "}" ~~> buildProp[Long] _ }
def BoolVals : ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ BoolRule , separator = ","~Blanks) ~ "}" ~~> buildProp[Boolean] _ }
def Float32Vals: ReductionRule2[Int, String, (UInt, PropList)] = rule { Number ~ ":" ~ "{" ~ zeroOrMore(optional(NameKey) ~ Float32Rule, separator = ","~Blanks) ~ "}" ~~> buildProp[Float] _ }
def BoolRule = rule { rule { HexNumber | Number | False | True } ~~> (l => if (l == 0) false else true) }
def UInt8Rule = rule { SInt64Rule ~~> (l => UByte(l.toByte)) }
def UInt16Rule = rule { SInt64Rule ~~> (l => UShort(l.toShort)) }
def UInt32Rule: Rule1[UInt] = rule { SInt64Rule ~~> (l => UInt(l.toInt)) }
def SInt32Rule: Rule1[Int] = rule { SInt64Rule ~~> (_.toInt) }
def SInt64Rule: Rule1[Long] = rule { HexNumber | Number }
def Float32Rule = rule { rule {
optional("-") ~ zeroOrMore(Digit) ~ optional("." ~ zeroOrMore(Digit))
} ~> (_.toDouble.toFloat) }
def Blanks = rule(SuppressNode) { zeroOrMore(" ") }
def NameKey = rule(SuppressNode) { zeroOrMore(!anyOf(",:}\\"") ~ ANY) ~ ":" ~ Blanks }
def Parent = rule { ignoreCase("ParentCohort=Key:") ~ "{" ~ zeroOrMore(SInt32Rule , separator = ",") ~ "}" ~~> {
s: Seq[Int] => s match {
case (Seq(t, g, i)) => Some(Tgi(t, g, i))
case _ => None
}
} ~~~? (_.isDefined) ~~> (_.get) }
def PropCount = rule { ignoreCase("PropCount=") ~ SInt64Rule }
def Header = rule(SuppressSubnodes) { anyOf("EC") ~> { _ == "C" } ~ "QZT1###" }
def Exemplar = rule {
Header ~ WhiteSpace ~ Parent ~ WhiteSpace ~ PropCount ~ WhiteSpace ~
zeroOrMore(Prop, separator = WhiteSpace) ~ optional(WhiteSpace) ~ EOI ~~> {
(isCohort: Boolean, parent: Tgi, _: Long, props: Seq[Property]) =>
scdbpf.Exemplar(parent, isCohort, props)
}
}
def WhiteSpace = rule(SuppressNode) { oneOrMore(anyOf(" \\r\\n\\t\\f")) }
def parseProperty(text: String): Property = {
val parsingResult = ReportingParseRunner(Prop).run(text)
parsingResult.result match {
case Some(p) => p
case None => throw new DbpfDecodeFailedException("Invalid property: " +
org.parboiled.errors.ErrorUtils.printParseErrors(parsingResult))
}
}
def parseExemplar(text: String): scdbpf.Exemplar = {
val parsingResult = ReportingParseRunner(this.Exemplar).run(text)
parsingResult.result match {
case Some(e) => e
case None => throw new DbpfDecodeFailedException("Invalid exemplar: " +
org.parboiled.errors.ErrorUtils.printParseErrors(parsingResult))
}
}
}
|
memo33/scdbpf
|
src/main/scala/scdbpf/internal/PropertyParser.scala
|
Scala
|
mit
| 5,467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.util.Locale
import scala.language.existentials
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.expressions.PredicateHelper
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class FilteredScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
SimpleFilteredScan(parameters("from").toInt, parameters("to").toInt)(sqlContext.sparkSession)
}
}
case class SimpleFilteredScan(from: Int, to: Int)(@transient val sparkSession: SparkSession)
extends BaseRelation
with PrunedFilteredScan {
override def sqlContext: SQLContext = sparkSession.sqlContext
override def schema: StructType =
StructType(
StructField("a", IntegerType, nullable = false) ::
StructField("b", IntegerType, nullable = false) ::
StructField("c", StringType, nullable = false) :: Nil)
override def unhandledFilters(filters: Array[Filter]): Array[Filter] = {
def unhandled(filter: Filter): Boolean = {
filter match {
case EqualTo(col, v) => col == "b"
case EqualNullSafe(col, v) => col == "b"
case LessThan(col, v: Int) => col == "b"
case LessThanOrEqual(col, v: Int) => col == "b"
case GreaterThan(col, v: Int) => col == "b"
case GreaterThanOrEqual(col, v: Int) => col == "b"
case In(col, values) => col == "b"
case IsNull(col) => col == "b"
case IsNotNull(col) => col == "b"
case Not(pred) => unhandled(pred)
case And(left, right) => unhandled(left) || unhandled(right)
case Or(left, right) => unhandled(left) || unhandled(right)
case _ => false
}
}
filters.filter(unhandled)
}
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val rowBuilders = requiredColumns.map {
case "a" => (i: Int) => Seq(i)
case "b" => (i: Int) => Seq(i * 2)
case "c" => (i: Int) =>
val c = (i - 1 + 'a').toChar.toString
Seq(c * 5 + c.toUpperCase(Locale.ROOT) * 5)
}
FiltersPushed.list = filters
ColumnsRequired.set = requiredColumns.toSet
// Predicate test on integer column
def translateFilterOnA(filter: Filter): Int => Boolean = filter match {
case EqualTo("a", v) => (a: Int) => a == v
case EqualNullSafe("a", v) => (a: Int) => a == v
case LessThan("a", v: Int) => (a: Int) => a < v
case LessThanOrEqual("a", v: Int) => (a: Int) => a <= v
case GreaterThan("a", v: Int) => (a: Int) => a > v
case GreaterThanOrEqual("a", v: Int) => (a: Int) => a >= v
case In("a", values) => (a: Int) => values.map(_.asInstanceOf[Int]).toSet.contains(a)
case IsNull("a") => (a: Int) => false // Int can't be null
case IsNotNull("a") => (a: Int) => true
case Not(pred) => (a: Int) => !translateFilterOnA(pred)(a)
case And(left, right) => (a: Int) =>
translateFilterOnA(left)(a) && translateFilterOnA(right)(a)
case Or(left, right) => (a: Int) =>
translateFilterOnA(left)(a) || translateFilterOnA(right)(a)
case _ => (a: Int) => true
}
// Predicate test on string column
def translateFilterOnC(filter: Filter): String => Boolean = filter match {
case StringStartsWith("c", v) => _.startsWith(v)
case StringEndsWith("c", v) => _.endsWith(v)
case StringContains("c", v) => _.contains(v)
case EqualTo("c", v: String) => _.equals(v)
case EqualTo("c", v: UTF8String) => sys.error("UTF8String should not appear in filters")
case In("c", values) => (s: String) => values.map(_.asInstanceOf[String]).toSet.contains(s)
case _ => (c: String) => true
}
def eval(a: Int) = {
val c = (a - 1 + 'a').toChar.toString * 5 +
(a - 1 + 'a').toChar.toString.toUpperCase(Locale.ROOT) * 5
filters.forall(translateFilterOnA(_)(a)) && filters.forall(translateFilterOnC(_)(c))
}
sparkSession.sparkContext.parallelize(from to to).filter(eval).map(i =>
Row.fromSeq(rowBuilders.map(_(i)).reduceOption(_ ++ _).getOrElse(Seq.empty)))
}
}
// A hack for better error messages when filter pushdown fails.
object FiltersPushed {
var list: Seq[Filter] = Nil
}
// Used together with `SimpleFilteredScan` to check pushed columns.
object ColumnsRequired {
var set: Set[String] = Set.empty
}
class FilteredScanSuite extends DataSourceTest with SharedSQLContext with PredicateHelper {
protected override lazy val sql = spark.sql _
override def beforeAll(): Unit = {
super.beforeAll()
sql(
"""
|CREATE TEMPORARY VIEW oneToTenFiltered
|USING org.apache.spark.sql.sources.FilteredScanSource
|OPTIONS (
| from '1',
| to '10'
|)
""".stripMargin)
}
sqlTest(
"SELECT * FROM oneToTenFiltered",
(1 to 10).map(i => Row(i, i * 2, (i - 1 + 'a').toChar.toString * 5
+ (i - 1 + 'a').toChar.toString.toUpperCase(Locale.ROOT) * 5)).toSeq)
sqlTest(
"SELECT a, b FROM oneToTenFiltered",
(1 to 10).map(i => Row(i, i * 2)).toSeq)
sqlTest(
"SELECT b, a FROM oneToTenFiltered",
(1 to 10).map(i => Row(i * 2, i)).toSeq)
sqlTest(
"SELECT a FROM oneToTenFiltered",
(1 to 10).map(i => Row(i)).toSeq)
sqlTest(
"SELECT b FROM oneToTenFiltered",
(1 to 10).map(i => Row(i * 2)).toSeq)
sqlTest(
"SELECT a * 2 FROM oneToTenFiltered",
(1 to 10).map(i => Row(i * 2)).toSeq)
sqlTest(
"SELECT A AS b FROM oneToTenFiltered",
(1 to 10).map(i => Row(i)).toSeq)
sqlTest(
"SELECT x.b, y.a FROM oneToTenFiltered x JOIN oneToTenFiltered y ON x.a = y.b",
(1 to 5).map(i => Row(i * 4, i)).toSeq)
sqlTest(
"SELECT x.a, y.b FROM oneToTenFiltered x JOIN oneToTenFiltered y ON x.a = y.b",
(2 to 10 by 2).map(i => Row(i, i)).toSeq)
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a = 1",
Seq(1).map(i => Row(i, i * 2)))
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a IN (1,3,5)",
Seq(1, 3, 5).map(i => Row(i, i * 2)))
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE A = 1",
Seq(1).map(i => Row(i, i * 2)))
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE b = 2",
Seq(1).map(i => Row(i, i * 2)))
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a IS NULL",
Seq.empty[Row])
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a IS NOT NULL",
(1 to 10).map(i => Row(i, i * 2)).toSeq)
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a < 5 AND a > 1",
(2 to 4).map(i => Row(i, i * 2)).toSeq)
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE a < 3 OR a > 8",
Seq(1, 2, 9, 10).map(i => Row(i, i * 2)))
sqlTest(
"SELECT a, b FROM oneToTenFiltered WHERE NOT (a < 6)",
(6 to 10).map(i => Row(i, i * 2)).toSeq)
sqlTest(
"SELECT a, b, c FROM oneToTenFiltered WHERE c like 'c%'",
Seq(Row(3, 3 * 2, "c" * 5 + "C" * 5)))
sqlTest(
"SELECT a, b, c FROM oneToTenFiltered WHERE c like '%D'",
Seq(Row(4, 4 * 2, "d" * 5 + "D" * 5)))
sqlTest(
"SELECT a, b, c FROM oneToTenFiltered WHERE c like '%eE%'",
Seq(Row(5, 5 * 2, "e" * 5 + "E" * 5)))
testPushDown("SELECT * FROM oneToTenFiltered WHERE A = 1", 1, Set("a", "b", "c"))
testPushDown("SELECT a FROM oneToTenFiltered WHERE A = 1", 1, Set("a"))
testPushDown("SELECT b FROM oneToTenFiltered WHERE A = 1", 1, Set("b"))
testPushDown("SELECT a, b FROM oneToTenFiltered WHERE A = 1", 1, Set("a", "b"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a = 1", 1, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE 1 = a", 1, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a > 1", 9, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a >= 2", 9, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE 1 < a", 9, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE 2 <= a", 9, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE 1 > a", 0, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE 2 >= a", 2, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a < 1", 0, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a <= 2", 2, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a > 1 AND a < 10", 8, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a IN (1,3,5)", 3, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a = 20", 0, Set("a", "b", "c"))
testPushDown(
"SELECT * FROM oneToTenFiltered WHERE b = 1",
10,
Set("a", "b", "c"),
Set(EqualTo("b", 1)))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a < 5 AND a > 1", 3, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE a < 3 OR a > 8", 4, Set("a", "b", "c"))
testPushDown("SELECT * FROM oneToTenFiltered WHERE NOT (a < 6)", 5, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like 'c%'", 1, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like 'C%'", 0, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like '%D'", 1, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like '%d'", 0, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like '%eE%'", 1, Set("a", "b", "c"))
testPushDown("SELECT a, b, c FROM oneToTenFiltered WHERE c like '%Ee%'", 0, Set("a", "b", "c"))
testPushDown("SELECT c FROM oneToTenFiltered WHERE c = 'aaaaaAAAAA'", 1, Set("c"))
testPushDown("SELECT c FROM oneToTenFiltered WHERE c IN ('aaaaaAAAAA', 'foo')", 1, Set("c"))
// Filters referencing multiple columns are not convertible, all referenced columns must be
// required.
testPushDown("SELECT c FROM oneToTenFiltered WHERE A + b > 9", 10, Set("a", "b", "c"))
// A query with an inconvertible filter, an unhandled filter, and a handled filter.
testPushDown(
"""SELECT a
| FROM oneToTenFiltered
| WHERE a + b > 9
| AND b < 16
| AND c IN ('bbbbbBBBBB', 'cccccCCCCC', 'dddddDDDDD', 'foo')
""".stripMargin.split("\n").map(_.trim).mkString(" "),
3,
Set("a", "b"),
Set(LessThan("b", 16)))
def testPushDown(
sqlString: String,
expectedCount: Int,
requiredColumnNames: Set[String]): Unit = {
testPushDown(sqlString, expectedCount, requiredColumnNames, Set.empty[Filter])
}
def testPushDown(
sqlString: String,
expectedCount: Int,
requiredColumnNames: Set[String],
expectedUnhandledFilters: Set[Filter]): Unit = {
test(s"PushDown Returns $expectedCount: $sqlString") {
// These tests check a particular plan, disable whole stage codegen.
spark.conf.set(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key, false)
try {
val queryExecution = sql(sqlString).queryExecution
val rawPlan = queryExecution.executedPlan.collect {
case p: execution.DataSourceScanExec => p
} match {
case Seq(p) => p
case _ => fail(s"More than one PhysicalRDD found\n$queryExecution")
}
val rawCount = rawPlan.execute().count()
assert(ColumnsRequired.set === requiredColumnNames)
val table = spark.table("oneToTenFiltered")
val relation = table.queryExecution.analyzed.collectFirst {
case LogicalRelation(r, _, _, _) => r
}.get
assert(
relation.unhandledFilters(FiltersPushed.list.toArray).toSet === expectedUnhandledFilters)
if (rawCount != expectedCount) {
fail(
s"Wrong # of results for pushed filter. Got $rawCount, Expected $expectedCount\n" +
s"Filters pushed: ${FiltersPushed.list.mkString(",")}\n" +
queryExecution)
}
} finally {
spark.conf.set(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key,
SQLConf.WHOLESTAGE_CODEGEN_ENABLED.defaultValue.get)
}
}
}
}
|
bravo-zhang/spark
|
sql/core/src/test/scala/org/apache/spark/sql/sources/FilteredScanSuite.scala
|
Scala
|
apache-2.0
| 13,158 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.commands.expressions
import org.neo4j.cypher.internal.compiler.v2_3._
import org.neo4j.cypher.internal.compiler.v2_3.pipes.QueryState
import org.neo4j.cypher.internal.compiler.v2_3.symbols.SymbolTable
import org.neo4j.cypher.internal.frontend.v2_3.SyntaxException
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
import org.neo4j.graphdb.Path
import scala.collection.JavaConverters._
case class RelationshipFunction(path: Expression) extends NullInNullOutExpression(path) {
def compute(value: Any, m: ExecutionContext)(implicit state: QueryState) = value match {
case p: PathImpl => p.relList
case p: Path => p.relationships().asScala.toSeq
case x => throw new SyntaxException("Expected " + path + " to be a path.")
}
def rewrite(f: (Expression) => Expression) = f(RelationshipFunction(path.rewrite(f)))
def arguments = Seq(path)
def calculateType(symbols: SymbolTable) = {
path.evaluateType(CTPath, symbols)
CTCollection(CTRelationship)
}
def symbolTableDependencies = path.symbolTableDependencies
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/commands/expressions/RelationshipFunction.scala
|
Scala
|
apache-2.0
| 1,900 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.evaluate
import slamdata.Predef._
import quasar.{IdStatus, Qspec, TreeMatchers}
import quasar.api.DataPathSegment
import quasar.api.datasource.DatasourceType
import quasar.api.push.{OffsetPath, InternalKey}
import quasar.api.resource._
import quasar.connector._
import quasar.connector.evaluate._
import quasar.contrib.pathy.AFile
import quasar.contrib.iota.{copkTraverse, copkEqual}
import quasar.fp.constEqual
import quasar.impl.EmptyDatasource
import quasar.qscript._
import scala.collection.immutable.SortedMap
import cats.{Order, Show}
import cats.data.NonEmptyList
import cats.implicits._
import fs2.Stream
import matryoshka._
import matryoshka.data.Fix
import pathy.Path._
import scalaz.Tree
import shims.{eqToScalaz, equalToCats, orderToCats, showToCats, showToScalaz}
import spire.math.Real
import skolems.∃
final class QueryFederatorSpec extends Qspec with TreeMatchers {
import ResourceError._
import IdStatus.ExcludeId
implicit val showTree: Show[Tree[ResourceName]] =
Show.show(_.drawTree)
type M[A] = Either[ResourceError, A]
type Q = InterpretedRead[ResourcePath]
val fedType = DatasourceType("federator", 37)
val abs = ResourcePath.root() / ResourceName("resource") / ResourceName("abs")
val xys = ResourcePath.root() / ResourceName("resource") / ResourceName("xys")
val absSrc =
EmptyDatasource[M, Stream[M, ?], Q, Int, ResourcePathType](fedType, 1, supportsSeek = false)
val xysSrc =
EmptyDatasource[M, Stream[M, ?], Q, Int, ResourcePathType](fedType, 2, supportsSeek = true)
val srcs = SortedMap(
abs -> Source(abs, absSrc),
xys -> Source(xys, xysSrc))(
Order[ResourcePath].toOrdering)
val federator = QueryFederator[Fix] { (f: AFile) =>
srcs.get(ResourcePath.leaf(f)).asRight[ResourceError]
}
val qs = construction.mkDefaults[Fix, QScriptEducated[Fix, ?]]
def offset(r: Real): Offset =
Offset.Internal(OffsetPath(DataPathSegment.Field("ts")), ∃(InternalKey.Actual.real(r)))
"returns 'not a resource' for root" >> {
val query =
qs.fix.Map(
qs.fix.Read[ResourcePath](ResourcePath.Root, ExcludeId),
qs.recFunc.MakeMapS("value", qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "value")))
federator((query, None)).swap.toOption must_= Some(notAResource(ResourcePath.root()))
}
"returns 'path not found' when no source" >> {
val query =
qs.fix.Map(
qs.fix.Read[ResourcePath](ResourcePath.leaf(rootDir </> dir("foo") </> file("bar")), ExcludeId),
qs.recFunc.MakeMapS("value", qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "value")))
val rp =
ResourcePath.root() / ResourceName("foo") / ResourceName("bar")
federator((query, None)).swap.toOption must_= Some(pathNotFound(rp))
}
"returns 'path not found' when no source in branch" >> {
val query =
qs.fix.Union(
qs.fix.Unreferenced,
qs.free.Read[ResourcePath](ResourcePath.leaf(rootDir </> dir("abs") </> file("a")), ExcludeId),
qs.free.Read[ResourcePath](ResourcePath.leaf(rootDir </> dir("foo") </> file("bar")), ExcludeId))
val rp =
ResourcePath.root() / ResourceName("abs") / ResourceName("a")
federator((query, None)).swap.toOption must_= Some(pathNotFound(rp))
}
"returns 'too many sources' when offset query references more than one source" >> {
val absf: AFile =
rootDir </> dir("resource") </> file("abs")
val xysf: AFile =
rootDir </> dir("resource") </> file("xys")
val query =
qs.fix.Filter(
qs.fix.Union(
qs.fix.Unreferenced,
qs.free.Read[ResourcePath](ResourcePath.leaf(absf), ExcludeId),
qs.free.Read[ResourcePath](ResourcePath.leaf(xysf), ExcludeId)),
qs.recFunc.Gt(
qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "ts"),
qs.recFunc.Now))
federator((query, Some(offset(16)))) must beLike {
case Left(ResourceError.TooManyResources(ps, _)) =>
ps must_= NonEmptyList.of(absf, xysf).map(ResourcePath.leaf(_))
}
}
"returns 'seek unsupported' when offset query refers to a source that cannot seek" >> {
val absf: AFile =
rootDir </> dir("resource") </> file("abs")
val query =
qs.fix.Filter(
qs.fix.Read[ResourcePath](ResourcePath.leaf(absf), ExcludeId),
qs.recFunc.Gt(
qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "ts"),
qs.recFunc.Now))
federator((query, Some(offset(100)))) must beLike {
case Left(ResourceError.SeekUnsupported(p)) => p must_= abs
}
}
"builds federated query when all sources found" >> {
val absf: AFile =
rootDir </> dir("resource") </> file("abs")
val xysf: AFile =
rootDir </> dir("resource") </> file("xys")
val query =
qs.fix.Filter(
qs.fix.Union(
qs.fix.Unreferenced,
qs.free.Read[ResourcePath](ResourcePath.leaf(absf), ExcludeId),
qs.free.Read[ResourcePath](ResourcePath.leaf(xysf), ExcludeId)),
qs.recFunc.Gt(
qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "ts"),
qs.recFunc.Now))
federator((query, None)) map { fq =>
fq.query must beTreeEqual(query)
fq.sources(absf).map(_.path) must_= Some(ResourcePath.leaf(absf))
fq.sources(xysf).map(_.path) must_= Some(ResourcePath.leaf(xysf))
} getOrElse ko("Unexpected evaluate failure.")
}
"builds offset query when single, seek-supporting, source exists" >> {
val xysf: AFile =
rootDir </> dir("resource") </> file("xys")
val query =
qs.fix.Filter(
qs.fix.Union(
qs.fix.Unreferenced,
qs.free.Read[ResourcePath](ResourcePath.leaf(xysf), ExcludeId),
qs.free.Read[ResourcePath](ResourcePath.leaf(xysf), ExcludeId)),
qs.recFunc.Gt(
qs.recFunc.ProjectKeyS(qs.recFunc.Hole, "ts"),
qs.recFunc.Now))
federator((query, Some(offset(42)))) map { fq =>
fq.query must beTreeEqual(query)
fq.sources(xysf).map(_.path) must_= Some(ResourcePath.leaf(xysf))
} getOrElse ko("Unexpected evaluate failure.")
}
}
|
quasar-analytics/quasar
|
impl/src/test/scala/quasar/impl/evaluate/QueryFederatorSpec.scala
|
Scala
|
apache-2.0
| 6,688 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Wed Dec 30 14:48:41 EST 2009
* @see LICENSE (MIT style license file).
*/
package apps.event
import scalation.model.Modelable
import scalation.event.{Entity, Event, Model, WaitQueue}
import scalation.linalgebra.{MatrixD, VectorD}
import scalation.queueingnet.MMc_Queue
import scalation.random.{Exponential, Variate}
import scalation.stat.Statistic
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Bank` object defines a particular scenario under which to execute the
* Bank model.
* @see scalation.event.ModelTest for another example of test code.
* > run-main apps.event.Bank
*/
object Bank extends App with Modelable
{
val stream = 1 // random number stream (0 to 99)
val lambda = 6.0 // customer arrival rate (per hr)
val mu = 7.5 // customer service rate (per hr)
val maxCusts = 10 // stopping rule: at maxCusts
val iArrivalRV = Exponential (HOUR/lambda, stream) // inter-arrival time random var
val serviceRV = Exponential (HOUR/mu, stream) // service time random variate
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Run the simulation of `BankModel`.
* @param startTime the start time for the simulation
*/
def simulate (startTime: Double)
{
new BankModel ("Bank", maxCusts, iArrivalRV, serviceRV)
} // simulate
simulate (0.0)
//:: verify the results using an M/M/c Queueing Model
println ("\\nVerification ...")
val mm1 = new MMc_Queue (lambda/HOUR, mu/HOUR)
mm1.view ()
mm1.report ()
} // Bank object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `BankModel` class defines a simple Event-Scheduling model of a Bank where
* service is provided by one teller and models an M/M/1 queue.
* @param name the name of the simulation model
* @param nArrivals the number of arrivals to generate (stopping condition)
* @param iArrivalRV the inter-arrival time distribution
* @param serviceRV the service time distribution
*/
class BankModel (name: String, nArrivals: Int, iArrivalRV: Variate, serviceRV: Variate)
extends Model (name)
{
val t_a_stat = new Statistic ("t_a") // time between Arrivals statistics
val t_s_stat = new Statistic ("t_s") // time in Service statistics
val waitQueue = WaitQueue (this) // waiting queue that collects stats
var nArr = 0.0 // number of customers that have arrived
var nIn = 0.0 // number of customers in the bank
var nOut = 0.0 // number of customers that finished & left
addStats (t_a_stat, t_s_stat)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** `Arrival` is a subclass of `Event` for handling arrival events.
* The 'occur' method triggers future events and updates the current state.
* @param customer the entity that arrives, in this case a bank customer
* @param delay the time delay for this event's occurrence
*/
case class Arrival (customer: Entity, delay: Double)
extends Event (customer, this, delay, t_a_stat)
{
def occur ()
{
if (nArr < nArrivals-1) {
val toArrive = Entity (iArrivalRV.gen, serviceRV.gen, BankModel.this)
schedule (Arrival (toArrive, toArrive.iArrivalT))
} // if
if (nIn == 0) {
schedule (Departure (customer, customer.serviceT))
} else {
waitQueue += customer // collects time in Queue statistics
} // if
nArr += 1 // update the current state
nIn += 1
} // occur
} // Arrival class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** `Departure` is a subclass of `Event` for handling departure events.
* The 'occur' method triggers future events and updates the current state.
* @param customer the entity that departs, in this case a bank customer
* @param delay the time delay for this event's occurrence
*/
case class Departure (customer: Entity, delay: Double)
extends Event (customer, this, delay, t_s_stat)
{
def occur ()
{
leave (customer) // collects time in sYstem statistics
if (! waitQueue.isEmpty) {
val nextService = waitQueue.dequeue () // first customer in queue
schedule (Departure (nextService, nextService.serviceT))
} // if
nIn -= 1 // update the current state
nOut += 1
} // occur
} // Departure class
//:: start the simulation after scheduling the first priming event
val firstArrival = Entity (iArrivalRV.gen, serviceRV.gen, this)
schedule (Arrival (firstArrival, firstArrival.iArrivalT)) // first priming event
simulate () // start simulating
report (("nArr", nArr), ("nIn", nIn), ("nOut", nOut))
reportStats
} // BankModel class
|
NBKlepp/fda
|
scalation_1.3/scalation_models/src/main/scala/apps/event/Bank.scala
|
Scala
|
mit
| 5,554 |
object o { def apply(i: AnyRef*)(j: String) = i }
object Test {
def main(args: Array[String]) {
println("(o()_)(\"\") = " + (o()_)(""))
println("(o(\"a1\")_)(\"\") = " + (o("a1")_)(""))
println("(o(\"a1\", \"a2\")_)(\"\") = " + (o("a1", "a2")_)(""))
}
}
|
felixmulder/scala
|
test/files/run/t5966.scala
|
Scala
|
bsd-3-clause
| 271 |
class B(val x: Int) {
self: A =>
def this(a: String) = this()
}
class A()
|
AlexSikia/dotty
|
tests/untried/neg/t4460c.scala
|
Scala
|
bsd-3-clause
| 80 |
package pokescala.model
import java.time.LocalDateTime
import pokescala.net.PokeAPI
class Type(
val name : String,
val offensiveMults : Map[String, Double],
val id : Int,
val resourceURI : String,
val created : LocalDateTime,
val modified : LocalDateTime) extends Model[Type] {
val registry = TypeRegistry;
registry.register(this);
def loadAdjacent = (for ((uri, mult) <- offensiveMults; t <- PokeAPI.typeByURI(uri)) yield t).toVector;
override def toString = s"$name; $offensiveMults; " + super.toString;
}
object TypeRegistry extends ModelRegistry[Type] {
}
|
haferflocken/PokeScala
|
PokeScala/src/pokescala/model/Type.scala
|
Scala
|
apache-2.0
| 607 |
package akka.contrib.persistence.mongodb
import org.junit.runner.RunWith
import org.scalatestplus.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ScalaPersistenceJournalTckSpec extends JournalTckSpec(classOf[ScalaDriverPersistenceExtension], s"officialScalaJournalTck")
@RunWith(classOf[JUnitRunner])
class ScalaSuffixPersistenceJournalTckSpec extends JournalTckSpec(classOf[ScalaDriverPersistenceExtension], s"officialScalaJournalTck-suffix", SuffixCollectionNamesTest.extendedConfig)
|
JeanFrancoisGuena/akka-persistence-mongo
|
scala/src/test/scala/akka/contrib/persistence/mongodb/ScalaPersistenceJournalTckSpec.scala
|
Scala
|
apache-2.0
| 497 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
object Product2 {
def unapply[T1, T2](x: Product2[T1, T2]): Option[Product2[T1, T2]] =
Some(x)
}
/** Product2 is a cartesian product of 2 components.
*
* @since 2.3
*/
trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Double) +T2] extends Product {
/**
* The arity of this product.
* @return 2
*/
override def productArity = 2
/**
* Returns the n-th projection of this product if 0<=n<arity,
* otherwise null.
*
* @param n number of the projection to be returned
* @return same as _(n+1)
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case _ => throw new IndexOutOfBoundsException(n.toString())
}
/** projection of this product */
def _1: T1
/** projection of this product */
def _2: T2
}
|
cran/rkafkajars
|
java/scala/Product2.scala
|
Scala
|
apache-2.0
| 1,529 |
package org.knora.webapi.messages.v1.responder.permissionmessages
import org.knora.webapi._
import org.knora.webapi.messages.v1.responder.usermessages.UsersResponderRequestV1
import org.scalatest.{Matchers, WordSpecLike}
/**
* This spec is used to test subclasses of the [[UsersResponderRequestV1]] class.
*/
class PermissionMessagesV1Spec extends WordSpecLike with Matchers {
"querying the user's 'PermissionProfileV1' with 'hasPermissionFor'" should {
"return true if the user is allowed to create a resource (root user)" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val resourceClassIri = "http://www.knora.org/ontology/incunabula#book"
val result = SharedAdminTestData.rootUser.permissionData.hasPermissionFor(ResourceCreateOperation(resourceClassIri), projectIri, None)
result should be(true)
}
"return true if the user is allowed to create a resource (project admin user)" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val resourceClassIri = "http://www.knora.org/ontology/incunabula#book"
val result = SharedAdminTestData.incunabulaProjectAdminUser.permissionData.hasPermissionFor(ResourceCreateOperation(resourceClassIri), projectIri, None)
result should be(true)
}
"return true if the user is allowed to create a resource (project member user)" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val resourceClassIri = "http://www.knora.org/ontology/incunabula#book"
val result = SharedAdminTestData.incunabulaMemberUser.permissionData.hasPermissionFor(ResourceCreateOperation(resourceClassIri), projectIri, None)
result should be(true)
}
"return false if the user is not allowed to create a resource" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val resourceClassIri = "http://www.knora.org/ontology/incunabula#book"
val result = SharedAdminTestData.normalUser.permissionData.hasPermissionFor(ResourceCreateOperation(resourceClassIri), projectIri, None)
result should be(false)
}
"return true if the user is allowed to create a resource (ProjectResourceCreateRestrictedPermission)" in {
val projectIri = SharedAdminTestData.IMAGES_PROJECT_IRI
val allowedResourceClassIri01 = "http://www.knora.org/ontology/images#bild"
val allowedResourceClassIri02 = "http://www.knora.org/ontology/images#bildformat"
val notAllowedResourceClassIri = "http://www.knora.org/ontology/images#person"
val result1 = SharedAdminTestData.imagesReviewerUser.permissionData.hasPermissionFor(ResourceCreateOperation(allowedResourceClassIri01), projectIri, None)
result1 should be(true)
val result2 = SharedAdminTestData.imagesReviewerUser.permissionData.hasPermissionFor(ResourceCreateOperation(allowedResourceClassIri02), projectIri, None)
result2 should be(true)
}
"return false if the user is not allowed to create a resource (ProjectResourceCreateRestrictedPermission)" in {
val projectIri = SharedAdminTestData.IMAGES_PROJECT_IRI
val notAllowedResourceClassIri = "http://www.knora.org/ontology/images#person"
val result = SharedAdminTestData.imagesReviewerUser.permissionData.hasPermissionFor(ResourceCreateOperation(notAllowedResourceClassIri), projectIri, None)
result should be(false)
}
}
"querying the user's 'PermissionsProfileV1' with 'hasProjectAdminAllPermissionFor'" should {
"return true if the user has the 'ProjectAdminAllPermission' (incunabula project admin user)" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val result = SharedAdminTestData.incunabulaProjectAdminUser.permissionData.hasProjectAdminAllPermissionFor(projectIri)
result should be(true)
}
"return false if the user has the 'ProjectAdminAllPermission' (incunabula member user)" in {
val projectIri = SharedAdminTestData.INCUNABULA_PROJECT_IRI
val result = SharedAdminTestData.incunabulaMemberUser.permissionData.hasProjectAdminAllPermissionFor(projectIri)
result should be(false)
}
}
}
|
nie-ine/Knora
|
webapi/src/test/scala/org/knora/webapi/messages/v1/responder/permissionmessages/PermissionMessagesV1Spec.scala
|
Scala
|
agpl-3.0
| 4,430 |
package reeds
import java.net.{InetSocketAddress, URI, URL}
import cats.data._
import Validated._
import cats.instances.list._
import cats.syntax.functor._
trait net {
import Reads._
implicit object InetSocketAddressReads extends Reads[InetSocketAddress] {
//Regex intended to parse host/port combo (host can be a valid hostname, or an IPv4 or IPv6 address)
private val addressParser = """^([^\\:]*|\\[(?:[a-fA-F0-9]{0,4}?\\:)*[a-fA-F0-9]{1,4}\\])\\:([0-9]+)$""".r
def apply(str: String) = str match {
case addressParser(host, port) =>
IntReads.apply(port).map {
portNum => new InetSocketAddress(host, portNum)
}
case other => Invalid(SimpleError(s"$other could not be parsed as host:port")).toValidatedNel
}
}
implicit val URLReads = Reads.catching(new URL(_))
implicit val URIReads = Reads.catching(new URI(_))
}
|
Acornsgrow/reeds
|
reeds-core/src/main/scala/reeds/net.scala
|
Scala
|
apache-2.0
| 882 |
package com.twitter.finagle.zipkin.thrift
import com.twitter.conversions.time._
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.service.TimeoutFilter
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.thrift.{ThriftClientFramedCodec, thrift}
import com.twitter.finagle.tracing._
import com.twitter.finagle.util.DefaultTimer
import com.twitter.finagle.{Service, SimpleFilter, tracing}
import com.twitter.util.{Time, Await, Base64StringEncoder, Future}
import java.net.InetSocketAddress
import java.nio.ByteBuffer
import java.util.concurrent.TimeoutException
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TMemoryBuffer
import scala.collection.mutable.{ArrayBuffer, HashMap, SynchronizedMap}
object RawZipkinTracer {
// to make sure we only create one instance of the tracer per host and port
private[this] val map =
new HashMap[String, RawZipkinTracer] with SynchronizedMap[String, RawZipkinTracer]
/**
* @param scribeHost Host to send trace data to
* @param scribePort Port to send trace data to
* @param statsReceiver Where to log information about tracing success/failures
*/
def apply(scribeHost: String = "localhost",
scribePort: Int = 1463,
statsReceiver: StatsReceiver = NullStatsReceiver
): Tracer = synchronized {
val tracer = map.getOrElseUpdate(scribeHost + ":" + scribePort, {
new RawZipkinTracer(
scribeHost,
scribePort,
statsReceiver.scope("zipkin")
)
})
tracer
}
// Try to flush the tracers when we shut
// down. We give it 100ms.
Runtime.getRuntime().addShutdownHook(new Thread {
override def run() {
val tracers = RawZipkinTracer.synchronized(map.values.toSeq)
val joined = Future.join(tracers map(_.flush()))
try {
Await.result(joined, 100.milliseconds)
} catch {
case _: TimeoutException =>
System.err.println("Failed to flush all traces before quitting")
}
}
})
}
/**
* Receives the Finagle generated traces and sends them off to Zipkin via scribe.
* @param scribeHost The scribe host used to send traces to scribe
* @param scribePort The scribe port used to send traces to scribe
* @param statsReceiver We generate stats to keep track of traces sent, failures and so on
*/
private[thrift] class RawZipkinTracer(
scribeHost: String,
scribePort: Int,
statsReceiver: StatsReceiver
) extends Tracer
{
private[this] val protocolFactory = new TBinaryProtocol.Factory()
private[this] val TraceCategory = "zipkin" // scribe category
// this sends off spans after the deadline is hit, no matter if it ended naturally or not.
private[this] val spanMap: DeadlineSpanMap =
new DeadlineSpanMap(this, 120.seconds, statsReceiver, DefaultTimer.twitter)
protected[thrift] val client = {
val transport = ClientBuilder()
.hosts(new InetSocketAddress(scribeHost, scribePort))
.codec(ThriftClientFramedCodec())
.hostConnectionLimit(5)
.daemon(true)
.build()
new scribe.FinagledClient(
new TracelessFilter andThen transport,
new TBinaryProtocol.Factory())
}
protected[thrift] def flush() = spanMap.flush()
/**
* Always sample the request.
*/
def sampleTrace(traceId: TraceId): Option[Boolean] = Some(true)
/**
* Serialize the span, base64 encode and shove it all in a list.
*/
private def createLogEntries(span: Span): Future[Seq[LogEntry]] = Future {
val buffer = new TMemoryBuffer(512) // 512 bytes fits small spans, but it can grow for bigger spans
span.toThrift.write(protocolFactory.getProtocol(buffer))
val thriftBytes = buffer.getArray.take(buffer.length)
val serializedBase64Span = Base64StringEncoder.encode(thriftBytes) + '\\n'
Seq(new LogEntry(category = TraceCategory, message = serializedBase64Span))
}
/**
* Log the span data via Scribe.
*/
def logSpan(span: Span): Future[Unit] = {
val logEntries = createLogEntries(span)
logEntries.flatMap(client.log) onSuccess {
case ResultCode.Ok => statsReceiver.scope("log_span").counter("ok").incr()
case ResultCode.TryLater => statsReceiver.scope("log_span").counter("try_later").incr()
case _ => () /* ignore */
} onFailure {
case e: Throwable => statsReceiver.counter("log_span", "error", e.getClass.getName).incr()
} map(_ => ())
}
/**
* Mutate the Span with whatever new info we have.
* If we see an "end" annotation we remove the span and send it off.
*/
protected def mutate(traceId: TraceId)(f: Span => Span) {
val span = spanMap.update(traceId)(f)
// if either two "end annotations" exists we send off the span
if (span.annotations.exists { a =>
a.value.equals(thrift.Constants.CLIENT_RECV) ||
a.value.equals(thrift.Constants.SERVER_SEND) ||
a.value.equals(TimeoutFilter.TimeoutAnnotation)
}) {
spanMap.remove(traceId)
logSpan(span)
}
}
def record(record: Record) {
record.annotation match {
case tracing.Annotation.ClientSend() =>
annotate(record, thrift.Constants.CLIENT_SEND)
case tracing.Annotation.ClientRecv() =>
annotate(record, thrift.Constants.CLIENT_RECV)
case tracing.Annotation.ServerSend() =>
annotate(record, thrift.Constants.SERVER_SEND)
case tracing.Annotation.ServerRecv() =>
annotate(record, thrift.Constants.SERVER_RECV)
case tracing.Annotation.Message(value) =>
annotate(record, value)
case tracing.Annotation.Rpcname(service: String, rpc: String) =>
mutate(record.traceId) { span =>
span.copy(_name = Some(rpc), _serviceName = Some(service))
}
case tracing.Annotation.BinaryAnnotation(key: String, value: Boolean) =>
binaryAnnotation(record, key, ByteBuffer.wrap(Array[Byte](if (value) 1 else 0)), thrift.AnnotationType.BOOL)
case tracing.Annotation.BinaryAnnotation(key: String, value: Array[Byte]) =>
binaryAnnotation(record, key, ByteBuffer.wrap(value), thrift.AnnotationType.BYTES)
case tracing.Annotation.BinaryAnnotation(key: String, value: ByteBuffer) =>
binaryAnnotation(record, key, value, thrift.AnnotationType.BYTES)
case tracing.Annotation.BinaryAnnotation(key: String, value: Short) =>
binaryAnnotation(record, key, ByteBuffer.allocate(2).putShort(0, value), thrift.AnnotationType.I16)
case tracing.Annotation.BinaryAnnotation(key: String, value: Int) =>
binaryAnnotation(record, key, ByteBuffer.allocate(4).putInt(0, value), thrift.AnnotationType.I32)
case tracing.Annotation.BinaryAnnotation(key: String, value: Long) =>
binaryAnnotation(record, key, ByteBuffer.allocate(8).putLong(0, value), thrift.AnnotationType.I64)
case tracing.Annotation.BinaryAnnotation(key: String, value: Double) =>
binaryAnnotation(record, key, ByteBuffer.allocate(8).putDouble(0, value), thrift.AnnotationType.DOUBLE)
case tracing.Annotation.BinaryAnnotation(key: String, value: String) =>
binaryAnnotation(record, key, ByteBuffer.wrap(value.getBytes), thrift.AnnotationType.STRING)
case tracing.Annotation.BinaryAnnotation(key: String, value) => // Throw error?
case tracing.Annotation.LocalAddr(ia: InetSocketAddress) =>
setEndpoint(record, ia)
case tracing.Annotation.ClientAddr(ia: InetSocketAddress) =>
mutate(record.traceId) { span =>
span.copy(bAnnotations = span.bAnnotations ++ Seq(
// use a binary annotation over a regular annotation to avoid a misleading timestamp
BinaryAnnotation(thrift.Constants.CLIENT_ADDR,
ByteBuffer.wrap(Array[Byte](1)),
thrift.AnnotationType.BOOL,
Endpoint.fromSocketAddress(ia))))
}
case tracing.Annotation.ServerAddr(ia: InetSocketAddress) =>
mutate(record.traceId) { span =>
span.copy(bAnnotations = span.bAnnotations ++ Seq(
BinaryAnnotation(thrift.Constants.SERVER_ADDR,
ByteBuffer.wrap(Array[Byte](1)),
thrift.AnnotationType.BOOL,
Endpoint.fromSocketAddress(ia))))
}
}
}
/**
* Sets the endpoint in the span for any future annotations. Also
* sets the endpoint in any previous annotations that lack one.
*/
protected def setEndpoint(record: Record, ia: InetSocketAddress) {
mutate(record.traceId) { span =>
val ep = Endpoint.fromSocketAddress(ia).boundEndpoint
span.copy(endpoint = ep,
annotations = span.annotations map { a =>
if (a.endpoint == Endpoint.Unknown)
ZipkinAnnotation(a.timestamp, a.value, ep, a.duration)
else
a
})
}
}
protected def binaryAnnotation(
record: Record,
key: String,
value: ByteBuffer,
annotationType: thrift.AnnotationType
) = {
mutate(record.traceId) { span =>
span.copy(bAnnotations = span.bAnnotations ++ Seq(
BinaryAnnotation(key, value, annotationType, span.endpoint)))
}
}
/**
* Add this record as a time based annotation.
*/
protected def annotate(record: Record, value: String) = {
mutate(record.traceId) { span =>
span.copy(annotations =
ZipkinAnnotation(record.timestamp, value, span.endpoint, record.duration) +: span.annotations)
}
}
}
/**
* Makes sure we don't trace the Scribe logging.
*/
private[thrift] class TracelessFilter[Req, Rep]()
extends SimpleFilter[Req, Rep]
{
def apply(request: Req, service: Service[Req, Rep]) = {
Trace.unwind {
Trace.clear()
service(request)
}
}
}
|
firebase/finagle
|
finagle-zipkin/src/main/scala/com/twitter/finagle/zipkin/thrift/RawZipkinTracer.scala
|
Scala
|
apache-2.0
| 9,807 |
object Test {
import scala.Eql
given [X, Y](using Eql[X, Y]) as Eql[List[X], List[Y]] = Eql.derived
val b: Byte = 1
val c: Char = 2
val i: Int = 3
val l: Long = 4L
val ii: Integer = i
List(b) == List(l)
List(l) == List(c)
List(b) != List(c)
List(i) == List(l)
List(i) == List(ii)
List(ii) == List(l)
List(b) == List(ii)
List(ii) == List(l)
import reflect.ClassTag
val BooleanTag: ClassTag[Boolean] = ClassTag.Boolean
class Setting[T: ClassTag] {
def doSet() = implicitly[ClassTag[T]] match {
case BooleanTag =>
case _ =>
}
}
}
|
som-snytt/dotty
|
tests/pos/multiversal.scala
|
Scala
|
apache-2.0
| 594 |
// A is a class, so W does not conform to A in bytecode. an access (w: W).m() requires a cast to A.
trait W extends A
class C extends W
object Test {
def main(args: Array[String]): Unit = {
val w: W = new C
assert(w.m() == 1)
}
}
|
lrytz/scala
|
test/files/run/t10249/Test.scala
|
Scala
|
apache-2.0
| 243 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.cdi.spring.beans
import java.beans.PropertyEditor
import scala.util.matching.Regex
import scala.collection.mutable
import scala.collection.immutable
import org.springframework.beans.{PropertyEditorRegistrar, PropertyEditorRegistry}
/** Property editor registrar for Scala property editors.
*
*/
class ScalaEditorRegistrar extends PropertyEditorRegistrar {
def registerCustomEditors(registry: PropertyEditorRegistry): Unit = {
implicit val r = registry
register(classOf[Option[Any]], new OptionEditor())
// Options
register(classOf[Option[Any]], new OptionEditor())
// Types
register(classOf[Regex], new RegexEditor())
//Iterable
register(classOf[collection.Iterable[Any]], new ScalaCollectionEditor(() => collection.Seq.newBuilder[Any]))
// Seq
register(classOf[collection.Seq[Any]], new ScalaCollectionEditor(() => collection.Seq.newBuilder[Any]))
register(classOf[immutable.Seq[Any]], new ScalaCollectionEditor(() => collection.immutable.Seq.newBuilder[Any]))
register(classOf[mutable.Seq[Any]], new ScalaCollectionEditor(() => mutable.Seq.newBuilder[Any]))
// IndexedSeq
register(classOf[collection.IndexedSeq[Any]], new ScalaCollectionEditor(() => collection.IndexedSeq.newBuilder[Any]))
register(classOf[immutable.IndexedSeq[Any]], new ScalaCollectionEditor(() => immutable.IndexedSeq.newBuilder[Any]))
register(classOf[mutable.IndexedSeq[Any]], new ScalaCollectionEditor(() => mutable.IndexedSeq.newBuilder[Any]))
// LinearSeq
register(classOf[collection.LinearSeq[Any]], new ScalaCollectionEditor(() => collection.LinearSeq.newBuilder[Any]))
register(classOf[immutable.LinearSeq[Any]], new ScalaCollectionEditor(() => immutable.LinearSeq.newBuilder[Any]))
// Buffer
register(classOf[mutable.Buffer[Any]], new ScalaCollectionEditor(() => mutable.Buffer.newBuilder[Any]))
register(classOf[mutable.ListBuffer[Any]], new ScalaCollectionEditor(() => mutable.ListBuffer.newBuilder[Any]))
// Set
register(classOf[collection.Set[Any]], new ScalaCollectionEditor(() => collection.Set.newBuilder[Any]))
register(classOf[immutable.Set[Any]], new ScalaCollectionEditor(() => immutable.Set.newBuilder[Any]))
register(classOf[mutable.Set[Any]], new ScalaCollectionEditor(() => mutable.Set.newBuilder[Any]))
// Map
register(classOf[collection.Map[Any, Any]], new ScalaCollectionEditor(() => collection.Map.newBuilder[Any, Any]))
register(classOf[immutable.Map[Any, Any]], new ScalaCollectionEditor(() => immutable.Map.newBuilder[Any, Any]))
register(classOf[mutable.Map[Any, Any]], new ScalaCollectionEditor(() => mutable.Map.newBuilder[Any, Any]))
register(classOf[mutable.HashMap[Any, Any]], new ScalaCollectionEditor(() => mutable.HashMap.newBuilder[Any, Any]))
}
@inline
private def register(clazz: Class[_], editor: PropertyEditor)(implicit registry: PropertyEditorRegistry): Unit = {
registry.registerCustomEditor(clazz, editor)
}
}
|
beangle/cdi
|
spring/src/main/scala/org/beangle/cdi/spring/beans/ScalaEditorRegistrar.scala
|
Scala
|
lgpl-3.0
| 3,725 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio
import org.apache.beam.sdk.io.{DefaultFilenamePolicy, FileBasedSink}
import org.apache.beam.sdk.io.fs.ResourceId
import org.apache.beam.sdk.options.ValueProvider.StaticValueProvider
/**
* Converters for Beam Java SDK APIs. Import all.
*
* {{{
* import com.spotify.scio.JavaConverters._
* }}}
*/
object JavaConverters {
/** Enhanced version of [[String]] with Beam Java SDK converter methods. */
implicit class RichString(private val s: String) extends AnyVal {
/** Convert the string to a [[ResourceId]]. */
def toResourceId: ResourceId =
FileBasedSink.convertToFileResourceIfPossible(s)
def toFilenamePolicy: DefaultFilenamePolicy =
DefaultFilenamePolicy.fromStandardParameters(
StaticValueProvider.of(s.toResourceId),
null,
null,
false
)
}
/** Scio version of [[DefaultFilenamePolicy]]. */
final case class FilenamePolicy(
baseFilename: String,
shardTemplate: String = null,
templateSuffix: String = null,
windowedWrites: Boolean = false
) {
/**
* Convert the filename policy to a
* [[org.apache.beam.sdk.io.DefaultFilenamePolicy DefaultFilenamePolicy]].
*/
def asJava: DefaultFilenamePolicy =
DefaultFilenamePolicy.fromStandardParameters(
StaticValueProvider.of(baseFilename.toResourceId),
shardTemplate,
templateSuffix,
windowedWrites
)
}
/** Enhanced version of [[Any]] with Beam Java SDK converter methods. */
implicit class RichAny[T](private val value: T) extends AnyVal {
/**
* Convert the value to a
* [[org.apache.beam.sdk.options.ValueProvider.StaticValueProvider StaticValueProvider]].
*/
def toStaticValueProvider: StaticValueProvider[T] =
StaticValueProvider.of(value)
}
}
|
spotify/scio
|
scio-core/src/main/scala/com/spotify/scio/JavaConverters.scala
|
Scala
|
apache-2.0
| 2,422 |
/*
* Copyright (c) 2012-2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
// SBT
import sbt._
import Keys._
object BuildSettings {
// Basic settings for our app
lazy val basicSettings = Seq[Setting[_]](
organization := "com.snowplowanalytics",
version := "1.1.0",
description := "The Snowplow Hadoop Enrichment process, written in Scalding for Hadoop 2.4",
scalaVersion := "2.10.4",
scalacOptions := Seq("-deprecation", "-encoding", "utf8",
"-target:jvm-1.7"),
parallelExecution in Test := false, // Parallel tests cause havoc with MapReduce
logBuffered := false, // For debugging Specs2 tests
resolvers ++= Dependencies.resolutionRepos
)
// Makes our SBT app settings available from within the ETL
lazy val scalifySettings = Seq(sourceGenerators in Compile <+= (sourceManaged in Compile, version, name, organization, scalaVersion) map { (d, v, n, o, sv) =>
val file = d / "settings.scala"
IO.write(file, """package com.snowplowanalytics.snowplow.enrich.hadoop.generated
|object ProjectSettings {
| val version = "%s"
| val name = "%s"
| val organization = "%s"
| val scalaVersion = "%s"
|}
|""".stripMargin.format(v, n, o, sv))
Seq(file)
})
// sbt-assembly settings for building a fat jar
import sbtassembly.Plugin._
import AssemblyKeys._
lazy val sbtAssemblySettings = assemblySettings ++ Seq(
// Simpler jar name
jarName in assembly := {
name.value + "-" + version.value + ".jar"
},
// Drop these jars
excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
val excludes = Set(
"junit-4.5.jar", // We shouldn't need JUnit
"jsp-api-2.1-6.1.14.jar",
"jsp-2.1-6.1.14.jar",
"jasper-compiler-5.5.12.jar",
"minlog-1.2.jar", // Otherwise causes conflicts with Kyro (which bundles it)
"janino-2.5.16.jar", // Janino includes a broken signature, and is not needed anyway
"commons-beanutils-core-1.8.0.jar", // Clash with each other and with commons-collections
"commons-beanutils-1.7.0.jar", // "
"hadoop-core-1.0.3.jar", // Brought in via dfs-datastores-cascading-1.3.4
"protobuf-java-2.4.1.jar" // Hadoop needs 2.5.0
)
cp filter { jar => excludes(jar.data.getName) }
},
mergeStrategy in assembly <<= (mergeStrategy in assembly) {
(old) => {
case x if x.endsWith("project.clj") => MergeStrategy.discard // Leiningen build files
case x if x.startsWith("META-INF") => MergeStrategy.discard // More bumf
case x if x.endsWith(".html") => MergeStrategy.discard
case x => old(x)
}
}
)
lazy val buildSettings = basicSettings ++ scalifySettings ++ sbtAssemblySettings
}
|
mdavid/lessig-bigdata
|
lib/snowplow/3-enrich/scala-hadoop-enrich/project/BuildSettings.scala
|
Scala
|
mit
| 3,467 |
package org.schedoscope.dsl.transformations
import org.scalatest.{FlatSpec, Matchers}
import org.schedoscope.dsl.View
import org.schedoscope.dsl.transformations.HiveTransformation._
case class HiveView() extends View {
val f = fieldOf[String]
transformVia {
() =>
HiveTransformation(
insertInto(this, "select * from view"))
.defineVersion("v2.2")
}
}
class TransformationTest extends FlatSpec with Matchers {
"the define version method" should "change a checksum" in {
//new transformation
val transformation = HiveTransformation("select * from view")
val checksum1 = transformation.checksum
//overwrite checksum with the same string as the hiveql
transformation.defineVersion("select * from view")
val checksum2 = transformation.checksum
//change checksum with version
transformation.defineVersion("v2.2")
val checksum3 = transformation.checksum
checksum1 shouldBe Checksum.digest("select * from view")
checksum1 shouldBe checksum2
checksum3 should not be checksum1
checksum3 should not be checksum2
}
it should "change the checksum of a transformation in a view" in {
//new transformation
val view = new HiveView()
view.transformation().checksum shouldBe Checksum.digest("v2.2")
}
}
|
christianrichter/schedoscope
|
schedoscope-core/src/test/scala/org/schedoscope/dsl/transformations/TransformationTest.scala
|
Scala
|
apache-2.0
| 1,307 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Example: Read using typed BigQuery Storage API with annotated case classes
// Usage:
// `sbt "runMain com.spotify.scio.examples.extra.TypedStorageBigQueryTornadoes
// --project=[PROJECT] --runner=DataflowRunner --zone=[ZONE]
// --output=[PROJECT]:[DATASET].[TABLE]"`
package com.spotify.scio.examples.extra
import com.spotify.scio.bigquery._
import com.spotify.scio.ContextAndArgs
object TypedStorageBigQueryTornadoes {
// Annotate input class with schema inferred.
// Class `Row` will be expanded into a case class with selected fields. A companion
// object will also be generated to provide easy access to original table from annotation,
// `TableSchema` and converter methods between the generated case class and `TableRow`.
@BigQueryType.fromStorage(
"bigquery-public-data:samples.gsod",
selectedFields = List("tornado", "month"),
rowRestriction = "tornado = true"
)
class Row
// Annotate output case class.
// Note that the case class is already defined and will not be expanded. Only the companion
// object will be generated to provide easy access to `TableSchema` and converter methods.
@BigQueryType.toTable
case class Result(month: Long, tornado_count: Long)
def main(cmdlineArgs: Array[String]): Unit = {
val (sc, args) = ContextAndArgs(cmdlineArgs)
// Get input from BigQuery and convert elements from `TableRow` to `Row`.
// SELECT query from the original annotation is used by default.
sc.typedBigQuery[Row]()
.map(_.month)
.countByValue
.map(kv => Result(kv._1, kv._2))
// Convert elements from Result to TableRow and save output to BigQuery.
.saveAsTypedBigQueryTable(
Table.Spec(args("output")),
writeDisposition = WRITE_TRUNCATE,
createDisposition = CREATE_IF_NEEDED
)
sc.run()
()
}
}
|
regadas/scio
|
scio-examples/src/main/scala/com/spotify/scio/examples/extra/TypedStorageBigQueryTornadoes.scala
|
Scala
|
apache-2.0
| 2,438 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.mockito.Mockito.when
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.BoxValidationFixture
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
class CP667Spec extends WordSpec with MockitoSugar with Matchers with BoxValidationFixture[ComputationsBoxRetriever] {
val boxRetriever = mock[ComputationsBoxRetriever]
override def setUpMocks = {
when(boxRetriever.cpQ8()).thenReturn(CPQ8(Some(false)))
}
testBoxIsZeroOrPositive("CP667", CP667.apply)
testBecauseOfDependendBoxThenCannotExist("CP667", CP667.apply) {
when(boxRetriever.cpQ8()).thenReturn(CPQ8(Some(true))).getMock[ComputationsBoxRetriever]
}
}
|
pncampbell/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/computations/CP667Spec.scala
|
Scala
|
apache-2.0
| 1,355 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.internal.SQLConf
/**
* It is just a wrapper over `sqlRDD`, which sets and makes effective all the configs from the
* captured `SQLConf`.
* Please notice that this means we may miss configurations set after the creation of this RDD and
* before its execution.
*
* @param sqlRDD the `RDD` generated by the SQL plan
* @param conf the `SQLConf` to apply to the execution of the SQL plan
*/
class SQLExecutionRDD(
var sqlRDD: RDD[InternalRow], @transient conf: SQLConf) extends RDD[InternalRow](sqlRDD) {
private val sqlConfigs = conf.getAllConfs
private lazy val sqlConfExecutorSide = {
val newConf = new SQLConf()
sqlConfigs.foreach { case (k, v) => newConf.setConfString(k, v) }
newConf
}
override val partitioner = firstParent[InternalRow].partitioner
override def getPartitions: Array[Partition] = firstParent[InternalRow].partitions
override def compute(split: Partition, context: TaskContext): Iterator[InternalRow] = {
// If we are in the context of a tracked SQL operation, `SQLExecution.EXECUTION_ID_KEY` is set
// and we have nothing to do here. Otherwise, we use the `SQLConf` captured at the creation of
// this RDD.
if (context.getLocalProperty(SQLExecution.EXECUTION_ID_KEY) == null) {
SQLConf.withExistingConf(sqlConfExecutorSide) {
firstParent[InternalRow].iterator(split, context)
}
} else {
firstParent[InternalRow].iterator(split, context)
}
}
}
|
goldmedal/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecutionRDD.scala
|
Scala
|
apache-2.0
| 2,452 |
package au.com.dius.pact.provider.sbtsupport
import au.com.dius.pact.provider.unfiltered.Conversions
import au.com.dius.pact.model.{Request, Response}
import au.com.dius.pact.provider.CollectionUtils
import com.typesafe.scalalogging.StrictLogging
import com.ning.http.client.FluentStringsMap
import dispatch.url
import scala.collection.JavaConversions
import scala.concurrent.{ExecutionContext, Future}
object HttpClient extends StrictLogging {
def run(request:Request)(implicit executionContext: ExecutionContext):Future[Response] = {
logger.debug("request=" + request)
val query = new FluentStringsMap()
if (request.getQuery != null) {
val queryMap = CollectionUtils.javaLMapToScalaLMap(request.getQuery)
queryMap.foldLeft(query) {
(fsm, q) => q._2.foldLeft(fsm) { (m, a) => m.add(q._1, a) }
}
}
val headers = if (request.getHeaders == null) None
else Some(JavaConversions.mapAsScalaMap(request.getHeaders))
val r = url(request.getPath).underlying(
_.setMethod(request.getMethod).setQueryParams(query)
) <:< headers.getOrElse(Map())
val body = if (request.getBody != null) request.getBody.orElse("") else null
val httpRequest = if (body != null) r.setBody(body) else r
dispatch.Http(httpRequest).map(Conversions.dispatchResponseToPactResponse)
}
}
|
flaregames/pact-jvm
|
pact-jvm-provider/src/main/scala/au/com/dius/pact/provider/sbtsupport/HttpClient.scala
|
Scala
|
apache-2.0
| 1,335 |
package com.twitter.finagle.mux
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{OneInstancePerTest, FunSpec}
@RunWith(classOf[JUnitRunner])
class TagMapTest extends FunSpec with OneInstancePerTest {
def test(range: Range, fastSize: Int) {
describe("TagMap[range=%d until %d by %d, fastSize=%d]".format(
range.start, range.end, range.step, fastSize)) {
val set = TagSet(range)
val ints = TagMap[java.lang.Integer](set)
it("should maintain mappings between tags and elems") {
for (i <- range)
assert(ints.map(-i) === Some(i))
for (i <- range)
assert(ints.unmap(i) === Some(-i))
}
it("should iterate over the mapping") {
for (i <- range)
assert(ints.map(-i) === Some(i))
assert(ints.sameElements(range map (i => (i, -i))))
ints.unmap(3+range.start)
ints.unmap(8+range.start)
assert(ints.sameElements(range collect {
case i if i != 3+range.start && i != 8+range.start => (i, -i)
}))
// Works in the presence of sharing the underlying
// TagSet.
assert(set.acquire() === Some(3+range.start))
assert(ints.sameElements(range collect {
case i if i != 3+range.start && i != 8+range.start => (i, -i)
}))
}
}
}
for (range <- Seq(Range(0, 10), Range(10, 20)); fast <- Seq(0, 1, 5, 10))
test(range, fast)
}
|
travisbrown/finagle
|
finagle-mux/src/test/scala/com/twitter/finagle/mux/TagMapTest.scala
|
Scala
|
apache-2.0
| 1,463 |
import sbt._
import Keys._
object ApplicationBuild extends Build {
val appName = "json-lib"
val appVersion = "1.0-SNAPSHOT"
val local: Project.Initialize[Option[sbt.Resolver]] = version { (version: String) =>
val localPublishRepo = "./repository"
if(version.trim.endsWith("SNAPSHOT"))
Some(Resolver.file("snapshots", new File(localPublishRepo + "/snapshots")))
else Some(Resolver.file("releases", new File(localPublishRepo + "/releases")))
}
lazy val baseSettings = Defaults.defaultSettings ++ Seq(
autoScalaLibrary := false,
crossPaths := false
)
lazy val root = Project("root", base = file("."))
.settings(baseSettings: _*)
.settings(
publishLocal := {},
publish := {}
).aggregate(
jsonlib
)
lazy val jsonlib = Project(appName, base = file("jsonlib"))
.settings(baseSettings: _*)
.settings(
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "Reactive couchbase" at "https://raw.github.com/ReactiveCouchbase/repository/master/snapshots/",
libraryDependencies += "com.fasterxml.jackson.core" % "jackson-core" % "2.7.1",
libraryDependencies += "com.fasterxml.jackson.core" % "jackson-annotations" % "2.7.1",
libraryDependencies += "com.fasterxml.jackson.core" % "jackson-databind" % "2.7.1",
libraryDependencies += "org.reactivecouchbase" % "common-lib" % "1.0-SNAPSHOT",
libraryDependencies += "org.reactivecouchbase" % "validation-lib" % "1.0-SNAPSHOT",
libraryDependencies += "joda-time" % "joda-time" % "2.9.2",
libraryDependencies += "junit" % "junit" % "4.11" % "test",
libraryDependencies += "com.novocode" % "junit-interface" % "0.9" % "test",
organization := "org.reactivecouchbase",
version := appVersion,
publishTo <<= local,
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { _ => false }
)
}
|
ReactiveCouchbase/json-lib
|
project/Build.scala
|
Scala
|
apache-2.0
| 1,991 |
package org.flowpaint.filters
import org.flowpaint.property.Data
import org.flowpaint.util.{DataSample, PropertyRegister, MathUtils}
/**
* Calculates the distance along the line, as well as the length of the previous stroke.
*
* @author Hans Haggstrom
*/
// TODO: Add velocity calculation too
class DistanceCalculatorFilter extends PathProcessor {
var previousX = 0f
var previousY = 0f
var previousDistance = 0f
override protected def onInit() = {
previousX = 0f
previousY = 0f
previousDistance = 0f
}
protected def processPathPoint(pointData: Data) : List[Data] = {
// TODO: Calculate distance along outer edge of the stroke at radius distance from center).
val x = pointData.getFloatProperty(PropertyRegister.PATH_X, 0)
val y = pointData.getFloatProperty(PropertyRegister.PATH_Y, 0)
if (firstPoint) previousDistance = 0
val previousSegmentLength = if (firstPoint) 0f else MathUtils.distance(previousX, previousY, x, y)
pointData.setFloatProperty(PropertyRegister.PREVIOUS_SEGMENT_LENGTH, previousSegmentLength)
pointData.setFloatProperty(PropertyRegister.DISTANCE, previousDistance + previousSegmentLength)
previousX = x
previousY = y
previousDistance += previousSegmentLength
List(pointData)
}
}
|
zzorn/flowpaint
|
src/main/scala/org/flowpaint/filters/DistanceCalculatorFilter.scala
|
Scala
|
gpl-2.0
| 1,362 |
package mesosphere.marathon
package state
import mesosphere.{ UnitTest, ValidationTestLike }
import mesosphere.marathon.state.PathId._
import scala.collection.SortedSet
class PathIdTest extends UnitTest with ValidationTestLike {
"A PathId" can {
"be parsed from string" in {
Given("A base id")
val path = PathId("/a/b/c/d")
When("The same path as list")
val reference = PathId("a" :: "b" :: "c" :: "d" :: Nil)
Then("the path is equal")
path should be(reference)
}
"parse the empty list from empty root string" in {
When("The same path as list")
val reference = PathId("/")
Then("the path is equal")
PathId.empty should be(reference)
}
"parse safePath from itself" in {
When("The path is empty")
PathId.fromSafePath(PathId.empty.safePath) should equal(PathId.empty)
When("The path isn't empty")
val reference = PathId("a" :: "b" :: "c" :: "d" :: Nil)
PathId.fromSafePath(reference.safePath) should equal(reference)
}
"be written and parsed from string" in {
Given("A base id")
val path = PathId("a/b/c/d")
When("The same path serialized to string and de-serialized again")
val reference = PathId(path.toString)
Then("the path is equal")
path should be(reference)
}
"compute the canonical path when path is relative" in {
Given("A base id")
val id = PathId("/a/b/c/d")
When("a relative path is canonized")
val path = PathId("./test/../e/f/g/./../").canonicalPath(id)
Then("the path is absolute and correct")
path should be(PathId("/a/b/c/d/e/f"))
}
"compute the canonical path when path is absolute" in {
When("a relative path is canonized")
val path = PathId("test/../a/b/c/d/d/../e/f/g/./../").canonicalPath()
Then("the path is absolute and correct")
path should be(PathId("/a/b/c/d/e/f"))
}
"compute the restOf with respect to a given path" in {
Given("A base id")
val id = PathId("a/b/c")
When("a rest of a path from a given path")
val path = PathId("a/b/c/d/e/f").restOf(id)
Then("the rest path is correct")
path should be(PathId("d/e/f"))
}
"append to a path" in {
Given("A base id")
val id = PathId("/a/b/c")
When("A path is appended to to the base")
val path = id.append("/d/e/f".toPath)
Then("the path is appended correctly")
path should be(PathId("/a/b/c/d/e/f"))
}
"give the taskTrackerRef path" in {
Given("base id's")
val id1 = PathId("/a/b/c")
val id2 = PathId("/a")
val id3 = PathId.empty
When("taskTrackerRef ids get computed")
val parent1 = id1.parent
val parent2 = id2.parent
val parent3 = id3.parent
Then("the taskTrackerRef path is correct")
parent1 should be(PathId("/a/b"))
parent2 should be(PathId.empty)
parent3 should be(PathId.empty)
}
"convert to a hostname" in {
Given("base id's")
val id1 = PathId("/a/b/c")
val id2 = PathId("/a")
val id3 = PathId.empty
When("hostnames get computed")
val host1 = id1.toHostname
val host2 = id2.toHostname
val host3 = id3.toHostname
Then("the hostname is valid")
host1 should be("c.b.a")
host2 should be("a")
host3 should be("")
}
}
"PathIds" should {
"handles root paths" in {
PathId("/").isRoot shouldBe true
PathId("").isRoot shouldBe true
}
"match another PathId" in {
PathId("/a/b/c").includes(PathId("/a/b")) shouldBe true
PathId("/a/b/c").includes(PathId("/a/b/d")) shouldBe false
PathId("/a/b/c").includes(PathId("/a")) shouldBe true
PathId("/a/b/c").includes(PathId("/other")) shouldBe false
}
"give all parents as sequence" in {
val parents = PathId("/a/b/c/d").allParents
parents should be(Seq(PathId("/a/b/c"), PathId("/a/b"), PathId("/a"), PathId("/")))
parents should have size 4
}
}
"An ordered PathID collection" should {
val a = PathId("/a")
val aa = a / "a"
val ab = a / "b"
val ac = a / "c"
val b = PathId("/b")
val c = PathId("/c")
"be sorted if all paths are on the same level" in {
SortedSet(a, b, a).toSeq should equal(Seq(a, b))
}
"be sorted if with paths on different levels" in {
SortedSet(a, b, aa, a).toSeq should equal(Seq(a, aa, b))
}
"be sorted if it was reversed" in {
SortedSet(c, b, a).toSeq should equal(Seq(a, b, c))
SortedSet(ac, ab, aa).toSeq should equal(Seq(aa, ab, ac))
}
}
"The PathId validation" when {
"passed legal characters" should {
"be valid" in {
val path = PathId("/foobar-0")
pathIdValidator(path) shouldBe aSuccess
}
}
"passed illegal characters" should {
"be invalid" in {
val path = PathId("/@§\\'foobar-0")
pathIdValidator(path) should haveViolations(
"/" -> "must fully match regular expression '^(([a-z0-9]|[a-z0-9][a-z0-9\\\\-]*[a-z0-9])\\\\.)*([a-z0-9]|[a-z0-9][a-z0-9\\\\-]*[a-z0-9])|(\\\\.|\\\\.\\\\.)$'")
}
}
}
}
|
guenter/marathon
|
src/test/scala/mesosphere/marathon/state/PathIdTest.scala
|
Scala
|
apache-2.0
| 5,193 |
package org.greencheek.spray.cache.memcached
import org.greencheek.util.memcached.WithMemcached
import org.specs2.runner.JUnitRunner
import org.junit.runner.RunWith
/**
* Created by dominictootell on 30/03/2014.
*/
// jmemcache binary protocol does not play nice with spy
// Therefore this class does not have the JUnit runner on it.
// But can be run manually from a ide
//
// memcached-finagle doesn't do binary memcached
//
//@RunWith(classOf[JUnitRunner])
class BinaryMemcachedCacheSpec extends MemcachedCacheSpec{
override def getMemcacheContext(): WithMemcached = WithMemcached(true)
override def getMemcachedHostsString() : Option[String] = { Some("localhost:11211") }
}
|
tootedom/spray-cache-spymemcached
|
src/test/scala/org/greencheek/spray/cache/memcached/BinaryMemcachedCacheSpec.scala
|
Scala
|
apache-2.0
| 687 |
/**
* Created by Mathieu Leclaire on 23/04/18.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package org.openmole.gui.plugin.wizard.jar
import scala.concurrent.ExecutionContext.Implicits.global
import boopickle.Default._
import org.openmole.gui.ext.data._
import org.openmole.gui.ext.client.{InputFilter, OMPost}
import scaladget.bootstrapnative.bsn._
import scaladget.tools._
import autowire._
import org.scalajs.dom.raw.HTMLElement
import scaladget.bootstrapnative.SelectableButtons
import scala.concurrent.Future
import scala.scalajs.js.annotation._
import scalatags.JsDom.TypedTag
import scalatags.JsDom.all._
import org.openmole.gui.ext.api.Api
import org.openmole.gui.ext.data.DataUtils._
import org.openmole.gui.ext.client
import rx._
import scalajs.js
object TopLevelExports {
@JSExportTopLevel("jar")
val jar = js.Object {
new org.openmole.gui.plugin.wizard.jar.JarWizardFactory
}
}
class JarWizardFactory extends WizardPluginFactory {
type WizardType = JarWizardData
val fileType = JarArchive
def build(safePath: SafePath, onPanelFilled: (LaunchingCommand) ⇒ Unit = (LaunchingCommand) ⇒ {}): WizardGUIPlugin = new JarWizardGUI(safePath, onPanelFilled)
def parse(safePath: SafePath): Future[Option[LaunchingCommand]] = OMPost()[JarWizardAPI].parse(safePath).call()
def name: String = "Jar"
}
class JarWizardGUI(safePath: SafePath, onMethodSelected: (LaunchingCommand) ⇒ Unit) extends WizardGUIPlugin {
type WizardType = JarWizardData
def factory = new JarWizardFactory
val jarClasses: Var[Seq[FullClass]] = Var(Seq())
val isOSGI: Var[Boolean] = Var(false)
OMPost()[Api].isOSGI(safePath).call().foreach { o =>
isOSGI() = o
}
lazy val embedAsPluginCheckBox: SelectableButtons = radios()(
selectableButton("Yes", onclick = () ⇒ {}),
selectableButton("No", onclick = () ⇒ {})
)
val classTable: Var[Option[scaladget.bootstrapnative.DataTable]] = Var(None)
val methodTable: Var[Option[scaladget.bootstrapnative.DataTable]] = Var(None)
searchClassInput.nameFilter.trigger {
classTable.now.foreach { t ⇒
t.filter(searchClassInput.nameFilter.now)
}
}
OMPost()[JarWizardAPI].jarClasses(safePath).call().foreach { jc ⇒
val table = scaladget.bootstrapnative.DataTable(
rows = jc.map { c ⇒
scaladget.bootstrapnative.DataTable.DataRow(Seq(c.name))
}.toSeq,
bsTableStyle = scaladget.bootstrapnative.Table.BSTableStyle(bordered_table +++ hover_table, emptyMod))
classTable() = Some(table)
classTable.now.get.selected.trigger {
classTable.now.get.selected.now.foreach { s ⇒
OMPost()[JarWizardAPI].jarMethods(safePath, s.values.head).call().foreach { jm ⇒
val methodMap = jm.map { m ⇒ m.expand -> m }.toMap
methodTable() = Some(
scaladget.bootstrapnative.DataTable(
rows = jm.map { m ⇒
scaladget.bootstrapnative.DataTable.DataRow(Seq(m.expand))
}.toSeq,
bsTableStyle = scaladget.bootstrapnative.Table.BSTableStyle(bordered_table +++ hover_table, emptyMod))
)
methodTable.now.get.selected.trigger {
methodTable.now.get.selected.now.map(r ⇒ methodMap(r.values.head)).foreach { selectedMethod ⇒
onMethodSelected(JavaLaunchingCommand(
selectedMethod,
selectedMethod.args, selectedMethod.ret.map {
Seq(_)
}.getOrElse(Seq()))
)
}
}
}
}
}
}
lazy val searchClassInput = InputFilter("", "Ex: mynamespace.MyClass")
val tableCSS: ModifierSeq = Seq(
overflow := "auto",
height := 300,
)
lazy val panel: TypedTag[HTMLElement] = div(
div(client.columnCSS)(
Rx {
if (isOSGI()) hForm(
div(embedAsPluginCheckBox.render)
.render.withLabel("Embed jar as plugin?"),
span(client.modelHelp +++ client.columnCSS, "Your jar is an OSGI bundle. The best way to use it is to embed it as a plugin.").render
) else div(client.modelHelp,
div("Your jar is not an OSGI bundle. As an OSGI bundle is safer and more robust, we recommend you convert your jar to an OSGI bundle."),
a(href := "http://www.openmole.org/Plugin+Development.html", target := "_blank")("How to create an OSGI bundle?"))
},
h3("Classes"),
searchClassInput.tag,
div(tableCSS, paddingTop := 10)(
Rx {
classTable().map {
_.render
}.getOrElse(div())
}).render
),
div(client.columnCSS)(
h3("Methods"),
div(tableCSS)(
Rx {
methodTable().map {
_.render
}.getOrElse(div())
}).render
)
)
def save(
target: SafePath,
executableName: String,
command: String,
inputs: Seq[ProtoTypePair],
outputs: Seq[ProtoTypePair],
libraries: Option[String],
resources: Resources) = {
val embedAsPlugin = {
if (isOSGI.now)
if (embedAsPluginCheckBox.activeIndex == 0) true else false
else false
}
val plugin: Option[String] = {
if (embedAsPlugin) classTable.now.map {
_.selected.now.map {
_.values.headOption
}.flatten
}.flatten
else None
}
OMPost()[JarWizardAPI].toTask(
target,
executableName,
command,
inputs,
outputs,
libraries,
resources,
JarWizardData(embedAsPlugin, plugin, safePath)).call()
}
}
|
openmole/openmole
|
openmole/gui/plugins/org.openmole.gui.plugin.wizard.jar/src/main/scala/org/openmole/gui/plugin/wizard/jar/JarWizardGUI.scala
|
Scala
|
agpl-3.0
| 6,234 |
import io.circe.{ Decoder, Encoder }
import io.circe.generic.semiauto.{ deriveDecoder, deriveEncoder }
import sangria.macros._
import types._
object EpisodeEnum {
object EpisodeEnum extends GraphQLQuery {
val document: sangria.ast.Document = graphql"""query EpisodeEnum {
hero {
name
appearsIn
}
}"""
case class Variables()
object Variables { implicit val jsonEncoder: Encoder[Variables] = deriveEncoder[Variables] }
case class Data(hero: Hero)
object Data { implicit val jsonDecoder: Decoder[Data] = deriveDecoder[Data] }
case class Hero(name: Option[String], appearsIn: Option[List[Option[Episode]]])
object Hero {
implicit val jsonDecoder: Decoder[Hero] = deriveDecoder[Hero]
implicit val jsonEncoder: Encoder[Hero] = deriveEncoder[Hero]
}
}
}
|
muuki88/sbt-graphql
|
src/test/resources/apollo/starwars-circe/EpisodeEnum.scala
|
Scala
|
apache-2.0
| 806 |
package org.sofi.deadman.component.manager
import akka.actor._
import com.rbmhtechnology.eventuate._
import org.sofi.deadman.component.actor.TaskActor
import org.sofi.deadman.messages.command._
import org.sofi.deadman.messages.event._
final class CommandManager(val id: String, val eventLog: ActorRef) extends EventsourcedActor with ActorLogging {
// Actor registry
private var registry: Map[String, ActorRef] = Map.empty
// Load an actor
protected def actorFor(aggregate: String) =
registry.get(aggregate) match {
case Some(actor) ⇒ actor
case None ⇒
registry = registry + (aggregate -> context.actorOf(TaskActor.props(aggregate, id, eventLog)))
registry(aggregate)
}
// Forward commands to aggregate specific components
def onCommand: Receive = {
case st: ScheduleTask ⇒ actorFor(st.aggregate) forward st
case ct: CompleteTask ⇒ actorFor(ct.aggregate) forward ct
}
// Lazy load actors for non-expired task events
def onEvent: Receive = {
case t: Task ⇒ if (!t.isExpired) { val _ = actorFor(t.aggregate) }
}
}
object CommandManager {
def name(id: String): String = s"$id-task-manager"
def props(id: String, eventLog: ActorRef): Props = Props(new CommandManager(id, eventLog))
}
|
SocialFinance/deadman-switch
|
core/src/main/scala/org/sofi/deadman/component/manager/CommandManager.scala
|
Scala
|
bsd-3-clause
| 1,268 |
package wafna.radius.protocol
import java.nio.ByteBuffer
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import StdAttrType._
@RunWith(classOf[JUnitRunner])
class TestPacketSerialization extends FunSuite {
val packetType = PacketType.AccessRequest.code
val packetId: Byte = 99
val userName: String = "Billy Bob"
val nasId: String = "Go NAS!"
val authenticator = (0 until 16).foldLeft(new Array[Byte](16)) { (a, i) => a(i) = i.toByte ; a }
def testAuth(auth: Array[Byte]): Unit = {
assert((0 until 16).map(v => auth(v) == v).forall(v => v), "authenticator")
}
def createPacket(attrs: Attribute *): Packet = {
new Packet(packetType, packetId, authenticator, attrs)
}
def testPacket(packet1: Packet): Unit = {
val buffer = new Array[Byte](Short.MaxValue)
packet1.write(buffer)
val packet2 = Packet read buffer
assertResult(packet1.code, "code")(packet2.code)
assertResult(packet1.packetId, "id")(packet2.packetId)
val attrs1 = packet1.attributes
val attrs2 = packet2.attributes
assertResult(attrs1.length, "length of attrs")(attrs2.length)
attrs1.zip(attrs2) map { pair =>
val a1 = pair._1
val a2 = pair._2
assertResult(a1.code, "attr type")(a2.code)
assertResult(a1.data.capacity(), s"attr value length: ${a1.code}")(a2.data.capacity())
}
}
// test("no attrs") {
// testPacket(createPacket())
// }
test("user name") {
testPacket(createPacket(new Attribute(UserName.code, userName)))
}
// test("user name and nas id") {
// testPacket(createPacket(new Attribute(UserName.code, userName), new Attribute(NASIdentifier.code, nasId)))
// }
// test("vendor specific") {
// testPacket(createPacket(List(new VendorSpecific(Vendor.AcmeWidgets, )))
// }
}
|
wafna/scud
|
radius/src/test/scala/wafna/radius/protocol/TestPacketSerialization.scala
|
Scala
|
unlicense
| 1,816 |
package io.getquill
import io.getquill.context.Context
trait TestEntities {
this: Context[_, _] =>
case class TestEntity(s: String, i: Int, l: Long, o: Option[Int])
case class TestEntity2(s: String, i: Int, l: Long, o: Option[Int])
case class TestEntity3(s: String, i: Int, l: Long, o: Option[Int])
case class TestEntity4(i: Long)
case class TestEntity5(s: String, i: Long)
val qr1 = quote {
query[TestEntity]
}
val qr2 = quote {
query[TestEntity2]
}
val qr3 = quote {
query[TestEntity3]
}
val qr4 = quote {
query[TestEntity4]
}
val qr5 = quote {
for {
a <- query[TestEntity]
} yield TestEntity5(a.s, a.l)
}
}
|
mentegy/quill
|
quill-core/src/test/scala/io/getquill/TestEntities.scala
|
Scala
|
apache-2.0
| 676 |
object LongCalls {
def foo(x: String => String) = x("45")
foo(/*start*/_.substring(1).concat("556")/*end*/)
}
//String => String
|
jastice/intellij-scala
|
scala/scala-impl/testdata/typeInference/expected/placeholder/LongCalls.scala
|
Scala
|
apache-2.0
| 133 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.sql
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.planner.plan.utils.WindowEmitStrategy.{TABLE_EXEC_EMIT_EARLY_FIRE_DELAY, TABLE_EXEC_EMIT_EARLY_FIRE_ENABLED}
import org.apache.flink.table.planner.utils.{TableConfigUtils, TableTestBase}
import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType}
import org.junit.{Before, Test}
class MiniBatchIntervalInferTest extends TableTestBase {
private val util = streamTestUtil()
val STRING = new VarCharType(VarCharType.MAX_LENGTH)
val LONG = new BigIntType()
val INT = new IntType()
@Before
def setup(): Unit = {
util.addDataStream[(Int, String, Long)](
"MyTable1", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
util.addDataStream[(Int, String, Long)](
"MyTable2", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
util.tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
}
@Test
def testMiniBatchOnly(): Unit = {
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
val sql = "SELECT b, COUNT(DISTINCT a), MAX(b), SUM(c) FROM MyTable1 GROUP BY b"
util.verifyPlan(sql)
}
@Test
def testRedundantWatermarkDefinition(): Unit = {
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
util.addTableWithWatermark("MyTable3", util.tableEnv.from("MyTable1"), "rowtime", 0)
val sql = "SELECT b, COUNT(DISTINCT a), MAX(b), SUM(c) FROM MyTable3 GROUP BY b"
util.verifyPlan(sql)
}
@Test
def testWindowWithEarlyFire(): Unit = {
val tableConfig = util.tableEnv.getConfig
tableConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
withEarlyFireDelay(tableConfig, Time.milliseconds(500))
util.addTableWithWatermark("MyTable3", util.tableEnv.from("MyTable1"), "rowtime", 0)
val sql =
"""
| SELECT b, SUM(cnt)
| FROM (
| SELECT b,
| COUNT(a) as cnt,
| HOP_START(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND) as w_start,
| HOP_END(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND) as w_end
| FROM MyTable3
| GROUP BY b, HOP(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND)
| )
| GROUP BY b
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testWindowCascade(): Unit = {
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "3 s")
util.addTableWithWatermark("MyTable3", util.tableEnv.from("MyTable1"), "rowtime", 0)
val sql =
"""
| SELECT b,
| SUM(cnt)
| FROM (
| SELECT b,
| COUNT(a) as cnt,
| TUMBLE_ROWTIME(rowtime, INTERVAL '10' SECOND) as rt
| FROM MyTable3
| GROUP BY b, TUMBLE(rowtime, INTERVAL '10' SECOND)
| )
| GROUP BY b, TUMBLE(rt, INTERVAL '5' SECOND)
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testIntervalJoinWithMiniBatch(): Unit = {
util.addTableWithWatermark("LeftT", util.tableEnv.from("MyTable1"), "rowtime", 0)
util.addTableWithWatermark("RightT", util.tableEnv.from("MyTable2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
val sql =
"""
| SELECT b, COUNT(a)
| FROM (
| SELECT t1.a as a, t1.b as b
| FROM
| LeftT as t1 JOIN RightT as t2
| ON
| t1.a = t2.a AND t1.rowtime BETWEEN t2.rowtime - INTERVAL '5' SECOND AND
| t2.rowtime + INTERVAL '10' SECOND
| )
| GROUP BY b
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testRowtimeRowsOverWithMiniBatch(): Unit = {
util.addTableWithWatermark("MyTable3", util.tableEnv.from("MyTable1"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
val sql =
"""
| SELECT cnt, COUNT(c)
| FROM (
| SELECT c, COUNT(a)
| OVER (PARTITION BY c ORDER BY rowtime ROWS BETWEEN 5 preceding AND CURRENT ROW) as cnt
| FROM MyTable3
| )
| GROUP BY cnt
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testTemporalTableFunctionJoinWithMiniBatch(): Unit = {
util.addTableWithWatermark("Orders", util.tableEnv.from("MyTable1"), "rowtime", 0)
util.addTableWithWatermark("RatesHistory", util.tableEnv.from("MyTable2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
util.addFunction(
"Rates",
util.tableEnv.from("RatesHistory").createTemporalTableFunction($"rowtime", $"b"))
val sqlQuery =
"""
| SELECT r_a, COUNT(o_a)
| FROM (
| SELECT o.a as o_a, r.a as r_a
| FROM Orders As o,
| LATERAL TABLE (Rates(o.rowtime)) as r
| WHERE o.b = r.b
| )
| GROUP BY r_a
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testMultiOperatorNeedsWatermark1(): Unit = {
// infer result: miniBatchInterval=[Rowtime, 0ms]
util.addTableWithWatermark("LeftT", util.tableEnv.from("MyTable1"), "rowtime", 0)
util.addTableWithWatermark("RightT", util.tableEnv.from("MyTable2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
val sql =
"""
| SELECT
| b, COUNT(a),
| TUMBLE_START(rt, INTERVAL '5' SECOND),
| TUMBLE_END(rt, INTERVAL '5' SECOND)
| FROM (
| SELECT t1.a as a, t1.b as b, t1.rowtime as rt
| FROM
| LeftT as t1 JOIN RightT as t2
| ON
| t1.a = t2.a AND t1.rowtime BETWEEN t2.rowtime - INTERVAL '5' SECOND AND
| t2.rowtime + INTERVAL '10' SECOND
| )
| GROUP BY b,TUMBLE(rt, INTERVAL '5' SECOND)
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testMultiOperatorNeedsWatermark2(): Unit = {
util.addTableWithWatermark("LeftT", util.tableEnv.from("MyTable1"), "rowtime", 0)
util.addTableWithWatermark("RightT", util.tableEnv.from("MyTable2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "6 s")
val sql =
"""
| SELECT b, COUNT(a)
| OVER (PARTITION BY b ORDER BY rt ROWS BETWEEN 5 preceding AND CURRENT ROW)
| FROM (
| SELECT t1.a as a, t1.b as b, t1.rt as rt
| FROM
| (
| SELECT b,
| COUNT(a) as a,
| TUMBLE_ROWTIME(rowtime, INTERVAL '5' SECOND) as rt
| FROM LeftT
| GROUP BY b, TUMBLE(rowtime, INTERVAL '5' SECOND)
| ) as t1
| JOIN
| (
| SELECT b,
| COUNT(a) as a,
| HOP_ROWTIME(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND) as rt
| FROM RightT
| GROUP BY b, HOP(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND)
| ) as t2
| ON
| t1.a = t2.a AND t1.rt BETWEEN t2.rt - INTERVAL '5' SECOND AND
| t2.rt + INTERVAL '10' SECOND
| )
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testMultiOperatorNeedsWatermark3(): Unit = {
util.addTableWithWatermark("RightT", util.tableEnv.from("MyTable2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "6 s")
val sql =
"""
| SELECT t1.a, t1.b
| FROM (
| SELECT a, COUNT(b) as b FROM MyTable1 GROUP BY a
| ) as t1
| JOIN (
| SELECT b, COUNT(a) as a
| FROM (
| SELECT b, COUNT(a) as a,
| HOP_ROWTIME(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND) as rt
| FROM RightT
| GROUP BY b, HOP(rowtime, INTERVAL '5' SECOND, INTERVAL '6' SECOND)
| )
| GROUP BY b
| ) as t2
| ON t1.a = t2.a
""".stripMargin
util.verifyPlan(sql)
}
/**
* Test watermarkInterval trait infer among optimize block
*/
@Test
def testMultipleWindowAggregates(): Unit = {
val stmtSet = util.tableEnv.createStatementSet()
util.addDataStream[(Int, Long, String)]("T1", 'id1, 'rowtime.rowtime, 'text)
util.addDataStream[(Int, Long, Int, String, String)](
"T2",
'id2, 'rowtime.rowtime, 'cnt, 'name, 'goods)
util.addTableWithWatermark("T3", util.tableEnv.from("T1"), "rowtime", 0)
util.addTableWithWatermark("T4", util.tableEnv.from("T2"), "rowtime", 0)
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "500 ms")
util.tableEnv.getConfig.getConfiguration.setLong(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 300L)
val table1 = util.tableEnv.sqlQuery(
"""
|SELECT id1, T3.rowtime AS ts, text
| FROM T3, T4
|WHERE id1 = id2
| AND T3.rowtime > T4.rowtime - INTERVAL '5' MINUTE
| AND T3.rowtime < T4.rowtime + INTERVAL '3' MINUTE
""".stripMargin)
util.tableEnv.registerTable("TempTable1", table1)
val table2 = util.tableEnv.sqlQuery(
"""
|SELECT id1,
| LISTAGG(text, '#') as text,
| TUMBLE_ROWTIME(ts, INTERVAL '6' SECOND) as ts
|FROM TempTable1
|GROUP BY TUMBLE(ts, INTERVAL '6' SECOND), id1
""".stripMargin)
util.tableEnv.registerTable("TempTable2", table2)
val table3 = util.tableEnv.sqlQuery(
"""
|SELECT id1,
| LISTAGG(text, '*')
|FROM TempTable2
|GROUP BY HOP(ts, INTERVAL '12' SECOND, INTERVAL '4' SECOND), id1
""".stripMargin)
val appendSink1 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING))
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"appendSink1", appendSink1)
stmtSet.addInsert("appendSink1", table3)
val table4 = util.tableEnv.sqlQuery(
"""
|SELECT id1,
| LISTAGG(text, '-')
|FROM TempTable1
|GROUP BY TUMBLE(ts, INTERVAL '9' SECOND), id1
""".stripMargin)
val appendSink2 = util.createAppendTableSink(Array("a", "b"), Array(INT, STRING))
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"appendSink2", appendSink2)
stmtSet.addInsert("appendSink2", table4)
val table5 = util.tableEnv.sqlQuery(
"""
|SELECT id1,
| COUNT(text)
|FROM TempTable2
|GROUP BY id1
""".stripMargin)
val appendSink3 = util.createRetractTableSink(Array("a", "b"), Array(INT, LONG))
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"appendSink3", appendSink3)
stmtSet.addInsert("appendSink3", table5)
util.verifyExplain(stmtSet)
}
@Test
def testMiniBatchOnDataStreamWithRowTime(): Unit = {
util.addDataStream[(Long, Int, String)]("T1", 'long, 'int, 'str, 'rowtime.rowtime)
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
val sql =
"""
|SELECT long,
| COUNT(str) as cnt,
| TUMBLE_END(rowtime, INTERVAL '10' SECOND) as rt
|FROM T1
|GROUP BY long, TUMBLE(rowtime, INTERVAL '10' SECOND)
""".stripMargin
util.verifyPlan(sql)
}
@Test
def testOverWindowMiniBatchOnDataStreamWithRowTime(): Unit = {
util.addDataStream[(Long, Int, String)]("T1", 'long, 'int, 'str, 'rowtime.rowtime)
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "3 s")
val sql =
"""
| SELECT cnt, COUNT(`int`)
| FROM (
| SELECT `int`,
| COUNT(str) OVER
| (PARTITION BY long ORDER BY rowtime ROWS BETWEEN 5 preceding AND CURRENT ROW) as cnt
| FROM T1
| )
| GROUP BY cnt
""".stripMargin
util.verifyPlan(sql)
}
private def withEarlyFireDelay(tableConfig: TableConfig, interval: Time): Unit = {
val intervalInMillis = interval.toMilliseconds
val preEarlyFireInterval = TableConfigUtils.getMillisecondFromConfigDuration(
tableConfig, TABLE_EXEC_EMIT_EARLY_FIRE_DELAY)
if (preEarlyFireInterval != null && (preEarlyFireInterval != intervalInMillis)) { //
// earlyFireInterval of the two query config is not equal and not the default
throw new RuntimeException("Currently not support different earlyFireInterval configs in " +
"one job")
}
tableConfig.getConfiguration.setBoolean(TABLE_EXEC_EMIT_EARLY_FIRE_ENABLED, Boolean.box(true))
tableConfig.getConfiguration.setString(
TABLE_EXEC_EMIT_EARLY_FIRE_DELAY, intervalInMillis + " ms")
}
}
|
jinglining/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/MiniBatchIntervalInferTest.scala
|
Scala
|
apache-2.0
| 14,382 |
package cz.jenda.pidifrky.ui
import android.os.Bundle
import cz.jenda.pidifrky.R
import cz.jenda.pidifrky.logic.DebugReporter
import cz.jenda.pidifrky.ui.api.{BasicTabActivity, ExceptionHandler, NavigationDrawer, TabFragment}
import cz.jenda.pidifrky.ui.fragments.{CardsAllListFragment, CardsNearestListFragment, MerchantsNearestListFragment}
/**
* @author Jenda Kolena, [email protected]
*/
class ListActivity extends BasicTabActivity with NavigationDrawer with ExceptionHandler {
override protected def tabLayoutId: Int = R.layout.activity_list
override protected val hasParentActivity: Boolean = false
override protected lazy val tabs: Seq[TabFragment] = Seq(CardsAllListFragment(), CardsNearestListFragment(), MerchantsNearestListFragment())
override protected lazy val preselectedTabIndex: Int = preselect
//it's mutable because it's set in onCreate
private var preselect = 1
override protected def onCreate(savedInstanceState: Bundle): Unit = {
//this needs to be first!
preselect = getIntent.getIntExtra(MapActivity.BundleKeys.ViewType, 1)
super.onCreate(savedInstanceState)
}
override protected def onNavigationDrawerClick: PartialFunction[Int, Unit] = {
case R.id.drawer_showSettings =>
goTo(classOf[SettingsActivity])
case _ =>
DebugReporter.debug("not implemented")
}
}
|
jendakol/pidifrky
|
client/src/main/scala/cz/jenda/pidifrky/ui/ListActivity.scala
|
Scala
|
apache-2.0
| 1,349 |
package com.ing.bakery.baker
import scala.concurrent.Future
object BakerReadinessCheck {
var ready: Boolean = false
def enable(): Unit = { BakerReadinessCheck.ready = true }
}
class BakerReadinessCheck extends (() => Future[Boolean]) {
override def apply(): Future[Boolean] = Future.successful(BakerReadinessCheck.ready)
}
|
ing-bank/baker
|
bakery/state/src/main/scala/com/ing/bakery/baker/BakerReadinessCheck.scala
|
Scala
|
mit
| 332 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys._
import sbtrelease.ReleasePlugin._
/**
* TODO add sbt release
*/
object Publish extends Build {
lazy val publishSettings = Seq(
publishTo <<= isSnapshot { isSnapshot =>
val id = if (isSnapshot) "content/repositories/snapshots" else "service/local/staging/deploy/maven2"
val uri = s"https://oss.sonatype.org/$id"
Some(Resolver.url(uri, url(uri))(Resolver.ivyStylePatterns))
},
publishArtifact in (Compile, packageDoc) := false,
publishArtifact in (Compile, packageSrc) := false,
publishArtifact in Test := false,
publishMavenStyle := true,
pomIncludeRepository := { x => false },
pomExtra := (
<url>http://github.com/helena/spark-cassandra</url>
<scm>
<url>[email protected]:helena/spark-cassandra.git</url>
<connection>scm:git:[email protected]:helena/spark-cassandra.git</connection>
</scm>
<developers>
<developer>
<id>helena</id>
<name>Helena Edelson</name>
<url>http://github.com/helena</url>
</developer>
</developers>
)
)
}
|
helena/spark-cassandra
|
project/Publish.scala
|
Scala
|
apache-2.0
| 1,930 |
package com.github.kickshare.indexer.es.exception
/**
* Throws when batch fails for any reason.
* @param msg describing error.
*/
class IndexingFailedException(msg: String) extends Exception(msg)
|
kucera-jan-cz/kickshare
|
kickshare-tools/data-indexer/src/main/scala/com/github/kickshare/indexer/es/exception/IndexingFailedException.scala
|
Scala
|
apache-2.0
| 200 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.core.util
import com.liferay.portal.kernel.cache.CacheRegistryUtil
import com.liferay.portal.kernel.dao.db.DBFactoryUtil
import com.liferay.portal.kernel.dao.orm.{EntityCacheUtil, FinderCacheUtil}
import com.typesafe.scalalogging.slf4j.LazyLogging
trait LiferayCacheUtil extends LazyLogging {
def clearLiferayCaches() = {
//Clear only if no embedded hypersonic is used, to avoid locking problems
if (!isHSQLDB) {
logger.info("Clearing Liferay cache")
FinderCacheUtil.clearCache()
EntityCacheUtil.clearCache()
CacheRegistryUtil.clear()
}
}
def disableLiferayCaching() = {
if (isCachingEnabled && !isHSQLDB) CacheRegistryUtil.setActive(false)
}
def enableLiferayCaching() = {
if (isCachingEnabled && !isHSQLDB) CacheRegistryUtil.setActive(true)
}
lazy val isCachingEnabled = CacheRegistryUtil.isActive
lazy val isHSQLDB = DBFactoryUtil.getDB.getType.equals(com.liferay.portal.kernel.dao.db.DB.TYPE_HYPERSONIC)
}
|
nonblocking/cliwix
|
cliwix-core/src/main/scala/at/nonblocking/cliwix/core/util/LiferayCacheUtil.scala
|
Scala
|
agpl-3.0
| 1,792 |
/*
* Main method
*/
// Java style
object Echo {
def main(args: Array[String]): Unit = {
args foreach { println _ }
}
}
// Alternative
object Echo extends App {
for (arg <- args) println(arg)
}
/*
* Assertions.
* Assertions are used to document and check design-by-contract style invariants in code. They can
* be disabled at runtime with the `-Xdisable-assertions` command line option. For reference, see:
* http://www.scala-lang.org/api/current/index.html#scala.Predef$
*/
assert(1 > 0)
// `assume` is intended for static code analysis tools. It is otherwise equivalent to `assert`.
assume(1 > 0)
// `require` is used to check pre-conditions, blaming the caller of a method for violating them.
// Unlike other assertions, `require` throws `IllegalArgumentException` instead of `AssertionError`
// and can never be disabled at runtime.
require(1 > 0)
// `ensuring` is used on a method's return value to check post-conditions.
def square(a: Int) = {a * a} ensuring(_ > 0)
/*
* Exceptions
*/
import scala.util.control.NonFatal
try {
// ...
} catch {
case NonFatal(e) => // Recommended way to catch all
case e: Exception => // ...
case _: Throwable => // Not recommended
}
/*
* Flexible casting.
* The following is semantically equivalent to `asInstanceOf[Any]`, but more flexible.
* For instance, it is possible to use different branches to perform multiple conditional casts at
* the same time for various types, perform conversions, and fallback or return `None` or `null`
* instead of throwing an exception, etc.
*/
e match {
case a: AnyRef => a
case _ => throw new ClassCastException
}
/*
* Seq to variable length argument list
*/
def foo(args: Int*) = args.foreach{println(_)}
foo(list:_*)
/*
* Notable annotations
*/
// Automatic Java get and set methods
import scala.beans.{BeanProperty, BooleanBeanProperty}
case class A(@BeanProperty var i: Int, @BooleanBeanProperty var b: Boolean)
val a = A(1, true)
a.setI(2)
a.getI
a.setB(false)
a.isB
// Warns when tail-recursion optimization is not possible
import scala.annotation.tailrec
@tailrec def f(i: Int, s: Int = 0): Int = if (i > 0) f(i - 1, s + i) else s
// Warns when a match compiles to conditional expressions instead of tableswitch or lookupswitch
import scala.annotation.switch
(i: @switch) match {
case 1 => "One"
case 2 => "Two"
case _ => "Many"
}
// Suppress exhaustivity checking for pattern matching
(e: @unchecked) match { ... }
/*
* Bidirectional conversions between corresponding Scala and Java collections.
* For a list of supported conversions, see:
* http://www.scala-lang.org/api/current/scala/collection/JavaConversions$.html
* http://www.scala-lang.org/api/current/scala/collection/JavaConverters$.html
*/
// Implicit, automatic conversions
import scala.collection.JavaConversions._
// asScala, asJava, asJavaCollection, asJavaEnumeration, asJavaDictionary
import scala.collection.JavaConverters._
/*
* Transparently use Java collections as if they were Scala collections
*/
import scala.collection.JavaConversions.mapAsScalaMap
import scala.collection.mutable
var map: mutable.Map[String, String] = new java.util.HashMap[String, String]
map += "foo" -> "bar"
assert(map("foo") == "bar")
/*
* Mutable collections.
* Do not import a mutable collection directly. Import the `mutable` package
* instead and use the `mutable.` prefix to denote mutability explicitly.
*/
import scala.collection.mutable
val set = mutable.Set(1, 2, 3)
// Counter-example
import scala.collection.mutable.Set
val set = Set(1, 2, 3) // Too risky for the inattentive reader
/*
* Collection initialization
*/
val map = Map("one" -> 1, "two" -> 2, "three" -> 3)
val list = List(1, 2, 3)
val list = Seq(1, 2, 3) // Same as above
val list = 1 :: 2 :: 3 :: Nil // Same as above
/*
* Initialization with pattern matching
*/
val tuple = ("foo", 1, 0)
val (x, y, z) = tuple
assert((x, y, z) == ("foo", 1, 0))
val option = Some("foo")
val Some(foo) = option
assert(foo == "foo")
case class Foo(x: String, y: Int)
val foo = Foo("foo", 1)
val Foo(a, b) = foo
assert((a, b) == ("foo", 1))
val list = List(1, 2, 3, 4, 5, 6)
val x :: xs = list
assert((x, xs) == (1, List(2, 3, 4, 5, 6)))
// Same as above
val List(x, xs@_*) = list
assert((x, xs) == (1, List(2, 3, 4, 5, 6)))
// Skipping elements
val _ :: a :: b :: _ :: xs = list
assert((a, b, xs) == (2, 3, List(5, 6)))
// Works with other collections, too
val vector = Vector(1, 2, 3, 4, 5, 6)
val Vector(_, a, b, _, xs@_*) = vector
assert((a, b, xs) == (2, 3, Vector(5, 6)))
/*
* Regular expression extraction
*/
val regex = """(.)(.)(.)""".r // Creates a scala.util.matching.Regex object
val regex(a, b, c) = "xyz" // Matches and extracts regex against "xyz"
assert((a, b, c) == ("x", "y", "z"))
"xyz" match {
case regex(a, b, c) => // Match found
case _ => // No match
}
/*
* Extractors
*/
object Twice {
def unapply(x: Int) = if (x % 2 == 0) Some(x/2) else None
}
val Twice(i) = 20
assert(i == 10)
val Twice(j) = 15 // Throws MatchError
// Works with instances, too
class Foo(i: Int) {
def unapply(x: Int) = if (x == i) Some(Some(x)) else Some(None)
}
val foo = new Foo(10)
val foo(a) = 10
assert(a == Some(10))
val foo(b) = 12
assert(b == None)
/*
* Enumerations.
* Java enumerations, as described in Joshua Bloch's "Effective Java", are one of the language
* most powerful features. In Scala, there are two commonly used alternatives to Java's `Enum`:
* sealed case objects and the `Enumeration` trait. Unfortunately, none of them support all `Enum`
* features: sealed case objects, for instance, cannot be enumerated (iterated over), and
* `Enumeration` values cannot have fields or override methods. These examples shows how to
* combine them to get a feature set equivalent to Java `Enum`.
*/
sealed trait Gender
case object Male extends Gender
case object Female extends Gender
object Season extends Enumeration {
type Season = Value
val Spring, Summer, Autumn, Winter = Value
}
object Suit extends Enumeration {
type Suit = SuitVal
implicit def toVal(v: Value) = v.asInstanceOf[SuitVal]
case class SuitVal private[Suit] (symbol: Char) extends Val
val Spades = SuitVal('♠')
val Hearts = SuitVal('♥')
val Diamonds = SuitVal('♦')
val Clubs = SuitVal('♣')
}
object Lang extends Enumeration {
type Lang = LangVal
implicit def toVal(v: Value) = v.asInstanceOf[LangVal]
sealed abstract class LangVal extends Val {
def greet(name: String): String
}
val English = new LangVal {
def greet(name: String) = s"Welcome, $name."
}
val French = new LangVal {
def greet(name: String) = s"Bienvenue, $name."
}
}
/*
* Get current system time
*/
import System.{currentTimeMillis => now}
import System.{nanoTime => now}
|
marconilanna/snippets
|
snippets.scala
|
Scala
|
unlicense
| 6,835 |
/*
* Copyright © 2017 Full 360 Inc
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.full360.prometheus.http.finatra
import com.full360.prometheus.Prometheus
import com.full360.prometheus.http.HttpSummary
import com.twitter.finagle.http.Status.Ok
import com.twitter.finatra.http.routing.HttpRouter
import org.scalactic.TolerantNumerics
class FinatraSummarySpec extends FinatraBaseSpec with HttpSummary {
implicit val doubleEq = TolerantNumerics.tolerantDoubleEquality(200.0)
override def configureHttp(router: HttpRouter) = {
router
.filter[FinatraSummary]
.add[FinatraMetric]
}
test("Summary metric should provide a summary filter for Finatra which tracks the time an endpoint consumes") {
server.httpGet(
path = "/metrics",
andExpect = Ok,
withBody = "")
val array = Prometheus.getRegistry.replace('\n', ' ').split(' ')
registryShouldBe(
s"""# HELP ${summaryNamespace}_$summaryName $summaryHelp
|# TYPE ${summaryNamespace}_$summaryName summary
|${summaryNamespace}_$summaryName{method="get",code="200",path="/metrics",quantile="0.5",} ${array(16)}
|${summaryNamespace}_$summaryName{method="get",code="200",path="/metrics",quantile="0.9",} ${array(18)}
|${summaryNamespace}_$summaryName{method="get",code="200",path="/metrics",quantile="0.99",} ${array(20)}
|${summaryNamespace}_${summaryName}_count{method="get",code="200",path="/metrics",} ${array(22)}
|${summaryNamespace}_${summaryName}_sum{method="get",code="200",path="/metrics",} ${array(24)}
|""".stripMargin)
}
}
|
full360/prometheus_client_scala
|
client-finatra/src/test/scala/com/full360/prometheus/http/finatra/FinatraSummarySpec.scala
|
Scala
|
mit
| 2,632 |
/**
* Exercise 1:
*
* Set up a map of prices for a number of gizmos that you covet. Then produce a
* second map with the same keys and the prices at a 10 percent discount.
*
**/
val gizmos = Map("MAC" -> 3500, "iPhone" -> 12345, "SpaceShip" -> 1045600)
val discountgizmos = for((k, v) <- gizmos) yield (k, v * 0.9)
discountgizmos // => Map("MAC" -> 3150.0, "iPhone" -> 11110.5, "SpaceShip" -> 941040.0): Map[String, Double]
|
ragmha/scala-impatient
|
solutions/maps-and-tuples/ex1.scala
|
Scala
|
mit
| 427 |
/*
* This file is part of Evo2DSim.
*
* Evo2DSim is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Evo2DSim is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Evo2DSim. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vastness.evo2dsim.core.evolution.genomes
import org.vastness.evo2dsim.core.neuro.{Neuron, TransferFunction}
import org.vastness.evo2dsim.core.evolution.genomes.byte.ByteEvolutionManager
import org.vastness.evo2dsim.core.evolution.genomes.neat.NEATEvolutionManager
import org.vastness.evo2dsim.core.evolution.genomes.standard.STDEvolutionManager
trait EvolutionManager {
def probability: Double
def standardTransferFunction: TransferFunction
def blueprint: Set[Neuron] //Getter
def blueprint_=(b: Set[Neuron]) //Setter
def getBasicRandomGenome: Genome
}
object EvolutionManager {
def apply(genomeName: String, propability: Double, genomeSettings: String, t_func: TransferFunction = TransferFunction.THANH): EvolutionManager = genomeName match {
case "ByteGenome" => ByteEvolutionManager(propability, t_func, genomeSettings)
case "NEATGenome" => new NEATEvolutionManager(propability, t_func)
case "STDGenome" => STDEvolutionManager(propability, t_func, genomeSettings)
}
}
|
vchuravy/Evo2DSim
|
core/src/main/scala/org/vastness/evo2dsim/core/evolution/genomes/EvolutionManager.scala
|
Scala
|
mit
| 1,721 |
package monocle.state
import monocle.Getter
import cats.data.Reader
trait ReaderGetterSyntax {
@deprecated("no replacement", since = "3.0.0-M1")
implicit def toReaderGetterOps[S, A](getter: Getter[S, A]): ReaderGetterOps[S, A] =
new ReaderGetterOps[S, A](getter)
}
@deprecated("no replacement", since = "3.0.0-M1")
final class ReaderGetterOps[S, A](private val getter: Getter[S, A]) extends AnyVal {
/** transforms a Getter into a Reader */
@deprecated("no replacement", since = "3.0.0-M1")
def toReader: Reader[S, A] =
Reader(getter.get)
/** alias for toReader */
@deprecated("no replacement", since = "3.0.0-M1")
def rd: Reader[S, A] =
toReader
/** extracts the value viewed through the getter */
@deprecated("no replacement", since = "3.0.0-M1")
def ask: Reader[S, A] =
toReader
/** extracts the value viewed through the getter and applies `f` over it */
@deprecated("no replacement", since = "3.0.0-M1")
def asks[B](f: A => B): Reader[S, B] =
ask.map(f)
}
|
julien-truffaut/Monocle
|
state/src/main/scala/monocle/state/ReaderGetterSyntax.scala
|
Scala
|
mit
| 1,016 |
package chandu0101.scalajs.facades.examples.pages.components.leaflet
import chandu0101.scalajs.facades.examples.pages.common.CodeExample
import chandu0101.scalajs.facades.leaflet._
import japgolly.scalajs.react.ReactComponentB
import japgolly.scalajs.react.vdom.all._
import scala.scalajs.js
import scala.scalajs.js.Dynamic.{global => g, literal => json}
import scala.scalajs.js.JSON
/**
* Created by chandrasekharkode on 3/3/15.
*/
object LGeoJSONStyle {
val code =
"""
| div( id := "map", width := "600px", height := "285px")
|
| val geoJsonState = [{
| "type": "Feature",
| "properties": {"party": "Republican"},
| "geometry": {
| "type": "Polygon",
| "coordinates": [[
| [-104.05, 48.99],
| [-97.22, 48.98],
| [-96.58, 45.94],
| [-104.03, 45.94],
| [-104.05, 48.99]
| ]]
| }
|}, {
| "type": "Feature",
| "properties": {"party": "Democrat"},
| "geometry": {
| "type": "Polygon",
| "coordinates": [[
| [-109.05, 41.00],
| [-102.06, 40.99],
| [-102.03, 36.99],
| [-109.04, 36.99],
| [-109.05, 41.00]
| ]]
| }
|}]
| // define map
| val map = LMap("map").setView(LLatLng(41.8369,-87.6847), 4.0)
| map.addLayer(getTileLayer)
| LIconDefault.imagePath = "images/" // set images path
| val styleFunction : js.Function1[js.Dynamic,js.Any] = (feature : js.Dynamic) => {
| if(feature.properties.party.toString == "Republican") json("color" -> "orange")
| else json("color" -> "green")
| }
| LGeoJson(JSON.parse(geoJsonStates),
| LGeoJsonOptions(
| style = styleFunction
| )
| ).addTo(map)
|
""".stripMargin
val component = ReactComponentB[Unit]("LGeoJSONStyle")
.render(P => {
div(
h3("GeoJSON Style Option "),
CodeExample(code)(
div(key := "map", id := "map", width := "600px", height := "285px")
)
)
})
.componentDidMount(scope => {
// define map
val map = LMap("map").setView(LLatLng(41.8369,-87.6847), 4.0)
map.addLayer(getTileLayer)
LIconDefault.imagePath = "images" // set images path
val styleFunction : js.Function1[js.Dynamic,js.Any] = (feature : js.Dynamic) => {
if(feature.properties.party.toString == "Republican") json("color" -> "orange")
else json("color" -> "green")
}
LGeoJson(JSON.parse(geoJsonStates),
LGeoJsonOptions.style(styleFunction).result
).addTo(map)
})
.buildU
def apply() = component()
}
|
CapeSepias/scalajs-facades
|
examples/src/main/scala/chandu0101/scalajs/facades/examples/pages/components/leaflet/LGeoJSONStyle.scala
|
Scala
|
mit
| 2,848 |
//
// Taranos Cloud Sonification Framework: Service Core
// Copyright (C) 2018 David Hinson, Netrogen Blue LLC ([email protected])
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
package org.taranos.mc.trunk.intraprocess
import org.taranos.mc.Cell
import org.taranos.mc.Common.ReportSectionsParser
import org.taranos.mc.trunk.intraprocess.BiasedElement.{BiasedConstructorMetaDecoder, BiasedUpdateMetaDecoder}
import org.taranos.mc.trunk.intraprocess.Signal.SignalTypes
import org.taranos.mc.trunk.intraprocess.TestableElement.TestableUpdateStateDecoder
import org.taranos.mc.trunk.intraprocess.TrunkElement.{CommonConstructorMetaDecoder, CommonDestructorMetaDecoder,
CommonQueryDecoder, CommonUpdateMetaDecoder}
import play.api.libs.json._
object SignalSink
{
import scala.collection.mutable
class Key (uniqueKey: TrunkElement.UniqueKey, symbol: Symbol = 'SignalSink)
extends SignalEndpoint.Key(uniqueKey, symbol)
class Meta (
uniqueKey: TrunkElement.UniqueKey,
tag: String,
badgeOpt: Option[String],
nameOpt: Option[String],
descriptionOpt: Option[String],
mode: Signal.ModeEnum.Mode = Signal.ModeEnum.Unbiased)
extends SignalEndpoint.Meta[SignalSink.Key](
new SignalSink.Key(uniqueKey),
tag,
badgeOpt,
nameOpt,
descriptionOpt,
mode)
class Attrs
extends SignalEndpoint.Attrs
class Refs (trunkKey: Trunk.Key)
extends SignalEndpoint.Refs(trunkKey)
class State (
val _traps: mutable.HashMap[TrunkElement.Key, Signal[_ >: Signal.SignalTypes]]
= mutable.HashMap.empty[TrunkElement.Key, Signal[_ >: Signal.SignalTypes]])
extends SignalEndpoint.State
{
override
def Report (sections: ReportSectionsParser): JsObject =
{
import scala.language.existentials
var report = super.Report(sections)
var traps: JsObject = Json.obj()
_traps.foreach(signalPair =>
{
val (key, signal) = signalPair
traps = traps ++ Json.obj(
TrunkElement.EncodeKey(key) -> signal.Report(sections))
})
report ++= Json.obj(TrunkModel.Glossary.kTrap -> traps)
report
}
}
case class Constructor (
_tag: String,
_badgeOpt: Option[String] = None,
_nameOpt: Option[String] = None,
_descriptionOpt: Option[String] = None,
_mode: Signal.ModeEnum.Mode = Signal.ModeEnum.Unbiased)
case class Destructor (
_key: SignalSink.Key)
case class Query (
keys: Vector[SignalSink.Key],
sectionsOpt: Option[String] = None)
extends TrunkElement.Query[SignalSink.Key](
keys,
sectionsOpt)
case class Update (
_key: SignalSink.Key,
_nameOpt: Option[String],
_descriptionOpt: Option[String],
// _linkKeyOpt: Option[SignalLink.Key],
// _tapKeyOpt: Option[SignalTap.Key],
_modeOpt: Option[Signal.ModeEnum.Mode],
_signalEncodedOpt: Option[String])
val kAnyKey = new Key(TrunkModel.Glossary.kAnyKeyBase)
val kNoneKey = new Key(TrunkModel.Glossary.kNoneKeyBase)
def DecodeConstructor (encoded: String): Constructor =
{
val constructor = Json.parse(encoded)
val commonMeta = new CommonConstructorMetaDecoder(constructor, Cell.ErrorCodes.SignalSinkConstructorInvalid)
val biasedMeta = new BiasedConstructorMetaDecoder(constructor)
val mode = biasedMeta._modeOpt.getOrElse(Signal.ModeEnum.Unbiased)
Constructor(
commonMeta._tag,
commonMeta._badgeOpt,
commonMeta._nameOpt,
commonMeta._descriptionOpt,
mode)
}
def DecodeDestructor (encoded: String): Destructor =
{
val destructor = Json.parse(encoded)
val commonMeta = new CommonDestructorMetaDecoder[SignalSink.Key](
destructor, Cell.ErrorCodes.SignalSinkDestructorInvalid)
Destructor(commonMeta._key)
}
def DecodeQuery (encoded: String): Query =
{
val query = Json.parse(encoded)
val commonQuery = new CommonQueryDecoder[SignalSink.Key](query)
Query(commonQuery._keysOpt.get, commonQuery._sectionsOpt)
}
def DecodeUpdate (encoded: String): Update =
{
val update = Json.parse(encoded)
val commonMeta = new CommonUpdateMetaDecoder[SignalSink.Key](
update, Cell.ErrorCodes.SignalSinkUpdateInvalid)
// val linkKeyOpt: Option[SignalLink.Key] =
// (update \\ "r" \\ "sl").validate[String] match
// {
// // case JsSuccess(value, _) => Some(TrunkElement.DecodeKey[SignalLink.Key](value))
// case JsSuccess(value, _) => assert(false); null
// case JsError(errors) => None
// }
//
// val tapKeyOpt: Option[SignalTap.Key] =
// (update \\ "r" \\ "st").validate[String] match
// {
// // case JsSuccess(value, _) => Some(TrunkElement.DecodeKey[SignalTap.Key](value))
// case JsSuccess(value, _) => assert(false); null
// case JsError(errors) => None
// }
val biasedMeta = new BiasedUpdateMetaDecoder(update)
val testableState = new TestableUpdateStateDecoder(update)
Update(
commonMeta._key,
commonMeta._nameOpt,
commonMeta._descriptionOpt,
// linkKeyOpt,
// tapKeyOpt,
biasedMeta._modeOpt,
testableState._signalEncodedOpt)
}
}
class SignalSink (
meta: SignalSink.Meta,
attrs: SignalSink.Attrs,
refs: SignalSink.Refs,
state: SignalSink.State,
listenerOpt: Option[ListenerElement])
(implicit protected val _trunkModel: TrunkModel)
extends SignalEndpoint[SignalSink.Key]
with NotifierElement
{
import scala.collection.mutable
//
// Meta:
//
protected
val _meta = meta
def GetMode: Signal.ModeEnum.Mode =
_meta._mode
//
// Attrs:
//
protected
val _attrs = attrs
//
// Refs:
//
protected
val _refs = refs
def BindLink (
linkKey: SignalLink.Key,
part: String,
isReciprocal: Boolean): Unit =
{
_refs._linkKeys += part -> linkKey
if (isReciprocal)
{
_trunkModel.GetSignalLinkOpt(GetTrunkKey, linkKey) match
{
case Some(link) => link.BindSink(GetKey, isReciprocal = false)
case None => throw TrunkException(Cell.ErrorCodes.SignalLinkInvalid)
}
}
}
def BindTap (
tapKey: SignalTap.Key,
isReciprocal: Boolean): Unit =
{
_refs._tapKeyOpt = Some(tapKey)
if (_refs._tapKeyOpt.isDefined && isReciprocal)
{
_trunkModel.GetSignalTapOpt(GetTrunkKey, _refs._tapKeyOpt.get) match
{
case Some(tap) => tap.BindSink(GetKey, isReciprocal = false)
case None => throw TrunkException(Cell.ErrorCodes.SignalTapInvalid)
}
}
}
def GetLinkKeys: mutable.HashMap[String, SignalLink.Key] =
_refs._linkKeys
def GetTapKeyOpt: Option[SignalTap.Key] =
_refs._tapKeyOpt
def UnbindLink (
linkKeyOpt: Option[SignalLink.Key] = None,
isReciprocal: Boolean): Unit =
{
linkKeyOpt.foreach(linkKey =>
{
if (isReciprocal)
{
_trunkModel.GetSignalLinkOpt(GetTrunkKey, linkKey) match
{
case Some(link) => link.UnbindSink(isReciprocal = false)
case None => // We don't care...
}
}
_refs._linkKeys.find(_._2 == linkKey) match
{
case Some(tuple) => _refs._linkKeys -= tuple._1
case None => // Weird, but we don't care...
}
})
}
def UnbindTap (isReciprocal: Boolean): Unit =
{
if (_refs._tapKeyOpt.isDefined && isReciprocal)
{
_trunkModel.GetSignalTapOpt(GetTrunkKey, _refs._tapKeyOpt.get) match
{
case Some(tap) => tap.UnbindSink(isReciprocal = false)
case None => // We don't care...
}
}
_refs._tapKeyOpt = None
}
//
// State:
//
protected
val _state = state
def GetLastTrappedSignalOpt: Option[Signal[_ >: SignalTypes]] =
GetTrappedSignalsOrdered.lastOption
def GetTrappedSignalsOrdered: List[Signal[_ >: SignalTypes]] =
{
val signalsList = GetTrappedSignals.toList
signalsList.sortWith(_._ordinal < _._ordinal)
}
def GetTrappedSignals: scala.collection.Iterable[Signal[_ >: Signal.SignalTypes]] =
_state._traps.values
def MarkAllLinks (signal: Signal[_ >: SignalTypes]): Unit =
{
_refs._linkKeys.values.foreach(linkKey =>
{
_trunkModel.GetSignalLinkOpt(GetTrunkKey, linkKey) match
{
case Some(link) => link.SetMark(signal._ordinal)
case None => // Weird, but we'll ignore.
}
})
}
def Propagate (
signalOpt: Option[Signal[_ >: SignalTypes]],
partOpt: Option[String]): Unit =
{
// Sink must be receiving a signal (sinks do not perform default propagation):
val signal = signalOpt.getOrElse(
throw TrunkException(Cell.ErrorCodes.SignalInvalid, "signal must be valid"))
// If signal is virtual then accept its mark:
if (signal._scalar.isInstanceOf[Signal.Virtual])
{
SetMark(signal._ordinal)
if (_refs._tapKeyOpt.isDefined)
{
val tapKey = _refs._tapKeyOpt.get
_trunkModel.GetSignalTapOpt(GetTrunkKey, tapKey) match
{
case Some(tap) => tap.Propagate(Some(signal))
case None => throw TrunkException(Cell.ErrorCodes.SignalSinkTapless)
}
}
}
// Else propagate normally:
else
{
val propagatorKey = signal._propagatorKeyOpt.getOrElse(GetKey)
// Trap signal per propagator:
_trunkModel.Log( s"$GetTag trapping ${signal.ToFriendly} from $propagatorKey")
signal._propagatorKeyOpt = Some(GetKey)
_state._traps(propagatorKey) = signal
// If there is a listener then notify it:
if (_listenerOpt.isDefined)
{
_trunkModel.Log( s"$GetTag notifying for ${signal.ToFriendly} from $propagatorKey")
val listener = _listenerOpt.get
listener.Notify(propagatorKey)
}
}
}
def Report (sectionsOpt: Option[String] = None): JsObject =
{
var report = Json.obj()
val sections = new ReportSectionsParser(sectionsOpt)
// Add meta section:
if (sections.HasMetaPropertyset)
report ++= Json.obj(TrunkModel.Glossary.kPSMeta -> _meta.Report(sections))
// Add attrs section:
if (sections.HasAttrsPropertyset)
report ++= Json.obj(TrunkModel.Glossary.kPSAttrs -> _attrs.Report(sections))
// Add refs section:
if (sections.HasRefsPropertyset)
report ++= Json.obj(TrunkModel.Glossary.kPSRefs -> _refs.Report(sections))
// Add state section:
if (sections.HasStatePropertyset)
report ++= Json.obj(TrunkModel.Glossary.kPSState -> _state.Report(sections))
report
}
override
def SetMark (mark: Int): Unit =
{
import scala.util.control.Breaks._
// Determine if all of sink's links are marked:
var isAllLinksMarked = true
breakable
{
_refs._linkKeys.values.foreach(linkKey =>
{
_trunkModel.GetSignalLinkOpt(GetTrunkKey, linkKey) match
{
case Some(link) =>
if (link.GetMark == kUnmarked)
{
isAllLinksMarked = false
break()
}
case None => // Weird, but we'll ignore since we're marking for destruction anyway.
}
})
}
// If all links are marked then accept mark:
if (isAllLinksMarked)
super.SetMark(mark)
}
def PropagateTest (signal: Signal[_ >: SignalTypes]): Unit =
{
Propagate(Some(signal))
}
SetListenerOpt(listenerOpt)
}
|
taranos/taranoscsf-core
|
src/main/scala/org/taranos/mc/trunk/intraprocess/SignalSink.scala
|
Scala
|
agpl-3.0
| 13,952 |
package at.droelf.gui.entities
case class Email(from: String, text: String)
|
dr03lf/travel-log
|
app/at/droelf/gui/entities/ContactEntities.scala
|
Scala
|
apache-2.0
| 77 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.pipeline
import akka.actor._
import spray.can.Http.RegisterChunkHandler
import spray.http._
import scala.util.{Failure, Success}
//TODO use FSM ??
class PipelineProcessorActor(target: ActorRef, client: ActorRef, processor: Processor) extends Actor with ActorLogging with Stash {
import context.dispatcher
import processor._
//common receiver for all cases
def onPostProcess: Receive = {
case PostProcess(ctx) => postProcess(ctx)
case other => client forward other // unknown message
}
override def receive = onRequest orElse onPostProcess
//1st state
def onRequest: Receive = {
case ctx: RequestContext => inboundForRequest(ctx)
}
private def inboundForRequest(ctx: RequestContext) = {
var newCtx = ctx
try {
newCtx = preInbound(ctx)
newCtx.response match {
case nr: NormalResponse => postProcess(newCtx) //TODO need go thru outbound??
case er: ExceptionalResponse => postProcess(newCtx)
case _ =>
inbound(newCtx) onComplete {
case Success(result) =>
try {
if (result.responseReady) self ! PostProcess(result)
else {
val postResult = postInbound(result)
context.become(onResponse(postResult) orElse onPostProcess)
target ! postResult.payload
}
} catch {
case t: Throwable =>
t.printStackTrace()
log.error(t, "Error in postInbound processing")
self ! PostProcess(onRequestError(result, t))
}
case Failure(t) =>
log.error(t, "Error in inbound processing")
self ! PostProcess(onRequestError(newCtx, t))
}
}
} catch {
case t: Throwable =>
log.error(t, "Error in processing request")
self ! PostProcess(onRequestError(newCtx, t))
} finally {
inboundFinalize(newCtx)
}
}
private def outboundForResponse(reqCtx: RequestContext, msgFunc: RequestContext => Any) = {
var newCtx = reqCtx
try {
newCtx = preOutbound(newCtx)
outbound(newCtx) onComplete {
case Success(result) =>
self ! msgFunc(result)
case Failure(t) =>
log.error(t, "Error in processing outbound")
self ! PostProcess(onResponseError(newCtx, t)) // chunks will be dead letters?
}
} catch {
case t: Throwable =>
log.error(t, "Error in processing response")
self ! PostProcess(onResponseError(newCtx, t))
} finally {
outboundFinalize(newCtx)
}
}
// ready to serve response from proxied actor/route
def onResponse(reqCtx: RequestContext): Receive = {
case resp: HttpResponse =>
outboundForResponse(reqCtx.copy(response = NormalResponse(resp)), ctx => PostProcess(ctx))
case ReadyToChunk(ctx) =>
val newCtx = postProcess(ctx)
unstashAll()
context.become(onChunk(newCtx) orElse onPostProcess)
case respStart: ChunkedResponseStart =>
outboundForResponse(reqCtx.copy(response = NormalResponse(respStart)), ctx => ReadyToChunk(ctx))
case data@Confirmed(ChunkedResponseStart(resp), ack) =>
outboundForResponse(reqCtx.copy(response = NormalResponse(data, sender())), ctx => ReadyToChunk(ctx))
case chunk: MessageChunk => stash()
case chunkEnd: ChunkedMessageEnd => stash()
case Confirmed(data, ack) => stash()
case rch@RegisterChunkHandler(handler) =>
val chunkHandler = context.actorOf(Props(classOf[ChunkHandler], handler, self, processor, reqCtx))
client ! RegisterChunkHandler(chunkHandler)
case Status.Failure(t) =>
log.error(t, "Receive Status.Failure")
outboundForResponse(onResponseError(reqCtx, t), ctx => PostProcess(ctx)) // make sure preOutbound gets invoked to pair with postInbound
case t: Throwable =>
log.error(t, "Receive Throwable")
outboundForResponse(onResponseError(reqCtx, t), ctx => PostProcess(ctx))
}
//usually chunks will not go to postProcess but go to finalOutput directly.
def onChunk(reqCtx: RequestContext): Receive = {
case chunk: MessageChunk =>
processChunk(reqCtx) {
finalOutput(reqCtx.copy(response = NormalResponse(processResponseChunk(reqCtx, chunk))))
}
case chunkEnd: ChunkedMessageEnd =>
processChunk(reqCtx) {
finalOutput(reqCtx.copy(response = NormalResponse(processResponseChunkEnd(reqCtx, chunkEnd))))
}
case data@Confirmed(mc@(_: MessageChunk), ack) =>
processChunk(reqCtx) {
val newChunk = processResponseChunk(reqCtx, mc)
finalOutput(reqCtx.copy(response = NormalResponse(Confirmed(newChunk, ack), sender())))
}
case AckInfo(rawAck, receiver) =>
processChunk(reqCtx) {
receiver tell(rawAck, self)
}
}
private def postProcess(ctx: RequestContext): RequestContext = {
val newCtx: RequestContext = try {
postOutbound(ctx)
} catch {
case t: Throwable =>
log.error(t, "Error in processing postProcess")
onResponseError(ctx, t)
}
finalOutput(newCtx)
newCtx
}
private def finalOutput(ctx: RequestContext) = {
ctx.response match {
case r: NormalResponse =>
val response = r.responseMessage
client ! response
response match {
case r@(_: HttpResponse | _: ChunkedMessageEnd) => context stop self
case other =>
}
case r: ExceptionalResponse =>
//TODO needs to check if chunk already start
client ! r.response
context stop self
case other =>
log.error("Unexpected response: " + other)
client ! ExceptionalResponse.defaultErrorResponse
context stop self
}
}
}
case class ReadyToChunk(ctx: RequestContext)
case class PostProcess(ctx: RequestContext)
private class ChunkHandler(realHandler: ActorRef, caller: ActorRef, processor: Processor, reqCtx: RequestContext) extends Actor {
import processor._
def receive: Actor.Receive = {
case chunk: MessageChunk => realHandler tell(processRequestChunk(reqCtx, chunk), caller)
case chunkEnd: ChunkedMessageEnd => realHandler tell(processRequestChunkEnd(reqCtx, chunkEnd), caller)
case other => realHandler tell(other, caller)
}
}
|
keshin/squbs
|
squbs-pipeline/src/main/scala/org/squbs/pipeline/PipelineProcessorActor.scala
|
Scala
|
apache-2.0
| 6,956 |
import sbt._
object Resolvers {
val list = Seq(
Resolver.mavenLocal,
Resolver.sonatypeRepo("releases"),
Resolver.typesafeRepo("releases"),
"Spray Repository" at "http://repo.spray.io",
Resolver.jcenterRepo,
Resolver.sbtPluginRepo("sbt-plugin-releases"),
Resolver.bintrayRepo("mingchuno", "maven"),
Resolver.bintrayRepo("kamon-io", "sbt-plugins")
)
}
|
mavenlink/changestream
|
project/Resolvers.scala
|
Scala
|
mit
| 387 |
package epic.ontonotes
import epic.framework.Example
import epic.trees.{AnnotatedLabel, Tree}
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Represents an ontonotes document (a single file, or portion thereof)
*/
case class Document(id: String, sentences: IndexedSeq[Sentence]) extends Example[IndexedSeq[OntoAnnotations], IndexedSeq[IndexedSeq[String]]] {
def dspans = sentences.flatMap(_.dspans)
def words: IndexedSeq[IndexedSeq[String]] = sentences.map(_.words)
def features = words
lazy val label: IndexedSeq[OntoAnnotations] = sentences.map(_.label)
lazy val trees: IndexedSeq[Tree[AnnotatedLabel]] = sentences.map(_.tree)
lazy val ner: Map[DSpan, NerType.Value] = sentences.map(_.ner).reduceLeft(_ ++ _)
lazy val coref: Map[DSpan, Mention] = sentences.map(_.coref).reduceLeft(_ ++ _)
}
|
maxim-rabinovich/epic
|
src/main/scala/epic/ontonotes/Document.scala
|
Scala
|
apache-2.0
| 1,341 |
package toolkit
import java.io.{File, FileNotFoundException}
import org.parboiled2.ParseError
import org.scalatest._
import toolkit.exceptions.ExocuteParseError
import scala.util.{Failure, Success, Try}
/**
* Created by #GrowinScala
*
* Takes two directories: correct and incorrect
* correct directory should have: file.in with the pln file and file.out with the expected representation of the file
* incorrect directory just needs the incorrect input files
*/
class ParserTest extends FlatSpec {
/**
* takes a path directory and returns a list of Files that are from the type ".in"
*
* @param dir - String with the path
* @return List of files that respect conditions
*/
def getListOfFiles(dir: String): List[File] = {
val d = new File(dir)
if (d.exists && d.isDirectory) {
d.listFiles.filter(file => file.isFile && isInputFile(file)).toList
} else {
List[File]()
}
}
/**
* checks if the file's extension is ".in"
*
* @param file
* @return returns true if file's extension is ".in", false otherwise
*/
def isInputFile(file: File): Boolean = file.getName.endsWith(".in")
/**
* takes a string with the all file and removes all '\\r' and removes all comments '//'
*
* @param file
* @return a new String filtered
*/
def clearComments(file: String): String = {
file.split("\\n").map(str => {
val index = str.indexOf("//")
if (index == -1) str
else str.substring(0, index)
}).map(str => str.filterNot(_ == '\\r')).mkString("\\n")
}
/**
* takes a path of a file and returns is context
*
* @param path
* @return file in string
*/
def readFile(path: String): String = scala.io.Source.fromFile(path).mkString
/**
* takes a path with the in file of a test and returns the out file of the same test
* example: test.in => test.out
*
* @param path
* @return string with the path of the out file
*/
def getResultFile(path: String): String = {
path.substring(0, path.length - 2) + "out"
}
/**
* tests if the pln file is correct
*
* @param path
* @return true if it's correct, false otherwise
*/
def testFile(path: String): Boolean = {
val pln = readFile(path)
val plnClean = clearComments(pln)
val parser = new ActivityParser(plnClean)
val res: Try[GraphRep] = parser.InputLine.run()
res match {
case Success(graph) =>
println(path + ":")
{
try {
val expected: String = clearComments(readFile(getResultFile(path)))
val validOut = graph.toString == expected
if (!validOut) {
println(
s"""---
|${graph.toString}
|---
|$expected
|---
|""".stripMargin)
false
} else {
new ActivityParser(expected).InputLine.run() match {
case Success(expectedOutIntoGraph) =>
if (expectedOutIntoGraph.toString != expected) {
println(
s"""Out file is not consistent with GraphRep.toString representation:
|---
|${expectedOutIntoGraph.toString}
|---
|$expected
|---
|""".stripMargin)
false
} else
true
case Failure(e: ParseError) =>
println("Out file is not consistent with GraphRep.toString representation")
throw new ExocuteParseError(parser.formatError(e))
case Failure(e) =>
throw e
}
}
} catch {
case _: FileNotFoundException =>
println("Out file should be:")
println(graph.toString)
false
}
} && {
graph.checkValidGraph() match {
case Success(_) => true
case Failure(e) => throw e
}
}
case Failure(e: ParseError) =>
throw new ExocuteParseError(parser.formatError(e))
case Failure(e) =>
throw e
}
}
/**
* tests if the pln file is correct
*
* @param path
* @return true if it's correct, false otherwise
*/
def testFileShouldFail(path: String): Boolean = {
val expectedException: Try[String] = Try(readFile(getResultFile(path)))
val pln = readFile(path)
val plnClean = clearComments(pln)
val parser = new ActivityParser(plnClean)
val res: Try[GraphRep] = parser.InputLine.run()
res.flatMap(_.checkValidGraph()) match {
case Success(_) =>
if (expectedException.isSuccess) {
println(path + s": should have failed with ${expectedException.get} exception")
} else {
println(path + ": should have failed")
}
false
case Failure(originalException) =>
val err = originalException match {
case parseError: ParseError =>
new ExocuteParseError(parser.formatError(parseError))
case e => e
}
if (expectedException.isSuccess) {
val expected = expectedException.get
val same = originalException.toString startsWith expected
if (!same)
println(s"Exception expected: $expected, found: ${err.toString}")
same
} else {
println(s"Exception caught: ${err.toString.replace("\\t", " ")}")
false
}
}
}
/**
* tests the files supposed to be correct
*/
getListOfFiles("tests" + File.separatorChar + "correct").foreach {
file =>
file.getName should "succeed" in {
assert({
val v = testFile(file.getAbsolutePath)
if (!v) Thread.sleep(500)
v
})
}
}
/**
* tests the files supposed to be incorrect
*/
getListOfFiles("tests" + File.separatorChar + "incorrect").foreach {
file =>
file.getName should "Not Succeed" in {
assert({
val v = testFileShouldFail(file.getAbsolutePath)
if (!v) Thread.sleep(500)
v
})
}
}
}
|
exocute/Toolkit
|
src/test/scala/toolkit/ParserTest.scala
|
Scala
|
bsd-2-clause
| 6,313 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.aliyun.tablestore
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods._
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.types.{StructField, StructType}
case class TableStoreCatalog(schema: StructType)
object TableStoreCatalog {
val tableCatalog = "catalog"
val columns = "columns"
val col = "col"
val `type` = "type"
def apply(parameters: Map[String, String]): TableStoreCatalog = {
val jString = parameters(tableCatalog)
val jObj = parse(jString).asInstanceOf[JObject]
val schema = StructType(
getColsPreservingOrder(jObj).map(e =>
StructField(e._1, CatalystSqlParser.parseDataType(e._2(`type`)))))
new TableStoreCatalog(schema)
}
def getColsPreservingOrder(jObj: JObject): Seq[(String, Map[String, String])] = {
val jCols = jObj.obj.find(_._1 == columns).get._2.asInstanceOf[JObject]
jCols.obj.map { case (name, jvalue) =>
(name, jvalue.values.asInstanceOf[Map[String, String]])
}
}
}
|
aliyun/aliyun-emapreduce-sdk
|
emr-tablestore/src/main/scala/org/apache/spark/sql/aliyun/tablestore/TableStoreCatalog.scala
|
Scala
|
artistic-2.0
| 1,850 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan
import java.sql.Timestamp
import org.apache.flink.api.scala._
import org.apache.flink.table.api.Tumble
import org.apache.flink.table.api.scala._
import org.apache.flink.table.expressions.TimeIntervalUnit
import org.apache.flink.table.functions.{ScalarFunction, TableFunction}
import org.apache.flink.table.plan.TimeIndicatorConversionTest.{ScalarFunc, TableFunc}
import org.apache.flink.table.utils.TableTestBase
import org.apache.flink.table.utils.TableTestUtil._
import org.junit.Test
/**
* Tests for [[org.apache.flink.table.calcite.RelTimeIndicatorConverter]].
*/
class TimeIndicatorConversionTest extends TableTestBase {
@Test
def testSimpleMaterialization(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int, 'proctime.proctime)
val result = t
.select('rowtime.floor(TimeIntervalUnit.DAY) as 'rowtime, 'long)
.filter('long > 0)
.select('rowtime)
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "FLOOR(CAST(rowtime)", "FLAG(DAY)) AS rowtime"),
term("where", ">(long, 0)")
)
util.verifyTable(result, expected)
}
@Test
def testSelectAll(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int, 'proctime.proctime)
val result = t.select('*)
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "rowtime", "long", "int",
"PROCTIME(proctime) AS proctime")
)
util.verifyTable(result, expected)
}
@Test
def testFilteringOnRowtime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int)
val result = t
.filter('rowtime > "1990-12-02 12:11:11".toTimestamp)
.select('rowtime)
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "rowtime"),
term("where", ">(CAST(rowtime), 1990-12-02 12:11:11:TIMESTAMP(3))")
)
util.verifyTable(result, expected)
}
@Test
def testGroupingOnRowtime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int, 'proctime.proctime)
val result = t
.groupBy('rowtime)
.select('long.count)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "CAST(rowtime) AS rowtime", "long")
),
term("groupBy", "rowtime"),
term("select", "rowtime", "COUNT(long) AS EXPR$0")
),
term("select", "EXPR$0")
)
util.verifyTable(result, expected)
}
@Test
def testAggregationOnRowtime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int)
val result = t
.groupBy('long)
.select('rowtime.min)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "CAST(rowtime) AS rowtime", "long")
),
term("groupBy", "long"),
term("select", "long", "MIN(rowtime) AS EXPR$0")
),
term("select", "EXPR$0")
)
util.verifyTable(result, expected)
}
@Test
def testTableFunction(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int, 'proctime.proctime)
val func = new TableFunc
val result = t.joinLateral(func('rowtime, 'proctime, "") as 's).select('rowtime, 'proctime, 's)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamCorrelate",
streamTableNode(t),
term("invocation",
s"${func.functionIdentifier}(CAST($$0):TIMESTAMP(3) NOT NULL, PROCTIME($$3), '')"),
term("correlate", s"table(TableFunc(CAST(rowtime), PROCTIME(proctime), ''))"),
term("select", "rowtime", "long", "int", "proctime", "s"),
term("rowType", "RecordType(TIME ATTRIBUTE(ROWTIME) rowtime, BIGINT long, INTEGER int, " +
"TIME ATTRIBUTE(PROCTIME) proctime, VARCHAR(65536) s)"),
term("joinType", "INNER")
),
term("select", "rowtime", "PROCTIME(proctime) AS proctime", "s")
)
util.verifyTable(result, expected)
}
@Test
def testWindow(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int)
val result = t
.window(Tumble over 100.millis on 'rowtime as 'w)
.groupBy('w, 'long)
.select('w.end as 'rowtime, 'long, 'int.sum)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
streamTableNode(t),
term("groupBy", "long"),
term("window", "TumblingGroupWindow('w, 'rowtime, 100.millis)"),
term("select", "long", "SUM(int) AS EXPR$1", "end('w) AS EXPR$0")
),
term("select", "EXPR$0 AS rowtime", "long", "EXPR$1")
)
util.verifyTable(result, expected)
}
@Test
def testUnion(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]("MyTable", 'rowtime.rowtime, 'long, 'int)
val result = t.unionAll(t).select('rowtime)
val expected = binaryNode(
"DataStreamUnion",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "rowtime")
),
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "rowtime")
),
term("all", "true"),
term("union all", "rowtime")
)
util.verifyTable(result, expected)
}
@Test
def testMultiWindow(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]('rowtime.rowtime, 'long, 'int)
val result = t
.window(Tumble over 100.millis on 'rowtime as 'w)
.groupBy('w, 'long)
.select('w.rowtime as 'newrowtime, 'long, 'int.sum as 'int)
.window(Tumble over 1.second on 'newrowtime as 'w2)
.groupBy('w2, 'long)
.select('w2.end, 'long, 'int.sum)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
streamTableNode(t),
term("groupBy", "long"),
term("window", "TumblingGroupWindow('w, 'rowtime, 100.millis)"),
term("select", "long", "SUM(int) AS EXPR$1", "rowtime('w) AS EXPR$0")
),
term("select", "EXPR$0 AS newrowtime", "long", "EXPR$1 AS int")
),
term("groupBy", "long"),
term("window", "TumblingGroupWindow('w2, 'newrowtime, 1000.millis)"),
term("select", "long", "SUM(int) AS EXPR$1", "end('w2) AS EXPR$0")
),
term("select", "EXPR$0", "long", "EXPR$1")
)
util.verifyTable(result, expected)
}
@Test
def testGroupingOnProctime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Int)]("MyTable" , 'long, 'int, 'proctime.proctime)
val result = util.tableEnv.sqlQuery("SELECT COUNT(long) FROM MyTable GROUP BY proctime")
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "PROCTIME(proctime) AS proctime", "long")
),
term("groupBy", "proctime"),
term("select", "proctime", "COUNT(long) AS EXPR$0")
),
term("select", "EXPR$0")
)
util.verifyTable(result, expected)
}
@Test
def testAggregationOnProctime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Int)]("MyTable" , 'long, 'int, 'proctime.proctime)
val result = util.tableEnv.sqlQuery("SELECT MIN(proctime) FROM MyTable GROUP BY long")
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "long", "PROCTIME(proctime) AS proctime")
),
term("groupBy", "long"),
term("select", "long", "MIN(proctime) AS EXPR$0")
),
term("select", "EXPR$0")
)
util.verifyTable(result, expected)
}
@Test
def testWindowSql(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]("MyTable", 'rowtime.rowtime, 'long, 'int)
val result = util.tableEnv.sqlQuery(
"SELECT TUMBLE_END(rowtime, INTERVAL '0.1' SECOND) AS `rowtime`, `long`, " +
"SUM(`int`) FROM MyTable " +
"GROUP BY `long`, TUMBLE(rowtime, INTERVAL '0.1' SECOND)")
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
streamTableNode(t),
term("groupBy", "long"),
term("window", "TumblingGroupWindow('w$, 'rowtime, 100.millis)"),
term("select",
"long",
"SUM(int) AS EXPR$2",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "w$end AS rowtime", "long", "EXPR$2")
)
util.verifyTable(result, expected)
}
@Test
def testWindowWithAggregationOnRowtime(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)]("MyTable", 'rowtime.rowtime, 'long, 'int)
val result = util.tableEnv.sqlQuery("SELECT MIN(rowtime), long FROM MyTable " +
"GROUP BY long, TUMBLE(rowtime, INTERVAL '0.1' SECOND)")
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(t),
term("select", "long", "rowtime", "CAST(rowtime) AS rowtime0")
),
term("groupBy", "long"),
term("window", "TumblingGroupWindow('w$, 'rowtime, 100.millis)"),
term("select", "long", "MIN(rowtime0) AS EXPR$0")
),
term("select", "EXPR$0", "long")
)
util.verifyTable(result, expected)
}
@Test
def testMaterializeRightSideOfTemporalTableJoin(): Unit = {
val util = streamTestUtil()
val proctimeOrders = util.addTable[(Long, String)](
"ProctimeOrders", 'o_amount, 'o_currency, 'o_proctime.proctime)
val proctimeRatesHistory = util.addTable[(String, Int)](
"ProctimeRatesHistory", 'currency, 'rate, 'proctime.proctime)
val proctimeRates = proctimeRatesHistory.createTemporalTableFunction('proctime, 'currency)
val result = proctimeOrders
.joinLateral(proctimeRates('o_proctime), 'currency === 'o_currency)
.select("o_amount * rate, currency, proctime").as("converted_amount")
.window(Tumble over 1.second on 'proctime as 'w)
.groupBy('w, 'currency)
.select('converted_amount.sum)
val expected =
unaryAnyNode(
unaryAnyNode(
unaryNode(
"DataStreamCalc",
anySubtree(),
term(
"select",
"*(o_amount, rate) AS converted_amount",
"currency",
"PROCTIME(proctime) AS proctime")
)
)
)
util.verifyTable(result, expected)
}
@Test
def testDoNotMaterializeLeftSideOfTemporalTableJoin(): Unit = {
val util = streamTestUtil()
val proctimeOrders = util.addTable[(Long, String)](
"ProctimeOrders", 'o_amount, 'o_currency, 'o_proctime.proctime)
val proctimeRatesHistory = util.addTable[(String, Int)](
"ProctimeRatesHistory", 'currency, 'rate, 'proctime.proctime)
val proctimeRates = proctimeRatesHistory.createTemporalTableFunction('proctime, 'currency)
val result = proctimeOrders
.joinLateral(proctimeRates('o_proctime), 'currency === 'o_currency)
.select("o_amount * rate, currency, o_proctime").as("converted_amount")
.window(Tumble over 1.second on 'o_proctime as 'w)
.groupBy('w, 'currency)
.select('converted_amount.sum)
val expected =
unaryAnyNode(
unaryAnyNode(
unaryNode(
"DataStreamCalc",
anySubtree(),
term(
"select",
"*(o_amount, rate) AS converted_amount",
"currency",
"o_proctime")
)
)
)
util.verifyTable(result, expected)
}
@Test
def testMaterializeLeftRowtimeWithProcessingTimeTemporalTableJoin(): Unit = {
val util = streamTestUtil()
val proctimeOrders = util.addTable[(Long, String)](
"ProctimeOrders", 'o_amount, 'o_currency, 'o_proctime.proctime, 'o_rowtime.rowtime)
val proctimeRatesHistory = util.addTable[(String, Int)](
"ProctimeRatesHistory", 'currency, 'rate, 'proctime.proctime)
val proctimeRates = proctimeRatesHistory.createTemporalTableFunction('proctime, 'currency)
val result = proctimeOrders
.joinLateral(proctimeRates('o_proctime), 'currency === 'o_currency)
.select("o_amount * rate, currency, o_proctime, o_rowtime").as("converted_amount")
.window(Tumble over 1.second on 'o_rowtime as 'w)
.groupBy('w, 'currency)
.select('converted_amount.sum)
val expected =
unaryAnyNode(
unaryAnyNode(
unaryNode(
"DataStreamCalc",
anySubtree(),
term(
"select",
"*(o_amount, rate) AS converted_amount",
"currency",
"CAST(o_rowtime) AS o_rowtime")
)
)
)
util.verifyTable(result, expected)
}
@Test
def testMatchRecognizeRowtimeMaterialization(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)](
"RowtimeTicker",
'rowtime.rowtime,
'symbol,
'price)
util.addFunction("func", new ScalarFunc)
val query =
s"""
|SELECT
| *
|FROM RowtimeTicker
|MATCH_RECOGNIZE (
| PARTITION BY symbol
| ORDER BY rowtime
| MEASURES
| MATCH_ROWTIME() as matchRowtime,
| func(MATCH_ROWTIME()) as funcRowtime,
| A.rowtime as noLongerRowtime
| ONE ROW PER MATCH
| PATTERN (A)
| DEFINE
| A AS A.price > 0
|)
|""".stripMargin
val expected = unaryNode(
"DataStreamMatch",
streamTableNode(t),
term("partitionBy", "symbol"),
term("orderBy", "rowtime ASC"),
term("measures",
"FINAL(MATCH_ROWTIME()) AS matchRowtime",
"FINAL(func(CAST(MATCH_ROWTIME()))) AS funcRowtime",
"FINAL(CAST(A.rowtime)) AS noLongerRowtime"
),
term("rowsPerMatch", "ONE ROW PER MATCH"),
term("after", "SKIP TO NEXT ROW"),
term("pattern", "'A'"),
term("define", "{A=>(PREV(A.$2, 0), 0)}")
)
util.verifySql(query, expected)
}
@Test
def testMatchRecognizeProctimeMaterialization(): Unit = {
val util = streamTestUtil()
val t = util.addTable[(Long, Long, Int)](
"ProctimeTicker",
'rowtime.rowtime,
'symbol,
'price,
'proctime.proctime)
util.addFunction("func", new ScalarFunc)
val query =
s"""
|SELECT
| *
|FROM ProctimeTicker
|MATCH_RECOGNIZE (
| PARTITION BY symbol
| ORDER BY rowtime
| MEASURES
| MATCH_PROCTIME() as matchProctime,
| func(MATCH_PROCTIME()) as funcProctime,
| A.proctime as noLongerProctime
| ONE ROW PER MATCH
| PATTERN (A)
| DEFINE
| A AS A.price > 0
|)
|""".stripMargin
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamMatch",
streamTableNode(t),
term("partitionBy", "symbol"),
term("orderBy", "rowtime ASC"),
term("measures",
"FINAL(MATCH_PROCTIME()) AS matchProctime",
"FINAL(func(PROCTIME(MATCH_PROCTIME()))) AS funcProctime",
"FINAL(PROCTIME(A.proctime)) AS noLongerProctime"
),
term("rowsPerMatch", "ONE ROW PER MATCH"),
term("after", "SKIP TO NEXT ROW"),
term("pattern", "'A'"),
term("define", "{A=>(PREV(A.$2, 0), 0)}")
),
term("select",
"symbol",
"PROCTIME(matchProctime) AS matchProctime",
"funcProctime",
"noLongerProctime"
)
)
util.verifySql(query, expected)
}
}
object TimeIndicatorConversionTest {
class TableFunc extends TableFunction[String] {
val t = new Timestamp(0L)
def eval(time1: Long, time2: Timestamp, string: String): Unit = {
collect(time1.toString + time2.after(t) + string)
}
}
class ScalarFunc extends ScalarFunction {
val t = new Timestamp(0L)
def eval(time: Timestamp): String = {
time.toString
}
}
}
|
bowenli86/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/plan/TimeIndicatorConversionTest.scala
|
Scala
|
apache-2.0
| 18,026 |
//package io.skysail.app.wyt.services
//
//import io.skysail.repo.orientdb.ScalaDbService
//import io.skysail.core.model.ApplicationModel
//import io.skysail.app.wyt.repository.WytRepository
//import org.json4s.DefaultFormats
//import io.skysail.app.wyt.domain.Confirmation
//import io.skysail.app.wyt.domain.Turn
//
//class TurnService(dbService: ScalaDbService, appModel: ApplicationModel) {
//
// private var repo: WytRepository = new WytRepository(dbService)
// private implicit val formats = DefaultFormats
//
// var i = 0
//
// def confirm(confirmation: Confirmation) = {
// i = i+1
// }
//
// def getNextTurn(pactId: String) = {
// if (i % 2 == 0) {
// Turn(Some("1"),Some("Georgios is next"))
// } else {
// Turn(Some("1"),Some("Carsten is next"))
// }
// }
//
//// def getById(id: String): Option[Connection] = {
//// val entry = repo.findOne(id)
//// if (entry.isDefined) Some(entry.get.extract[Connection]) else None
//// }
////
//// def find(f: Filter, p: Pagination) = repo.find(f, p).map { (row => row.extract[Connection]) }.toList
////
//// def findOne(id: String): Option[Connection] = {
//// val option = repo.findOne(id)
//// if (option.isDefined) Some(option.get.extract[Connection]) else None
//// }
////
//// def save(entity: Connection): Connection = {
//// val vertex = repo.save(entity, appModel)
//// // entity.setId(vertex.getId().toString())
//// entity.copy(id = Some(vertex.get.id.toString()))
//// }
//
//
//}
|
evandor/skysail-notes
|
skysail.app.wyt/src/io/skysail/app/wyt/services/TurnService.scala
|
Scala
|
apache-2.0
| 1,502 |
package edu.rice.habanero.benchmarks.count
import edu.rice.habanero.actors.{JumiActor, JumiActorState, JumiPool}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> ([email protected])
*/
object CountingJumiActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new CountingJumiActorBenchmark)
}
private final class CountingJumiActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
CountingConfig.parseArgs(args)
}
def printArgInfo() {
CountingConfig.printArgs()
}
def runIteration() {
val counter = new CountingActor()
counter.start()
val producer = new ProducerActor(counter)
producer.start()
producer.send(IncrementMessage())
JumiActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double): Unit = {
if (lastIteration) {
JumiPool.shutdown()
}
}
}
private case class IncrementMessage()
private case class RetrieveMessage(sender: JumiActor[AnyRef])
private case class ResultMessage(result: Int)
private class ProducerActor(counter: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
private val self = this
override def process(msg: AnyRef) {
msg match {
case m: IncrementMessage =>
var i = 0
while (i < CountingConfig.N) {
counter.send(m)
i += 1
}
counter.send(RetrieveMessage(self))
case m: ResultMessage =>
val result = m.result
if (result != CountingConfig.N) {
println("ERROR: expected: " + CountingConfig.N + ", found: " + result)
} else {
println("SUCCESS! received: " + result)
}
exit()
}
}
}
private class CountingActor extends JumiActor[AnyRef] {
private var count = 0
override def process(msg: AnyRef) {
msg match {
case m: IncrementMessage =>
count += 1
case m: RetrieveMessage =>
m.sender.send(ResultMessage(count))
exit()
}
}
}
}
|
shamsmahmood/savina
|
src/main/scala/edu/rice/habanero/benchmarks/count/CountingJumiActorBenchmark.scala
|
Scala
|
gpl-2.0
| 2,209 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.zeromq
import akka.actor.SupervisorStrategy
import akka.util.ByteString
import akka.zeromq.Subscribe
import org.scalatest.FunSuite
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.streaming.dstream.ReceiverInputDStream
class ZeroMQStreamSuite extends FunSuite {
val batchDuration = Seconds(1)
private val master: String = "local[2]"
private val framework: String = this.getClass.getSimpleName
test("zeromq input stream") {
val ssc = new StreamingContext(master, framework, batchDuration)
val publishUrl = "abc"
val subscribe = new Subscribe(null.asInstanceOf[ByteString])
val bytesToObjects = (bytes: Seq[ByteString]) => null.asInstanceOf[Iterator[String]]
// tests the API, does not actually test data receiving
val test1: ReceiverInputDStream[String] =
ZeroMQUtils.createStream(ssc, publishUrl, subscribe, bytesToObjects)
val test2: ReceiverInputDStream[String] = ZeroMQUtils.createStream(
ssc, publishUrl, subscribe, bytesToObjects, StorageLevel.MEMORY_AND_DISK_SER_2)
val test3: ReceiverInputDStream[String] = ZeroMQUtils.createStream(
ssc, publishUrl, subscribe, bytesToObjects,
StorageLevel.MEMORY_AND_DISK_SER_2, SupervisorStrategy.defaultStrategy)
// TODO: Actually test data receiving
ssc.stop()
}
}
|
trueyao/spark-lever
|
external/zeromq/src/test/scala/org/apache/spark/streaming/zeromq/ZeroMQStreamSuite.scala
|
Scala
|
apache-2.0
| 2,211 |
package japgolly.scalajs.react.extra.router
import org.scalajs.dom
import scalaz.effect.IO
import scalaz.{-\/, \/-, \/}
import japgolly.scalajs.react.{ReactElement, ReactComponentC, TopNode}
/**
* DSL for specifying a set of routing rules.
*/
trait RoutingRules {
final type P = this.type
final type Renderer = japgolly.scalajs.react.extra.router.Renderer[P]
final type Router = japgolly.scalajs.react.extra.router.Router[P]
final type Loc = Location[P]
final type RedirectTarget = Redirect.Target[P]
@inline final protected implicit def componentP_renderer[S,B,T<:TopNode](c: ReactComponentC.ReqProps[Router, S, B, T]): Renderer = c(_)
@inline final protected implicit def componentU_renderer[P,S,B,T<:TopNode](c: ReactComponentC.ConstProps[P, S, B, T]): Renderer = _ => c()
@inline final protected implicit def element_renderer[A <% ReactElement](a: A): Renderer = _ => a
@inline final protected implicit def loc_redirectable(a: Loc): RedirectTarget = \/-(a)
@inline final protected implicit def path_redirectable(p: Path): RedirectTarget = -\/(p)
private[this] def totalParser: Path => RouteAction[P] =
p => parseS(p) orElse parseD(p) getOrElse notFound(p)
private[this] var staticRoutes = Map.empty[Path, RouteAction[P]]
private[this] val parseS: Path => Option[RouteAction[P]] = p => staticRoutes.get(p)
private type DRouteFn = Path => Option[RouteAction[P]]
private[this] var dynRoutes = Vector.empty[DRouteFn]
private[this] val parseD: DRouteFn = p => dynRoutes.foldLeft(None: Option[RouteAction[P]])(_ orElse _(p))
private[this] var onRouteChangeAction: Loc => IO[Unit] =
Function const IO(dom.window.scrollTo(0, 0))
// ===================================================================================================================
// Interception
protected case class InterceptionR(loc: Loc, router: Router, element: ReactElement)
/** Customise all renderable routes. */
protected def interceptRender(i: InterceptionR): ReactElement = i.element
private def mkloc(path: Path, render: Renderer): Loc = {
lazy val l: Loc = Location(path, r => interceptRender(InterceptionR(l, r, render(r))))
l
}
/** Perform an action when a new route is activated. */
protected def onRouteChange(a: Loc => Unit): Unit =
onRouteChangeIO(l => IO(a(l)))
/** Perform an action when a new route is activated. */
protected def onRouteChangeIO(a: Loc => IO[Unit]): Unit =
onRouteChangeAction = a
// ===================================================================================================================
// Actions
final type DynAction = Path => RouteAction[P]
/**
* The catch-all response to unmatched routes.
*/
protected val notFound: DynAction
final protected def render(render: Renderer): DynAction =
path => mkloc(path, render)
final protected def redirect(to: Loc, method: Redirect.Method): DynAction =
_ => Redirect(to, method)
// ===================================================================================================================
// Static Routes
/** An unregistered static route. Install via `register()`. */
protected case class StaticRoute[A <: RouteAction[P]](path: Path, action: A)
final protected def register[A <: RouteAction[P]](u: StaticRoute[A]): A = {
import u.{path, action}
staticRoutes.get(path) match {
case Some(prev) =>
throw new ExceptionInInitializerError(s"Attempted to register two routes with the name '${path.value}' in $this.\n1) $prev\n2) $action")
case None =>
staticRoutes += path -> u.action
action
}
}
final protected def rootLocation(r: Renderer): StaticRoute[Loc] =
location("", r)
final protected def location(path: String, render: Renderer): StaticRoute[Loc] = {
val p = Path(path)
StaticRoute(p, mkloc(p, render))
}
final protected def redirection(from: String, to: RedirectTarget, method: Redirect.Method): StaticRoute[Redirect[P]] =
StaticRoute(Path(from), Redirect(to, method))
// ===================================================================================================================
// Dynamic Routes
/** An unregistered dynamic route. Install via `register()`. */
protected case class DynamicRoute(route: DRouteFn)
final protected def register(u: DynamicRoute): Unit =
dynRoutes :+= u.route
/**
* Parser for a dynamic path. Example: `"person/123"`
*
* @tparam T The value of the dynamic portion of the path. Example: `"123"` or `PersonId(123)`.
*/
final protected def parser[T](pf: PartialFunction[String, T]): DynB[T] =
new DynB[T](pf.lift.compose[Path](_.value))
final protected class DynB[T](parse: Path => Option[T]) {
private def dynamicRoute(f: (Path, T) => RouteAction[P]): DynamicRoute =
new DynamicRoute(path => parse(path).map(t => f(path, t)))
def thenMatch(f: T => DynAction): DynamicRoute =
dynamicRoute((p, t) => f(t)(p))
/**
* Note that a `Location[P]` is not returned. In order to create links to this location,
* use `this.dynLink()`.
*/
def location(f: T => Renderer): DynamicRoute =
dynamicRoute((p, t) => mkloc(p, f(t)))
def redirection(f: T => (RedirectTarget, Redirect.Method)): DynamicRoute =
dynamicRoute((_, t) => {
val (l, m) = f(t)
Redirect(l, m)
})
}
/**
* Generates paths for a dynamic route, which can then be passed to [[japgolly.scalajs.react.extra.router.Router]]
* to be turned into clickable links.
*
* @param path Example: `"person/123"`
* @tparam T The value of the dynamic portion of the route. Example: `PersonId(123)`.
*/
final protected def dynLink[T](path: T => String): T => DynamicLocation[P] =
t => DynamicLocation(Path(path(t)))
// ===================================================================================================================
// Convenience & Utility
import Router.{Logger, nopLogger}
final def routingEngine(base: BaseUrl, logger: Logger = nopLogger): Router =
new Router(base, totalParser, onRouteChangeAction, logger)
final def router(base: BaseUrl, logger: Logger = nopLogger): Router.Component[P] =
Router.component(routingEngine(base, logger))
/** `case matchNumber(num) => num.toLong` */
final protected lazy val matchNumber = "^(\\d+)$".r
/**
* Registers a handle that uses a replace-state redirect to remove trailing slashes from unmatched route urls.
*/
final protected def removeTrailingSlashes: DynamicRoute = {
val regex = "^(.*?)/+$".r
parser { case regex(p) => p }.redirection(p => (Path(p), Redirect.Replace))
}
}
|
beni55/scalajs-react
|
extra/src/main/scala/japgolly/scalajs/react/extra/router/RoutingRules.scala
|
Scala
|
apache-2.0
| 6,733 |
package uk.co.pragmasoft.graphdb.orient.sampledao
import com.orientechnologies.orient.core.id.ORID
import com.tinkerpop.blueprints.{Direction, TransactionalGraph, Vertex}
import uk.co.pragmasoft.graphdb.orient.{OrientDBBasicConversions, OrientGraphMarshaller}
object marshallers {
implicit object MusicianMarshaller extends OrientGraphMarshaller[Musician] with OrientDBBasicConversions {
type IdType = String
override def vertexClassName: String = "musician"
override def getModelObjectID(obj: Musician) = obj.id
val NameAttribute = "name"
val InstrumentAttribute = "instrument"
override def propertiesForCreate(musician: Musician) =
Set(
NameAttribute -> musician.name,
InstrumentAttribute -> musician.instrument
)
// Only changing instrument
override def propertiesForUpdate(musician: Musician) =
Set(
InstrumentAttribute -> musician.instrument
)
override def readFrom(vertex: Vertex)(implicit graphDb: TransactionalGraph) =
Musician(
id = vertex.getId.asInstanceOf[ORID],
name = vertex.property(NameAttribute).get, // Name is mandatory
instrument = vertex.property(InstrumentAttribute).getOrElse("")
)
override def updateProperties(musician: Musician, vertex: Vertex)(implicit graphDb: TransactionalGraph): Unit = {
vertex.setProperty(InstrumentAttribute, musician.instrument)
}
override def updateRelationships(data: Musician, vertex: Vertex)(implicit graphDb: TransactionalGraph) = {}
override def writeRelationships(data: Musician, vertex: Vertex)(implicit graphDb: TransactionalGraph) = {}
}
implicit object BandMarshaller extends OrientGraphMarshaller[Band] with OrientDBBasicConversions {
type IdType = String
override def vertexClassName: String = "artist"
val PlaysIn = "plays-in"
val NameAttribute = "name"
val StylesAttribute = "styles"
override def getModelObjectID(artist: Band) = artist.id
override def readFrom(vertex: Vertex)(implicit graphDb: TransactionalGraph): Band =
Band(
vertex.getId.asInstanceOf[ORID],
vertex.property(NameAttribute).get,
vertex.embeddedSetProperty[String](StylesAttribute).getOrElse(Set.empty),
vertex.inAdjacentsForLabel[Musician](PlaysIn).toSet
)
override def propertiesForCreate(artist: Band) = Set(
NameAttribute -> artist.name,
StylesAttribute -> artist.styles
)
// Cannot update name..
override def propertiesForUpdate(artist: Band) = Set(
StylesAttribute -> artist.styles
)
override def writeRelationships(artist: Band, vertex: Vertex)(implicit graphDb: TransactionalGraph) = {
artist.musicians.foreach { musician =>
vertex.addInEdgeFrom(musician, PlaysIn)
}
}
override def updateRelationships(artist: Band, vertex: Vertex)(implicit graphDb: TransactionalGraph) = {
vertex.removeEdges(PlaysIn, Direction.IN )
artist.musicians.foreach { musician =>
vertex <-- PlaysIn <-- musician
}
}
}
implicit object FanMarshaller extends OrientGraphMarshaller[Fan] with OrientDBBasicConversions {
type IdType = String
override def vertexClassName: String = "fan"
val Adores = "adores"
val NameAttribute = "name"
val AgeAttribute = "age"
override def writeRelationships(fanData: Fan, fanVertex: Vertex)(implicit graphDb: TransactionalGraph): Unit = {
fanVertex.removeEdges(Adores)
fanData.fanOf foreach { artist =>
fanVertex --> Adores --> artist
}
}
override def updateRelationships(fanData: Fan, fanVertex: Vertex)(implicit graphDb: TransactionalGraph) = {
fanData.fanOf foreach { artist =>
fanVertex --> Adores --> artist
}
}
override def propertiesForCreate(data: Fan) = attributesMap(data)
override def propertiesForUpdate(data: Fan) = attributesMap(data)
def attributesMap(data: Fan): Set[(String, Any)] = {
Set(
NameAttribute -> data.name,
AgeAttribute -> data.age
)
}
override def getModelObjectID(fan: Fan) = fan.id
override def readFrom(vertex: Vertex)(implicit graphDb: TransactionalGraph) =
Fan(
id = vertex.getId.asInstanceOf[ORID],
name = vertex.getProperty[String](NameAttribute),
age = vertex.getProperty[Int](AgeAttribute),
fanOf = vertex.outAdjacentsForLabel[Band](Adores).toSet
)
}
}
|
galarragas/sgal
|
sgal-orient/src/test/scala/uk/co/pragmasoft/graphdb/orient/sampledao/marshallers.scala
|
Scala
|
apache-2.0
| 4,477 |
package com.goticks
import akka.actor._
import akka.io.IO
import com.typesafe.config.ConfigFactory
import spray.can.Http
//********** ********** ********** ********** ********** ********** ********** \\\\
object Main extends App {
val config = ConfigFactory.load()
val host = config.getString("http.host")
val port = config.getInt("http.port")
implicit val system = ActorSystem("goticks")
val api = system.actorOf(Props(new RestInterface()), "httpInterface")
// newHttpServer(api) ! Bind(interface=host, port=port)
IO(Http) ! Http.Bind(listener = api, interface = host, port = port)
// +--------------------+ +--------------------+ +--------------------+ \\\\
}
//********** ********** ********** ********** ********** ********** ********** \\\\
|
mduttaroy-dev/akka-trials-01.idea
|
src/main/scala/com/goticks/Main.scala
|
Scala
|
gpl-2.0
| 759 |
package com.rklaehn.abc
import algebra.{Eq, Order}
import cats.Show
import cats.implicits._
import com.rklaehn.sonicreducer.Reducer
final class ArrayMultiMap[@sp(ILD) K, @sp(ILD) V] private[abc] (
private[abc] val map: ArrayMap[K, ArraySet[V]]) {
def keys: ArraySet[K] = map.keys
def entries: Iterator[(K, V)] =
map.iterator.flatMap { case (k, vs) ⇒ vs.iterator.map(v ⇒ k → v) }
def justKeys(keys: ArraySet[K])(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[K, V] =
new ArrayMultiMap[K, V](map.justKeys(keys))
def exceptKeys(keys: ArraySet[K])(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[K, V] =
new ArrayMultiMap[K, V](map.exceptKeys(keys))
def filterKeys(p: K ⇒ Boolean)(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[K, V] =
new ArrayMultiMap[K, V](map.filterKeys(p))
def merge(that: ArrayMultiMap[K, V])(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[K, V] = {
def mergeElements(a: ArraySet[V], b: ArraySet[V]): ArraySet[V] = a.union(b)
new ArrayMultiMap[K, V](map.unionWith(that.map, mergeElements))
}
def inverse(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[V, K] = {
val swappedPairs: Iterator[(V, K)] = for {
(k, vs) ← map.iterator
v ← vs.iterator
} yield (v, k)
ArrayMultiMap.fromEntries(swappedPairs.toArray: _*)
}
def except(that: ArrayMultiMap[K, V])(implicit kOrder: Order[K], kClassTag: ClassTag[K], vOrder: Order[V], vClassTag: ClassTag[V]): ArrayMultiMap[K, V] = {
val map1 = map.except(that.map, (x,y) ⇒ {
val r = x diff y
if(r.isEmpty) Option.empty[ArraySet[V]]
else Option(r)
})
new ArrayMultiMap[K, V](map1)
}
def apply(k: K)(implicit kOrder: Order[K]): ArraySet[V] = map.apply0(k)
override def equals(that: Any): Boolean = that match {
case that: ArrayMultiMap[K, V] => ArrayMultiMap.eqv(Universal[K], Universal[V]).eqv(this, that)
case _ => false
}
override def hashCode(): Int = ArrayMultiMap.hash(Universal[K], Universal[V]).hash(this)
override def toString: String = ArrayMultiMap.show(Universal[K], Universal[V]).show(this)
}
private[abc] trait ArrayMultiMap0 {
implicit def eqv[K: Eq, V: Eq]: Eq[ArrayMultiMap[K, V]] = Eq.by(_.map)
}
object ArrayMultiMap extends ArrayMultiMap0 {
implicit def show[K: Show, V: Show]: Show[ArrayMultiMap[K, V]] = Show.show {
_.map
.iterator
.map { case (k, v) => k.show + "->" + v.show }
.mkString("ArrayMultiMap(",",",")")
}
implicit def hash[K: Hash, V: Hash]: Hash[ArrayMultiMap[K, V]] = Hash.by(_.map)
def empty[@sp(ILD) K: ClassTag, @sp(ILD) V: ClassTag]: ArrayMultiMap[K, V] =
new ArrayMultiMap[K, V](ArrayMap.empty[K, ArraySet[V]])
def singleton[@sp(ILD) K: ClassTag, @sp(ILD) V: ClassTag](k: K, v: ArraySet[V]) = {
new ArrayMultiMap[K, V](ArrayMap.singleton(k, v))
}
def apply[@sp(ILD) K: Order: ClassTag, @sp(ILD) V: Order: ClassTag](kvs: (K, ArraySet[V])*) = {
val reducer = Reducer[ArrayMultiMap[K, V]](_ merge _)
for ((k, v) <- kvs)
if(!v.isEmpty)
reducer(singleton(k, v))
reducer.resultOrElse(empty[K, V])
}
def fromEntries[@sp(ILD) K: Order: ClassTag, @sp(ILD) V: Order: ClassTag](kvs: (K, V)*) = {
val reducer = Reducer[ArrayMultiMap[K, V]](_ merge _)
for ((k, v) <- kvs)
reducer(singleton(k, ArraySet.singleton(v)))
reducer.resultOrElse(empty[K, V])
}
}
|
rklaehn/abc
|
extras/src/main/scala/com/rklaehn/abc/ArrayMultiMap.scala
|
Scala
|
apache-2.0
| 3,693 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
import algebra.{Monoid => AMonoid}
import algebra.ring.AdditiveMonoid
import scala.annotation.implicitNotFound
import scala.math.Equiv
import scala.reflect.ClassTag
import java.lang.{
Boolean => JBool,
Double => JDouble,
Float => JFloat,
Integer => JInt,
Long => JLong,
Short => JShort
}
import java.util.{List => JList, Map => JMap}
import scala.collection.{Map => ScMap}
/**
* Monoid (take a deep breath, and relax about the weird name): This is a semigroup that has an additive
* identity (called zero), such that a+0=a, 0+a=a, for every a
*/
@implicitNotFound(msg = "Cannot find Monoid type class for ${T}")
trait Monoid[@specialized(Int, Long, Float, Double) T]
extends Semigroup[T]
with AMonoid[T]
with AdditiveMonoid[T] {
def isNonZero(v: T): Boolean = v != zero
def assertNotZero(v: T): Unit =
if (!isNonZero(v)) {
throw new java.lang.IllegalArgumentException("argument should not be zero")
}
def nonZeroOption(v: T): Option[T] =
if (isNonZero(v)) {
Some(v)
} else {
None
}
override def sum(vs: TraversableOnce[T]): T = {
val optT = sumOption(vs)
if (optT.isDefined) optT.get
else zero
}
/**
* These are from algebra.Monoid
*/
override def additive: AMonoid[T] = this
override def empty: T = zero
override def combineAll(t: TraversableOnce[T]): T = sum(t)
}
// For Java interop so they get the default methods
abstract class AbstractMonoid[T] extends Monoid[T]
/**
* Some(5) + Some(3) == Some(8) Some(5) + None == Some(5)
*/
class OptionMonoid[T](implicit semi: Semigroup[T]) extends Monoid[Option[T]] {
override def zero: None.type = None
override def plus(left: Option[T], right: Option[T]): Option[T] =
if (left.isEmpty) {
right
} else if (right.isEmpty) {
left
} else {
Some(semi.plus(left.get, right.get))
}
override def sumOption(items: TraversableOnce[Option[T]]): Option[Option[T]] =
if (items.isEmpty) None
else Some(semi.sumOption(items.toIterator.filter(_.isDefined).map(_.get)))
}
class EitherMonoid[L, R](implicit semigroupl: Semigroup[L], monoidr: Monoid[R])
extends EitherSemigroup[L, R]()(semigroupl, monoidr)
with Monoid[Either[L, R]] {
override lazy val zero: Right[L, R] = Right(monoidr.zero)
}
object StringMonoid extends Monoid[String] {
override val zero: String = ""
override def plus(left: String, right: String): String = left + right
override def sumOption(items: TraversableOnce[String]): Option[String] =
if (items.isEmpty) None
else Some(items.mkString(""))
}
/**
* List concatenation monoid. plus means concatenation, zero is empty list
*/
class ListMonoid[T] extends Monoid[List[T]] {
override def zero: List[T] = List[T]()
override def plus(left: List[T], right: List[T]): List[T] = left ++ right
override def sumOption(items: TraversableOnce[List[T]]): Option[List[T]] =
if (items.isEmpty) None
else {
// ListBuilder mutates the tail of the list until
// result is called so that it is O(N) to push N things on, not N^2
val builder = List.newBuilder[T]
items.foreach(builder ++= _)
Some(builder.result())
}
}
// equivalent to ListMonoid
class SeqMonoid[T] extends Monoid[Seq[T]] {
override def zero: Seq[T] = Seq[T]()
override def plus(left: Seq[T], right: Seq[T]): Seq[T] = left ++ right
override def sumOption(items: TraversableOnce[Seq[T]]): Option[Seq[T]] =
if (items.isEmpty) None
else {
val builder = Seq.newBuilder[T]
items.foreach(builder ++= _)
Some(builder.result())
}
}
/**
* Pair-wise sum Array monoid.
*
* plus returns left[i] + right[i] for all array elements. The resulting array will be as long as the longest
* array (with its elements duplicated) zero is an empty array
*/
class ArrayMonoid[T: ClassTag](implicit semi: Semigroup[T]) extends Monoid[Array[T]] {
// additive identity
override def isNonZero(v: Array[T]): Boolean = v.nonEmpty
override def zero: Array[T] = Array[T]()
override def plus(left: Array[T], right: Array[T]): Array[T] = {
val (longer, shorter) =
if (left.length > right.length) (left, right) else (right, left)
val sum = longer.clone
var idx = 0
while (idx < shorter.length) {
sum(idx) = semi.plus(longer(idx), shorter(idx))
idx = idx + 1
}
sum
}
}
/**
* Set union monoid. plus means union, zero is empty set
*/
class SetMonoid[T] extends Monoid[Set[T]] {
override def zero: Set[T] = Set[T]()
override def plus(left: Set[T], right: Set[T]): Set[T] =
if (left.size > right.size) {
left ++ right
} else {
right ++ left
}
override def sumOption(items: TraversableOnce[Set[T]]): Option[Set[T]] =
if (items.isEmpty) None
else {
val builder = Set.newBuilder[T]
items.foreach(s => builder ++= s)
Some(builder.result())
}
}
/**
* Function1 monoid. plus means function composition, zero is the identity function
*/
class Function1Monoid[T] extends Monoid[Function1[T, T]] {
override def zero: T => T = identity[T]
// (f1 + f2)(x) = f2(f1(x)) so that:
// listOfFn.foldLeft(x) { (v, fn) => fn(v) } = (Monoid.sum(listOfFn))(x)
override def plus(f1: Function1[T, T], f2: Function1[T, T]): T => T = { (t: T) => f2(f1(t)) }
}
// To use the OrValMonoid wrap your item in a OrVal object
case class OrVal(get: Boolean) extends AnyVal
object OrVal {
implicit def monoid: Monoid[OrVal] = OrValMonoid
def unboxedMonoid: Monoid[Boolean] = new Monoid[Boolean] {
override def zero = false
override def plus(l: Boolean, r: Boolean): Boolean = l || r
override def sumOption(its: TraversableOnce[Boolean]): Option[Boolean] =
if (its.isEmpty) None
else Some(its.exists(identity))
}
}
/**
* Boolean OR monoid. plus means logical OR, zero is false.
*/
object OrValMonoid extends Monoid[OrVal] {
override def zero: OrVal = OrVal(false)
override def plus(l: OrVal, r: OrVal): OrVal = if (l.get) l else r
override def sumOption(its: TraversableOnce[OrVal]): Option[OrVal] =
if (its.isEmpty) None
else Some(OrVal(its.exists(_.get)))
}
// To use the AndValMonoid wrap your item in a AndVal object
case class AndVal(get: Boolean) extends AnyVal
object AndVal {
implicit def monoid: Monoid[AndVal] = AndValMonoid
def unboxedMonoid: Monoid[Boolean] = new Monoid[Boolean] {
override def zero = true
override def plus(l: Boolean, r: Boolean): Boolean = l && r
override def sumOption(its: TraversableOnce[Boolean]): Option[Boolean] =
if (its.isEmpty) None
else Some(its.forall(identity))
}
}
/**
* Boolean AND monoid. plus means logical AND, zero is true.
*/
object AndValMonoid extends Monoid[AndVal] {
override def zero: AndVal = AndVal(true)
override def plus(l: AndVal, r: AndVal): AndVal = if (l.get) r else l
override def sumOption(its: TraversableOnce[AndVal]): Option[AndVal] =
if (its.isEmpty) None
else Some(AndVal(its.forall(_.get)))
}
class FromAlgebraMonoid[T](m: AMonoid[T]) extends FromAlgebraSemigroup(m) with Monoid[T] {
override def sum(ts: TraversableOnce[T]): T = m.combineAll(ts)
override def zero: T = m.empty
}
private[algebird] trait FromAlgebraMonoidImplicit1 {
implicit def fromAlgebraAdditiveMonoid[T](implicit m: AdditiveMonoid[T]): Monoid[T] =
new FromAlgebraMonoid(m.additive)
}
private[algebird] trait FromAlgebraMonoidImplicit0 extends FromAlgebraMonoidImplicit1 {
implicit def fromAlgebraMonoid[T](implicit m: AMonoid[T]): Monoid[T] =
new FromAlgebraMonoid(m)
}
object Monoid extends GeneratedMonoidImplicits with ProductMonoids with FromAlgebraMonoidImplicit0 {
// This pattern is really useful for typeclasses
def zero[T](implicit mon: Monoid[T]): T = mon.zero
// strictly speaking, same as Semigroup, but most interesting examples
// are monoids, and code already depends on this:
def plus[T](l: T, r: T)(implicit monoid: Monoid[T]): T = monoid.plus(l, r)
def assertNotZero[T](v: T)(implicit monoid: Monoid[T]): Unit =
monoid.assertNotZero(v)
def isNonZero[T](v: T)(implicit monoid: Monoid[T]): Boolean = monoid.isNonZero(v)
def nonZeroOption[T](v: T)(implicit monoid: Monoid[T]): Option[T] =
monoid.nonZeroOption(v)
// Left sum: (((a + b) + c) + d)
def sum[T](iter: TraversableOnce[T])(implicit monoid: Monoid[T]): T =
monoid.sum(iter)
def from[T](z: => T)(associativeFn: (T, T) => T): Monoid[T] = new Monoid[T] {
override lazy val zero: T = z
override def plus(l: T, r: T): T = associativeFn(l, r)
}
/**
* Return an Equiv[T] that uses isNonZero to return equality for all zeros useful for Maps/Vectors that have
* many equivalent in memory representations of zero
*/
def zeroEquiv[T: Equiv: Monoid]: Equiv[T] = Equiv.fromFunction { (a: T, b: T) =>
(!isNonZero(a) && !isNonZero(b)) || Equiv[T].equiv(a, b)
}
/**
* Same as v + v + v .. + v (i times in total) requires i >= 0, wish we had NonnegativeBigInt as a class
*/
def intTimes[T](i: BigInt, v: T)(implicit mon: Monoid[T]): T = {
require(i >= 0, "Cannot do negative products with a Monoid, try Group.intTimes")
if (i == 0) {
mon.zero
} else {
Semigroup.intTimes(i, v)(mon)
}
}
implicit def nullMonoid: Monoid[Null] = NullGroup
implicit def unitMonoid: Monoid[Unit] = UnitGroup
implicit def boolMonoid: Monoid[Boolean] = BooleanRing
implicit def jboolMonoid: Monoid[JBool] = JBoolRing
implicit def intMonoid: Monoid[Int] = IntRing
implicit def jintMonoid: Monoid[JInt] = JIntRing
implicit def shortMonoid: Monoid[Short] = ShortRing
implicit def jshortMonoid: Monoid[JShort] = JShortRing
implicit def bigIntMonoid: Monoid[BigInt] = BigIntRing
implicit def bigDecimalMonoid: Monoid[BigDecimal] =
implicitly[Ring[BigDecimal]]
implicit def longMonoid: Monoid[Long] = LongRing
implicit def jlongMonoid: Monoid[JLong] = JLongRing
implicit def floatMonoid: Monoid[Float] = FloatRing
implicit def jfloatMonoid: Monoid[JFloat] = JFloatRing
implicit def doubleMonoid: Monoid[Double] = DoubleRing
implicit def jdoubleMonoid: Monoid[JDouble] = JDoubleRing
implicit def stringMonoid: Monoid[String] = StringMonoid
implicit def optionMonoid[T: Semigroup]: Monoid[Option[T]] =
new OptionMonoid[T]
implicit def listMonoid[T]: Monoid[List[T]] = new ListMonoid[T]
implicit def seqMonoid[T]: Monoid[Seq[T]] = new SeqMonoid[T]
implicit def arrayMonoid[T: ClassTag](implicit semi: Semigroup[T]): ArrayMonoid[T] =
new ArrayMonoid[T]
implicit def indexedSeqMonoid[T: Monoid]: Monoid[IndexedSeq[T]] =
new IndexedSeqMonoid[T]
implicit def jlistMonoid[T]: Monoid[JList[T]] = new JListMonoid[T]
implicit def setMonoid[T]: Monoid[Set[T]] = new SetMonoid[T]
implicit def mapMonoid[K, V: Semigroup]: Monoid[Map[K, V]] =
new MapMonoid[K, V]
implicit def scMapMonoid[K, V: Semigroup]: Monoid[ScMap[K, V]] =
new ScMapMonoid[K, V]
implicit def jmapMonoid[K, V: Semigroup]: Monoid[JMap[K, V]] =
new JMapMonoid[K, V]
implicit def eitherMonoid[L: Semigroup, R: Monoid]: Monoid[Either[L, R]] =
new EitherMonoid[L, R]
implicit def function1Monoid[T]: Monoid[Function1[T, T]] =
new Function1Monoid[T]
}
|
twitter/algebird
|
algebird-core/src/main/scala/com/twitter/algebird/Monoid.scala
|
Scala
|
apache-2.0
| 11,808 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import com.sksamuel.exts.OptionImplicits._
case class ReverseNestedAggregationDefinition(name: String,
path: Option[String] = None,
pipelines: Seq[PipelineAggregationDefinition] = Nil,
subaggs: Seq[AggregationDefinition] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends AggregationDefinition {
type T = ReverseNestedAggregationDefinition
def path(path: String): ReverseNestedAggregationDefinition = copy(path = path.some)
override def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): T = copy(pipelines = pipelines.toSeq)
override def subAggregations(aggs: Iterable[AggregationDefinition]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = metadata)
}
|
tyth/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/ReverseNestedAggregationDefinition.scala
|
Scala
|
apache-2.0
| 1,061 |
package scala.slick.direct
import common._
/*
// DOES NOT WORK, because it fetches Ordering[Reversed[T]] for Ordering[T]
class ReverseOrder{
type Reversed[T] = T
def reversed[T](t:T) : Reversed[T] = t.asInstanceOf[Reversed[T]]
implicit def reversedOrdering[T](implicit ordering:Ordering[T]) : Ordering[Reversed[T]] = ordering.reverse.asInstanceOf[Ordering[Reversed[T]]]
}
*/
class ReverseOrder{
final case class Reversed[T](value:T)
def reversed[T](t:T) : Reversed[T] = Reversed(t)
implicit def reversedOrdering[T:Ordering] : Ordering[Reversed[T]] = new Ordering[Reversed[T]]{
def compare( a:Reversed[T], b:Reversed[T] ) = implicitly[Ordering[T]].reverse.compare( a.value, b.value )
}
}
/**
* null ordering dummies for slick translation
*/
class NullAndReverseOrder extends ReverseOrder{
def nonesFirst [T](t:Option[T]) : Option[T] = SLICK_ONLY
def nonesLast [T](t:Option[T]) = SLICK_ONLY
def nonesFirst [T](t:Reversed[Option[T]]) = SLICK_ONLY
def nonesLast [T](t:Reversed[Option[T]]) = SLICK_ONLY
}
object order extends NullAndReverseOrder
|
boldradius/slick
|
src/main/scala/scala/slick/direct/order.scala
|
Scala
|
bsd-2-clause
| 1,079 |
package com.jrende.commands
import com.jrende.model.TweetManager
import com.jrende.view.Renderer
case class Next(params: Seq[String]) extends Command() {
override def execute(): Unit = {
Renderer.renderTweets(TweetManager.getNextPage())
}
}
|
Jrende/Skvitter
|
src/main/scala/com/jrende/commands/Next.scala
|
Scala
|
mit
| 251 |
package colossus.extensions.util.bson
import java.nio.ByteOrder
trait BsonValue extends Writable {
implicit val byteOrder = ByteOrder.LITTLE_ENDIAN
}
|
fehmicansaglam/colossus-extensions
|
mongo/src/main/scala/colossus/extensions/util/bson/BsonValue.scala
|
Scala
|
apache-2.0
| 154 |
package com.scripts
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object RDDAssignment {
def main (args: Array[String]) {
import PageCounter._
val conf = new SparkConf().setAppName("Big Apple").setMaster("local[4]")
val sc = new SparkContext(conf)
// Page Counts RDD
val pageCountsRdd = sc.textFile("C:\\\\Users\\\\sumeet.agrawal\\\\Downloads/pagecounts-20151201-220000.gz")
// 10 records
extractHeadByCount(pageCountsRdd, 10) foreach println
//Total Records
println(s"Total records = ${pageCountsRdd.count}")
//English Pages
val englishPages = filterByString(pageCountsRdd, "/en/")
// English Pages Count
println(s" English pages = ${englishPages.count()}")
// Requested > 200K
val combineByKeyRDD = reduceByKeyWithFunction(pageCountsRdd, (a: Int, b: Int) => a + b)
val pageCountGreaterThan200K = filterByFunction(combineByKeyRDD, (x => (x._2 > 200000)))
println(s" Pages Requested>200k =${pageCountGreaterThan200K.count()} ")
sc.stop()
}
}
object PageCounter {
def extractHeadByCount(rdd: RDD[String], n: Int) = rdd.take(n)
def filterByString(rdd: RDD[String], s: String) = rdd.filter(line => line.contains(s))
def filterByFunction(rdd: RDD[(String, Int)], fnc: ((String, Int)) => Boolean) = rdd.filter(fnc)
def reduceByKeyWithFunction(rdd: RDD[String], fnc: (Int, Int) => Int) = {
val splitRDD = rdd.map(_.split(" "))
val combineByKeyRDD = splitRDD.map(x => (x(1), x(2).toInt))
combineByKeyRDD.reduceByKey(fnc)
}
}
|
sum-coderepo/spark-scala
|
src/main/scala/com/scripts/RDDassignment.scala
|
Scala
|
apache-2.0
| 1,536 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.metrics.sender
import java.net.InetSocketAddress
import scala.concurrent.duration._
import io.gatling.core.akka.BaseActor
import io.gatling.core.config._
import akka.actor.{ Props, Stash }
private[metrics] object MetricsSender {
def props(configuration: GatlingConfiguration): Props = {
val remote = new InetSocketAddress(configuration.data.graphite.host, configuration.data.graphite.port)
configuration.data.graphite.protocol match {
case Tcp => Props(new TcpSender(remote, 5, 5 seconds))
case Udp => Props(new UdpSender(remote))
}
}
}
private[metrics] abstract class MetricsSender extends BaseActor with Stash
|
wiacekm/gatling
|
gatling-metrics/src/main/scala/io/gatling/metrics/sender/MetricsSender.scala
|
Scala
|
apache-2.0
| 1,284 |
package com.bazaarvoice.sswf
/**
* Marshaller for workflow input. Simply provide the mechanism to serialize and deserialize workflow inputs.
* @tparam SSWFInput The JVM object representing your workflow input.
*/
trait InputParser[SSWFInput] {
def serialize(input: SSWFInput): String
def deserialize(inputString: String): SSWFInput
}
|
bazaarvoice/super-simple-workflow
|
sswf-core/src/main/scala/com/bazaarvoice/sswf/InputParser.scala
|
Scala
|
apache-2.0
| 342 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.qsu
import slamdata.Predef._
import quasar.NameGenerator
import quasar.Planner.PlannerErrorME
import quasar.contrib.scalaz.MonadState_
import quasar.fp.symbolOrder
import quasar.fp.ski.ι
import quasar.qscript.{construction, Hole, HoleF, ReduceFunc, ReduceIndexF, SrcHole}
import ApplyProvenance.AuthenticatedQSU
import matryoshka._
import scalaz.{ISet, Monad, NonEmptyList, Scalaz, StateT}, Scalaz._
final class ReifyBuckets[T[_[_]]: BirecursiveT: EqualT: ShowT] private () extends QSUTTypes[T] {
import QSUGraph.Extractors._
val prov = QProv[T]
val qsu = QScriptUniform.Optics[T]
val func = construction.Func[T]
def apply[F[_]: Monad: NameGenerator: PlannerErrorME](aqsu: AuthenticatedQSU[T])
: F[AuthenticatedQSU[T]] = {
type G[A] = StateT[StateT[F, RevIdx, ?], QAuth, A]
val bucketsReified = aqsu.graph.rewriteM[G] {
case g @ LPReduce(source, reduce) =>
for {
res <- bucketsFor[G](source.root)
(srcs, buckets0) = res
reifiedG <- srcs.findMin match {
case Some(sym) =>
UnifyTargets[T, G](buildGraph[G](_))(g, sym, NonEmptyList(source.root))(GroupedKey, ReduceExprKey) map {
case (newSrc, original, reduceExpr) =>
val buckets =
buckets0.map(_ flatMap { access =>
if (Access.valueHole.isEmpty(access))
func.Hole as access
else
original as access
})
g.overwriteAtRoot(mkReduce(newSrc.root, buckets, reduce as reduceExpr.seconds.head)) :++ newSrc
}
case None =>
g.overwriteAtRoot(mkReduce(source.root, buckets0, reduce as HoleF)).point[G]
}
} yield reifiedG
case g @ QSSort(source, Nil, keys) =>
val src = source.root
bucketsFor[G](src) map { case (_, buckets) =>
g.overwriteAtRoot(qsu.qsSort(src, buckets, keys))
}
}
bucketsReified.run(aqsu.auth).eval(aqsu.graph.generateRevIndex) map {
case (auth, graph) => ApplyProvenance.AuthenticatedQSU(graph, auth)
}
}
////
private val GroupedKey = "grouped"
private val ReduceExprKey = "reduce_expr"
private def bucketsFor[F[_]: Monad: PlannerErrorME: MonadState_[?[_], QAuth]]
(vertex: Symbol)
: F[(ISet[Symbol], List[FreeAccess[Hole]])] =
for {
qauth <- MonadState_[F, QAuth].get
vdims <- qauth.lookupDimsE[F](vertex)
ids = prov.buckets(prov.reduce(vdims)).toList
res <- ids traverse {
case id @ IdAccess.GroupKey(s, i) =>
qauth.lookupGroupKeyE[F](s, i)
.map(fm => (ISet.singleton(s), fm as Access.value[prov.D, Hole](SrcHole)))
case other =>
(ISet.empty[Symbol], HoleF[T] as Access.id[prov.D, Hole](other, SrcHole)).point[F]
}
} yield res.unfzip leftMap (_.foldMap(ι))
private def buildGraph[F[_]: Monad: NameGenerator: PlannerErrorME: RevIdxM: MonadState_[?[_], QAuth]](
node: QScriptUniform[Symbol])
: F[QSUGraph] =
for {
newGraph <- QSUGraph.withName[T, F]("rbu")(node)
_ <- ApplyProvenance.computeProvenance[T, F](newGraph)
} yield newGraph
private def mkReduce[A](
src: A,
buckets: List[FreeAccess[Hole]],
reducer: ReduceFunc[FreeMap])
: QScriptUniform[A] =
qsu.qsReduce(
src,
buckets,
List(reducer),
ReduceIndexF[T](0.right))
}
object ReifyBuckets {
def apply[T[_[_]]: BirecursiveT: EqualT: ShowT, F[_]: Monad: NameGenerator: PlannerErrorME]
(aqsu: AuthenticatedQSU[T])
: F[AuthenticatedQSU[T]] =
taggedInternalError("ReifyBuckets", new ReifyBuckets[T].apply[F](aqsu))
}
|
jedesah/Quasar
|
connector/src/main/scala/quasar/qscript/qsu/ReifyBuckets.scala
|
Scala
|
apache-2.0
| 4,391 |
package io.buoyant.linkerd.protocol.http
import com.twitter.finagle.Path
import com.twitter.finagle.buoyant.Dst
import com.twitter.finagle.http.Request
import com.twitter.finagle.util.LoadService
import io.buoyant.config.Parser
import io.buoyant.linkerd.IdentifierInitializer
import io.buoyant.linkerd.protocol.HttpIdentifierConfig
import io.buoyant.router.RoutingFactory.IdentifiedRequest
import io.buoyant.test.FunSuite
class StaticIdentifierConfigTest extends FunSuite {
test("sanity") {
// ensure it doesn't totally blowup
val _ = StaticIdentifierConfig(Path.read("/foo")).newIdentifier(Path.empty)
}
test("service registration") {
assert(LoadService[IdentifierInitializer].exists(_.isInstanceOf[StaticIdentifierInitializer]))
}
test("parse config") {
val yaml = s"""
|kind: io.l5d.static
|path: /foo
""".stripMargin
val mapper = Parser.objectMapper(yaml, Iterable(Seq(StaticIdentifierInitializer)))
val config = mapper.readValue[HttpIdentifierConfig](yaml).asInstanceOf[StaticIdentifierConfig]
val identifier = config.newIdentifier(Path.empty)
val req = Request()
assert(
await(identifier(req)).asInstanceOf[IdentifiedRequest[Request]].dst ==
Dst.Path(Path.read("/foo"))
)
}
}
|
denverwilliams/linkerd
|
linkerd/protocol/http/src/test/scala/io/buoyant/linkerd/protocol/http/StaticIdentifierConfigTest.scala
|
Scala
|
apache-2.0
| 1,295 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler
import org.neo4j.cypher.internal._
import org.neo4j.cypher.internal.frontend.v2_3.InputPosition
import org.neo4j.cypher.internal.frontend.v2_3.test_helpers.CypherFunSuite
import org.scalatest.prop.TableDrivenPropertyChecks
class CypherPreParserTest extends CypherFunSuite with TableDrivenPropertyChecks {
val queries = Table(
("query", "expected"),
("CYPHER 1.9 MATCH", PreParsedStatement("MATCH", Seq(ConfigurationOptions(Some(VersionOption("1.9")), Seq.empty)), (1, 12, 11))),
("CYPHER 2.0 THAT", PreParsedStatement("THAT", Seq(ConfigurationOptions(Some(VersionOption("2.0")), Seq.empty)), (1, 12, 11))),
("CYPHER 2.1 YO", PreParsedStatement("YO", Seq(ConfigurationOptions(Some(VersionOption("2.1")), Seq.empty)), (1, 12, 11))),
("CYPHER 2.2 PRO", PreParsedStatement("PRO", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty)), (1, 12, 11))),
("PROFILE THINGS", PreParsedStatement("THINGS", Seq(ProfileOption), (1, 9, 8))),
("EXPLAIN THIS", PreParsedStatement("THIS", Seq(ExplainOption), (1, 9, 8))),
("CYPHER 2.2 PLANNER COST PROFILE PATTERN", PreParsedStatement("PATTERN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty), CostPlannerOption, ProfileOption), (1, 33, 32))),
("EXPLAIN CYPHER 2.1 YALL", PreParsedStatement("YALL", Seq(ExplainOption, ConfigurationOptions(Some(VersionOption("2.1")), Seq.empty)), (1, 20, 19))),
("CYPHER 2.2 PLANNER COST RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty), CostPlannerOption), (1, 25, 24))),
("PLANNER COST RETURN", PreParsedStatement("RETURN", Seq(CostPlannerOption), (1, 14, 13))),
("CYPHER 2.2 PLANNER RULE RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty), RulePlannerOption), (1, 25, 24))),
("PLANNER RULE RETURN", PreParsedStatement("RETURN", Seq(RulePlannerOption), (1, 14, 13))),
("CYPHER 2.2 PLANNER IDP RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty), IDPPlannerOption), (1, 24, 23))),
("CYPHER 2.2 PLANNER DP RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq.empty), DPPlannerOption), (1, 23, 22))),
("PLANNER IDP RETURN", PreParsedStatement("RETURN", Seq(IDPPlannerOption), (1, 13, 12))),
("PLANNER DP RETURN", PreParsedStatement("RETURN", Seq(DPPlannerOption), (1, 12, 11))),
("CYPHER planner=cost RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(None, Seq(CostPlannerOption))), (1, 21, 20))),
("CYPHER 2.2 planner=cost RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq(CostPlannerOption))), (1, 25, 24))),
("CYPHER 2.2 planner = idp RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(Some(VersionOption("2.2")), Seq(IDPPlannerOption))), (1, 26, 25))),
("CYPHER planner =dp RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(None, Seq(
DPPlannerOption))), (1, 20, 19))),
("CYPHER runtime=interpreted RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(None, Seq(InterpretedRuntimeOption))), (1, 28, 27))),
("CYPHER runtime=compiled RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(None, Seq(InterpretedRuntimeOption))), (1, 25, 24))),
("CYPHER 2.3 planner=cost runtime=interpreted RETURN", PreParsedStatement("RETURN", Seq(
ConfigurationOptions(Some(VersionOption("2.3")), Seq(CostPlannerOption, InterpretedRuntimeOption))), (1, 45, 44))),
("CYPHER 2.3 planner=dp runtime=interpreted RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions(
Some(VersionOption("2.3")), Seq(DPPlannerOption, InterpretedRuntimeOption))), (1, 43, 42))),
("CYPHER 2.3 planner=idp runtime=interpreted RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions
(Some(VersionOption("2.3")), Seq(IDPPlannerOption, InterpretedRuntimeOption))), (1, 44, 43))),
("CYPHER 2.3 planner=idp runtime=interpreted RETURN", PreParsedStatement("RETURN", Seq(ConfigurationOptions
(Some(VersionOption("2.3")), Seq(IDPPlannerOption, InterpretedRuntimeOption))), (1, 44, 43))),
("explainmatch", PreParsedStatement("explainmatch", Seq.empty, (1, 1, 0)))
)
test("run the tests") {
forAll(queries) {
case (query, expected) => parse(query) should equal(expected)
}
}
private def parse(arg:String): PreParsedStatement = {
CypherPreParser(arg)
}
private implicit def lift(pos: (Int, Int, Int)): InputPosition = InputPosition(pos._3, pos._1, pos._2)
}
|
HuangLS/neo4j
|
community/cypher/cypher/src/test/scala/org/neo4j/cypher/internal/compiler/CypherPreParserTest.scala
|
Scala
|
apache-2.0
| 5,436 |
package com.sksamuel.scapegoat
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
class ReadmeTest extends AnyFreeSpec with Matchers {
val readme =
scala.io.Source
.fromFile("README.md")
.getLines()
.toSeq
val inspectionNamesAndLevelsFromReadme =
readme
.dropWhile(l => l.trim.distinct != "|-")
.drop(1)
.dropWhile(l => l.trim.distinct != "|-")
.drop(1)
.takeWhile(l => l.trim.nonEmpty)
.map(_.split("\\\\|"))
.collect { case Array(_, className, _, level) =>
className.trim -> level.trim
}
val inspectionNamesAndLevels =
Inspections.inspections.map(i => i.getClass.getSimpleName -> i.defaultLevel.toString).toSet
"README" - {
"should be up to date" in {
val inCodeOnly = inspectionNamesAndLevels.diff(inspectionNamesAndLevelsFromReadme.toSet).toSeq.sorted
val inReadmeOnly = inspectionNamesAndLevelsFromReadme.toSet.diff(inspectionNamesAndLevels).toSeq.sorted
if (inCodeOnly.nonEmpty || inReadmeOnly.nonEmpty)
fail(s"""
|README file need to be updated:
| It misses following inspections found in code: ${inCodeOnly.mkString("[", ",", "]")}
| It has following inspections not found in code: ${inReadmeOnly.mkString("[", ",", "]")}
|""".stripMargin)
}
"should have inspections listed in order" in {
inspectionNamesAndLevelsFromReadme.sorted shouldBe inspectionNamesAndLevelsFromReadme
}
"should have correct number of inspections" in {
val Pattern = raw"There are currently (\\d+?) inspections.*".r
readme.collect { case Pattern(n) =>
n.toInt shouldBe inspectionNamesAndLevels.size
}
}
"should mention all existing configuration options" - {
val existingOptions = classOf[Configuration].getDeclaredFields.map(_.getName)
val readmeText = readme.mkString("\\n")
existingOptions.foreach { option =>
s"$option should be listed in help" in {
readmeText.contains(s"-P:scapegoat:$option:") shouldBe true
}
}
}
}
}
|
sksamuel/scapegoat
|
src/test/scala/com/sksamuel/scapegoat/ReadmeTest.scala
|
Scala
|
apache-2.0
| 2,149 |
/*
*************************************************************************************
* Copyright 2011-2013 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.repository.jdbc
import net.liftweb.common.Loggable
import net.liftweb.common._
import scala.util.{Try, Failure => Catch, Success}
import org.springframework.jdbc.core.JdbcTemplate
import com.normation.rudder.domain.workflows.ChangeRequest
import com.normation.rudder.repository._
import com.normation.rudder.domain.workflows.WorkflowNodeId
import com.normation.rudder.domain.workflows.WorkflowNode
import scala.collection.JavaConversions._
import com.normation.rudder.domain.workflows.ChangeRequestId
import org.springframework.jdbc.core.RowMapper
import java.sql.ResultSet
import org.springframework.jdbc.support.GeneratedKeyHolder
import org.springframework.jdbc.core.PreparedStatementCreator
import com.normation.rudder.domain.workflows.ChangeRequestId
import com.normation.rudder.domain.workflows.WorkflowNodeId
class RoWorkflowJdbcRepository(
jdbcTemplate : JdbcTemplate
)extends RoWorkflowRepository with Loggable {
val SELECT_SQL = "SELECT id, state FROM Workflow "
def getAllByState(state : WorkflowNodeId) : Box[Seq[ChangeRequestId]] = {
Try {
val list = jdbcTemplate.query(SELECT_SQL + "WHERE state = ?", Array[AnyRef](state.value.asInstanceOf[AnyRef]), DummyWorkflowsMapper)
list.toSeq.map(x => ChangeRequestId(x.crId))
} match {
case Success(x) => Full(x)
case Catch(error) =>
logger.error(s"Error when fetching all change request by state ${state.value} : ${error.toString()}")
Failure(error.toString())
}
}
def getStateOfChangeRequest(crId: ChangeRequestId) : Box[WorkflowNodeId] = {
Try {
val list = jdbcTemplate.query(SELECT_SQL + "WHERE id = ?", Array[AnyRef](crId.value.asInstanceOf[AnyRef]), DummyWorkflowsMapper)
list.toSeq match {
case seq if seq.size == 0 =>
logger.warn(s"Change request ${crId.value} doesn't exist")
Failure(s"Change request ${crId.value} doesn't exist")
case seq if seq.size > 1 =>
logger.error(s"Too many change request with same id ${crId.value}")
Failure(s"Too many change request with same id ${crId.value}")
case seq if seq.size == 1 => Full(WorkflowNodeId(seq.head.state))
}
} match {
case Success(x) => x
case Catch(error) =>
logger.error(s"Error when fetching status of change request ${crId.value} : ${error.toString()}")
Failure(error.toString())
}
}
def getAllChangeRequestsState() : Box[Map[ChangeRequestId,WorkflowNodeId]] = {
Try {
for{
WorkflowStateMapper(state,id) <- jdbcTemplate.query(SELECT_SQL , DummyWorkflowsMapper).toSeq
} yield {
(ChangeRequestId(id),WorkflowNodeId(state))
}
} match {
case Success(x) => Full(x.toMap)
case Catch(error) =>
logger.error(s"Error when fetching status of all change requests : ${error.toString()}")
Failure(error.toString())
}
}
def isChangeRequestInWorkflow(crId: ChangeRequestId) : Box[Boolean] = {
Try {
val list = jdbcTemplate.query(SELECT_SQL + "WHERE id = ?", Array[AnyRef](crId.value.asInstanceOf[AnyRef]), DummyWorkflowsMapper)
list.toSeq match {
case seq if seq.size == 0 =>
Full(true)
case seq if seq.size > 1 =>
logger.error(s"Too many change request with same id ${crId.value}")
Failure(s"Too many change request with same id ${crId.value}")
case seq if seq.size == 1 => Full(false)
}
} match {
case Success(x) => x
case Catch(error) =>
logger.error(s"Error when fetching existance of change request ${crId.value} : ${error.toString()}")
Failure(error.toString())
}
}
}
class WoWorkflowJdbcRepository(
jdbcTemplate : JdbcTemplate
, roRepo : RoWorkflowRepository
)extends WoWorkflowRepository with Loggable {
val UPDATE_SQL = "UPDATE Workflow set state = ? where id = ?"
val INSERT_SQL = "INSERT into Workflow (id, state) values (?, ?)"
def createWorkflow(crId: ChangeRequestId, state : WorkflowNodeId) : Box[WorkflowNodeId] = {
Try {
roRepo.isChangeRequestInWorkflow(crId) match {
case eb : EmptyBox => eb
case Full(true) =>
jdbcTemplate.update(
INSERT_SQL
, new java.lang.Integer(crId.value)
, state.value
)
roRepo.getStateOfChangeRequest(crId)
case _ =>
logger.error(s"Cannot start a workflow for Change Request id ${crId.value}, as it is already part of a workflow")
Failure(s"Cannot start a workflow for Change Request id ${crId.value}, as it is already part of a workflow")
}
} match {
case Success(x) => x
case Catch(error) =>
logger.error(s"Error when updating status of change request ${crId.value}: ${error.toString}")
Failure(error.toString())
}
}
def updateState(crId: ChangeRequestId, from : WorkflowNodeId, state : WorkflowNodeId) : Box[WorkflowNodeId] = {
Try {
roRepo.getStateOfChangeRequest(crId) match {
case eb : EmptyBox => eb
case Full(entry) =>
if (entry != from) {
Failure(s"Cannot change status of ChangeRequest id ${crId.value} : it has the status ${entry.value} but we were expecting ${from.value}")
} else {
jdbcTemplate.update(
UPDATE_SQL
, state.value
, new java.lang.Integer(crId.value)
)
}
roRepo.getStateOfChangeRequest(crId)
}
} match {
case Success(x) => x
case Catch(error) =>
logger.error(s"Error when updating status of change request ${crId.value} : ${error.toString()}")
Failure(error.toString())
}
}
}
/**
* A dummy object easing the transition between database and code
*/
private[jdbc] case class WorkflowStateMapper(
state : String
, crId : Int
)
object DummyWorkflowsMapper extends RowMapper[WorkflowStateMapper] with Loggable {
def mapRow(rs : ResultSet, rowNum: Int) : WorkflowStateMapper = {
// dummy code
WorkflowStateMapper(
rs.getString("state")
, rs.getInt("id")
)
}
}
|
armeniaca/rudder
|
rudder-core/src/main/scala/com/normation/rudder/repository/jdbc/WorkflowJdbcRepository.scala
|
Scala
|
gpl-3.0
| 7,871 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.facade.filters.annotated
import com.hypertino.binders.value.Lst
import com.hypertino.facade.filter.model.ResponseFilter
import com.hypertino.facade.filter.parser.ExpressionEvaluator
import com.hypertino.facade.metrics.MetricKeys
import com.hypertino.facade.model._
import com.hypertino.hyperbus.model.{DynamicBody, DynamicResponse, ErrorBody, InternalServerError, NotFound, StandardResponse}
import monix.eval.Task
import monix.execution.Scheduler
class ExtractItemResponseFilter(protected val expressionEvaluator: ExpressionEvaluator) extends ResponseFilter {
val timer = Some(MetricKeys.specificFilter("ExtractItemResponseFilter"))
override def apply(requestContext: RequestContext, response: DynamicResponse)
(implicit scheduler: Scheduler): Task[DynamicResponse] = {
implicit val mcx = requestContext.request
response.body.content match {
case Lst(items) ⇒
if (items.isEmpty) Task.raiseError {
NotFound(ErrorBody(ErrorCode.COLLECTION_IS_EMPTY, Some(s"Resource ${requestContext.request.headers.hrl} is an empty collection")))
}
else {
if (items.size > 1) Task.raiseError {
InternalServerError(ErrorBody(ErrorCode.COLLECTION_HAVE_MORE_THAN_1_ITEMS, Some(s"Resource ${requestContext.request.headers.hrl} have ${items.size} items")))
}
else Task.now {
StandardResponse(DynamicBody(items.head), response.headers).asInstanceOf[DynamicResponse]
}
}
case _ ⇒
Task.raiseError {
InternalServerError(ErrorBody(ErrorCode.RESOURCE_IS_NOT_COLLECTION))
}
}
}
}
|
hypertino/hyperfacade
|
src/main/scala/com/hypertino/facade/filters/annotated/ExtractItemResponseFilter.scala
|
Scala
|
mpl-2.0
| 1,933 |
package org.infinispan.spark.test
import java.lang.Thread._
import org.apache.spark.streaming.scheduler.{StreamingListener, StreamingListenerReceiverStarted}
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfterEach, Suite}
/**
* Trait to be mixed-in by tests requiring org.apache.spark.streaming.StreamingContext
*
* @author gustavonalle
*/
trait SparkStream extends BeforeAndAfterEach {
this: Suite with RemoteTest =>
protected var sc: SparkContext = _
protected var ssc: StreamingContext = _
private lazy val config: SparkConf = new SparkConf().setMaster("local[8]").setAppName(this.getClass.getName).set("spark.driver.host","127.0.0.1")
override protected def beforeEach(): Unit = {
sc = new SparkContext(config)
ssc = new StreamingContext(sc, Seconds(1))
super.beforeEach()
}
override protected def afterEach(): Unit = {
ssc.stop(stopSparkContext = true)
sc.stop()
super.afterEach()
}
protected def executeAfterReceiverStarted(block: => Unit) = {
ssc.addStreamingListener(new StreamingListener {
override def onReceiverStarted(receiverStarted: StreamingListenerReceiverStarted): Unit = {
sleep(1000)
block
}
})
}
}
|
galderz/infinispan-spark
|
src/test/scala/org/infinispan/spark/test/SparkStream.scala
|
Scala
|
apache-2.0
| 1,348 |
package com.maxmouchet.vamk.timetables.parser.timetable.models
import org.joda.time.LocalTime
case class TimeInterval(startTime: LocalTime, endTime: LocalTime)
|
OpenLamas/vamk-timetables
|
lib/scala/src/main/scala/com/maxmouchet/vamk/timetables/parser/timetable/models/TimeInterval.scala
|
Scala
|
mit
| 161 |
package org.joda.time
trait ReadableInterval {
def getChronology(): Chronology
def getStartMillis(): Long
def getStart(): DateTime
def getEndMillis(): Long
def getEnd(): DateTime
def contains(instant: ReadableInstant): Boolean
def contains(interval: ReadableInterval): Boolean
def overlaps(interval: ReadableInterval): Boolean
def isAfter(instant: ReadableInstant): Boolean
def isAfter(interval: ReadableInterval): Boolean
def isBefore(instant: ReadableInstant): Boolean
def isBefore(interval: ReadableInterval): Boolean
def toInterval(): Interval
def toMutableInterval(): MutableInterval
def toDuration(): Duration
def toDurationMillis(): Long
def toPeriod(): Period
def toPeriod(`type`: PeriodType): Period
override def equals(readableInterval: Any): Boolean
override def hashCode(): Int
override def toString(): String
}
|
mdedetrich/soda-time
|
shared/src/main/scala/org/joda/time/ReadableInterval.scala
|
Scala
|
bsd-2-clause
| 888 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.filters
import java.time.{Duration, ZonedDateTime}
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
import org.opengis.feature.simple.SimpleFeatureType
/**
* Age off a feature based on the key timestamp
*/
trait AgeOffFilter extends AbstractFilter {
protected var expiry: Long = -1L
override def init(options: Map[String, String]): Unit = {
import AgeOffFilter.Configuration.ExpiryOpt
expiry = ZonedDateTime.now().minus(Duration.parse(options(ExpiryOpt))).toInstant.toEpochMilli
}
override def accept(row: Array[Byte],
rowOffset: Int,
rowLength: Int,
value: Array[Byte],
valueOffset: Int,
valueLength: Int,
timestamp: Long): Boolean = timestamp > expiry
}
object AgeOffFilter {
// configuration keys
object Configuration {
val ExpiryOpt = "retention"
}
def configure(sft: SimpleFeatureType, expiry: scala.concurrent.duration.Duration): Map[String, String] = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
require(!sft.isTableSharing || SystemProperty("geomesa.age-off.override").option.exists(_.toBoolean),
"AgeOff filter should only be applied to features that don't use table sharing. You may override this check" +
"by setting the system property 'geomesa.age-off.override=true', however please note that age-off" +
"will affect all shared feature types in the same catalog")
// we use java.time.Duration.toString to serialize as ISO-8601 to not break compatibility
Map(Configuration.ExpiryOpt -> Duration.ofMillis(expiry.toMillis).toString)
}
}
|
elahrvivaz/geomesa
|
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/filters/AgeOffFilter.scala
|
Scala
|
apache-2.0
| 2,249 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan
import java.math.BigDecimal
import java.sql.{Date, Time, Timestamp}
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rex._
import org.apache.calcite.sql.SqlPostfixOperator
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.`type`.SqlTypeName.{BIGINT, INTEGER, VARCHAR}
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.util.{DateString, TimeString, TimestampString}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog, GenericInMemoryCatalog}
import org.apache.flink.table.expressions._
import org.apache.flink.table.plan.util.{RexNodeToExpressionConverter, RexProgramExtractor}
import org.apache.flink.table.utils.InputTypeBuilder.inputOf
import org.hamcrest.CoreMatchers.is
import org.junit.Assert.{assertArrayEquals, assertEquals, assertThat}
import org.junit.Test
import scala.collection.JavaConverters._
import scala.collection.mutable
class RexProgramExtractorTest extends RexProgramTestBase {
private val functionCatalog: FunctionCatalog = new FunctionCatalog(
new CatalogManager("default_catalog", new GenericInMemoryCatalog("default_catalog")))
private val expressionBridge: ExpressionBridge[PlannerExpression] =
new ExpressionBridge[PlannerExpression](
functionCatalog,
PlannerExpressionConverter.INSTANCE)
@Test
def testExtractRefInputFields(): Unit = {
val usedFields = RexProgramExtractor.extractRefInputFields(buildSimpleRexProgram())
assertArrayEquals(usedFields, Array(2, 3, 1))
}
@Test
def testExtractSimpleCondition(): Unit = {
val builder: RexBuilder = new RexBuilder(typeFactory)
val program = buildSimpleRexProgram()
val firstExp = ExpressionParser.parseExpression("id > 6")
val secondExp = ExpressionParser.parseExpression("amount * price < 100")
val expected: Array[Expression] = Array(firstExp, secondExp)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
builder,
functionCatalog)
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(0, unconvertedRexNodes.length)
}
@Test
def testExtractSingleCondition(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// a = amount >= id
val a = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, t0, t1))
builder.addCondition(a)
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
val expected: Array[Expression] = Array(ExpressionParser.parseExpression("amount >= id"))
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(0, unconvertedRexNodes.length)
}
// ((a AND b) OR c) AND (NOT d) => (a OR c) AND (b OR c) AND (NOT d)
@Test
def testExtractCnfCondition(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// price
val t2 = rexBuilder.makeInputRef(allFieldTypes.get(3), 3)
// 100
val t3 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
// 200
val t4 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(200L))
// a = amount < 100
val a = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, t0, t3))
// b = id > 100
val b = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN, t1, t3))
// c = price == 100
val c = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, t2, t3))
// d = amount <= id
val d = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, t0, t1))
// e = price == 200
val e = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, t2, t4))
// a AND b
val and = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.AND, List(a, b).asJava))
// (a AND b) OR c OR e
val or = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.OR, List(and, c, e).asJava))
// NOT d
val not = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.NOT, List(d).asJava))
// (a AND b) OR c OR e) AND (NOT d)
builder.addCondition(builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.AND, List(or, not).asJava)))
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
val expected: Array[Expression] = Array(
ExpressionParser.parseExpression("amount < 100 || price == 100 || price === 200"),
ExpressionParser.parseExpression("id > 100 || price == 100 || price === 200"),
ExpressionParser.parseExpression("!(amount <= id)"))
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(0, unconvertedRexNodes.length)
}
@Test
def testExtractANDExpressions(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// price
val t2 = rexBuilder.makeInputRef(allFieldTypes.get(3), 3)
// 100
val t3 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
// a = amount < 100
val a = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, t0, t3))
// b = id > 100
val b = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN, t1, t3))
// c = price == 100
val c = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, t2, t3))
// d = amount <= id
val d = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, t0, t1))
// a AND b AND c AND d
val and = builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.AND, List(a, b, c, d).asJava))
builder.addCondition(builder.addExpr(and))
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val expanded = program.expandLocalRef(program.getCondition)
var convertedExpressions = new mutable.ArrayBuffer[Expression]
val unconvertedRexNodes = new mutable.ArrayBuffer[RexNode]
val inputNames = program.getInputRowType.getFieldNames.asScala.toArray
val converter = new RexNodeToExpressionConverter(inputNames, functionCatalog)
expanded.accept(converter) match {
case Some(expression) =>
convertedExpressions += expression
case None => unconvertedRexNodes += expanded
}
val expected: Array[Expression] = Array(
ExpressionParser.parseExpression("amount < 100 && id > 100 && price === 100 && amount <= id"))
assertExpressionArrayEquals(expected, convertedExpressions.toArray)
assertEquals(0, unconvertedRexNodes.length)
}
@Test
def testLiteralConversions(): Unit = {
val fieldNames = List("timestamp_col", "date_col", "time_col").asJava
val fieldTypes = makeTypes(SqlTypeName.TIMESTAMP, SqlTypeName.DATE, SqlTypeName.TIME)
val inputRowType = typeFactory.createStructType(fieldTypes, fieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
val timestampString = new TimestampString("2017-09-10 14:23:01.245")
val rexTimestamp = rexBuilder.makeTimestampLiteral(timestampString, 3)
val rexDate = rexBuilder.makeDateLiteral(new DateString("2017-09-12"))
val rexTime = rexBuilder.makeTimeLiteral(new TimeString("14:23:01"), 0)
val allRexNodes = List(rexTimestamp, rexDate, rexTime)
val condition = fieldTypes.asScala.zipWithIndex
.map((t: (RelDataType, Int)) => rexBuilder.makeInputRef(t._1, t._2))
.zip(allRexNodes)
.map(t => rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, t._1, t._2))
.map(builder.addExpr)
.asJava
builder.addCondition(builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.AND, condition)))
val (converted, _) = RexProgramExtractor.extractConjunctiveConditions(
builder.getProgram,
new RexBuilder(typeFactory),
functionCatalog)
val expected = Array[Expression](
EqualTo(
UnresolvedFieldReference("timestamp_col"),
Literal(Timestamp.valueOf("2017-09-10 14:23:01.245"))
),
EqualTo(
UnresolvedFieldReference("date_col"),
Literal(Date.valueOf("2017-09-12"))
),
EqualTo(
UnresolvedFieldReference("time_col"),
Literal(Time.valueOf("14:23:01"))
)
)
assertExpressionArrayEquals(expected, converted)
}
@Test
def testExtractArithmeticConditions(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// 100
val t2 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
val condition = List(
// amount < id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN, t0, t1)),
// amount <= id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, t0, t1)),
// amount <> id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.NOT_EQUALS, t0, t1)),
// amount == id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, t0, t1)),
// amount >= id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN_OR_EQUAL, t0, t1)),
// amount > id
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN, t0, t1)),
// amount + id == 100
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.PLUS, t0, t1), t2)),
// amount - id == 100
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.MINUS, t0, t1), t2)),
// amount * id == 100
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.MULTIPLY, t0, t1), t2)),
// amount / id == 100
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.DIVIDE, t0, t1), t2)),
// -amount == 100
builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.UNARY_MINUS, t0), t2))
).asJava
builder.addCondition(builder.addExpr(rexBuilder.makeCall(SqlStdOperatorTable.AND, condition)))
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
val expected: Array[Expression] = Array(
ExpressionParser.parseExpression("amount < id"),
ExpressionParser.parseExpression("amount <= id"),
ExpressionParser.parseExpression("amount <> id"),
ExpressionParser.parseExpression("amount == id"),
ExpressionParser.parseExpression("amount >= id"),
ExpressionParser.parseExpression("amount > id"),
ExpressionParser.parseExpression("amount + id == 100"),
ExpressionParser.parseExpression("amount - id == 100"),
ExpressionParser.parseExpression("amount * id == 100"),
ExpressionParser.parseExpression("amount / id == 100"),
ExpressionParser.parseExpression("-amount == 100")
)
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(0, unconvertedRexNodes.length)
}
@Test
def testExtractPostfixConditions(): Unit = {
testExtractSinglePostfixCondition(4, SqlStdOperatorTable.IS_NULL, "('flag).isNull")
// IS_NOT_NULL will be eliminated since flag is not nullable
// testExtractSinglePostfixCondition(SqlStdOperatorTable.IS_NOT_NULL, "('flag).isNotNull")
testExtractSinglePostfixCondition(4, SqlStdOperatorTable.IS_TRUE, "('flag).isTrue")
testExtractSinglePostfixCondition(4, SqlStdOperatorTable.IS_NOT_TRUE, "('flag).isNotTrue")
testExtractSinglePostfixCondition(4, SqlStdOperatorTable.IS_FALSE, "('flag).isFalse")
testExtractSinglePostfixCondition(4, SqlStdOperatorTable.IS_NOT_FALSE, "('flag).isNotFalse")
}
@Test
def testExtractConditionWithFunctionCalls(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// 100
val t2 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
// sum(amount) > 100
val condition1 = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN,
rexBuilder.makeCall(SqlStdOperatorTable.SUM, t0), t2))
// min(id) == 100
val condition2 = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.EQUALS,
rexBuilder.makeCall(SqlStdOperatorTable.MIN, t1), t2))
builder.addCondition(builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.AND, condition1, condition2)))
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
val expected: Array[Expression] = Array(
GreaterThan(Sum(UnresolvedFieldReference("amount")), Literal(100)),
EqualTo(Min(UnresolvedFieldReference("id")), Literal(100))
)
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(0, unconvertedRexNodes.length)
}
@Test
def testExtractWithUnsupportedConditions(): Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
// amount
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(2), 2)
// id
val t1 = rexBuilder.makeInputRef(allFieldTypes.get(1), 1)
// 100
val t2 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
// unsupported now: amount.cast(BigInteger)
val cast = builder.addExpr(rexBuilder.makeCast(allFieldTypes.get(1), t0))
// unsupported now: amount.cast(BigInteger) > 100
val condition1 = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.GREATER_THAN, cast, t2))
// amount <= id
val condition2 = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.LESS_THAN_OR_EQUAL, t0, t1))
// contains unsupported condition: (amount.cast(BigInteger) > 100 OR amount <= id)
val condition3 = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.OR, condition1, condition2))
// only condition2 can be translated
builder.addCondition(
rexBuilder.makeCall(SqlStdOperatorTable.AND, condition1, condition2, condition3))
val program = builder.getProgram
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
val expected: Array[Expression] = Array(
ExpressionParser.parseExpression("amount <= id")
)
assertExpressionArrayEquals(expected, convertedExpressions)
assertEquals(2, unconvertedRexNodes.length)
assertEquals(">(CAST($2):BIGINT NOT NULL, 100)", unconvertedRexNodes(0).toString)
assertEquals("OR(>(CAST($2):BIGINT NOT NULL, 100), <=($2, $1))",
unconvertedRexNodes(1).toString)
}
@Test
def testExtractRefNestedInputFields(): Unit = {
val rexProgram = buildRexProgramWithNesting()
val usedFields = RexProgramExtractor.extractRefInputFields(rexProgram)
val usedNestedFields = RexProgramExtractor.extractRefNestedInputFields(rexProgram, usedFields)
val expected = Array(Array("amount"), Array("*"))
assertThat(usedNestedFields, is(expected))
}
@Test
def testExtractRefNestedInputFieldsWithNoNesting(): Unit = {
val rexProgram = buildSimpleRexProgram()
val usedFields = RexProgramExtractor.extractRefInputFields(rexProgram)
val usedNestedFields = RexProgramExtractor.extractRefNestedInputFields(rexProgram, usedFields)
val expected = Array(Array("*"), Array("*"), Array("*"))
assertThat(usedNestedFields, is(expected))
}
@Test
def testExtractDeepRefNestedInputFields(): Unit = {
val rexProgram = buildRexProgramWithDeepNesting()
val usedFields = RexProgramExtractor.extractRefInputFields(rexProgram)
val usedNestedFields = RexProgramExtractor.extractRefNestedInputFields(rexProgram, usedFields)
val expected = Array(
Array("amount"),
Array("*"),
Array("with.deeper.entry", "with.deep.entry"))
assertThat(usedFields, is(Array(1, 0, 2)))
assertThat(usedNestedFields, is(expected))
}
private def buildRexProgramWithDeepNesting(): RexProgram = {
// person input
val passportRow = inputOf(typeFactory)
.field("id", VARCHAR)
.field("status", VARCHAR)
.build
val personRow = inputOf(typeFactory)
.field("name", VARCHAR)
.field("age", INTEGER)
.nestedField("passport", passportRow)
.build
// payment input
val paymentRow = inputOf(typeFactory)
.field("id", BIGINT)
.field("amount", INTEGER)
.build
// deep field input
val deepRowType = inputOf(typeFactory)
.field("entry", VARCHAR)
.build
val entryRowType = inputOf(typeFactory)
.nestedField("inside", deepRowType)
.build
val deeperRowType = inputOf(typeFactory)
.nestedField("entry", entryRowType)
.build
val withRowType = inputOf(typeFactory)
.nestedField("deep", deepRowType)
.nestedField("deeper", deeperRowType)
.build
val fieldRowType = inputOf(typeFactory)
.nestedField("with", withRowType)
.build
// main input
val inputRowType = inputOf(typeFactory)
.nestedField("persons", personRow)
.nestedField("payments", paymentRow)
.nestedField("field", fieldRowType)
.build
// inputRowType
//
// [ persons: [ name: VARCHAR, age: INT, passport: [id: VARCHAR, status: VARCHAR ] ],
// payments: [ id: BIGINT, amount: INT ],
// field: [ with: [ deep: [ entry: VARCHAR ],
// deeper: [ entry: [ inside: [entry: VARCHAR ] ] ]
// ] ]
// ]
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
val t0 = rexBuilder.makeInputRef(personRow, 0)
val t1 = rexBuilder.makeInputRef(paymentRow, 1)
val t2 = rexBuilder.makeInputRef(fieldRowType, 2)
val t3 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(10L))
// person
val person$pass = rexBuilder.makeFieldAccess(t0, "passport", false)
val person$pass$stat = rexBuilder.makeFieldAccess(person$pass, "status", false)
// payment
val pay$amount = rexBuilder.makeFieldAccess(t1, "amount", false)
val multiplyAmount = builder.addExpr(
rexBuilder.makeCall(SqlStdOperatorTable.MULTIPLY, pay$amount, t3))
// field
val field$with = rexBuilder.makeFieldAccess(t2, "with", false)
val field$with$deep = rexBuilder.makeFieldAccess(field$with, "deep", false)
val field$with$deeper = rexBuilder.makeFieldAccess(field$with, "deeper", false)
val field$with$deep$entry = rexBuilder.makeFieldAccess(field$with$deep, "entry", false)
val field$with$deeper$entry = rexBuilder.makeFieldAccess(field$with$deeper, "entry", false)
val field$with$deeper$entry$inside = rexBuilder
.makeFieldAccess(field$with$deeper$entry, "inside", false)
val field$with$deeper$entry$inside$entry = rexBuilder
.makeFieldAccess(field$with$deeper$entry$inside, "entry", false)
builder.addProject(multiplyAmount, "amount")
builder.addProject(person$pass$stat, "status")
builder.addProject(field$with$deep$entry, "entry")
builder.addProject(field$with$deeper$entry$inside$entry, "entry")
builder.addProject(field$with$deeper$entry, "entry2")
builder.addProject(t0, "person")
// Program
// (
// payments.amount * 10),
// persons.passport.status,
// field.with.deep.entry
// field.with.deeper.entry.inside.entry
// field.with.deeper.entry
// persons
// )
builder.getProgram
}
private def buildRexProgramWithNesting(): RexProgram = {
val personRow = inputOf(typeFactory)
.field("name", INTEGER)
.field("age", VARCHAR)
.build
val paymentRow = inputOf(typeFactory)
.field("id", BIGINT)
.field("amount", INTEGER)
.build
val types = List(personRow, paymentRow).asJava
val names = List("persons", "payments").asJava
val inputRowType = typeFactory.createStructType(types, names)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
val t0 = rexBuilder.makeInputRef(types.get(0), 0)
val t1 = rexBuilder.makeInputRef(types.get(1), 1)
val t2 = rexBuilder.makeExactLiteral(BigDecimal.valueOf(100L))
val payment$amount = rexBuilder.makeFieldAccess(t1, "amount", false)
builder.addProject(payment$amount, "amount")
builder.addProject(t0, "persons")
builder.addProject(t2, "number")
builder.getProgram
}
private def testExtractSinglePostfixCondition(
fieldIndex: Integer,
op: SqlPostfixOperator,
expr: String) : Unit = {
val inputRowType = typeFactory.createStructType(allFieldTypes, allFieldNames)
val builder = new RexProgramBuilder(inputRowType, rexBuilder)
rexBuilder = new RexBuilder(typeFactory)
// flag
val t0 = rexBuilder.makeInputRef(allFieldTypes.get(fieldIndex), fieldIndex)
builder.addCondition(builder.addExpr(rexBuilder.makeCall(op, t0)))
val program = builder.getProgram(false)
val relBuilder: RexBuilder = new RexBuilder(typeFactory)
val (convertedExpressions, unconvertedRexNodes) =
RexProgramExtractor.extractConjunctiveConditions(
program,
relBuilder,
functionCatalog)
assertEquals(1, convertedExpressions.length)
assertEquals(expr, convertedExpressions.head.toString)
assertEquals(0, unconvertedRexNodes.length)
}
private def assertExpressionArrayEquals(
expected: Array[Expression],
actual: Array[Expression]) = {
// TODO we assume only planner expression as a temporary solution to keep the old interfaces
val sortedExpected = expected.map(expressionBridge.bridge).sortBy(e => e.toString)
val sortedActual = actual.sortBy(e => e.toString)
assertEquals(sortedExpected.length, sortedActual.length)
sortedExpected.zip(sortedActual).foreach {
case (l, r) => assertEquals(l.toString, r.toString)
}
}
}
|
fhueske/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/plan/RexProgramExtractorTest.scala
|
Scala
|
apache-2.0
| 24,575 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.batch.table
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.api.scala.util.CollectionDataSets
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.runtime.utils.TableProgramsCollectionTestBase
import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode
import org.apache.flink.table.sinks.CsvTableSink
import org.apache.flink.table.utils.MemoryTableSourceSinkUtil
import org.apache.flink.table.utils.MemoryTableSourceSinkUtil.UnsafeMemoryOutputFormatTableSink
import org.apache.flink.test.util.TestBaseUtils
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import java.io.File
import scala.collection.JavaConverters._
@RunWith(classOf[Parameterized])
class TableSinkITCase(
configMode: TableConfigMode)
extends TableProgramsCollectionTestBase(configMode) {
@Test
def testBatchTableSink(): Unit = {
val tmpFile = File.createTempFile("flink-table-sink-test", ".tmp")
tmpFile.deleteOnExit()
val path = tmpFile.toURI.toString
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = BatchTableEnvironment.create(env, config)
env.setParallelism(4)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"testSink",
new CsvTableSink(path, "|").configure(
Array[String]("c", "b"), Array[TypeInformation[_]](Types.STRING, Types.LONG)))
val input = CollectionDataSets.get3TupleDataSet(env)
.map(x => x).setParallelism(4) // increase DOP to 4
val results = input.toTable(tEnv, 'a, 'b, 'c)
.where('a < 5 || 'a > 17)
.select('c, 'b)
.insertInto("testSink")
tEnv.execute("job name")
val expected = Seq(
"Hi|1", "Hello|2", "Hello world|2", "Hello world, how are you?|3",
"Comment#12|6", "Comment#13|6", "Comment#14|6", "Comment#15|6").mkString("\n")
TestBaseUtils.compareResultsByLinesInMemory(expected, path)
}
@Test
def testOutputFormatTableSink(): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = BatchTableEnvironment.create(env, config)
MemoryTableSourceSinkUtil.clear()
val fieldNames = Array("c", "b")
val fieldTypes: Array[TypeInformation[_]] = Array(Types.STRING, Types.LONG)
val sink = new UnsafeMemoryOutputFormatTableSink
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"testSink", sink.configure(fieldNames, fieldTypes))
val input = CollectionDataSets.get3TupleDataSet(env)
.map(x => x).setParallelism(4) // increase DOP to 4
input.toTable(tEnv, 'a, 'b, 'c)
.where('a < 5 || 'a > 17)
.select('c, 'b)
.insertInto("testSink")
tEnv.execute("job name")
val results = MemoryTableSourceSinkUtil.tableDataStrings.asJava
val expected = Seq(
"Hi,1", "Hello,2", "Hello world,2", "Hello world, how are you?,3",
"Comment#12,6", "Comment#13,6", "Comment#14,6", "Comment#15,6").mkString("\n")
TestBaseUtils.compareResultAsText(results, expected)
}
}
|
GJL/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/table/TableSinkITCase.scala
|
Scala
|
apache-2.0
| 4,060 |
package edu.gemini.model.p1.targetio.api
import edu.gemini.model.p1.immutable.Target
import java.io.{InputStream, File}
trait TargetReader[+T <: Target] {
type TargetResult = Either[ParseError, T]
type Result = Either[DataSourceError, List[TargetResult]]
def read(file: File): Result
def read(is: InputStream): Result
def read(data: String): Result
}
|
arturog8m/ocs
|
bundle/edu.gemini.model.p1.targetio/src/main/scala/edu/gemini/model/p1/targetio/api/TargetReader.scala
|
Scala
|
bsd-3-clause
| 369 |
package es.weso.utils
object Boolean {
def all(vs: Traversable[Boolean]): Boolean = {
vs.fold(true)(_ && _)
}
def some(vs: Traversable[Boolean]): Boolean = {
vs.fold(false)(_ || _)
}
}
|
jorgeyp/ShExcala
|
src/main/scala/es/weso/utils/Boolean.scala
|
Scala
|
mit
| 201 |
/*
* Copyright (c) 2016 by its authors. Some rights reserved.
* See the project homepage at: https://sincron.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sincron.atomic
import org.sincron.atomic.PaddingStrategy.NoPadding
import org.sincron.atomic.boxes.{Factory, BoxedLong}
import scala.annotation.tailrec
import java.lang.Double.{longBitsToDouble, doubleToLongBits}
final class AtomicDouble private (val ref: BoxedLong)
extends AtomicNumber[Double] {
def get: Double = longBitsToDouble(ref.volatileGet())
def set(update: Double): Unit = ref.volatileSet(doubleToLongBits(update))
def lazySet(update: Double): Unit = ref.lazySet(doubleToLongBits(update))
def compareAndSet(expect: Double, update: Double): Boolean = {
val expectLong = doubleToLongBits(expect)
val updateLong = doubleToLongBits(update)
ref.compareAndSet(expectLong, updateLong)
}
def getAndSet(update: Double): Double = {
longBitsToDouble(ref.getAndSet(doubleToLongBits(update)))
}
@tailrec
def increment(v: Int = 1): Unit = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
increment(v)
}
@tailrec
def add(v: Double): Unit = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
add(v)
}
@tailrec
def incrementAndGet(v: Int = 1): Double = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
incrementAndGet(v)
else
update
}
@tailrec
def addAndGet(v: Double): Double = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
addAndGet(v)
else
update
}
@tailrec
def getAndIncrement(v: Int = 1): Double = {
val current = get
val update = incrementOp(current, v)
if (!compareAndSet(current, update))
getAndIncrement(v)
else
current
}
@tailrec
def getAndAdd(v: Double): Double = {
val current = get
val update = plusOp(current, v)
if (!compareAndSet(current, update))
getAndAdd(v)
else
current
}
@tailrec
def subtract(v: Double): Unit = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
subtract(v)
}
@tailrec
def subtractAndGet(v: Double): Double = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
subtractAndGet(v)
else
update
}
@tailrec
def getAndSubtract(v: Double): Double = {
val current = get
val update = minusOp(current, v)
if (!compareAndSet(current, update))
getAndSubtract(v)
else
current
}
def decrement(v: Int = 1): Unit = increment(-v)
def decrementAndGet(v: Int = 1): Double = incrementAndGet(-v)
def getAndDecrement(v: Int = 1): Double = getAndIncrement(-v)
private[this] def plusOp(a: Double, b: Double): Double = a + b
private[this] def minusOp(a: Double, b: Double): Double = a - b
private[this] def incrementOp(a: Double, b: Int): Double = a + b
}
object AtomicDouble {
def apply(initialValue: Double)(implicit strategy: PaddingStrategy = NoPadding): AtomicDouble =
new AtomicDouble(Factory.newBoxedLong(
doubleToLongBits(initialValue),
boxStrategyToPaddingStrategy(strategy)))
}
|
monixio/sincron
|
sincron-atomic/jvm/src/main/scala/org/sincron/atomic/AtomicDouble.scala
|
Scala
|
apache-2.0
| 3,888 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.reportlib.model
import org.scalatest.{Matchers, WordSpec}
import spray.json._
import io.deepsense.commons.types.ColumnType
import io.deepsense.commons.types.ColumnType.ColumnType
import io.deepsense.commons.types.ColumnType.ColumnType
import io.deepsense.reportlib.model.factory.TableTestFactory
class TableJsonSpec extends WordSpec with Matchers with TableTestFactory with ReportJsonProtocol {
"Table" should {
"serialize" when {
val rowNames: List[String] = List("rowName1", "rowName2")
val columnNames: List[String] = List("A", "B")
val columnTypes: List[ColumnType] = List(ColumnType.string, ColumnType.numeric)
val values: List[List[Option[String]]] = List(List(Some("11"), None), List(None, Some("34")))
"columnsNames specified" in {
val json = testTableWithLabels(Some(columnNames), columnTypes, None, values).toJson
json shouldBe jsonTable(Some(columnNames), columnTypes, None, values)
}
"rowsNames specified" in {
val json = testTableWithLabels(None, columnTypes, Some(rowNames), values).toJson
json shouldBe jsonTable(None, columnTypes, Some(rowNames), values)
}
"rowsNames, columnNames and columTypes specified" in {
val json = testTableWithLabels(
Some(columnNames), columnTypes, Some(rowNames), values).toJson
json shouldBe jsonTable(Some(columnNames), columnTypes, Some(rowNames), values)
}
"is empty" in {
val json = testEmptyTable.toJson
json shouldBe jsonTable(None, List(), None, List())
}
}
"deserialize" when {
"filled table" in {
val columnNames: Some[List[String]] = Some(List("A", "B"))
val rowNames: Some[List[String]] = Some(List("1", "2"))
val columnTypes: List[ColumnType] = List(ColumnType.string, ColumnType.numeric)
val values: List[List[Option[String]]] =
List(List(Some("a"), Some("1")), List(Some("b"), Some("2")))
val json = jsonTable(columnNames, columnTypes, rowNames, values)
json.convertTo[Table] shouldBe testTableWithLabels(
columnNames, columnTypes, rowNames, values)
}
"empty table" in {
val json = jsonTable(None, List(), None, List())
json.convertTo[Table] shouldBe testTableWithLabels(None, List(), None, List())
}
}
}
private def jsonTable(
columnsNames: Option[List[String]],
columnTypes: List[ColumnType],
rowsNames: Option[List[String]],
values: List[List[Option[String]]]): JsObject = JsObject(Map[String, JsValue](
"name" -> JsString(TableTestFactory.tableName),
"description" -> JsString(TableTestFactory.tableDescription),
"columnNames" -> toJsValue(columnsNames),
"columnTypes" -> toJsValue(Some(columnTypes.map(_.toString))),
"rowNames" -> toJsValue(rowsNames),
"values" ->
JsArray(
values.map(row => JsArray(row.map(op => op.map(JsString(_)).getOrElse(JsNull)).toVector))
.toVector)
))
def toJsValue(values: Option[List[String]]): JsValue with Product with Serializable = {
values
.map(values => JsArray(values.map(JsString(_)).toVector)).getOrElse(JsNull)
}
}
|
deepsense-io/seahorse-workflow-executor
|
reportlib/src/test/scala/io/deepsense/reportlib/model/TableJsonSpec.scala
|
Scala
|
apache-2.0
| 3,799 |
package argonaut
trait JsonIdentity[J] {
val j: J
/**
* Encode to a JSON value using the given implicit encoder.
*/
def jencode(implicit e: EncodeJson[J]): Json = e(j)
/**
* Encode to a JSON value using the given implicit encoder. Alias for `jencode`.
*/
def asJson(implicit e: EncodeJson[J]): Json = jencode
/**
* Encode to a JSONNumber.
*/
def asJsonNumber(implicit asn: EncodeJsonNumber[J]): JsonNumber = asn.encodeJsonNumber(j)
/**
* Encode to a JSONNumber, wrapped in a Some if it is valid, otherwise a None.
*/
def asPossibleJsonNumber(implicit asn: EncodePossibleJsonNumber[J]): Option[JsonNumber] = asn.possiblyEncodeJsonNumber(j)
}
object JsonIdentity extends JsonIdentitys
trait JsonIdentitys {
implicit def ToJsonIdentity[J](k: J): JsonIdentity[J] = {
new JsonIdentity[J] {
val j = k
}
}
implicit def FromJsonIdentity[J](k: JsonIdentity[J]): J = k.j
}
|
jedws/argonaut
|
argonaut/src/main/scala/argonaut/JsonIdentity.scala
|
Scala
|
bsd-3-clause
| 937 |
/** *****************************************************************
* See the NOTICE file distributed with this work for additional *
* information regarding Copyright ownership. The author/authors *
* license this file to you under the terms of the Apache License, *
* Version 2.0 (the "License"); you may not use this file except *
* in compliance with the License. You may obtain a copy of the *
* License at: *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, *
* either express or implied. See the License for the specific *
* language governing permissions and limitations under the *
* License. *
* ******************************************************************/
package codes.reactive.sbt
import java.util.jar.Attributes
import aether.SignedAetherPlugin
import com.typesafe.sbt.pgp.PgpKeys
import sbt.Keys._
import sbt._
import sbt.plugins.JvmPlugin
object SbtCodes extends AutoPlugin {
override val trigger: PluginTrigger = allRequirements
override val requires: Plugins = JvmPlugin && SignedAetherPlugin
override lazy val projectSettings: Seq[Def.Setting[_]] = codesSettings
override lazy val buildSettings: Seq[Def.Setting[_]] = codesBuildSettings
override def globalSettings: Seq[Def.Setting[_]] = pluginGlobalSettings
val autoImport = Import
import autoImport._
def pluginSettings = Seq(
codesImplementationVersion := implementationVer(codesRelease.value, codesProfile.value, codesTeamcity.value,
version.value, codesBuildNumber.value, vcsRevision.value),
// codesDevelopers <<= codesDevelopers ?? None,
codesRootDoc <<= codesRootDoc ?? None,
codesDocFooter <<= codesDocFooter ?? None
)
def pluginGlobalSettings = Seq(
codesTeamcity := sys.env.get("TEAMCITY_VERSION").nonEmpty,
codesProfile <<= codesProfile ?? {
sys.props.getOrElse("profile", "development") match {
case "development" => DevelopmentProfile
case "integration" => IntegrationProfile
case "release" => ReleaseProfile
}
},
codesRelease := codesProfile.value == ReleaseProfile
)
def baseProjectSettings = Seq(
name ~= formalize,
pomIncludeRepository := (_ => false),
// pomExtra := { codesDevelopers.value map (d ⇒ pomExtra.value ++ developersToXml(d: _*)) getOrElse pomExtra.value },
scmInfo <<= codesGithubRepo(r => Some(ScmInfo(url(r.browseUrl), r.connection, Some(r.developerConnection))))
)
def artefactSettings = {
def licenceMappings(c: Configuration)(t: TaskKey[_]) = inConfig(c)(inTask(t)(
Seq(mappings <++= baseDirectory map { (base: File) =>
Seq((base / "LICENSE") -> "META-INF/LICENSE", (base / "NOTICE") -> "META-INF/NOTICE")
})))
val m = licenceMappings(Compile) _
Seq(
publish <<= aether.AetherKeys.aetherDeploy,
publishLocal <<= PgpKeys.publishLocalSigned,
packageOptions in(Compile, packageBin) +=
Package.ManifestAttributes(
Attributes.Name.SPECIFICATION_VERSION -> version.value,
Attributes.Name.IMPLEMENTATION_VERSION -> codesImplementationVersion.value
),
packageOptions in(Compile, packageBin) += Package.ManifestAttributes(
"Built-By" -> System.getProperty("java.version"),
"Built-Time" -> java.util.Calendar.getInstance.getTimeInMillis.toString)
) ++ m(packageBin) ++ m(packageSrc) ++ m(packageDoc)
}
def codesSettings: Seq[Setting[_]] = pluginSettings ++ artefactSettings ++ baseProjectSettings
private def codesBuildSettings = Seq(
codesBuildNumber := tcBuildNumber(codesTeamcity.value),
codesGithubRepo := GithubRepo("reactivecodes", (normalizedName in LocalRootProject).value),
codesVersionMessage in LocalRootProject := tcBuildMetaTask(codesTeamcity.value, (codesImplementationVersion in LocalRootProject).value)
)
// Used to formalize project name for projects declared with the syntax 'val fooProject = project ...'
private def formalize(name: String): String = name.split("-|(?<!(^|[A-Z]))(?=[A-Z])|(?<!^)(?=[A-Z][a-z])")
.map(_.capitalize).mkString(" ")
// Append relevant build implementation information to the version/revision
private def implementationVer(release: Boolean, profile: BuildProfile, teamcity: Boolean, version: String, buildNumber: Option[String], buildVCSNumber: Option[String]) =
s"$version+${implementationMeta(release, profile, teamcity, buildNumber, buildVCSNumber).mkString(".")}"
// Build the build implementation meta information
private def implementationMeta(release: Boolean, profile: BuildProfile, teamcity: Boolean, buildNumber: Option[String], buildVCSNumber: Option[String]): Seq[String] = {
def vcsNo = buildVCSNumber.map(_.take(7))
def published: Boolean = release || {
profile match {
case ReleaseProfile | IntegrationProfile => true;
case _ => false
}
}
def buildNo = buildNumber.map(n => if (published) s"b$n" else s"DEV-b$n")
Seq(buildNo, vcsNo) collect { case v if v.nonEmpty ⇒ v.get }
}
// Obtain the build number if running in TeamCity
private def tcBuildNumber(teamcity: Boolean) = sys.env.get("BUILD_NUMBER")
// Update TeamCity build number with implementation meta info
private def tcBuildMetaTask(teamcity: Boolean, implementationVersion: String) =
if (teamcity) println(s"##teamcity[buildNumber '$implementationVersion']")
}
|
reactivecodes/sbt-codes
|
src/main/scala/codes/reactive/sbt/SbtCodes.scala
|
Scala
|
apache-2.0
| 5,739 |
def caseClassSequencePattern(value: Any):String = value match {
case Numbers(_*, a) => "Last number: "+a
}
|
grzegorzbalcerek/scala-book-examples
|
examples/PatternsCaseClasses5.scala
|
Scala
|
mit
| 109 |
package org.jetbrains.plugins.scala
package lang
package checkers
package checkPrivateAccess
import java.io.File
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.{CharsetToolkit, LocalFileSystem}
import com.intellij.psi.PsiMember
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.resolve.ResolveUtils
/**
* User: Alexander Podkhalyuzin
* Date: 08.10.2009
*/
abstract class CheckPrivateAccessTestBase extends ScalaLightPlatformCodeInsightTestCaseAdapter {
val refMarker = "/*ref*/"
protected def folderPath = baseRootPath() + "checkers/checkPrivateAccess/"
protected def doTest() {
import _root_.junit.framework.Assert._
val filePath = folderPath + getTestName(false) + ".scala"
val file = LocalFileSystem.getInstance.findFileByPath(filePath.replace(File.separatorChar, '/'))
assert(file != null, "file " + filePath + " not found")
val fileText = StringUtil.convertLineSeparators(FileUtil.loadFile(new File(file.getCanonicalPath), CharsetToolkit.UTF8))
configureFromFileTextAdapter(getTestName(false) + ".scala", fileText)
val scalaFile = getFileAdapter.asInstanceOf[ScalaFile]
val offset = fileText.indexOf(refMarker) + refMarker.length
assert(offset != refMarker.length - 1, "Not specified caret marker in test case. Use " + refMarker + " in scala file for this.")
val elem = scalaFile.findElementAt(offset).getParent
if (!elem.isInstanceOf[ScReferenceElement]) assert(assertion = true, message = "Ref marker should point on reference")
val ref = elem.asInstanceOf[ScReferenceElement]
val resolve: PsiMember = PsiTreeUtil.getParentOfType(ref.resolve(), classOf[PsiMember], false)
val res = "" + ResolveUtils.isAccessible(resolve, elem)
val lastPsi = scalaFile.findElementAt(scalaFile.getText.length - 1)
val text = lastPsi.getText
val output = lastPsi.getNode.getElementType match {
case ScalaTokenTypes.tLINE_COMMENT => text.substring(2).trim
case ScalaTokenTypes.tBLOCK_COMMENT | ScalaTokenTypes.tDOC_COMMENT =>
text.substring(2, text.length - 2).trim
case _ => assertTrue("Test result must be in last comment statement.", false)
}
assertEquals(output, res.toString)
}
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/checkers/checkPrivateAccess/CheckPrivateAccessTestBase.scala
|
Scala
|
apache-2.0
| 2,584 |
package com.tribbloids.spookystuff.doc
import com.tribbloids.spookystuff.actions._
import com.tribbloids.spookystuff.{dsl, SpookyEnvFixture}
import com.tribbloids.spookystuff.testutils.LocalPathDocsFixture
import com.tribbloids.spookystuff.utils.serialization.AssertSerializable
import org.apache.spark.SparkEnv
/**
* Created by peng on 11/30/14.
*/
class TestUnstructured extends SpookyEnvFixture with LocalPathDocsFixture {
import dsl._
lazy val page = (Wget(HTML_URL).as('old) :: Nil).fetch(spooky).head.asInstanceOf[Doc]
it("Unstructured is serializable for div") {
val elements = page.findAll("div.central-featured-lang")
assert(elements.size === 10)
elements.foreach { element =>
AssertSerializable[Unstructured](
element,
condition = { (element, element2) =>
assert(element === element2)
assert(
element.asInstanceOf[HtmlElement].formattedCode.get.split("\\n").map(_.trim) === element2
.asInstanceOf[HtmlElement]
.formattedCode
.get
.split("\\n")
.map(_.trim))
assert(element.findAll("a").size === element2.findAll("a").size)
assert(element.attr("class") === element2.attr("class"))
assert(element.code === element2.code)
assert(element.ownText === element2.ownText)
assert(element.boilerPipe === element2.boilerPipe)
}
)
}
}
lazy val tablePage = (Wget(HTML_URL).as('old) :: Nil).fetch(spooky).head.asInstanceOf[Doc]
ignore("Unstructured is serializable for tr") {
val elements = tablePage.findAll("table#mp-topbanner > tbody > tr")
assert(elements.size === 1)
elements.foreach { element =>
val ser = SparkEnv.get.serializer.newInstance()
val serElement = ser.serialize(element)
val element2 = ser.deserialize[Unstructured](serElement)
assert(element === element2)
assert(
element.asInstanceOf[HtmlElement].formattedCode.get.split("\\n").map(_.trim) === element2
.asInstanceOf[HtmlElement]
.formattedCode
.get
.split("\\n")
.map(_.trim))
assert(element.findAll("a").size === element2.findAll("a").size)
assert(element.attr("class") === element2.attr("class"))
assert(element.code === element2.code)
assert(element.ownText === element2.ownText)
assert(element.boilerPipe === element2.boilerPipe)
}
}
ignore("Unstructured is serializable for td") {
val elements = tablePage.findAll("table#mp-topbanner > tbody > tr > td")
assert(elements.size === 4)
elements.foreach { element =>
val ser = SparkEnv.get.serializer.newInstance()
val serElement = ser.serialize(element)
val element2 = ser.deserialize[Unstructured](serElement)
assert(element === element2)
assert(
element.asInstanceOf[HtmlElement].formattedCode.get.split("\\n").map(_.trim) === element2
.asInstanceOf[HtmlElement]
.formattedCode
.get
.split("\\n")
.map(_.trim))
assert(element.findAll("a").size === element2.findAll("a").size)
assert(element.attr("class") === element2.attr("class"))
assert(element.code === element2.code)
assert(element.ownText === element2.ownText)
assert(element.boilerPipe === element2.boilerPipe)
}
}
it("attrs should handles empty attributes properly") {
assert(page.findAll("h1.central-textlogo img").attrs("title").nonEmpty)
assert(page.findAll("h1.central-textlogo img dummy").attrs("title").isEmpty)
assert(page.findAll("h1.central-textlogo img").attrs("dummy").isEmpty)
}
}
|
tribbloid/spookystuff
|
core/src/test/scala/com/tribbloids/spookystuff/doc/TestUnstructured.scala
|
Scala
|
apache-2.0
| 3,693 |
package me.yingrui.segment.util
object CharCheckUtil {
def isChinese(c: Char): Boolean =
Character.UnicodeBlock.of(c) == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
def isChinese(s: String): Boolean = {
s.find(ch => !isChinese(ch)) match {
case None => true
case _ => false
}
}
def isSymbol(c: Char): Boolean = {
val typo = Character.getType(c)
typo == 24 || typo == 25 || typo == 21 || typo == 22 || typo == 27 || typo == 23 || typo == 26 || typo == 20
}
def isWhiteSpace(word: String): Boolean = word.matches("\\\\s+")
def isEnglish(word: String): Boolean = word.matches("[A-z]+('|'s)?")
}
|
yingrui/mahjong
|
lib-segment/src/main/scala/me/yingrui/segment/util/CharCheckUtil.scala
|
Scala
|
gpl-3.0
| 667 |
package com.temportalist.href.common.inventory
import com.temportalist.href.common.tile.TETransmitter
import com.temportalist.origin.wrapper.common.inventory.ContainerWrapper
import net.minecraft.entity.player.EntityPlayer
/**
*
*
* @author TheTemportalist
*/
class ContainerTransmitter(p: EntityPlayer, te: TETransmitter) extends ContainerWrapper(p, te) {
override protected def registerSlots(): Unit = {
for (x <- 0 until 9) {
for (y <- 0 until 4) {
this.registerSlot(x + (y * 9), 8 + (x * 18), 8 + (y * 18), isFinal = false)
}
}
this.registerPlayerSlots(0, 0)
}
}
|
TheTemportalist/href
|
src/main/scala/com/temportalist/href/common/inventory/ContainerTransmitter.scala
|
Scala
|
apache-2.0
| 595 |
package scodec
package protocols
import language.higherKinds
import fs2.Stream
package object time {
/**
* A single value in a `TimeSeries`. Provides a timestamp along with either a value of type `A` or
* a clock tick (represented by a none).
*/
type TimeSeriesValue[+A] = TimeStamped[Option[A]]
/**
* A stream of timestamped values or clock ticks.
*
* Values are represented as right values in a `TimeStamped[Option[A]]`, whereas
* clock ticks are represented as nones. This encoding allows for an indication
* of time passage with no observed values.
*
* Generally, time series appear in increasing order, and many combinators that work with
* time series will rely on that. For streams that are globally ordered, but not locally ordered,
* i.e., near adjacent values might be out of order but values at great distance from each other
* are ordered, consider using `TimeStamped.reorderLocally` to adjust.
*/
type TimeSeries[F[_], +A] = Stream[F, TimeSeriesValue[A]]
/** Alias for a stream transducer on time series values. */
type TimeSeriesTransducer[F[_], -A, +B] = Stream[F, TimeSeriesValue[A]] => Stream[F, TimeSeriesValue[B]]
}
|
scodec/scodec-protocols
|
src/main/scala/scodec/protocols/time/package.scala
|
Scala
|
bsd-3-clause
| 1,195 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.