code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.rasterfoundry.common
import geotrellis.store.LayerId
import io.circe._
import io.circe.generic.JsonCodec
@JsonCodec
final case class LayerAttribute(layerName: String,
zoom: Int,
name: String,
value: Json) {
def layerId: LayerId = LayerId(layerName, zoom)
}
object LayerAttribute {
def tupled = (LayerAttribute.apply _).tupled
def create = LayerAttribute.apply _
}
|
raster-foundry/raster-foundry
|
app-backend/common/src/main/scala/LayerAttribute.scala
|
Scala
|
apache-2.0
| 482 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.flink.table.api.TableException
import org.apache.flink.table.catalog.ResolvedCatalogTable
import org.apache.flink.table.connector.sink.abilities.SupportsPartitioning
import org.apache.flink.table.planner.plan.`trait`.FlinkRelDistribution
import org.apache.flink.table.planner.plan.abilities.sink.{PartitioningSpec, SinkAbilitySpec}
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalSink
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalSink
import org.apache.flink.table.types.logical.RowType
import org.apache.calcite.plan.RelOptRule
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import scala.collection.JavaConversions._
import scala.collection.mutable
class StreamPhysicalSinkRule extends ConverterRule(
classOf[FlinkLogicalSink],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamPhysicalSinkRule") {
def convert(rel: RelNode): RelNode = {
val sink = rel.asInstanceOf[FlinkLogicalSink]
val newTrait = rel.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
var requiredTraitSet = sink.getInput.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
val abilitySpecs: mutable.ArrayBuffer[SinkAbilitySpec] =
mutable.ArrayBuffer(sink.abilitySpecs: _*)
val resolvedCatalogTable = sink.contextResolvedTable.getResolvedTable
.asInstanceOf[ResolvedCatalogTable]
if (resolvedCatalogTable.isPartitioned) {
sink.tableSink match {
case partitionSink: SupportsPartitioning =>
if (sink.staticPartitions.nonEmpty) {
val partitioningSpec = new PartitioningSpec(sink.staticPartitions)
partitioningSpec.apply(partitionSink)
abilitySpecs += partitioningSpec
}
val dynamicPartFields = resolvedCatalogTable.getPartitionKeys
.filter(!sink.staticPartitions.contains(_))
val fieldNames = resolvedCatalogTable
.getResolvedSchema
.toPhysicalRowDataType
.getLogicalType.asInstanceOf[RowType]
.getFieldNames
if (dynamicPartFields.nonEmpty) {
val dynamicPartIndices =
dynamicPartFields.map(fieldNames.indexOf(_))
// TODO This option is hardcoded to remove the dependency of planner from
// flink-connector-files. We should move this option out of FileSystemConnectorOptions
val shuffleEnable = resolvedCatalogTable
.getOptions
.getOrDefault("sink.shuffle-by-partition.enable", "false")
if (shuffleEnable.toBoolean) {
requiredTraitSet = requiredTraitSet.plus(
FlinkRelDistribution.hash(dynamicPartIndices
.map(Integer.valueOf), requireStrict = false))
}
if (partitionSink.requiresPartitionGrouping(false)) {
throw new TableException("Partition grouping in stream mode is not supported yet!")
}
}
case _ => throw new TableException(
s"'${sink.contextResolvedTable.getIdentifier.asSummaryString}' is a partitioned table, " +
s"but the underlying [${sink.tableSink.asSummaryString()}] DynamicTableSink " +
s"doesn't implement SupportsPartitioning interface.")
}
}
val newInput = RelOptRule.convert(sink.getInput, requiredTraitSet)
new StreamPhysicalSink(
rel.getCluster,
newTrait,
newInput,
sink.hints,
sink.contextResolvedTable,
sink.tableSink,
abilitySpecs.toArray
)
}
}
object StreamPhysicalSinkRule {
val INSTANCE = new StreamPhysicalSinkRule
}
|
wwjiang007/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamPhysicalSinkRule.scala
|
Scala
|
apache-2.0
| 4,646 |
/*
* (c) Copyright 2019 EntIT Software LLC, a Micro Focus company, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.google.utils.action
import io.cloudslang.content.google.utils.action.InputUtils.verifyEmpty
import org.apache.commons.lang3.StringUtils.EMPTY
import org.junit.Test
import org.specs2.matcher.JUnitMustMatchers
/**
* Created by victor on 2/25/17.
*/
class InputUtilsTest extends JUnitMustMatchers {
val NON_EMPTY_STRING = "a"
@Test
def verifyEmptyTest(): Unit = {
verifyEmpty(null) must beNone
verifyEmpty(EMPTY) must beNone
verifyEmpty(NON_EMPTY_STRING) mustEqual Option(NON_EMPTY_STRING)
}
}
|
CloudSlang/cs-actions
|
cs-google/src/test/scala/io/cloudslang/content/google/utils/action/InputUtilsTest.scala
|
Scala
|
apache-2.0
| 1,209 |
package com.ajjpj.adiagram_.render.shapes.lineend
import javafx.scene.canvas.GraphicsContext
import com.ajjpj.adiagram_.geometry.{Angle, APoint}
import com.ajjpj.adiagram_.render.base.LineStyle
import com.ajjpj.adiagram_.geometry.transform.Translation
import com.ajjpj.adiagram_.ui.Zoom
/**
* @author arno
*/
class NullLineEnd extends ALineEnd {
override def shortenLengthUnzoomed(style: LineStyle) = 0.0
override def width(style: LineStyle, zoom: Zoom) = 0.0
override def paint(gc: GraphicsContext, p: APoint, angle: Angle, style: LineStyle, t: Translation, zoom: Zoom) {
}
}
|
arnohaase/a-diagram
|
src/main/scala-old/com/ajjpj/adiagram_/render/shapes/lineend/NullLineEnd.scala
|
Scala
|
apache-2.0
| 590 |
package com.github.takezoe.solr.scala
import org.apache.solr.client.solrj.response.UpdateResponse
import org.apache.solr.client.solrj.{SolrClient => ApacheSolrClient}
import org.apache.solr.common.SolrInputDocument
class BatchRegister(server: ApacheSolrClient, collection: Option[String], docs: Map[String, Any]*) {
add(docs: _*)
def add(docs: Any*): BatchRegister = {
CaseClassMapper.toMapArray(docs: _*).foreach { doc =>
val solrDoc = new SolrInputDocument
doc.collect { case (key, value) =>
solrDoc.addField(key, value)
}
collection match {
case Some(c) => server.add(c, solrDoc)
case None => server.add(solrDoc)
}
}
this
}
def commit(): UpdateResponse = server.commit
def commit(collection: String): UpdateResponse = server.commit(collection)
def rollback(): UpdateResponse = server.rollback
}
|
takezoe/solr-scala-client
|
src/main/scala/com/github/takezoe/solr/scala/BatchRegister.scala
|
Scala
|
apache-2.0
| 883 |
import java.io.File
import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import scala.collection.mutable.HashSet
import scala.io.Source
case class Config(
inputFacts: String = "", inputRules: String = "", outputDir: String = "",
outputFacts: String = "facts.csv", outputRules: String = "rules.csv",
task: String = "", maxFacts: Int = -1, maxRules: Int = -1, ruleType: Int = -1,
functionalConstraint: Int = 100, trueFacts: String = ""
)
object Main {
private val conf = new SparkConf().setAppName("Ontological Pathfinding")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
private val spark = new SparkContext(conf)
private val logger = Logger.getLogger(getClass.getName)
private val options = new scopt.OptionParser[Config]("op") {
head("Ontological Pathfinding", "1.0")
opt[String]("input-facts") required() action { (value, option) =>
option.copy(inputFacts = value) } text("Path to input facts.")
opt[String]("input-rules") required() action { (value, option) =>
option.copy(inputRules = value) } text("Path to input rules.")
opt[String]("output-dir") required() action { (value, option) =>
option.copy(outputDir = value) } text("Path to output directory.")
cmd("partition") action { (_, option) =>
option.copy(task = "partition") } text(
"Partition the input KB.") children(
opt[Int]("max-facts") required() action { (value, option) =>
option.copy(maxFacts = value) } text("Maximum facts."),
opt[Int]("max-rules") required() action { (value, option) =>
option.copy(maxRules = value) } text("Maximum rules."),
opt[String]("output-facts") optional() action { (value, option) =>
option.copy(outputFacts = value) } text("Output facts."),
opt[String]("output-rules") optional() action { (value, option) =>
option.copy(outputRules = value) } text("Output rules.")
)
cmd("learn") action { (_, option) =>
option.copy(task = "learn") } text("Learn inference rules.") children(
opt[Int]("rule-type") required() action { (value, option) =>
option.copy(ruleType = value) } validate { x => if (1 <= x && x <= 9)
success else failure("Supporting rule types 1-9.")
} text("Rule type."),
opt[Int]("functional-constraint") abbr("fc") action { (value, option) =>
option.copy(functionalConstraint = value) }
text("Functional constraint.")
)
cmd("infer") action { (_, option) =>
option.copy(task = "infer") } text("Infer new facts.") children(
opt[Int]("rule-type") required() action { (value, option) =>
option.copy(ruleType = value) } validate { x => if (1 <= x && x <= 9)
success else failure("Supporting rule types 1-9.")
} text("Rule type."),
opt[Int]("functional-constraint") abbr("fc") action { (value, option) =>
option.copy(functionalConstraint = value) }
text("Functional constraint.")
)
cmd("evaluate") action { (_, option) =>
option.copy(task = "evaluate") } text("Infer and evaluate.") children(
opt[String]("ground-truth") required() action { (value, option) =>
option.copy(trueFacts = value) } text("Ground truth facts.")
)
}
def main(args: Array[String]) {
options.parse(args, Config()) match {
case Some(config) => config.task match {
case "partition" => partition(config)
case "learn" => learn(config)
case "infer" => infer(config)
case "evaluate" => evaluate(config)
}
case None => logger.fatal("Abort.")
}
}
// Runs the rule learning algorithm.
private def learn(config: Config) {
val facts = spark.textFile(config.inputFacts, 64)
.map(fact => fact.split(" ").map(_.toInt))
val rules = Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ").map(_.toInt))
.toArray
OPLearners(config.ruleType)
.learn(facts, spark.broadcast(rules), config.functionalConstraint)
.filter({case (rule, supp, conf) => conf > 0})
.map(_.productIterator.mkString(" "))
.saveAsTextFile(config.outputDir)
}
private def infer(config: Config) {
val facts = spark.textFile(config.inputFacts, 64)
.map(fact => fact.split(" ").map(_.toInt))
val rules = Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ").map(_.toInt))
.toArray
OPLearners(config.ruleType)
.infer(facts, spark.broadcast(rules), config.functionalConstraint)
.map({case (fact, rules) =>
fact.productIterator.mkString(" ") + ":" + rules.mkString(" ")})
.saveAsTextFile(config.outputDir)
}
private def evaluate(config: Config) {
object ::> {def unapply[T] (arr: Array[T]) = Some((arr.init, arr.last))}
val ruleStats = Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ") match {
case Array(id, _*) ::> supp ::> conf => (id.toInt -> conf.toDouble)})
.toMap
val inferFacts = spark.textFile(config.inputFacts, 64)
.filter(line => !line.split(':')(1).split(' ').contains("0"))
.map(line => line.split(':') match {
case Array(fact, rules) => (
fact.split(" ") match {
case Array(p, x, y) => (p.toInt, x.toInt, y.toInt)},
rules.split(" ").map(rule => ruleStats(rule.toInt)).max
)})
.reduceByKey((a, b) => Math.max(a, b), 512)
val preds = Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ") match {
case Array(id, h, _*) => h.toInt}).toSet
val truths = spark.textFile(config.trueFacts, 64)
.map(fact => fact.split(" ") match {
case Array(p, x, y) => ((p.toInt, x.toInt, y.toInt), 0)})
.filter({case ((p, x, y), 0) => preds.contains(p)})
.rightOuterJoin(inferFacts, 10000)
.map({
case ((p, x, y), (Some(_), conf)) => (p, x, y, conf, 1, 1)
case ((p, x, y), (None, conf)) => (p, x, y, conf, 0, 1)
})
// .sortBy({case (p, x, y, conf, _, _) => conf}, false, 10000)
.map(_.productIterator.mkString(" "))
.saveAsTextFile(config.outputDir)
}
private def evaluate2(config: Config) {
object ::> {def unapply[T] (arr: Array[T]) = Some((arr.init, arr.last))}
val ruleStats = spark.broadcast(
Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ") match {
case Array(id, _*) ::> supp ::> conf => (id.toInt -> conf.toDouble)})
.toMap)
val inferFacts = spark.textFile(config.inputFacts, 64)
.map(line => line.split(':') match {
case Array(fact, rules) => (fact.split(' ') match {
case Array(p, x, y) => (p.toInt, x.toInt, y.toInt)}, rules)
})
.filter({case (fact, rules) => !rules.split(' ').contains("0")})
val preds = spark.broadcast(Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ") match {
case Array(id, h, _*) => h.toInt}).toSet)
val truths = spark.textFile(config.trueFacts, 64)
.map(fact => fact.split(" ") match {
case Array(p, x, y) => ((p.toInt, x.toInt, y.toInt), 0)})
.filter({case ((p, x, y), 0) => preds.value.contains(p)})
.rightOuterJoin(inferFacts, 512)
.map({
case ((p, x, y), (Some(_), rules)) => (rules, 1, 1)
case ((p, x, y), (None, rules)) => (rules, 0, 1)
})
.flatMap({case (rules, sum, cnt) => rules.split(' ')
.map(r => (r.toInt, (sum, cnt)))})
.reduceByKey((x, y) => (x._1 + y._1, x._2 + y._2))
.map({case (r, (sum, cnt)) => (r, 1.0*sum/cnt, ruleStats.value(r.toInt))})
.sortBy({case (r, conf, _) => conf}, false, 8000)
.map(_.productIterator.mkString(" "))
.saveAsTextFile(config.outputDir)
}
// Runs the partitioning algorithm.
private def partition(config: Config) {
val rules = Source.fromFile(config.inputRules).getLines()
.map(rule => rule.split(" ").map(_.toInt))
.toArray
val histogram = spark.textFile(config.inputFacts, 64)
.map(f => f.split(" ") match {
case Array(pred, sub, obj) => (pred.toInt, 1)})
.reduceByKey(_ + _)
.collectAsMap
val partitions = new Partitioner(rules, histogram,
config.maxFacts, config.maxRules).getPartitions
writePartitions(partitions, config)
}
private def writePartitions(partitions: Array[Partition],
config: Config) {
for ((partition, index) <- partitions.zipWithIndex) {
val predicates = spark.broadcast(partition.predicates)
val facts = spark.textFile(config.inputFacts, 64)
.map(fact => fact.split(" ") map(_.toInt))
.filter(fact => predicates.value.contains(fact(0)))
.map(fact => fact.mkString(" "))
.saveAsTextFile(config.outputDir + "/part-" + index + "/" +
config.outputFacts)
val rules = new java.io.PrintWriter(
config.outputDir + "/part-" + index + "/" + config.outputRules)
rules.write(partition.rules.map(rule => rule.mkString(" "))
.mkString("\\n"))
rules.close
}
}
private val OPLearners = Array(
OPLearnerType1, OPLearnerType1, OPLearnerType2, OPLearnerType3,
OPLearnerType4, OPLearnerType5, OPLearnerType6, OPLearnerType7,
OPLearnerType8, OPLearnerType9
)
}
|
yang-chen/Ontological-Pathfinding
|
src/main/scala/Main.scala
|
Scala
|
bsd-2-clause
| 9,543 |
package skinny.micro.contrib.flash
import java.util.concurrent.{ ConcurrentHashMap, ConcurrentSkipListSet }
import skinny.micro.contrib.FlashMapSupport
import skinny.micro.data.MutableMapWithIndifferentAccess
import scala.collection.JavaConverters._
/**
* A FlashMap is the data structure used by [[FlashMapSupport]]
* to allow passing temporary values between sequential actions.
*
* FlashMap behaves like [[skinny.micro.data.MapWithIndifferentAccess]]. By
* default, anything placed in the map is available to the current request and
* next request, and is then discarded.
*
* @see FlashMapSupport
*/
class FlashMap extends MutableMapWithIndifferentAccess[Any] with Serializable {
private[this] val m = new ConcurrentHashMap[String, Any]().asScala
private[this] val flagged = new ConcurrentSkipListSet[String]().asScala
/**
* Removes an entry from the flash map. It is no longer available for this
* request or the next.
*/
def -=(key: String): FlashMap.this.type = {
m -= key
this
}
/**
* Adds an entry to the flash map. Clears the sweep flag for the key.
*/
def +=(kv: (String, Any)): FlashMap.this.type = {
flagged -= kv._1
m += kv
this
}
/**
* Creates a new iterator over the values of the flash map. These are the
* values that were added during the last request.
*/
def iterator = new Iterator[(String, Any)] {
private[this] val it = m.iterator
def hasNext = it.hasNext
def next = {
val kv = it.next
flagged += kv._1
kv
}
}
/**
* Returns the value associated with a key and flags it to be swept.
*/
def get(key: String): Option[Any] = {
flagged += key
m.get(key)
}
/**
* Removes all flagged entries.
*/
def sweep(): Unit = {
flagged foreach { key => m -= key }
}
/**
* Clears all flags so no entries are removed on the next sweep.
*/
def keep(): Unit = {
flagged.clear()
}
/**
* Clears the flag for the specified key so its entry is not removed on the next sweep.
*/
def keep(key: String): Unit = {
flagged -= key
}
/**
* Flags all current keys so the entire map is cleared on the next sweep.
*/
def flag(): Unit = {
flagged ++= m.keys
}
/**
* Sets a value for the current request only. It will be removed before the next request unless explicitly kept.
* Data put in this object is availble as usual:
* {{{
* flash.now("notice") = "logged in succesfully"
* flash("notice") // "logged in succesfully"
* }}}
*/
object now {
def update(key: String, value: Any) = {
flagged += key
m += key -> value
}
}
}
|
xerial/skinny-micro
|
micro/src/main/scala/skinny/micro/contrib/flash/FlashMap.scala
|
Scala
|
bsd-2-clause
| 2,672 |
package es.upm.oeg.pnk.actions
import es.upm.oeg.pnk.Analyze
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by cbadenes on 04/08/15.
*/
object EntityRecognizerDistinctExample {
def main(args: Array[String]): Unit = {
// Spark Configuration
val conf = new SparkConf().
setMaster("local[4]").
setAppName("Local Spark Example").
set("spark.executor.memory", "8g").
set("spark.driver.maxResultSize", "4g")
val sc = new SparkContext(conf)
Logger.getRootLogger.setLevel(Level.WARN)
// Extract txt from html
EntityRecognizer.distinct(sc,"output/entities/raw","output/entities/distinct/personas","PERS")
EntityRecognizer.distinct(sc,"output/entities/raw","output/entities/distinct/lugares","LUG")
EntityRecognizer.distinct(sc,"output/entities/raw","output/entities/distinct/organizaciones","ORG")
EntityRecognizer.distinct(sc,"output/entities/raw","output/entities/distinct/otros","OTROS")
}
}
|
cbadenes/pnk
|
src/test/scala/es/upm/oeg/pnk/actions/EntityRecognizerDistinctExample.scala
|
Scala
|
gpl-2.0
| 1,018 |
package io.kaitai.struct.languages.components
import io.kaitai.struct.ClassTypeProvider
import io.kaitai.struct.datatype.DataType
import io.kaitai.struct.datatype.DataType.SwitchType
import io.kaitai.struct.exprlang.Ast
import io.kaitai.struct.format.Identifier
import io.kaitai.struct.translators.BaseTranslator
/**
* Trait to be used when language needs two implementation of switching:
* a "true" one utilizing switch-like statement and an "if emulation"
* which utilizes series of if-then-else statements to emulate a switch.
*
* "True" switches are typically more efficient, but are limited to a
* subset of types. Examples of consumers of this pattern are C++, C#, Java.
*/
trait SwitchIfOps extends SwitchOps {
val translator: BaseTranslator
def typeProvider: ClassTypeProvider
/**
* Determines if this particular implementation of switches would be ok with true
* built-in `switch` mechanism, or it will require `if`-based emulation.
*
* @param onType type we'll be switching over
* @return true if `if`-based emulation is required
*/
def switchRequiresIfs(onType: DataType): Boolean
def switchIfStart(id: Identifier, on: Ast.expr, onType: DataType): Unit
def switchIfCaseFirstStart(condition: Ast.expr): Unit = switchIfCaseStart(condition)
def switchIfCaseStart(condition: Ast.expr): Unit
def switchIfCaseEnd(): Unit
def switchIfElseStart(): Unit
def switchIfElseEnd(): Unit = switchIfCaseEnd()
def switchIfEnd(): Unit
/**
* Generate switch cases by calling case procedures. Suitable for a wide variety of
* target languages that something remotely resembling C-like `switch`-`case` statement.
* Thanks to customizable argument type for case procedures, can be used for switch type
* handling and a variety of other cases (i.e. switching between customizable endianness,
* etc).
* @param id attribute identifier
* @param on on expression to decide upon
* @param cases cases map: keys should be expressions, values are arbitrary typed objects
* that will be passed to case procedures
* @param normalCaseProc procedure that would handle "normal" (i.e. non-else case)
* @param elseCaseProc procedure that would handle "else" case
* @tparam T type of object to pass to procedures
*/
override def switchCases[T](
id: Identifier,
on: Ast.expr,
cases: Map[Ast.expr, T],
normalCaseProc: (T) => Unit,
elseCaseProc: (T) => Unit
): Unit = {
val onType = translator.detectType(on)
typeProvider._currentSwitchType = Some(onType)
val switchIfs = switchRequiresIfs(onType)
if (switchIfs) {
switchCasesUsingIf(id, on, onType, cases, normalCaseProc, elseCaseProc)
} else {
switchCasesRender(id, on, cases, normalCaseProc, elseCaseProc)
}
}
protected def switchCasesUsingIf[T](
id: Identifier,
on: Ast.expr,
onType: DataType,
cases: Map[Ast.expr, T],
normalCaseProc: (T) => Unit,
elseCaseProc: (T) => Unit
): Unit = {
val someNormalCases = cases.filter { case (caseExpr, _) =>
caseExpr != SwitchType.ELSE_CONST
}.size > 0
if (someNormalCases) {
switchIfStart(id, on, onType)
// Pass 1: only normal case clauses
var first = true
cases.foreach { case (condition, result) =>
condition match {
case SwitchType.ELSE_CONST =>
// skip for now
case _ =>
if (first) {
switchIfCaseFirstStart(condition)
first = false
} else {
switchIfCaseStart(condition)
}
normalCaseProc(result)
switchIfCaseEnd()
}
}
// Pass 2: else clause, if it is there
cases.get(SwitchType.ELSE_CONST).foreach { (result) =>
switchIfElseStart()
elseCaseProc(result)
switchIfElseEnd()
}
switchIfEnd()
} else {
cases.get(SwitchType.ELSE_CONST).foreach { (result) =>
elseCaseProc(result)
}
}
}
}
|
kaitai-io/kaitai_struct_compiler
|
shared/src/main/scala/io/kaitai/struct/languages/components/SwitchIfOps.scala
|
Scala
|
gpl-3.0
| 4,050 |
/** *****************************************************************************
* This is part of ltlfo2mon (https://github.com/jckuester/ltlfo2mon).
*
* Copyright (c) 2013 by Jan-Christoph Kuester <[email protected]>
*
* Ltlfo2mon is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Ltlfo2mon is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with ltlfo2mon. If not, see <http://www.gnu.org/licenses/>.
* *****************************************************************************/
package ltlfo2mon.cli
import java.io.FileNotFoundException
import ltlfo2mon.Conf
import ltlfo2mon.monitor._
import ltlfo2mon.util.parsing._
import scala.collection._
import scala.io.Source
object Ltlfo2mon {
val cmdParser = new scopt.immutable.OptionParser[Config]("ltlfo2mon", "v1.3") {
def options = Seq(
arg("<LTLFO formula>", "LTLFO formula that gets monitored.") { (v: String, c: Config) => c.copy(formula = v)}, // Syntax: p | q | \\u03C6 & \\u03C8 | \\u03C6 || \\u03C8 | X \\u03C6 | \\u03C6 U \\u03C8 | G | F | -> | <-> | ")
argOpt("<trace>", "Monitor reads single trace from stdin.") { (v: String, c: Config) => c.copy()}, // Syntax: {<name>(<value>)} {} ..., e.g., {sms(123)} {}
//opt("l", "look-up-table", "<file>", "Provide file with pre-computed SAs.") { (v: String, c: Config) => c.copy(saCacheFile = v) },
opt("o", "output", "<file>", "Write monitor statistics (size, number of submonitors, etc.) to file.") { (v: String, c: Config) => c.copy(outputFile = v)},
flag("p", "progression", "Use progression/formula rewriting as monitor.") { (c: Config) => c.copy(progression = true)},
flag("sa", "sa-monitor", "Use deprecated SA-based monitor (default is optimized SA-based monitor, based on ltl3tools).") { (c: Config) => c.copy(sa = true)},
flag("v", "verbose", "Show monitor's statistics (size, number of submonitors, etc.) after each step.") { (c: Config) => c.copy(verbose = true)},
flag("vv", "verbose level 2", "Show monitor's inner-state after each step.") { (c: Config) => c.copy(verbose = true, verbose2 = true)}
)
}
def main(args: Array[String]): Unit = {
cmdParser.parse(args, Config()).fold {
// arguments are bad, usage message will have been displayed
} { config =>
// set verbose mode
Conf.verbose = config.verbose
Conf.verbose2 = config.verbose2
val formulaParser = new FormulaParser(Conf.struct)
val traceParser = new TraceParser(Conf.struct)
formulaParser.parse(config.formula) match {
case None => sys.exit()
case Some(formula) =>
try {
val traceFile = Source.stdin.getLines()
val traceStr = traceFile.next()
if (traceFile.hasNext) {
println("More than one trace in file.")
sys.exit()
} else {
traceParser.parse(traceStr) match {
case None => sys.exit()
case Some(trace) =>
/*
* Progression
*/
if (config.progression) {
val progression = new Progression(formula, Conf.struct)
val output = progression.process(trace, config.outputFile)
if (!Conf.verbose)
println("Result Progression: " + output._1 + " after " + output._2 + " events.")
return
} else if (config.sa) {
/*
* naive SAbasedMonitor
*/
if (config.saCacheFile == "") {
// read or create look-up table
//println("INFO: No look-up table provided; SAs will be pre-computed.")
unoptimised.SA.setLookupTable(formula)
} else {
unoptimised.SA.setLookupTableFromFile(config.saCacheFile)
}
val saBasedMonitor = new unoptimised.SAbasedMonitor(formula, Conf.struct)
val output = saBasedMonitor.process(trace, config.outputFile)
if (!Conf.verbose)
println("Result SA: " + output._1 + " after " + output._2 + " events.")
return
} else {
/*
* optimised SAbasedMonitor
*/
if (config.saCacheFile == "") {
// read or create look-up table
//println("INFO: No look-up table provided; SAs will be pre-computed.")
optimised.SAbasedMonitor.setLookupTable(formula)
} else {
optimised.SAbasedMonitor.setLookupTableFromFile(config.saCacheFile)
}
val saBasedMonitor = new optimised.SAbasedMonitor(formula, Conf.struct)
val output = saBasedMonitor.process(trace, config.outputFile)
if (!Conf.verbose)
println("Result SA-opt: " + output._1 + " after " + output._2 + " events.")
return
}
}
}
} catch {
case e: FileNotFoundException =>
println("No or wrong trace-file given.")
sys.exit()
}
}
}
}
/*
* config for scopt
*/
case class Config(formula: String = "",
outputFile: String = "", progression: Boolean = false, sa: Boolean = false, saCacheFile: String = "", verbose: Boolean = false, verbose2: Boolean = false)
}
|
jckuester/ltlfo2mon
|
src/main/scala/ltlfo2mon/cli/Ltlfo2mon.scala
|
Scala
|
gpl-3.0
| 6,082 |
package au.id.cxd.math.model.evaluation
import au.id.cxd.math.function.moments.Mean
/**
* calculate R-squared metric for distance as the correlation coefficient.
*
* Note howevber R^2 has the shortcoming of being influenced by extreme values
* in variation given its use of the squared error.
*
*
*/
class RSquared(val obs:Seq[Double], val pred:Seq[Double]) {
def op():Double = {
val obs_mean = Mean(obs)
val pred_mean = Mean(pred)
val obs_ss = obs.foldLeft(0.0)((acc, o) => acc + (o - obs_mean)*(o - obs_mean))
val pred_ss = pred.foldLeft(0.0)((acc,p) => acc + (p - pred_mean)*(p-pred_mean))
val joint_ss = obs.zip(pred).foldLeft(0.0) {
(acc, pair) =>
acc + (pair._1 - obs_mean)*(pair._2 - pred_mean)
}
joint_ss / (obs_ss * pred_ss)
}
}
object RSquared {
/**
* calculate R^2
* @param obs - observations
* @param pred - predicted values.
* @return
*/
def apply(obs:Seq[Double], pred:Seq[Double]):Double =
new RSquared(obs, pred).op()
}
|
cxd/scala-au.id.cxd.math
|
math/src/main/scala/au/id/cxd/math/model/evaluation/RSquared.scala
|
Scala
|
mit
| 1,031 |
import scala.util.parsing.combinator._
object DictTxtToXml4 extends RegexParsers {
def dict = rep(entry) ^^ { entries => <dict>{ entries }</dict>}
def entry = partOfSpeech into { part => translations(part) }
def partOfSpeech = "["~>("noun"|"verb")<~"]"
def translations(part: String) = enTranslation~"-"~frTranslation(part) ^^ {
case e~_~f => <entry kind={part}>{e}{f}</entry>
}
def enTranslation = word ^^ (x => <en>{ x }</en>)
def frTranslation(part: String) = part match {
case "noun" => word~gender ^^ { case w~g => <fr>{ w }{ g }</fr> }
case "verb" => word ^^ ( w => <fr>{ w }</fr> )
}
def gender = "("~>"""\\w""".r<~")" ^^ (x => <gender>{ x }</gender>)
def word = """\\w+""".r ^^ {w => <word>{ w }</word>}
}
|
grzegorzbalcerek/scala-book-examples
|
examples/DictTxtToXml4.scala
|
Scala
|
mit
| 753 |
package com.seanshubin.schulze.server
import com.seanshubin.server._
import com.seanshubin.schulze.server.data_transfer._
import com.seanshubin.schulze._
import com.seanshubin.schulze.persistence.{PersistenceSnapshotApi, PersistenceApi}
import com.seanshubin.schulze.server.data_transfer.Candidate
import com.seanshubin.schulze.server.data_transfer.AlternativesAndPaths
import com.seanshubin.schulze.server.data_transfer.TallyDto
import com.seanshubin.schulze.server.data_transfer.Voter
import com.seanshubin.server.SimplifiedRequest
import com.seanshubin.server.Content
import com.seanshubin.server.SimplifiedResponse
import com.seanshubin.schulze.StrongestPathResolution
import com.seanshubin.schulze.server.data_transfer.Election
import com.seanshubin.schulze.AlternatePathExploration
class SchulzeHandler(jsonSerialization: JsonSerialization,
persistence: PersistenceApi,
displayOrdering: DisplayOrdering) extends SimplifiedHandler {
val lock = new Object()
def handle(request: SimplifiedRequest): Option[SimplifiedResponse] = {
import Verb._
val maybeResponse = lock.synchronized {
//should really let datomic handle synchronization, for now, synchronize it at the http level
request match {
case Post("elections") => createElection(request)
case Post("elections", electionName, "candidates") => createCandidate(request, electionName)
case Get("elections") => getElections
case Get("elections", electionName, "candidates") => getCandidates(electionName)
case Get("elections", name) => getElection(name)
case Post("voters") => createVoter(request)
case Get("voters") => getVoters
case Get("voters", name) => getVoter(name)
case Delete("voters", name) => deleteVoter(name)
case Delete("elections", name) => deleteElection(name)
case Delete("elections", electionName, "candidates", candidateName) => deleteCandidate(electionName, candidateName)
case Get("favicon.ico") => favicon()
case Get("elections", electionName, "candidates", candidateName) => getCandidate(electionName, candidateName)
case Get("vote") => getVote(request)
case Put("vote") => putVote(request)
case Patch("elections", electionName, "candidates", candidateName) => patchCandidate(request, electionName, candidateName)
case Get("places", electionName) => getPlaces(request, electionName)
case Get("tally", electionName) => getTally(request, electionName)
case Options("elections") => emptyOkResponse
case _ => None
}
}
maybeResponse
}
def cleanName(name: String): String = {
name.trim.replaceAll( """\\s+""", " ")
}
private def createElection(request: SimplifiedRequest): Option[SimplifiedResponse] = {
val Election(electionName) = jsonSerialization.fromJson(request.body, classOf[Election])
persistence.createElection(cleanName(electionName))
emptyOkResponse
}
private def createCandidate(request: SimplifiedRequest, electionName: String): Option[SimplifiedResponse] = {
val CandidateName(candidateName) = jsonSerialization.fromJson(request.body, classOf[CandidateName])
persistence.createCandidate(electionName, cleanName(candidateName))
emptyOkResponse
}
private def patchCandidate(request: SimplifiedRequest, electionName: String, candidateName: String): Option[SimplifiedResponse] = {
val Candidate(candidateName, maybeDescription) = jsonSerialization.fromJson(request.body, classOf[Candidate])
persistence.updateCandidate(electionName, candidateName, maybeDescription)
emptyOkResponse
}
private def getElections: Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
val elections = displayOrdering.sortElections(snapshot.electionNames()).map(Election.apply)
val body = jsonSerialization.toJson(elections)
okJsonResponse(body)
}
private def getCandidates(electionName: String): Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
if (snapshot.electionNames().contains(electionName)) {
val candidates = displayOrdering.sortCandidates(snapshot.candidates(electionName))
val body = jsonSerialization.toJson(candidates)
okJsonResponse(body)
} else {
notFoundResponse
}
}
private def createVoter(request: SimplifiedRequest): Option[SimplifiedResponse] = {
val Voter(voterName) = jsonSerialization.fromJson(request.body, classOf[Voter])
persistence.createVoter(cleanName(voterName))
emptyOkResponse
}
private def deleteVoter(name: String) = {
val snapshot = persistence.snapshot
if (snapshot.voterNames().contains(name)) {
persistence.deleteVoter(name)
emptyOkResponse
}
else notFoundResponse
}
private def deleteElection(name: String) = {
val snapshot = persistence.snapshot
if (snapshot.electionNames().contains(name)) {
persistence.deleteElection(name)
emptyOkResponse
}
else notFoundResponse
}
private def getVoter(name: String) = {
val snapshot = persistence.snapshot
if (snapshot.voterNames().contains(name)) okJsonResponse(jsonSerialization.toJson(Voter(name)))
else notFoundResponse
}
private def getElection(name: String) = {
val snapshot = persistence.snapshot
if (snapshot.electionNames().contains(name)) okJsonResponse(jsonSerialization.toJson(Election(name)))
else notFoundResponse
}
private def getCandidate(electionName: String, candidateName: String) = {
val snapshot = persistence.snapshot
val maybeCandidate = snapshot.candidate(electionName, candidateName)
val response = maybeCandidate match {
case Some(candidate) =>
okJsonResponse(jsonSerialization.toJson(candidate))
case None => notFoundResponse
}
response
}
private def deleteCandidate(electionName: String, candidateName: String) = {
val snapshot = persistence.snapshot
val maybeCandidate = snapshot.candidate(electionName, candidateName)
val response = maybeCandidate match {
case Some(candidate) =>
persistence.deleteCandidate(electionName, candidateName)
emptyOkResponse
case None => notFoundResponse
}
response
}
private def getVoters: Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
val voters = displayOrdering.sortVoters(snapshot.voterNames()).map(Voter.apply)
val body = jsonSerialization.toJson(voters)
okJsonResponse(body)
}
private def getVote(request: SimplifiedRequest): Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
val parameters = request.parametersAsMapOfFirstOccurrence()
val voterName = parameters("voter")
val electionName = parameters("election")
if (snapshot.electionNames().contains(electionName) && snapshot.voterNames().contains(voterName)) {
val rankings = snapshot.vote(electionName, voterName).map(CandidateDescriptionRank.apply)
val sortedRankings = displayOrdering.sortRankings(rankings)
val body = jsonSerialization.toJson(sortedRankings)
okJsonResponse(body)
} else {
notFoundResponse
}
}
def rankingsFromMap(rankingsMap: Map[String, Long]): Map[String, Option[Long]] = {
def convertEntry(oldEntry: (String, Long)): (String, Option[Long]) = {
val (key, value) = oldEntry
val newEntry = key -> Some(value.asInstanceOf[Long])
newEntry
}
val ranking = rankingsMap.map(convertEntry)
ranking
}
private def putVote(request: SimplifiedRequest): Option[SimplifiedResponse] = {
val parameters = request.parametersAsMapOfFirstOccurrence()
val voterName = parameters("voter")
val electionName = parameters("election")
val rankings = CandidateRank.seqToMap(jsonSerialization.fromJsonArray(request.body, classOf[CandidateRank]))
persistence.updateVote(electionName, voterName, CandidateRank.removeOptionNone(rankings))
emptyOkResponse
}
private def getPlaces(request: SimplifiedRequest, electionName: String): Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
if (snapshot.electionNames().contains(electionName)) {
val tallyResult = computeTally(snapshot, electionName)
okJsonResponse(jsonSerialization.toJson(tallyResult.places))
} else {
notFoundResponse
}
}
private def getTally(request: SimplifiedRequest, electionName: String): Option[SimplifiedResponse] = {
val snapshot = persistence.snapshot
if (snapshot.electionNames().contains(electionName)) {
val tallyResult = computeTally(snapshot, electionName)
okJsonResponse(jsonSerialization.toJson(tallyResult))
} else {
notFoundResponse
}
}
def computeTally(snapshot: PersistenceSnapshotApi, electionName: String): TallyDto = {
val candidateNames = snapshot.candidates(electionName).map(_.name).toSeq.sorted
val votes = Vote.normalizeVotes(candidateNames, snapshot.votes(electionName))
val tally = new Tally(candidateNames, votes)
val places = tally.rankings
val voterNames = tally.voterNames
val preferences = Strengths.fillBlanksWithZeroes(candidateNames, tally.preferences)
val floydWarshall = strongestPathResolutionToFloydWarshall(tally.strongestPathResolution, tally.preferences)
val strongestPaths = tally.strongestPaths
TallyDto(places, candidateNames.toSeq, voterNames, votes, preferences, floydWarshall, strongestPaths)
}
private def strongestPathResolutionToFloydWarshall(strongestPathResolution: StrongestPathResolution, preferences: Map[String, Map[String, Long]]): Map[String, AlternativesAndPaths] = {
def addWeights(path: Seq[String]): Seq[Any] = {
if (path.isEmpty) Seq()
else if (path.size == 1) Seq(path)
else {
val segments = path.sliding(2).toVector
def expandSegment(segment: Seq[String]): Seq[Any] = {
val Seq(winner, loser) = segment
val strength = Strengths.strength(preferences, winner, loser)
Seq(winner, strength, loser)
}
def getTail(x: Seq[Any]) = x.tail
val expanded: Seq[Seq[Any]] = segments.map(expandSegment).toSeq.toVector
val result = (expanded.head +: expanded.tail.map(getTail)).flatten
result
}
}
def toEntry(exploration: AlternatePathExploration): (String, AlternativesAndPaths) = {
val candidateName = exploration.candidate
val alternatives = exploration.alternatePaths.map(addWeights)
val paths = exploration.strongestPaths.map(addWeights)
candidateName -> AlternativesAndPaths(alternatives, paths)
}
val entries = strongestPathResolution.alternateExplorations.map(toEntry)
entries.toMap
}
private def okJsonResponse(body: String): Option[SimplifiedResponse] = {
val code = HttpResponseCode.Ok.code
val mediaType = InternetMediaType.Json.name
val content = Content(mediaType, body)
Some(SimplifiedResponse(code, Some(content), headers))
}
private def favicon(): Option[SimplifiedResponse] = Some(SimplifiedResponse(HttpResponseCode.NoContent.code))
private val headers: Seq[(String, String)] = Seq(
("Content-Type", InternetMediaType.Json.name)
)
private val emptyOkResponse = Some(SimplifiedResponse(HttpResponseCode.Ok.code, None, headers))
private val notFoundResponse = Some(SimplifiedResponse(HttpResponseCode.NotFound.code, None, headers))
}
|
SeanShubin/schulze
|
server/src/main/scala/com/seanshubin/schulze/server/SchulzeHandler.scala
|
Scala
|
unlicense
| 11,454 |
package ru.listok.test
import _root_.ru.listok._
import org.scalatest.FunSuite
class EnvTest extends FunSuite with Helper {
// def listok = new Listok
test("def") {
expect(Lint(42)){listok.eval("(def a 42) a")}
expect(Lint(43)){listok.eval(
"""
(def a 42)
((lambda () (setf a (+ a 1))))
a
""")}
expect(Lint(42)){listok.eval(
"""
(def a 42)
((lambda (a) (setf a (+ a 1))) 1)
a
""")}
}
test("constants") {
expect(Lint(42)){listok.eval("(defconstant a 42) a")}
intercept[SyntaxError]{listok.eval(
"""
(defconstant a 42)
(setf a (+ a 1))
""")}
}
test("redefine") {
intercept[SyntaxError]{listok.eval("(def a 1)(def a 2)")}
intercept[SyntaxError]{listok.eval("(def a 1)(defconstant a 2)")}
intercept[SyntaxError]{listok.eval("(defun fn ())(def fn 2)")}
intercept[SyntaxError]{listok.eval("(defmacro m ())(def m 2)")}
expect(Lkeyword('inlet)){listok.eval("(def a 1)(let ((a :inlet)) a)")}
expect(Lint(2)){listok.eval("(def a 1)(progn (setf a 2) a)")}
expect(Lint(2)){listok.eval("(def a 1)(progn (def a 2) a)")}
//expect(Lint(1)){listok.eval("(def a 1)(progn (def a 2) a)")}
}
test("test scope") {
expect(LL(Lint(22), Lint(11), Lint(3), Lint(2))){listok.eval(
"""
(def a 1)
(setf a (+ a 1))
(def r (list a))
(progn
(setf r (cons (+ 1 a) r))
;(def a 11)
(let ((a 11))
(setf r (cons a r))
(let ((a 22))
(setf r (cons a r))
)))
r
""")}
}
test("env and nested vars") {
expect(LL(Lint(1),Lint(2),Lint(3),Lint(4),Lint(5),Lint(6),Lint(7),
Lint(8),Lint(9),Lint(10),Lint(11),Lint(12))){
listok.eval(
"""
(def g1 1)
(def g2 2)
(def l1 3)
(def l2 4)
(let ((b1 5) (b2 6))
(def ll1 7)
(def ll2 8)
((lambda (x y)
(def ll3 11)
(def g3 12)
(list g1 g2 l1 l2 b1 b2 ll1 ll2 x y ll3 g3))
9 10))
""")}
}
test("nested func") {
expect(Lint(18)){listok.eval(
"""
(defun f1 (x1 x2 x3 x4)
(defun f2 (x1 x2) (+ x1 x2 x3 x4))
(f2 5 6))
(f1 1 2 3 4)
""")}
expect(LL(Lint(7),Lint(2))){listok.eval(
"""
(defun foo (n)
(defun bar () n)
(progn (bar)))
(list (foo 7) (foo 2))
""")}
}
test("func and let") {
expect(LL(Lint(2), Lint(1))){listok.eval(
"""
(defun swap (a b)
(let ((temp a))
(setf a b)
(setf b temp)
(list a b)))
(swap 1 2)
""")}
}
test("recurse") {
expect(Lint(55)){listok.eval("""
(defun fn (x sum)
;(declare :tco)
;(write-line (string "fn " x " " sum))
(match x
((numberp n)
;(write-line (string " x=" x " n=" n " sum=" sum)) (read-line)
(fn (if (< n 10) (+ n 1) :done) (+ n sum)))
(:done
;(write-line "work is done")
sum)
))
(fn 1 0)
""")}
expect(Lint(10)){listok.eval(
"""
(defun fn (a)
(if (= a 10)
a
(progn (fn (+ a 1)))))
(fn 1)
""")}
}
test("load with prefix") {
val m =
"""
(def a :ma)
(defun f1 () :mf1)
(defun f2 (x) (list x :mf2 (f1) a))
(f2 nil)
"""
def makelistok = new Listok {
override def onload(env: Env, source: String) = {
Listok.load(env, new java.io.ByteArrayInputStream(m.getBytes))
}
}
val l = makelistok
expect(LL(Lnil, Lkeyword('mf2), Lkeyword('mf1), Lkeyword('ma))) {l.eval("(load :unused :m)")}
expect(Lkeyword('ma)){l.eval("m:a")}
expect(Lkeyword('mf1)){l.eval("(m:f1)")}
expect(LL(Ltrue, Lkeyword('mf2), Lkeyword('mf1), Lkeyword('ma))) {l.eval("(m:f2 t)")}
val l1 = makelistok
expect(LL(Lnil, Lkeyword('mf2), Lkeyword('mf1), Lkeyword('ma))){l1.eval("(load :unused)")}
expect(Lkeyword('ma)){l1.eval("a")}
expect(Lkeyword('mf1)){l1.eval("(f1)")}
expect(LL(Ltrue, Lkeyword('mf2), Lkeyword('mf1), Lkeyword('ma))) {l1.eval("(f2 t)")}
}
test("macro") {
expect(LL(Lint(2), Lint(1))) {
listok.eval(
"""
(defmacro swap (a b)
`(let ((z ,a))
(setf ,a ,b)
(setf ,b z)
))
(def x 1)
(def y 2)
(swap x y)
(list x y)
""")}
}
}
|
kolyvan/listok
|
src/test/scala/env-test.scala
|
Scala
|
lgpl-3.0
| 4,398 |
import stainless.lang._
object InnerClasses2 {
abstract class Test {
def something: BigInt
}
def foo(x: Boolean, l: BigInt): Test = {
require(l > 1)
def bar(y: Boolean, m: BigInt): Test = {
require(m > 2)
def baz(z: Boolean, o: BigInt): Test = {
require(o > 3)
case class FooBarBaz(a: Boolean, b: Boolean, c: Boolean) extends Test {
def something: BigInt = if (a) l else if (b) m else if (c) o else 0
}
FooBarBaz(x, y, z)
}
baz(false, 4)
}
bar(true, 3)
}
def test = (foo(false, 2).something == 3).holds
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/InnerClasses2.scala
|
Scala
|
apache-2.0
| 605 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.builder.query.db.crud
import com.outworkers.phantom.PhantomSuite
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.tables._
import com.outworkers.util.samplers._
class InsertCasTest extends PhantomSuite {
override def beforeAll(): Unit = {
super.beforeAll()
database.primitives.createSchema()
database.testTable.createSchema()
database.recipes.createSchema()
}
"Standard inserts" should "not create multiple database entries and perform upserts instead" in {
val row = gen[PrimitiveRecord]
val chain = for {
truncate <- database.primitives.truncate.future()
store <- database.primitives.storeRecords(List(row, row, row, row))
one <- database.primitives.select.where(_.pkey eqs row.pkey).one
multi <- database.primitives.select.where(_.pkey eqs row.pkey).fetch()
} yield (one, multi)
whenReady(chain) { case (res1, res3) =>
info("The one query should return a record")
res1 shouldBe defined
info("And the record should equal the inserted record")
res1.value shouldEqual row
info("And only one record should be retrieved from a range fetch")
res3 should have size 1
}
}
"Conditional inserts" should "not create duplicate database entries" in {
val row = gen[PrimitiveRecord]
val chain = for {
truncate <- database.primitives.truncate.future()
store <- database.primitives.storeRecords(List(row, row, row, row))
one <- database.primitives.select.where(_.pkey eqs row.pkey).one
multi <- database.primitives.select.where(_.pkey eqs row.pkey).fetch()
} yield (one, multi)
whenReady(chain) { case (res1, res3) =>
info("The one query should return a record")
res1 shouldBe defined
info("And the record should equal the inserted record")
res1.value shouldEqual row
info("And only one record should be retrieved from a range fetch")
res3 should have size 1
}
}
}
|
outworkers/phantom
|
phantom-dsl/src/test/scala/com/outworkers/phantom/builder/query/db/crud/InsertCasTest.scala
|
Scala
|
apache-2.0
| 2,608 |
/**
* Tries to load a symbol for the `Foo$class` using Scala reflection.
* Since trait implementation classes do not get pickling information
* symbol for them should be created using fallback mechanism
* that exposes Java reflection information dressed up in
* a Scala symbol.
*/
object Test extends App with Outer {
import scala.reflect.{ClassTag, classTag}
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
assert(cm.classSymbol(classTag[Foo].runtimeClass).info.decl(TermName("bar")).info ==
cm.classSymbol(classTag[Bar].runtimeClass).info.decl(TermName("foo")).info)
val s1 = implClass(classTag[Foo].runtimeClass)
assert(s1 != NoSymbol)
assert(s1.info != NoType)
assert(s1.companion.info != NoType)
assert(s1.companion.info.decl(TermName("bar")) != NoSymbol)
val s2 = implClass(classTag[Bar].runtimeClass)
assert(s2 != NoSymbol)
assert(s2.info != NoType)
assert(s2.companion.info != NoType)
assert(s2.companion.info.decl(TermName("foo")) != NoSymbol)
def implClass(clazz: Class[_]) = {
val implClass = Class.forName(clazz.getName + "$class")
cm.classSymbol(implClass)
}
}
trait Foo {
def bar = 1
}
trait Outer {
trait Bar {
def foo = 1
}
}
|
som-snytt/dotty
|
tests/disabled/reflect/run/reflection-implClass.scala
|
Scala
|
apache-2.0
| 1,249 |
package edu.arizona.sista.discourse.rstparser
import org.slf4j.LoggerFactory
import edu.arizona.sista.utils.{Files, StringUtils}
import edu.arizona.sista.processors.Document
import scala.collection.mutable.ListBuffer
import scala.collection.mutable
import EDUClassifier._
import edu.arizona.sista.struct.Counter
import edu.arizona.sista.learning._
import java.io._
import Utils._
/**
* Detects EDU boundaries
* User: mihais
* Date: 4/12/14
*/
class EDUClassifier {
var classifier:Classifier[String, String] = null
var scaleRanges:ScaleRange[String] = null
def isTrained:Boolean = classifier != null
def saveTo(w:Writer) {
classifier.saveTo(w)
scaleRanges.saveTo(w)
}
/**
* Trains using all .dis in the given directory
*/
def train(trees:List[(DiscourseTree, Document)], corpusStats:CorpusStats) {
logger.debug("Constructing dataset...")
scaleRanges = null
val dataset = constructDataset(trees)
logger.debug("Scaling dataset...")
scaleRanges = Datasets.svmScaleDataset(dataset)
logger.debug("Training...")
classifier = new PerceptronClassifier[String, String](epochs = 5)
//classifier = new LinearSVMClassifier[String, String]()
//classifier = new LibSVMClassifier[String, String](PolynomialKernel)
//classifier = new LogisticRegressionClassifier[String, String]()
//classifier = new RandomForestClassifier[String, String](numTrees = 20)
//classifier = new BaggingClassifier[String, String](svmFactory, 10, new Random(1))
classifier.train(dataset)
}
/** Tests the standalone segmenter for intra-sentence classification */
def test (trees:List[(DiscourseTree, Document)]) {
val tokenStats = new Counter[Boolean]()
val tokens = extractEDUTokens(trees, tokenStats)
logger.info("Found " + tokens.size + " EDU tokens in TESTING: " +
tokenStats.getCount(true) + " +, " +
tokenStats.getCount(false) + " -.")
val datums = mkDatums(tokens)
val output = new ListBuffer[(String, String)]
var i = 0
for(datum <- datums) {
val token = tokens(i)
val l = classOf(datum)
output += new Tuple2(datum.label, l)
if(datum.label == POS && l == NEG) {
assert(token.eduStart)
println("MISSED THIS TOKEN:")
report(token, "FN")
} else if(datum.label == NEG && l == POS) {
assert(! token.eduStart)
println("FALSE POSITIVE:")
report(token, "FP")
}
i += 1
}
val (p, r, f, correct, predicted, total) = f1(output.toList)
logger.info(s"P = $p + ($correct / $predicted)")
logger.info(s"R = $r + ($correct / $total)")
logger.info(s"F1 = $f")
}
def report(token:EDUToken, errType:String) {
val doc = token.doc
val sent = doc.sentences(token.position.sentence)
val offset = token.position.token
val context = 10
print("...")
for(i <- scala.math.max(0, offset - context) until scala.math.min(offset + context, sent.size)) {
print(" ")
if(i == offset) print("[[")
print(sent.words(i))
if(i == offset) print("]]")
}
println("...")
println("Incoming dependencies for token:")
val inc = deps(sent).incomingEdges
if(offset < inc.size) {
for (d <- inc(offset)) {
println("\\t" + sent.words(d._1) + "\\t" + d._2)
}
}
if(offset < deps(sent).outgoingEdges.size) {
val (_, top) = featureExtractor.pathToRoot(offset, deps(sent).incomingEdges)
val leftMost = deps(sent).outgoingEdges(offset).size == 0
println(errType + "\\tleftmost:" + top + "|" + leftMost)
}
}
/**
* Extracts all document tokens, identifying if they are the beginning of an intra-sentence EDU or not.
* It skips the first tokens in each sentence because they are not useful during classification (they always begin an EDU).
* @param trees Extract tokens from all these trees
* @return All tokens to be used during classification
*/
private def extractEDUTokens(trees:List[(DiscourseTree, Document)], tokenStats:Counter[Boolean]):Array[EDUToken] = {
val tokens = new ListBuffer[EDUToken]
for(tree <- trees) {
// match known connectives in this document
val connectives = ConnectiveMatcher.matchConnectives(tree._2)
val docTokens = new ListBuffer[EDUToken]
// find positive examples
val starts = new mutable.HashSet[TokenOffset]()
findEDUStarts(tree._1, starts)
for(s <- starts) {
docTokens += new EDUToken(s, tree._2, connectives, true)
tokenStats.incrementCount(true)
}
// find negative examples
val doc = tree._2
for(si <- 0 until doc.sentences.size) {
val sent = doc.sentences(si)
// skip the last token in a sentence: it can never be the start of an EDU
for(ti <- 0 until sent.words.size - 1) {
val p = new TokenOffset(si, ti)
if(! starts.contains(p)) {
docTokens += new EDUToken(p, doc, connectives, false)
tokenStats.incrementCount(false)
}
}
}
// sort candidates in sentence/token order in this document
tokens ++= docTokens.toList.sortWith(boundarySort)
}
//printEDUBoundaries(tokens)
//eduBoundaryStats(tokens)
tokens.toArray
}
def boundarySort(e1:EDUToken, e2:EDUToken):Boolean = {
if(e1.position.sentence < e2.position.sentence) return true
if(e1.position.sentence > e2.position.sentence) return false
if(e1.position.token < e2.position.token) return true
false
}
def eduBoundaryStats(tokens:Iterable[EDUToken]) {
val posCounts = new Counter[String]()
val decileCounts = new Counter[Int]()
for(t <- tokens) {
if(t.eduStart) {
val s = t.doc.sentences(t.position.sentence)
if(s.tags.isDefined) {
val crt = s.tags.get(t.position.token)
posCounts.incrementCount(crt)
}
decileCounts.incrementCount(Utils.toDecile(t.position.token, s.size))
}
}
println("HISTOGRAM OF POS TAGS:")
val tags = posCounts.sorted
for(t <- tags) println(t._1 + "\\t" + t._2)
println("HISTOGRAM OF DECILE COUNTS:")
val decs = decileCounts.sorted
for(d <- decs) println(d._1 + "\\t" + d._2)
}
def printEDUBoundaries(tokens:Iterable[EDUToken]) {
for(t <- tokens) {
if(t.eduStart) {
val s = t.doc.sentences(t.position.sentence)
val crt = s.words(t.position.token)
val crtTag = s.tags.get(t.position.token)
val prev = s.words(t.position.token - 1)
var next = "END"
if(t.position.token < s.size - 1)
next = s.words(t.position.token + 1)
println("... " + prev + " " + crt + " " + next + " ...")
if(next == "END") {
print("ENDSENT: ")
for(w <- s.words) print(w + " ")
println()
}
if(crtTag == ":") {
print("COLONSENT: ")
for(w <- s.words) print(w + " ")
println()
}
}
}
}
def findEDUStarts(tree:DiscourseTree, starts:mutable.HashSet[TokenOffset]) {
if(tree.isTerminal) {
val s = tree.firstToken
// we only care about EDUs that are intra sentence, so we ignore EDUs that start at the beginning of sentence
if(s.token != 0) {
starts.add(s)
}
} else {
for(c <- tree.children) {
findEDUStarts(c, starts)
}
}
}
private def mkDataset(tokens:Array[EDUToken]):Dataset[String, String] = {
val dataset = new RVFDataset[String, String]()
for(i <- 0 until tokens.size) {
val token = tokens(i)
val datum = mkDatum(token)
dataset += datum
}
dataset
}
private def mkDatums(tokens:Array[EDUToken]):Iterable[Datum[String, String]] = {
val datums = new ListBuffer[Datum[String, String]]
for(i <- 0 until tokens.size) {
val token = tokens(i)
val datum = mkDatum(token)
datums += datum
}
datums.toList
}
def mkDatum(token:EDUToken):Datum[String, String] = {
var label = NEG
if(token.eduStart) label = POS
val feats = featureExtractor.mkFeatures(token.position, token.doc, token.connectives)
if(scaleRanges != null) {
val scaledFeats = Datasets.svmScaleDatum(feats, scaleRanges)
new RVFDatum[String, String](label, scaledFeats)
} else {
new RVFDatum[String, String](label, feats)
}
}
def classOf(datum:Datum[String, String]) = classifier.classOf(datum)
def constructDataset(trees:List[(DiscourseTree, Document)]):Dataset[String, String] = {
// find all intra-sentence tokens that can be EDU boundaries
val tokenStats = new Counter[Boolean]()
val tokens = extractEDUTokens(trees, tokenStats)
logger.info("Found " + tokens.size + " EDU tokens: " +
tokenStats.getCount(true) + " +, " +
tokenStats.getCount(false) + " -.")
//for(t <- tokens) println(t.position.sentence + "\\t" + t.position.token + "\\t" + t.eduStart)
// make the actual dataset with positive and negative examples
val dataset = mkDataset(tokens)
dataset
}
def f1(output:Iterable[(String, String)]):(Double, Double, Double, Int, Int, Int) = {
var total = 0
var pred = 0
var correct = 0
for(o <- output) {
val gold = o._1
val sys = o._2
if(gold == POS) total += 1
if(sys == POS) {
pred += 1
if(sys == gold) correct += 1
}
}
val p = correct.toDouble / pred.toDouble
val r = correct.toDouble / total.toDouble
val f = 2 * p * r / (p + r)
(p, r, f, correct, pred, total)
}
def simpleF1(output:Iterable[(String, String)]):Double = f1(output)._3
def featureSelectionIncremental(trees:List[(DiscourseTree, Document)], corpusStats:CorpusStats) {
val dataset = constructDataset(trees)
val featureGroups = Utils.findFeatureGroups(":", dataset.featureLexicon)
logger.debug(s"Found ${featureGroups.size} feature groups:")
for(f <- featureGroups.keySet) {
logger.debug(s"Group $f containing ${featureGroups.get(f).get.size} features.")
}
val chosenGroups = Datasets.incrementalFeatureSelection(
dataset, Utils.svmFactory, simpleF1, featureGroups)
logger.info(s"Selected ${chosenGroups.size} feature groups: " + chosenGroups)
}
def featureSelectionByInformativeness(trees:List[(DiscourseTree, Document)], corpusStats:CorpusStats) {
val dataset = constructDataset(trees)
val chosenFeatures = Datasets.featureSelectionByInformativeness(dataset, Utils.svmFactory, simpleF1)
}
def featureSelectionByFrequency(trees:List[(DiscourseTree, Document)], corpusStats:CorpusStats) {
val dataset = constructDataset(trees)
val chosenFeatures = Datasets.featureSelectionByFrequency(dataset, Utils.svmFactory, simpleF1)
}
}
class EDUToken (val position:TokenOffset, val doc:Document, val connectives:Array[Array[String]], val eduStart:Boolean)
object EDUClassifier {
val logger = LoggerFactory.getLogger(classOf[EDUClassifier])
val featureExtractor = new EDUFeatureExtractor
val POS = "+"
val NEG = "-"
def main(args:Array[String]) {
val props = StringUtils.argsToProperties(args)
var cls:EDUClassifier = null
if(props.containsKey("train")) {
cls = new EDUClassifier
val (trees, corpusStats) = RSTParser.mkTrees(props.getProperty("train"),
CacheReader.getProcessor(props.containsKey("dep")))
cls.train(trees, corpusStats)
if(props.containsKey("model")) {
val os = new PrintWriter(new BufferedWriter(new FileWriter(props.getProperty("model"))))
cls.saveTo(os)
os.close()
}
}
if(props.containsKey("test")) {
val (trees, _) = RSTParser.mkTrees(props.getProperty("test"),
CacheReader.getProcessor(props.containsKey("dep")),
makeStats = false)
if(props.containsKey("model")) {
val is = new BufferedReader(new FileReader(props.getProperty("model")))
cls = loadFrom(is)
is.close()
}
cls.test(trees)
}
if(props.containsKey("fsel")) {
cls = new EDUClassifier
val (trees, corpusStats) = RSTParser.mkTrees(props.getProperty("fsel"),
CacheReader.getProcessor(props.containsKey("dep")))
cls.featureSelectionIncremental(trees, corpusStats)
}
}
def loadFrom(r:java.io.Reader):EDUClassifier = {
val edu = new EDUClassifier
val reader = Files.toBufferedReader(r)
val c = PerceptronClassifier.loadFrom[String, String](reader)
val sr = ScaleRange.loadFrom[String](reader)
edu.classifier = c
edu.scaleRanges = sr
edu
}
}
|
capdevc/processors
|
src/main/scala/edu/arizona/sista/discourse/rstparser/EDUClassifier.scala
|
Scala
|
apache-2.0
| 12,575 |
package com.programmaticallyspeaking.ncd.nashorn
import com.programmaticallyspeaking.ncd.host._
import com.programmaticallyspeaking.ncd.host.types.{ExceptionData, Undefined}
import org.scalatest.prop.TableDrivenPropertyChecks
import scala.concurrent.Promise
import scala.util.{Failure, Success, Try}
class CallFunctionOnTest extends EvaluateTestFixture with TableDrivenPropertyChecks {
private def testSuccess[A](tr: Try[A])(tester: (A) => Unit): Unit = tr match {
case Success(value) => tester(value)
case Failure(t) => fail(t)
}
def testObjectValue(script: String, objName: String)(f: ObjectId => Unit): Unit = {
evaluateInScript(script)({ (host, stackframes) =>
host.evaluateOnStackFrame(stackframes.head.id, objName) match {
case Success(cn: ComplexNode) => f(cn.objectId)
case Success(other) => fail("Unexpected evaluate result: " + other)
case Failure(t) => fail("Error", t)
}
})
}
def testObjectValue(f: ObjectId => Unit): Unit = {
val script =
"""
|function fun() {
| var obj = { value: 42 };
| debugger;
| obj.toString();
|}
|fun();
""".stripMargin
testObjectValue(script, "obj")(f)
}
"callFunctionOn" - {
"works for access to 'this'" in {
val funcDecl = "function () { return this.value; }"
testObjectValue { objId =>
val retVal = getHost.callFunctionOn(StackFrame.TopId, Some(objId), funcDecl, Seq.empty)
retVal should be(Success(SimpleValue(42)))
}
}
"works with argument" in {
val funcDecl = "function (x) { return x.value; }"
testObjectValue { objId =>
val retVal = getHost.callFunctionOn(StackFrame.TopId, None, funcDecl, Seq(objId))
retVal should be(Success(SimpleValue(42)))
}
}
"can access Object in a strict mode function" in {
val script =
"""
|function fun() {
| 'use strict';
| var obj = { value: 99 };
| debugger;
| obj.toString();
|}
|fun();
""".stripMargin
testObjectValue(script, "obj") { objId =>
getHost.callFunctionOn(StackFrame.TopId, None, "function (x) { return Object.getOwnPropertyNames(x); }", Seq(objId)) match {
case Success(an: ArrayNode) =>
an.size should be (1)
case Success(other) => fail("Unexpected callFunctionOn result: " + other)
case Failure(t) => fail("Error", t)
}
}
}
}
}
|
provegard/ncdbg
|
src/test/scala/com/programmaticallyspeaking/ncd/nashorn/CallFunctionOnTest.scala
|
Scala
|
bsd-3-clause
| 2,532 |
package teleporter.integration.component.hdfs
import java.util.Properties
import com.google.common.io.Resources
import io.leopard.javahost.JavaHost
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.scalatest.FunSuite
/**
* Created by huanwuji
* date 2017/2/8.
*/
class Hdfs$Test extends FunSuite {
val props = new Properties
props.load(IOUtils.toInputStream(
"""
host1=localhost
"""))
JavaHost.updateVirtualDns(props)
test("hdfs connect") {
val conf = new Configuration(false)
// conf.addResource("/core-default.xml")
// conf.addResource("/core-site.xml")
// conf.addResource("/mapred-default.xml")
// conf.addResource("/mapred-site.xml")
// conf.addResource("/yarn-default.xml")
// conf.addResource("/yarn-site.xml")
// conf.addResource("/hdfs-default.xml")
// conf.addResource("/hdfs-site.xml")
// conf.reloadConfiguration()
conf.addResource(Resources.getResource("core-site.xml").openStream())
conf.addResource(Resources.getResource("hdfs-site.xml").openStream())
conf.addResource(Resources.getResource("ssl-client.xml").openStream())
conf.reloadConfiguration()
val fileSystem = FileSystem.get(conf)
val files = fileSystem.listFiles(new Path("/"), false)
files
}
}
|
huanwuji/teleporter
|
src/test/scala/teleporter/integration/component/hdfs/Hdfs$Test.scala
|
Scala
|
agpl-3.0
| 1,387 |
package com.sksamuel.avro4s.schema
import java.util.UUID
import com.sksamuel.avro4s.AvroSchema
import org.scalatest.{Matchers, WordSpec}
class UUIDSchemaTest extends WordSpec with Matchers {
case class UUIDTest(uuid: UUID)
case class UUIDSeq(uuids: Seq[UUID])
case class UUIDDefault(uuid: UUID = UUID.fromString("86da265c-95bd-443c-8860-9381efca059d"))
case class UUIDOption(uuid: Option[UUID])
"SchemaEncoder" should {
"support UUID logical types" in {
val schema = AvroSchema[UUIDTest]
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/uuid.json"))
schema shouldBe expected
}
"support Option[UUID] as a union" in {
val schema = AvroSchema[UUIDOption]
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/uuid_option.json"))
schema shouldBe expected
}
"support UUID with default value" in {
val schema = AvroSchema[UUIDDefault]
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/uuid_default.json"))
schema shouldBe expected
}
"support Seq[UUID] as an array of logical types" in {
val schema = AvroSchema[UUIDSeq]
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/uuid_seq.json"))
schema shouldBe expected
}
}
}
|
51zero/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/schema/UUIDSchemaTest.scala
|
Scala
|
mit
| 1,382 |
/** soar
*
* Copyright (c) 2017 Hugo Firth
* Email: <[email protected]/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ncl.la.soar.glance.eval.db
import java.sql.Timestamp
import java.time.Instant
import java.util.UUID
import doobie.imports._
import monix.eval.Task
import monix.cats._
import cats._
import cats.data.{NonEmptyVector, OptionT}
import cats.implicits._
import uk.ac.ncl.la.soar.{ModuleCode, StudentNumber}
import uk.ac.ncl.la.soar.db.{Repository, RepositoryCompanion}
import uk.ac.ncl.la.soar.glance.eval.{
ClusterSession,
Collection,
RecapSession,
Survey
}
class CollectionDb private[glance] (xa: Transactor[Task])
extends Repository[Collection] {
import CollectionDb._
type PK = UUID
override val init = initQ.transact(xa)
override val list = listQ.transact(xa)
override def find(id: UUID) = findQ(id).transact(xa)
def findIdx(id: UUID, idx: Int): F[Option[Survey]] =
findIdxQ(id, idx).transact(xa)
def findFirst(id: UUID): F[Option[Survey]] = findIdx(id, 0)
override def save(entry: Collection) = saveQ(entry).transact(xa)
override def delete(id: UUID) = deleteQ(id).transact(xa)
}
object CollectionDb extends RepositoryCompanion[Collection, CollectionDb] {
type CollectionRow = (UUID, ModuleCode, Int)
type CollectionMembership = (UUID, Int, Boolean)
type CollectionMembershipRow = (UUID, UUID, Int, Boolean)
implicit val uuidMeta: Meta[UUID] =
Meta[String].nxmap(UUID.fromString, _.toString)
override val initQ: ConnectionIO[Unit] = ().pure[ConnectionIO]
override val listQ: ConnectionIO[List[Collection]] = {
type Row = (UUID, ModuleCode, Int, UUID, Int)
//TODO: Read up on process/stream api from fs2!
val rowsQ =
sql"""
SELECT c.id, c.module_num, c.num_entries, cm.survey_id, cm.membership_idx
FROM collection c, collection_membership cm
WHERE c.id == cm.collection_id;
""".query[Row].list
rowsQ.map { rows: List[Row] =>
val collToMem = rows.groupBy(r => (r._1, r._2, r._3))
val memSorted = collToMem.collect {
case (c, m) if m.nonEmpty => c -> m.sortBy(_._2).map(_._1)
}
val collOpts = memSorted.flatMap {
case (c, hd :: tl) =>
Some(Collection(c._1, c._2, NonEmptyVector(hd, tl.toVector)))
case (c, Nil) => None
}
collOpts.toList
}
}
def findIdxQ(id: UUID, idx: Int): ConnectionIO[Option[Survey]] = {
val surveyOpt = for {
collection <- OptionT(findQ(id))
surveyId <- OptionT.fromOption[ConnectionIO](
collection.surveyIds.get(idx))
survey <- OptionT(SurveyDb.findQ(surveyId))
} yield survey
surveyOpt.value
}
//TODO: Look at refactoring or factoring out this method and the above. One must be better than the other
override def findQ(id: UUID): ConnectionIO[Option[Collection]] = {
for {
cR <- findCollectionRowQ(id)
cMR <- findCollectionMembershipsQ(id)
} yield
cR.flatMap {
case (cId, module, num) =>
cMR.toVector.sortBy(_._2).map(_._1) match {
case hd +: tl =>
Some(Collection(cId, module, NonEmptyVector(hd, tl)))
case _ => None
}
}
}
private def findCollectionRowQ(id: UUID) =
sql"""
SELECT c.id, c.module_num, c.num_entries FROM collection c WHERE c.id = $id;
""".query[CollectionRow].option
private def findCollectionMembershipsQ(id: UUID) =
sql"""
SELECT c.survey_id, c.membership_idx, c.last
FROM collection_membership c WHERE c.collection_id = $id;
""".query[CollectionMembership].list
override def saveQ(entry: Collection): ConnectionIO[Unit] = {
//Batch insert entries into collection_membership table
val addMembershipsSQL =
"""
INSERT INTO collection_membership (collection_id, survey_id, membership_idx, last)
VALUES (?, ?, ?, ?);
"""
val addCollectionQ =
sql"""
INSERT INTO collection (id, module_num, num_entries)
VALUES (${entry.id}, ${entry.module}, ${entry.numEntries});
""".update.run
//Prepare membership rows
val surveyIndices = entry.surveyIds.toVector.zipWithIndex
//Add collection id and "last" flag
val last = entry.numEntries - 1
val membershipRows = surveyIndices.map {
case (s, i) if i == last => (entry.id, s, i, true)
case (s, i) => (entry.id, s, i, false)
}
//Add all db rows
for {
_ <- addCollectionQ
_ <- Update[CollectionMembershipRow](addMembershipsSQL)
.updateMany(membershipRows)
} yield ()
}
override def deleteQ(id: UUID) =
sql"DELETE FROM collection c WHERE c.id = $id;".update.run.map(_ > 0)
}
|
NewcastleComputingScience/student-outcome-accelerator
|
glance-core/jvm/src/main/scala/uk/ac/ncl/la/soar/glance/eval/db/CollectionRepository.scala
|
Scala
|
apache-2.0
| 5,256 |
package com.kjetland.jackson.jsonSchema
import java.time.{LocalDate, LocalDateTime, OffsetDateTime}
import java.util
import java.util.{Collections, Optional, TimeZone}
import com.fasterxml.jackson.databind.module.SimpleModule
import com.fasterxml.jackson.databind.node.{ArrayNode, MissingNode, ObjectNode}
import com.fasterxml.jackson.databind.{JavaType, JsonNode, ObjectMapper, SerializationFeature}
import com.fasterxml.jackson.datatype.jdk8.Jdk8Module
import com.fasterxml.jackson.datatype.joda.JodaModule
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule
import com.fasterxml.jackson.module.kotlin.KotlinModule
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.github.fge.jsonschema.main.JsonSchemaFactory
import com.kjetland.jackson.jsonSchema.testData.GenericClass.GenericClassVoid
import com.kjetland.jackson.jsonSchema.testData.MapLike.GenericMapLike
import com.kjetland.jackson.jsonSchema.testData._
import com.kjetland.jackson.jsonSchema.testData.generic.GenericClassContainer
import com.kjetland.jackson.jsonSchema.testData.mixin.{MixinChild1, MixinModule, MixinParent}
import com.kjetland.jackson.jsonSchema.testData.polymorphism1.{Child1, Child2, Parent}
import com.kjetland.jackson.jsonSchema.testData.polymorphism2.{Child21, Child22, Parent2}
import com.kjetland.jackson.jsonSchema.testData.polymorphism3.{Child31, Child32, Parent3}
import com.kjetland.jackson.jsonSchema.testData.polymorphism4.{Child41, Child42}
import com.kjetland.jackson.jsonSchema.testData.polymorphism5.{Child51, Child52, Parent5}
import com.kjetland.jackson.jsonSchema.testData.polymorphism6.{Child61, Parent6}
import com.kjetland.jackson.jsonSchema.testDataScala._
import com.kjetland.jackson.jsonSchema.testData_issue_24.EntityWrapper
import javax.validation.groups.Default
import org.scalatest.{FunSuite, Matchers}
import scala.collection.JavaConverters._
class JsonSchemaGeneratorTest extends FunSuite with Matchers {
val _objectMapper = new ObjectMapper()
val _objectMapperScala = new ObjectMapper().registerModule(new DefaultScalaModule)
val _objectMapperKotlin = new ObjectMapper().registerModule(new KotlinModule())
val mixinModule = new MixinModule
List(_objectMapper, _objectMapperScala).foreach {
om =>
val simpleModule = new SimpleModule()
simpleModule.addSerializer(classOf[PojoWithCustomSerializer], new PojoWithCustomSerializerSerializer)
simpleModule.addDeserializer(classOf[PojoWithCustomSerializer], new PojoWithCustomSerializerDeserializer)
om.registerModule(simpleModule)
om.registerModule(new JavaTimeModule)
om.registerModule(new Jdk8Module)
om.registerModule(new JodaModule)
// For the mixin-test
om.registerModule(mixinModule)
om.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false)
om.setTimeZone(TimeZone.getDefault)
}
val jsonSchemaGenerator = new JsonSchemaGenerator(_objectMapper, debug = true)
val jsonSchemaGeneratorHTML5 = new JsonSchemaGenerator(_objectMapper, debug = true, config = JsonSchemaConfig.html5EnabledSchema)
val jsonSchemaGeneratorScala = new JsonSchemaGenerator(_objectMapperScala, debug = true)
val jsonSchemaGeneratorScalaHTML5 = new JsonSchemaGenerator(_objectMapperScala, debug = true, config = JsonSchemaConfig.html5EnabledSchema)
val jsonSchemaGeneratorKotlin = new JsonSchemaGenerator(_objectMapperKotlin, debug = true)
val vanillaJsonSchemaDraft4WithIds = JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(useTypeIdForDefinitionName = true)
val jsonSchemaGeneratorWithIds = new JsonSchemaGenerator(_objectMapperScala, debug = true, vanillaJsonSchemaDraft4WithIds)
val jsonSchemaGeneratorNullable = new JsonSchemaGenerator(_objectMapper, debug = true, config = JsonSchemaConfig.nullableJsonSchemaDraft4)
val jsonSchemaGeneratorHTML5Nullable = new JsonSchemaGenerator(_objectMapper, debug = true,
config = JsonSchemaConfig.html5EnabledSchema.copy(useOneOfForNullables = true))
val jsonSchemaGeneratorWithIdsNullable = new JsonSchemaGenerator(_objectMapperScala, debug = true,
vanillaJsonSchemaDraft4WithIds.copy(useOneOfForNullables = true))
val jsonSchemaGenerator_draft_06 = new JsonSchemaGenerator(_objectMapper, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.withJsonSchemaDraft(JsonSchemaDraft.DRAFT_06))
val jsonSchemaGenerator_draft_07 = new JsonSchemaGenerator(_objectMapper, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.withJsonSchemaDraft(JsonSchemaDraft.DRAFT_07))
val jsonSchemaGenerator_draft_2019_09 = new JsonSchemaGenerator(_objectMapper, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.withJsonSchemaDraft(JsonSchemaDraft.DRAFT_2019_09))
val testData = new TestData{}
def asPrettyJson(node:JsonNode, om:ObjectMapper):String = {
om.writerWithDefaultPrettyPrinter().writeValueAsString(node)
}
// Asserts that we're able to go from object => json => equal object
def assertToFromJson(g:JsonSchemaGenerator, o:Any): JsonNode = {
assertToFromJson(g, o, o.getClass)
}
// Asserts that we're able to go from object => json => equal object
// desiredType might be a class which o extends (polymorphism)
def assertToFromJson(g:JsonSchemaGenerator, o:Any, desiredType:Class[_]): JsonNode = {
val json = g.rootObjectMapper.writeValueAsString(o)
println(s"json: $json")
val jsonNode = g.rootObjectMapper.readTree(json)
val r = g.rootObjectMapper.treeToValue(jsonNode, desiredType)
assert(o == r)
jsonNode
}
def useSchema(jsonSchema:JsonNode, jsonToTestAgainstSchema:Option[JsonNode] = None): Unit = {
val schemaValidator = JsonSchemaFactory.byDefault().getJsonSchema(jsonSchema)
jsonToTestAgainstSchema.foreach {
node =>
val r = schemaValidator.validate(node)
if (!r.isSuccess) {
throw new Exception("json does not validate against schema: " + r)
}
}
}
// Generates schema, validates the schema using external schema validator and
// Optionally tries to validate json against the schema.
def generateAndValidateSchema
(
g:JsonSchemaGenerator,
clazz:Class[_], jsonToTestAgainstSchema:Option[JsonNode] = None,
jsonSchemaDraft: JsonSchemaDraft = JsonSchemaDraft.DRAFT_04
):JsonNode = {
val schema = g.generateJsonSchema(clazz)
println("--------------------------------------------")
println(asPrettyJson(schema, g.rootObjectMapper))
assert(jsonSchemaDraft.url == schema.at("/$schema").asText())
useSchema(schema, jsonToTestAgainstSchema)
schema
}
// Generates schema, validates the schema using external schema validator and
// Optionally tries to validate json against the schema.
def generateAndValidateSchemaUsingJavaType
(
g:JsonSchemaGenerator,
javaType:JavaType,
jsonToTestAgainstSchema:Option[JsonNode] = None,
jsonSchemaDraft: JsonSchemaDraft = JsonSchemaDraft.DRAFT_04
):JsonNode = {
val schema = g.generateJsonSchema(javaType)
println("--------------------------------------------")
println(asPrettyJson(schema, g.rootObjectMapper))
assert(jsonSchemaDraft.url == schema.at("/$schema").asText())
useSchema(schema, jsonToTestAgainstSchema)
schema
}
def assertJsonSubTypesInfo(node:JsonNode, typeParamName:String, typeName:String, html5Checks:Boolean = false): Unit ={
/*
"properties" : {
"type" : {
"type" : "string",
"enum" : [ "child1" ],
"default" : "child1"
},
},
"title" : "child1",
"required" : [ "type" ]
*/
assert(node.at(s"/properties/$typeParamName/type").asText() == "string")
assert(node.at(s"/properties/$typeParamName/enum/0").asText() == typeName)
assert(node.at(s"/properties/$typeParamName/default").asText() == typeName)
assert(node.at(s"/title").asText() == typeName)
assertPropertyRequired(node, typeParamName, required = true)
if (html5Checks) {
assert(node.at(s"/properties/$typeParamName/options/hidden").asBoolean())
assert(node.at(s"/options/multiple_editor_select_via_property/property").asText() == typeParamName)
assert(node.at(s"/options/multiple_editor_select_via_property/value").asText() == typeName)
} else {
assert(node.at(s"/options/multiple_editor_select_via_property/property").isInstanceOf[MissingNode])
}
}
def getArrayNodeAsListOfStrings(node:JsonNode):List[String] = {
node match {
case _:MissingNode => List()
case x:ArrayNode => x.asScala.toList.map(_.asText())
}
}
def getRequiredList(node:JsonNode):List[String] = {
getArrayNodeAsListOfStrings(node.at(s"/required"))
}
def assertPropertyRequired(schema:JsonNode, propertyName:String, required:Boolean): Unit = {
if (required) {
assert(getRequiredList(schema).contains(propertyName))
} else {
assert(!getRequiredList(schema).contains(propertyName))
}
}
def getNodeViaRefs(root:JsonNode, pathToArrayOfRefs:String, definitionName:String):JsonNode = {
val arrayItemNodes:List[JsonNode] = root.at(pathToArrayOfRefs) match {
case arrayNode:ArrayNode => arrayNode.iterator().asScala.toList
case objectNode:ObjectNode => List(objectNode)
}
val ref = arrayItemNodes.map(_.get("$ref").asText()).find(_.endsWith(s"/$definitionName")).get
// use ref to look the node up
val fixedRef = ref.substring(1) // Removing starting #
root.at(fixedRef)
}
def getNodeViaRefs(root:JsonNode, nodeWithRef:JsonNode, definitionName:String):ObjectNode = {
val ref = nodeWithRef.at("/$ref").asText()
assert(ref.endsWith(s"/$definitionName"))
// use ref to look the node up
val fixedRef = ref.substring(1) // Removing starting #
root.at(fixedRef).asInstanceOf[ObjectNode]
}
test("Generate scheme for plain class not using @JsonTypeInfo") {
val enumList = MyEnum.values().toList.map(_.toString)
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/someString/type").asText() == "string")
assert(schema.at("/properties/myEnum/type").asText() == "string")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnum/enum")) == enumList)
}
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assertNullableType(schema, "/properties/someString", "string")
assertNullableType(schema, "/properties/myEnum", "string")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnum/oneOf/1/enum")) == enumList)
}
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.classNotExtendingAnythingScala)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.classNotExtendingAnythingScala.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/someString/type").asText() == "string")
assert(schema.at("/properties/myEnum/type").asText() == "string")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnum/enum")) == enumList)
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnumO/enum")) == enumList)
}
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.genericClassVoid)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.genericClassVoid.getClass, Some(jsonNode))
assert(schema.at("/type").asText() == "object")
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/content/type").asText() == "null")
assert(schema.at("/properties/list/type").asText() == "array")
assert(schema.at("/properties/list/items/type").asText() == "null")
}
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.genericMapLike)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.genericMapLike.getClass, Some(jsonNode))
assert(schema.at("/type").asText() == "object")
assert(schema.at("/additionalProperties/type").asText() == "string")
}
}
test("Generating schema for concrete class which happens to extend class using @JsonTypeInfo") {
def doTest(pojo:Object, clazz:Class[_], g:JsonSchemaGenerator): Unit = {
val jsonNode = assertToFromJson(g, pojo)
val schema = generateAndValidateSchema(g, clazz, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/parentString/type").asText() == "string")
assertJsonSubTypesInfo(schema, "type", "child1")
}
doTest(testData.child1, testData.child1.getClass, jsonSchemaGenerator)
doTest(testData.child1Scala, testData.child1Scala.getClass, jsonSchemaGeneratorScala)
}
test("Generate schema for regular class which has a property of class annotated with @JsonTypeInfo") {
def assertDefaultValues(schema:JsonNode): Unit ={
assert(schema.at("/properties/stringWithDefault/type").asText() == "string")
assert(schema.at("/properties/stringWithDefault/default").asText() == "x")
assert(schema.at("/properties/intWithDefault/type").asText() == "integer")
assert(schema.at("/properties/intWithDefault/default").asInt() == 12)
assert(schema.at("/properties/booleanWithDefault/type").asText() == "boolean")
assert(schema.at("/properties/booleanWithDefault/default").asBoolean())
}
def assertNullableDefaultValues(schema:JsonNode): Unit = {
assert(schema.at("/properties/stringWithDefault/oneOf/0/type").asText() == "null")
assert(schema.at("/properties/stringWithDefault/oneOf/0/title").asText() == "Not included")
assert(schema.at("/properties/stringWithDefault/oneOf/1/type").asText() == "string")
assert(schema.at("/properties/stringWithDefault/oneOf/1/default").asText() == "x")
assert(schema.at("/properties/intWithDefault/type").asText() == "integer")
assert(schema.at("/properties/intWithDefault/default").asInt() == 12)
assert(schema.at("/properties/booleanWithDefault/type").asText() == "boolean")
assert(schema.at("/properties/booleanWithDefault/default").asBoolean())
}
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoWithParent)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.pojoWithParent.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/pojoValue/type").asText() == "boolean")
assertDefaultValues(schema)
assertChild1(schema, "/properties/child/oneOf")
assertChild2(schema, "/properties/child/oneOf")
}
// Java - html5
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5, testData.pojoWithParent)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5, testData.pojoWithParent.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/pojoValue/type").asText() == "boolean")
assertDefaultValues(schema)
assertChild1(schema, "/properties/child/oneOf", html5Checks = true)
assertChild2(schema, "/properties/child/oneOf", html5Checks = true)
}
// Java - html5/nullable
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5Nullable, testData.pojoWithParent)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5Nullable, testData.pojoWithParent.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assertNullableType(schema, "/properties/pojoValue", "boolean")
assertNullableDefaultValues(schema)
assertNullableChild1(schema, "/properties/child/oneOf/1/oneOf", html5Checks = true)
assertNullableChild2(schema, "/properties/child/oneOf/1/oneOf", html5Checks = true)
}
//Using fully-qualified class names
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorWithIds, testData.pojoWithParent)
val schema = generateAndValidateSchema(jsonSchemaGeneratorWithIds, testData.pojoWithParent.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/pojoValue/type").asText() == "boolean")
assertDefaultValues(schema)
assertChild1(schema, "/properties/child/oneOf", "com.kjetland.jackson.jsonSchema.testData.polymorphism1.Child1")
assertChild2(schema, "/properties/child/oneOf", "com.kjetland.jackson.jsonSchema.testData.polymorphism1.Child2")
}
// Using fully-qualified class names and nullable types
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorWithIdsNullable, testData.pojoWithParent)
val schema = generateAndValidateSchema(jsonSchemaGeneratorWithIdsNullable, testData.pojoWithParent.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assertNullableType(schema, "/properties/pojoValue", "boolean")
assertNullableDefaultValues(schema)
assertNullableChild1(schema, "/properties/child/oneOf/1/oneOf", "com.kjetland.jackson.jsonSchema.testData.polymorphism1.Child1")
assertNullableChild2(schema, "/properties/child/oneOf/1/oneOf", "com.kjetland.jackson.jsonSchema.testData.polymorphism1.Child2")
}
// Scala
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.pojoWithParentScala)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.pojoWithParentScala.getClass, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/pojoValue/type").asText() == "boolean")
assertDefaultValues(schema)
assertChild1(schema, "/properties/child/oneOf", "Child1Scala")
assertChild2(schema, "/properties/child/oneOf", "Child2Scala")
}
}
def assertChild1(node:JsonNode, path:String, defName:String = "Child1", typeParamName:String = "type", typeName:String = "child1", html5Checks:Boolean = false): Unit ={
val child1 = getNodeViaRefs(node, path, defName)
assertJsonSubTypesInfo(child1, typeParamName, typeName, html5Checks)
assert(child1.at("/properties/parentString/type").asText() == "string")
assert(child1.at("/properties/child1String/type").asText() == "string")
assert(child1.at("/properties/_child1String2/type").asText() == "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assertPropertyRequired(child1, "_child1String3", required = true)
}
def assertNullableChild1(node:JsonNode, path:String, defName:String = "Child1", html5Checks:Boolean = false): Unit ={
val child1 = getNodeViaRefs(node, path, defName)
assertJsonSubTypesInfo(child1, "type", "child1", html5Checks)
assertNullableType(child1, "/properties/parentString", "string")
assertNullableType(child1, "/properties/child1String", "string")
assertNullableType(child1, "/properties/_child1String2", "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assertPropertyRequired(child1, "_child1String3", required = true)
}
def assertChild2(node:JsonNode, path:String, defName:String = "Child2", typeParamName:String = "type", typeName:String = "child2", html5Checks:Boolean = false): Unit ={
val child2 = getNodeViaRefs(node, path, defName)
assertJsonSubTypesInfo(child2, typeParamName, typeName, html5Checks)
assert(child2.at("/properties/parentString/type").asText() == "string")
assert(child2.at("/properties/child2int/type").asText() == "integer")
}
def assertNullableChild2(node:JsonNode, path:String, defName:String = "Child2", html5Checks:Boolean = false): Unit = {
val child2 = getNodeViaRefs(node, path, defName)
assertJsonSubTypesInfo(child2, "type", "child2", html5Checks)
assertNullableType(child2, "/properties/parentString", "string")
assertNullableType(child2, "/properties/child2int", "integer")
}
def assertNullableType(node:JsonNode, path:String, expectedType:String): Unit = {
val nullType = node.at(path).at("/oneOf/0")
assert(nullType.at("/type").asText() == "null")
assert(nullType.at("/title").asText() == "Not included")
val valueType = node.at(path).at("/oneOf/1")
assert(valueType.at("/type").asText() == expectedType)
Option(getRequiredList(node)).map(xs => assert(!xs.contains(path.split('/').last)))
}
test("Generate schema for super class annotated with @JsonTypeInfo - use = JsonTypeInfo.Id.NAME") {
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.child1)
assertToFromJson(jsonSchemaGenerator, testData.child1, classOf[Parent])
val schema = generateAndValidateSchema(jsonSchemaGenerator, classOf[Parent], Some(jsonNode))
assertChild1(schema, "/oneOf")
assertChild2(schema, "/oneOf")
}
// Java + Nullables
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.child1)
assertToFromJson(jsonSchemaGeneratorNullable, testData.child1, classOf[Parent])
val schema = generateAndValidateSchema(jsonSchemaGenerator, classOf[Parent], Some(jsonNode))
assertChild1(schema, "/oneOf")
assertChild2(schema, "/oneOf")
}
// Scala
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.child1Scala)
assertToFromJson(jsonSchemaGeneratorScala, testData.child1Scala, classOf[ParentScala])
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, classOf[ParentScala], Some(jsonNode))
assertChild1(schema, "/oneOf", "Child1Scala")
assertChild2(schema, "/oneOf", "Child2Scala")
}
}
test("Generate schema for super class annotated with @JsonTypeInfo - use = JsonTypeInfo.Id.CLASS") {
// Java
{
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val g = new JsonSchemaGenerator(_objectMapper, debug = true, config)
val jsonNode = assertToFromJson(g, testData.child21)
assertToFromJson(g, testData.child21, classOf[Parent2])
val schema = generateAndValidateSchema(g, classOf[Parent2], Some(jsonNode))
assertChild1(schema, "/oneOf", "Child21", typeParamName = "clazz", typeName = "com.kjetland.jackson.jsonSchema.testData.polymorphism2.Child21")
assertChild2(schema, "/oneOf", "Child22", typeParamName = "clazz", typeName = "com.kjetland.jackson.jsonSchema.testData.polymorphism2.Child22")
}
}
test("Generate schema for super class annotated with @JsonTypeInfo - use = JsonTypeInfo.Id.MINIMAL_CLASS") {
// Java
{
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val g = new JsonSchemaGenerator(_objectMapper, debug = true, config)
val jsonNode = assertToFromJson(g, testData.child51)
assertToFromJson(g, testData.child51, classOf[Parent5])
val schema = generateAndValidateSchema(g, classOf[Parent5], Some(jsonNode))
assertChild1(schema, "/oneOf", "Child51", typeParamName = "clazz", typeName = ".Child51")
assertChild2(schema, "/oneOf", "Child52", typeParamName = "clazz", typeName = ".Child52")
val embeddedTypeName = _objectMapper.valueToTree[ObjectNode](new Parent5.Child51InnerClass()).get("clazz").asText()
assertChild1(schema, "/oneOf", "Child51InnerClass", typeParamName = "clazz", typeName = embeddedTypeName)
}
}
test("Generate schema for interface annotated with @JsonTypeInfo - use = JsonTypeInfo.Id.MINIMAL_CLASS") {
// Java
{
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val g = new JsonSchemaGenerator(_objectMapper, debug = true, config)
val jsonNode = assertToFromJson(g, testData.child61)
assertToFromJson(g, testData.child61, classOf[Parent6])
val schema = generateAndValidateSchema(g, classOf[Parent6], Some(jsonNode))
assertChild1(schema, "/oneOf", "Child61", typeParamName = "clazz", typeName = ".Child61")
assertChild2(schema, "/oneOf", "Child62", typeParamName = "clazz", typeName = ".Child62")
}
}
test("Generate schema for super class annotated with @JsonTypeInfo - include = JsonTypeInfo.As.EXISTING_PROPERTY") {
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.child31)
assertToFromJson(jsonSchemaGenerator, testData.child31, classOf[Parent3])
val schema = generateAndValidateSchema(jsonSchemaGenerator, classOf[Parent3], Some(jsonNode))
assertChild1(schema, "/oneOf", "Child31", typeName = "child31")
assertChild2(schema, "/oneOf", "Child32", typeName = "child32")
}
}
test("Generate schema for super class annotated with @JsonTypeInfo - include = JsonTypeInfo.As.CUSTOM") {
// Java
{
val jsonNode1 = assertToFromJson(jsonSchemaGenerator, testData.child41)
val jsonNode2 = assertToFromJson(jsonSchemaGenerator, testData.child42)
val schema1 = generateAndValidateSchema(jsonSchemaGenerator, classOf[Child41], Some(jsonNode1))
val schema2 = generateAndValidateSchema(jsonSchemaGenerator, classOf[Child42], Some(jsonNode2))
assertJsonSubTypesInfo(schema1, "type", "Child41")
assertJsonSubTypesInfo(schema2, "type", "Child42")
}
}
test("Generate schema for class containing generics with same base type but different type arguments") {
{
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val g = new JsonSchemaGenerator(_objectMapper, debug = true, config)
val instance = new GenericClassContainer()
val jsonNode = assertToFromJson(g, instance)
assertToFromJson(g, instance, classOf[GenericClassContainer])
val schema = generateAndValidateSchema(g, classOf[GenericClassContainer], Some(jsonNode))
assert(schema.at("/definitions/BoringClass/properties/data/type").asText() == "integer")
assert(schema.at("/definitions/GenericClass(String)/properties/data/type").asText() == "string")
assert(schema.at("/definitions/GenericWithJsonTypeName(String)/properties/data/type").asText() == "string")
assert(schema.at("/definitions/GenericClass(BoringClass)/properties/data/$ref").asText() == "#/definitions/BoringClass")
assert(schema.at("/definitions/GenericClassTwo(String,GenericClass(BoringClass))/properties/data1/type").asText() == "string")
assert(schema.at("/definitions/GenericClassTwo(String,GenericClass(BoringClass))/properties/data2/$ref").asText() == "#/definitions/GenericClass(BoringClass)")
}
}
test("additionalProperties / failOnUnknownProperties") {
// Test default
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.manyPrimitives)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.manyPrimitives.getClass, Some(jsonNode))
assert(schema.at("/additionalProperties").asBoolean() == false)
}
// Test turning failOnUnknownProperties off
{
val generator = new JsonSchemaGenerator(_objectMapper, debug = false,
config = JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(failOnUnknownProperties = false)
)
val jsonNode = assertToFromJson(generator, testData.manyPrimitives)
val schema = generateAndValidateSchema(generator, testData.manyPrimitives.getClass, Some(jsonNode))
assert(schema.at("/additionalProperties").asBoolean() == true)
}
}
test("primitives") {
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.manyPrimitives)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.manyPrimitives.getClass, Some(jsonNode))
assert(schema.at("/properties/_string/type").asText() == "string")
assert(schema.at("/properties/_integer/type").asText() == "integer")
assertPropertyRequired(schema, "_integer", required = false) // Should allow null by default
assert(schema.at("/properties/_int/type").asText() == "integer")
assertPropertyRequired(schema, "_int", required = true) // Must have a value
assert(schema.at("/properties/_booleanObject/type").asText() == "boolean")
assertPropertyRequired(schema, "_booleanObject", required = false) // Should allow null by default
assert(schema.at("/properties/_booleanPrimitive/type").asText() == "boolean")
assertPropertyRequired(schema, "_booleanPrimitive", required = true) // Must be required since it must have true or false - not null
assert(schema.at("/properties/_booleanObjectWithNotNull/type").asText() == "boolean")
assertPropertyRequired(schema, "_booleanObjectWithNotNull", required = true)
assert(schema.at("/properties/_doubleObject/type").asText() == "number")
assertPropertyRequired(schema, "_doubleObject", required = false)// Should allow null by default
assert(schema.at("/properties/_doublePrimitive/type").asText() == "number")
assertPropertyRequired(schema, "_doublePrimitive", required = true) // Must be required since it must have a value - not null
assert(schema.at("/properties/myEnum/type").asText() == "string")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnum/enum")) == MyEnum.values().toList.map(_.toString))
assert(schema.at("/properties/myEnum/JsonSchemaInjectOnEnum").asText() == "true")
}
// Java with nullable types
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.manyPrimitivesNulls)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.manyPrimitivesNulls.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/_string", "string")
assertNullableType(schema, "/properties/_integer", "integer")
assertNullableType(schema, "/properties/_booleanObject", "boolean")
assertNullableType(schema, "/properties/_doubleObject", "number")
// We're actually going to test this elsewhere, because if we set this to null here it'll break the "generateAndValidateSchema"
// test. What's fun is that the type system will allow you to set the value as null, but the schema won't (because there's a @NotNull annotation on it).
assert(schema.at("/properties/_booleanObjectWithNotNull/type").asText() == "boolean")
assertPropertyRequired(schema, "_booleanObjectWithNotNull", required = true)
assert(schema.at("/properties/_int/type").asText() == "integer")
assertPropertyRequired(schema, "_int", required = true)
assert(schema.at("/properties/_booleanPrimitive/type").asText() == "boolean")
assertPropertyRequired(schema, "_booleanPrimitive", required = true)
assert(schema.at("/properties/_doublePrimitive/type").asText() == "number")
assertPropertyRequired(schema, "_doublePrimitive", required = true)
assertNullableType(schema, "/properties/myEnum", "string")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/myEnum/oneOf/1/enum")) == MyEnum.values().toList.map(_.toString))
}
// Scala
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.manyPrimitivesScala)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.manyPrimitivesScala.getClass, Some(jsonNode))
assert(schema.at("/properties/_string/type").asText() == "string")
assert(schema.at("/properties/_integer/type").asText() == "integer")
assertPropertyRequired(schema, "_integer", required = true) // Should allow null by default
assert(schema.at("/properties/_boolean/type").asText() == "boolean")
assertPropertyRequired(schema, "_boolean", required = true) // Should allow null by default
assert(schema.at("/properties/_double/type").asText() == "number")
assertPropertyRequired(schema, "_double", required = true) // Should allow null by default
}
}
test("scala using option") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.pojoUsingOptionScala)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScala, testData.pojoUsingOptionScala.getClass, Some(jsonNode))
assert(schema.at("/properties/_string/type").asText() == "string")
assertPropertyRequired(schema, "_string", required = false) // Should allow null by default
assert(schema.at("/properties/_integer/type").asText() == "integer")
assertPropertyRequired(schema, "_integer", required = false) // Should allow null by default
assert(schema.at("/properties/_boolean/type").asText() == "boolean")
assertPropertyRequired(schema, "_boolean", required = false) // Should allow null by default
assert(schema.at("/properties/_double/type").asText() == "number")
assertPropertyRequired(schema, "_double", required = false) // Should allow null by default
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1"), "Child1Scala")
assertJsonSubTypesInfo(child1, "type", "child1")
assert(child1.at("/properties/parentString/type").asText() == "string")
assert(child1.at("/properties/child1String/type").asText() == "string")
assert(child1.at("/properties/_child1String2/type").asText() == "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assert(schema.at("/properties/optionalList/type").asText() == "array")
assert(schema.at("/properties/optionalList/items/$ref").asText() == "#/definitions/ClassNotExtendingAnythingScala")
}
test("java using option") {
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoUsingOptionalJava)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.pojoUsingOptionalJava.getClass, Some(jsonNode))
assert(schema.at("/properties/_string/type").asText() == "string")
assertPropertyRequired(schema, "_string", required = false) // Should allow null by default
assert(schema.at("/properties/_integer/type").asText() == "integer")
assertPropertyRequired(schema, "_integer", required = false) // Should allow null by default
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1"), "Child1")
assertJsonSubTypesInfo(child1, "type", "child1")
assert(child1.at("/properties/parentString/type").asText() == "string")
assert(child1.at("/properties/child1String/type").asText() == "string")
assert(child1.at("/properties/_child1String2/type").asText() == "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assert(schema.at("/properties/optionalList/type").asText() == "array")
assert(schema.at("/properties/optionalList/items/$ref").asText() == "#/definitions/ClassNotExtendingAnything")
}
test("nullable Java using option") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.pojoUsingOptionalJava)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.pojoUsingOptionalJava.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/_string", "string")
assertNullableType(schema, "/properties/_integer", "integer")
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1/oneOf/1"), "Child1")
assertJsonSubTypesInfo(child1, "type", "child1")
assertNullableType(child1, "/properties/parentString", "string")
assertNullableType(child1, "/properties/child1String", "string")
assertNullableType(child1, "/properties/_child1String2", "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assertNullableType(schema, "/properties/optionalList", "array")
assert(schema.at("/properties/optionalList/oneOf/1/items/$ref").asText() == "#/definitions/ClassNotExtendingAnything")
}
test("custom serializer not overriding JsonSerializer.acceptJsonFormatVisitor") {
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoWithCustomSerializer)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.pojoWithCustomSerializer.getClass, Some(jsonNode))
assert(schema.asInstanceOf[ObjectNode].fieldNames().asScala.toList == List("$schema", "title")) // Empty schema due to custom serializer
}
test("object with property using custom serializer not overriding JsonSerializer.acceptJsonFormatVisitor") {
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.objectWithPropertyWithCustomSerializer)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.objectWithPropertyWithCustomSerializer.getClass, Some(jsonNode))
assert(schema.at("/properties/s/type").asText() == "string")
assert(schema.at("/properties/child").asInstanceOf[ObjectNode].fieldNames().asScala.toList == List())
}
test("pojoWithArrays") {
def doTest(pojo:Object, clazz:Class[_], g:JsonSchemaGenerator, html5Checks:Boolean): Unit ={
val jsonNode = assertToFromJson(g, pojo)
val schema = generateAndValidateSchema(g, clazz, Some(jsonNode))
assert(schema.at("/properties/intArray1/type").asText() == "array")
assert(schema.at("/properties/intArray1/items/type").asText() == "integer")
assert(schema.at("/properties/stringArray/type").asText() == "array")
assert(schema.at("/properties/stringArray/items/type").asText() == "string")
assert(schema.at("/properties/stringList/type").asText() == "array")
assert(schema.at("/properties/stringList/items/type").asText() == "string")
assert(schema.at("/properties/stringList/minItems").asInt() == 1)
assert(schema.at("/properties/stringList/maxItems").asInt() == 10)
assert(schema.at("/properties/polymorphismList/type").asText() == "array")
assertChild1(schema, "/properties/polymorphismList/items/oneOf", html5Checks = html5Checks)
assertChild2(schema, "/properties/polymorphismList/items/oneOf", html5Checks = html5Checks)
assert(schema.at("/properties/polymorphismArray/type").asText() == "array")
assertChild1(schema, "/properties/polymorphismArray/items/oneOf", html5Checks = html5Checks)
assertChild2(schema, "/properties/polymorphismArray/items/oneOf", html5Checks = html5Checks)
assert(schema.at("/properties/listOfListOfStrings/type").asText() == "array")
assert(schema.at("/properties/listOfListOfStrings/items/type").asText() == "array")
assert(schema.at("/properties/listOfListOfStrings/items/items/type").asText() == "string")
assert(schema.at("/properties/setOfUniqueValues/type").asText() == "array")
assert(schema.at("/properties/setOfUniqueValues/items/type").asText() == "string")
if (html5Checks) {
assert(schema.at("/properties/setOfUniqueValues/uniqueItems").asText() == "true")
assert(schema.at("/properties/setOfUniqueValues/format").asText() == "checkbox")
}
}
doTest(testData.pojoWithArrays, testData.pojoWithArrays.getClass, jsonSchemaGenerator, html5Checks = false)
doTest(testData.pojoWithArraysScala, testData.pojoWithArraysScala.getClass, jsonSchemaGeneratorScala, html5Checks = false)
doTest(testData.pojoWithArraysScala, testData.pojoWithArraysScala.getClass, jsonSchemaGeneratorScalaHTML5, html5Checks = true)
doTest(testData.pojoWithArrays, testData.pojoWithArrays.getClass, jsonSchemaGeneratorScalaHTML5, html5Checks = true)
}
test("pojoWithArraysNullable") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.pojoWithArraysNullable)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.pojoWithArraysNullable.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/intArray1", "array")
assert(schema.at("/properties/intArray1/oneOf/1/items/type").asText() == "integer")
assertNullableType(schema, "/properties/stringArray", "array")
assert(schema.at("/properties/stringArray/oneOf/1/items/type").asText() == "string")
assertNullableType(schema, "/properties/stringList", "array")
assert(schema.at("/properties/stringList/oneOf/1/items/type").asText() == "string")
assertNullableType(schema, "/properties/polymorphismList", "array")
assertNullableChild1(schema, "/properties/polymorphismList/oneOf/1/items/oneOf")
assertNullableChild2(schema, "/properties/polymorphismList/oneOf/1/items/oneOf")
assertNullableType(schema, "/properties/polymorphismArray", "array")
assertNullableChild1(schema, "/properties/polymorphismArray/oneOf/1/items/oneOf")
assertNullableChild2(schema, "/properties/polymorphismArray/oneOf/1/items/oneOf")
assertNullableType(schema, "/properties/listOfListOfStrings", "array")
assert(schema.at("/properties/listOfListOfStrings/oneOf/1/items/type").asText() == "array")
assert(schema.at("/properties/listOfListOfStrings/oneOf/1/items/items/type").asText() == "string")
}
test("recursivePojo") {
// Non-nullable Java types
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.recursivePojo)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.recursivePojo.getClass, Some(jsonNode))
assert(schema.at("/properties/myText/type").asText() == "string")
assert(schema.at("/properties/children/type").asText() == "array")
val defViaRef = getNodeViaRefs(schema, schema.at("/properties/children/items"), "RecursivePojo")
assert(defViaRef.at("/properties/myText/type").asText() == "string")
assert(defViaRef.at("/properties/children/type").asText() == "array")
val defViaRef2 = getNodeViaRefs(schema, defViaRef.at("/properties/children/items"), "RecursivePojo")
assert(defViaRef == defViaRef2)
}
// Nullable Java types
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.recursivePojo)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.recursivePojo.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/myText", "string")
assertNullableType(schema, "/properties/children", "array")
val defViaRef = getNodeViaRefs(schema, schema.at("/properties/children/oneOf/1/items"), "RecursivePojo")
assertNullableType(defViaRef, "/properties/myText", "string")
assertNullableType(defViaRef, "/properties/children", "array")
val defViaRef2 = getNodeViaRefs(schema, defViaRef.at("/properties/children/oneOf/1/items"), "RecursivePojo")
assert(defViaRef == defViaRef2)
}
}
test("pojo using Maps") {
// Use our standard Java validator
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoUsingMaps)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.pojoUsingMaps.getClass, Some(jsonNode))
assert(schema.at("/properties/string2Integer/type").asText() == "object")
assert(schema.at("/properties/string2Integer/additionalProperties/type").asText() == "integer")
assert(schema.at("/properties/string2String/type").asText() == "object")
assert(schema.at("/properties/string2String/additionalProperties/type").asText() == "string")
assert(schema.at("/properties/string2PojoUsingJsonTypeInfo/type").asText() == "object")
assert(schema.at("/properties/string2PojoUsingJsonTypeInfo/additionalProperties/oneOf/0/$ref").asText() == "#/definitions/Child1")
assert(schema.at("/properties/string2PojoUsingJsonTypeInfo/additionalProperties/oneOf/1/$ref").asText() == "#/definitions/Child2")
}
// Try it with nullable types.
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.pojoUsingMaps)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.pojoUsingMaps.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/string2Integer", "object")
assert(schema.at("/properties/string2Integer/oneOf/1/additionalProperties/type").asText() == "integer")
assertNullableType(schema, "/properties/string2String", "object")
assert(schema.at("/properties/string2String/oneOf/1/additionalProperties/type").asText() == "string")
assertNullableType(schema, "/properties/string2PojoUsingJsonTypeInfo", "object")
assert(schema.at("/properties/string2PojoUsingJsonTypeInfo/oneOf/1/additionalProperties/oneOf/0/$ref").asText() == "#/definitions/Child1")
assert(schema.at("/properties/string2PojoUsingJsonTypeInfo/oneOf/1/additionalProperties/oneOf/1/$ref").asText() == "#/definitions/Child2")
}
}
test("pojo Using Custom Annotations") {
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoUsingFormat)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.pojoUsingFormat.getClass, Some(jsonNode))
val schemaHTML5Date = generateAndValidateSchema(jsonSchemaGeneratorHTML5, testData.pojoUsingFormat.getClass, Some(jsonNode))
val schemaHTML5DateNullable = generateAndValidateSchema(jsonSchemaGeneratorHTML5Nullable, testData.pojoUsingFormat.getClass, Some(jsonNode))
assert(schema.at("/format").asText() == "grid")
assert(schema.at("/description").asText() == "This is our pojo")
assert(schema.at("/title").asText() == "Pojo using format")
assert(schema.at("/properties/emailValue/type").asText() == "string")
assert(schema.at("/properties/emailValue/format").asText() == "email")
assert(schema.at("/properties/emailValue/description").asText() == "This is our email value")
assert(schema.at("/properties/emailValue/title").asText() == "Email value")
assert(schema.at("/properties/choice/type").asText() == "boolean")
assert(schema.at("/properties/choice/format").asText() == "checkbox")
assert(schema.at("/properties/dateTime/type").asText() == "string")
assert(schema.at("/properties/dateTime/format").asText() == "date-time")
assert(schema.at("/properties/dateTime/description").asText() == "This is description from @JsonPropertyDescription")
assert(schemaHTML5Date.at("/properties/dateTime/format").asText() == "datetime")
assert(schemaHTML5DateNullable.at("/properties/dateTime/oneOf/1/format").asText() == "datetime")
assert(schema.at("/properties/dateTimeWithAnnotation/type").asText() == "string")
assert(schema.at("/properties/dateTimeWithAnnotation/format").asText() == "text")
// Make sure autoGenerated title is correct
assert(schemaHTML5Date.at("/properties/dateTimeWithAnnotation/title").asText() == "Date Time With Annotation")
}
test("using JavaType") {
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.pojoUsingFormat)
val schema = generateAndValidateSchemaUsingJavaType( jsonSchemaGenerator, _objectMapper.constructType(testData.pojoUsingFormat.getClass), Some(jsonNode))
assert(schema.at("/format").asText() == "grid")
assert(schema.at("/description").asText() == "This is our pojo")
assert(schema.at("/title").asText() == "Pojo using format")
assert(schema.at("/properties/emailValue/type").asText() == "string")
assert(schema.at("/properties/emailValue/format").asText() == "email")
assert(schema.at("/properties/emailValue/description").asText() == "This is our email value")
assert(schema.at("/properties/emailValue/title").asText() == "Email value")
assert(schema.at("/properties/choice/type").asText() == "boolean")
assert(schema.at("/properties/choice/format").asText() == "checkbox")
assert(schema.at("/properties/dateTime/type").asText() == "string")
assert(schema.at("/properties/dateTime/format").asText() == "date-time")
assert(schema.at("/properties/dateTime/description").asText() == "This is description from @JsonPropertyDescription")
assert(schema.at("/properties/dateTimeWithAnnotation/type").asText() == "string")
assert(schema.at("/properties/dateTimeWithAnnotation/format").asText() == "text")
}
test("using JavaType with @JsonTypeName") {
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val g = new JsonSchemaGenerator(_objectMapper, debug = true, config)
val instance = new BoringContainer();
instance.child1 = new PojoUsingJsonTypeName();
instance.child1.stringWithDefault = "test";
val jsonNode = assertToFromJson(g, instance)
assertToFromJson(g, instance, classOf[BoringContainer])
val schema = generateAndValidateSchema(g, classOf[BoringContainer], Some(jsonNode))
assert(schema.at("/definitions/OtherTypeName/type").asText() == "object");
}
test("scala using option with HTML5") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorScalaHTML5, testData.pojoUsingOptionScala)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, testData.pojoUsingOptionScala.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/_string", "string")
assert(schema.at("/properties/_string/title").asText() == "_string")
assertNullableType(schema, "/properties/_integer", "integer")
assert(schema.at("/properties/_integer/title").asText() == "_integer")
assertNullableType(schema, "/properties/_boolean", "boolean")
assert(schema.at("/properties/_boolean/title").asText() == "_boolean")
assertNullableType(schema, "/properties/_double", "number")
assert(schema.at("/properties/_double/title").asText() == "_double")
assert(schema.at("/properties/child1/oneOf/0/type").asText() == "null")
assert(schema.at("/properties/child1/oneOf/0/title").asText() == "Not included")
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1/oneOf/1"), "Child1Scala")
assert(schema.at("/properties/child1/title").asText() == "Child 1")
assertJsonSubTypesInfo(child1, "type", "child1", html5Checks = true)
assert(child1.at("/properties/parentString/type").asText() == "string")
assert(child1.at("/properties/child1String/type").asText() == "string")
assert(child1.at("/properties/_child1String2/type").asText() == "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assert(schema.at("/properties/optionalList/oneOf/0/type").asText() == "null")
assert(schema.at("/properties/optionalList/oneOf/0/title").asText() == "Not included")
assert(schema.at("/properties/optionalList/oneOf/1/type").asText() == "array")
assert(schema.at("/properties/optionalList/oneOf/1/items/$ref").asText() == "#/definitions/ClassNotExtendingAnythingScala")
assert(schema.at("/properties/optionalList/title").asText() == "Optional List")
}
test("java using optional with HTML5") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5, testData.pojoUsingOptionalJava)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5, testData.pojoUsingOptionalJava.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/_string", "string")
assert(schema.at("/properties/_string/title").asText() == "_string")
assertNullableType(schema, "/properties/_integer", "integer")
assert(schema.at("/properties/_integer/title").asText() == "_integer")
assert(schema.at("/properties/child1/oneOf/0/type").asText() == "null")
assert(schema.at("/properties/child1/oneOf/0/title").asText() == "Not included")
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1/oneOf/1"), "Child1")
assert(schema.at("/properties/child1/title").asText() == "Child 1")
assertJsonSubTypesInfo(child1, "type", "child1", html5Checks = true)
assert(child1.at("/properties/parentString/type").asText() == "string")
assert(child1.at("/properties/child1String/type").asText() == "string")
assert(child1.at("/properties/_child1String2/type").asText() == "string")
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assertNullableType(schema, "/properties/optionalList", "array")
assert(schema.at("/properties/optionalList/oneOf/1/items/$ref").asText() == "#/definitions/ClassNotExtendingAnything")
assert(schema.at("/properties/optionalList/title").asText() == "Optional List")
}
test("java using optional with HTML5+nullable") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5Nullable, testData.pojoUsingOptionalJava)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5Nullable, testData.pojoUsingOptionalJava.getClass, Some(jsonNode))
assertNullableType(schema, "/properties/_string", "string")
assertNullableType(schema, "/properties/_integer", "integer")
assert(schema.at("/properties/child1/oneOf/0/type").asText() == "null")
assert(schema.at("/properties/child1/oneOf/0/title").asText() == "Not included")
val child1 = getNodeViaRefs(schema, schema.at("/properties/child1/oneOf/1"), "Child1")
assertJsonSubTypesInfo(child1, "type", "child1", html5Checks = true)
assertNullableType(child1, "/properties/parentString", "string")
assertNullableType(child1, "/properties/child1String", "string")
assertNullableType(child1, "/properties/_child1String2", "string")
// This is required as we have a @JsonProperty marking it as so.
assert(child1.at("/properties/_child1String3/type").asText() == "string")
assertPropertyRequired(child1, "_child1String3", required = true)
assertNullableType(schema, "/properties/optionalList", "array")
assert(schema.at("/properties/optionalList/oneOf/1/items/$ref").asText() == "#/definitions/ClassNotExtendingAnything")
assert(schema.at("/properties/optionalList/title").asText() == "Optional List")
}
test("propertyOrdering") {
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(schema.at("/properties/someString/propertyOrder").asInt() == 1)
assert(schema.at("/properties/myEnum/propertyOrder").asInt() == 2)
}
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorHTML5Nullable, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGeneratorHTML5Nullable, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(schema.at("/properties/someString/propertyOrder").asInt() == 1)
assert(schema.at("/properties/myEnum/propertyOrder").asInt() == 2)
}
// Make sure propertyOrder is not enabled when not using html5
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGenerator, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(schema.at("/properties/someString/propertyOrder").isMissingNode)
}
// Same with the non-html5 nullable
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.classNotExtendingAnything.getClass, Some(jsonNode))
assert(schema.at("/properties/someString/propertyOrder").isMissingNode)
}
}
test("dates") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorScalaHTML5, testData.manyDates)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, testData.manyDates.getClass, Some(jsonNode))
assert(schema.at("/properties/javaLocalDateTime/format").asText() == "datetime-local")
assert(schema.at("/properties/javaOffsetDateTime/format").asText() == "datetime")
assert(schema.at("/properties/javaLocalDate/format").asText() == "date")
assert(schema.at("/properties/jodaLocalDate/format").asText() == "date")
}
test("default and examples") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorScalaHTML5, testData.defaultAndExamples)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, testData.defaultAndExamples.getClass, Some(jsonNode))
assert(getArrayNodeAsListOfStrings(schema.at("/properties/emailValue/examples")) == List("[email protected]"))
assert(schema.at("/properties/fontSize/default").asText() == "12")
assert(getArrayNodeAsListOfStrings(schema.at("/properties/fontSize/examples")) == List("10", "14", "18"))
assert(schema.at("/properties/defaultStringViaJsonValue/default").asText() == "ds")
assert(schema.at("/properties/defaultIntViaJsonValue/default").asText() == "1")
assert(schema.at("/properties/defaultBoolViaJsonValue/default").asText() == "true")
}
test("validation") {
// Scala
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScalaHTML5, testData.classUsingValidation)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, testData.classUsingValidation.getClass, Some(jsonNode))
verifyStringProperty(schema, "stringUsingNotNull", Some(1), None, None, required = true)
verifyStringProperty(schema, "stringUsingNotBlank", Some(1), None, Some("^.*\\\\S+.*$"), required = true)
verifyStringProperty(schema, "stringUsingNotBlankAndNotNull", Some(1), None, Some("^.*\\\\S+.*$"), required = true)
verifyStringProperty(schema, "stringUsingNotEmpty", Some(1), None, None, required = true)
verifyStringProperty(schema, "stringUsingSize", Some(1), Some(20), None, required = false)
verifyStringProperty(schema, "stringUsingSizeOnlyMin", Some(1), None, None, required = false)
verifyStringProperty(schema, "stringUsingSizeOnlyMax", None, Some(30), None, required = false)
verifyStringProperty(schema, "stringUsingPattern", None, None, Some("_stringUsingPatternA|_stringUsingPatternB"), required = false)
verifyStringProperty(schema, "stringUsingPatternList", None, None, Some("^(?=^_stringUsing.*)(?=.*PatternList$).*$"), required = false)
verifyNumericProperty(schema, "intMin", Some(1), None, required = true)
verifyNumericProperty(schema, "intMax", None, Some(10), required = true)
verifyNumericProperty(schema, "doubleMin", Some(1), None, required = true)
verifyNumericProperty(schema, "doubleMax", None, Some(10), required = true)
verifyNumericDoubleProperty(schema, "decimalMin", Some(1.5), None, required = true)
verifyNumericDoubleProperty(schema, "decimalMax", None, Some(2.5), required = true)
assert(schema.at("/properties/email/format").asText() == "email")
verifyArrayProperty(schema, "notEmptyStringArray", Some(1), None, required = true)
verifyObjectProperty(schema, "notEmptyMap", "string", Some(1), None, required = true)
}
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorScalaHTML5, testData.pojoUsingValidation)
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, testData.pojoUsingValidation.getClass, Some(jsonNode))
verifyStringProperty(schema, "stringUsingNotNull", Some(1), None, None, required = true)
verifyStringProperty(schema, "stringUsingNotBlank", Some(1), None, Some("^.*\\\\S+.*$"), required = true)
verifyStringProperty(schema, "stringUsingNotBlankAndNotNull", Some(1), None, Some("^.*\\\\S+.*$"), required = true)
verifyStringProperty(schema, "stringUsingNotEmpty", Some(1), None, None, required = true)
verifyStringProperty(schema, "stringUsingSize", Some(1), Some(20), None, required = false)
verifyStringProperty(schema, "stringUsingSizeOnlyMin", Some(1), None, None, required = false)
verifyStringProperty(schema, "stringUsingSizeOnlyMax", None, Some(30), None, required = false)
verifyStringProperty(schema, "stringUsingPattern", None, None, Some("_stringUsingPatternA|_stringUsingPatternB"), required = false)
verifyStringProperty(schema, "stringUsingPatternList", None, None, Some("^(?=^_stringUsing.*)(?=.*PatternList$).*$"), required = false)
verifyNumericProperty(schema, "intMin", Some(1), None, required = true)
verifyNumericProperty(schema, "intMax", None, Some(10), required = true)
verifyNumericProperty(schema, "doubleMin", Some(1), None, required = true)
verifyNumericProperty(schema, "doubleMax", None, Some(10), required = true)
verifyNumericDoubleProperty(schema, "decimalMin", Some(1.5), None, required = true)
verifyNumericDoubleProperty(schema, "decimalMax", None, Some(2.5), required = true)
verifyArrayProperty(schema, "notEmptyStringArray", Some(1), None, required = true)
verifyArrayProperty(schema, "notEmptyStringList", Some(1), None, required = true)
verifyObjectProperty(schema, "notEmptyStringMap", "string", Some(1), None, required = true)
}
def verifyStringProperty(schema:JsonNode, propertyName:String, minLength:Option[Int], maxLength:Option[Int], pattern:Option[String], required:Boolean): Unit = {
assertNumericPropertyValidation(schema, propertyName, "minLength", minLength)
assertNumericPropertyValidation(schema, propertyName, "maxLength", maxLength)
val matchNode = schema.at(s"/properties/$propertyName/pattern")
pattern match {
case Some(_) => assert(matchNode.asText == pattern.get)
case None => assert(matchNode.isMissingNode)
}
assertPropertyRequired(schema, propertyName, required)
}
def verifyNumericProperty(schema:JsonNode, propertyName:String, minimum:Option[Int], maximum:Option[Int], required:Boolean): Unit = {
assertNumericPropertyValidation(schema, propertyName, "minimum", minimum)
assertNumericPropertyValidation(schema, propertyName, "maximum", maximum)
assertPropertyRequired(schema, propertyName, required)
}
def verifyNumericDoubleProperty(schema:JsonNode, propertyName:String, minimum:Option[Double], maximum:Option[Double], required:Boolean): Unit = {
assertNumericDoublePropertyValidation(schema, propertyName, "minimum", minimum)
assertNumericDoublePropertyValidation(schema, propertyName, "maximum", maximum)
assertPropertyRequired(schema, propertyName, required)
}
def verifyArrayProperty(schema:JsonNode, propertyName:String, minItems:Option[Int], maxItems:Option[Int], required:Boolean): Unit = {
assertNumericPropertyValidation(schema, propertyName, "minItems", minItems)
assertNumericPropertyValidation(schema, propertyName, "maxItems", maxItems)
assertPropertyRequired(schema, propertyName, required)
}
def verifyObjectProperty(schema:JsonNode, propertyName:String, additionalPropertiesType:String, minProperties:Option[Int], maxProperties:Option[Int], required:Boolean): Unit = {
assert(schema.at(s"/properties/$propertyName/additionalProperties/type").asText() == additionalPropertiesType)
assertNumericPropertyValidation(schema, propertyName, "minProperties", minProperties)
assertNumericPropertyValidation(schema, propertyName, "maxProperties", maxProperties)
assertPropertyRequired(schema, propertyName, required)
}
def assertNumericPropertyValidation(schema:JsonNode, propertyName:String, validationName:String, value:Option[Int]): Unit = {
val jsonNode = schema.at(s"/properties/$propertyName/$validationName")
value match {
case Some(_) => assert(jsonNode.asInt == value.get)
case None => assert(jsonNode.isMissingNode)
}
}
def assertNumericDoublePropertyValidation(schema:JsonNode, propertyName:String, validationName:String, value:Option[Double]): Unit = {
val jsonNode = schema.at(s"/properties/$propertyName/$validationName")
value match {
case Some(_) => assert(jsonNode.asDouble() == value.get)
case None => assert(jsonNode.isMissingNode)
}
}
}
test("validation using groups") {
def check(schema:JsonNode, propertyName:String, included:Boolean): Unit = {
assertPropertyRequired(schema, propertyName, required = included)
assert(schema.at(s"/properties/$propertyName/injected").isMissingNode != included)
}
val objectUsingGroups = testData.classUsingValidationWithGroups
// no Group at all
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array()
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = true)
check(schema, "defaultGroup", included = true)
check(schema, "group1", included = false)
check(schema, "group2", included = false)
check(schema, "group12", included = false)
// Make sure inject on class-level is not included
assert(schema.at(s"/injected").isMissingNode)
}
// Default group
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[Default])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = true)
check(schema, "defaultGroup", included = true)
check(schema, "group1", included = false)
check(schema, "group2", included = false)
check(schema, "group12", included = false)
// Make sure inject on class-level is not included
assert(schema.at(s"/injected").isMissingNode)
}
// Group 1
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[ValidationGroup1])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = false)
check(schema, "defaultGroup", included = false)
check(schema, "group1", included = true)
check(schema, "group2", included = false)
check(schema, "group12", included = true)
// Make sure inject on class-level is not included
assert(!schema.at(s"/injected").isMissingNode)
}
// Group 1 and Default-group
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[ValidationGroup1], classOf[Default])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = true)
check(schema, "defaultGroup", included = true)
check(schema, "group1", included = true)
check(schema, "group2", included = false)
check(schema, "group12", included = true)
// Make sure inject on class-level is not included
assert(!schema.at(s"/injected").isMissingNode)
}
// Group 2
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[ValidationGroup2])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = false)
check(schema, "defaultGroup", included = false)
check(schema, "group1", included = false)
check(schema, "group2", included = true)
check(schema, "group12", included = true)
// Make sure inject on class-level is not included
assert(schema.at(s"/injected").isMissingNode)
}
// Group 1 and 2
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[ValidationGroup1], classOf[ValidationGroup2])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = false)
check(schema, "defaultGroup", included = false)
check(schema, "group1", included = true)
check(schema, "group2", included = true)
check(schema, "group12", included = true)
// Make sure inject on class-level is not included
assert(!schema.at(s"/injected").isMissingNode)
}
// Group 3 - not in use
{
val jsonSchemaGenerator_Group = new JsonSchemaGenerator(_objectMapperScala, debug = true,
JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(
javaxValidationGroups = Array(classOf[ValidationGroup3_notInUse])
))
val jsonNode = assertToFromJson(jsonSchemaGenerator_Group, objectUsingGroups)
val schema = generateAndValidateSchema(jsonSchemaGenerator_Group, objectUsingGroups.getClass, Some(jsonNode))
check(schema, "noGroup", included = false)
check(schema, "defaultGroup", included = false)
check(schema, "group1", included = false)
check(schema, "group2", included = false)
check(schema, "group12", included = false)
// Make sure inject on class-level is not included
assert(schema.at(s"/injected").isMissingNode)
}
}
test("Polymorphism using mixin") {
// Java
{
val jsonNode = assertToFromJson(jsonSchemaGenerator, testData.mixinChild1)
assertToFromJson(jsonSchemaGenerator, testData.mixinChild1, classOf[MixinParent])
val schema = generateAndValidateSchema(jsonSchemaGenerator, classOf[MixinParent], Some(jsonNode))
assertChild1(schema, "/oneOf", defName = "MixinChild1")
assertChild2(schema, "/oneOf", defName = "MixinChild2")
}
// Java + Nullable types
{
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.mixinChild1)
assertToFromJson(jsonSchemaGeneratorNullable, testData.mixinChild1, classOf[MixinParent])
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, classOf[MixinParent], Some(jsonNode))
assertNullableChild1(schema, "/oneOf", defName = "MixinChild1")
assertNullableChild2(schema, "/oneOf", defName = "MixinChild2")
}
}
test("issue 24") {
jsonSchemaGenerator.generateJsonSchema(classOf[EntityWrapper])
jsonSchemaGeneratorNullable.generateJsonSchema(classOf[EntityWrapper])
}
test("Polymorphism oneOf-ordering") {
val schema = generateAndValidateSchema(jsonSchemaGeneratorScalaHTML5, classOf[PolymorphismOrderingParentScala], None)
val oneOfList:List[String] = schema.at("/oneOf").asInstanceOf[ArrayNode].iterator().asScala.toList.map(_.at("/$ref").asText)
assert(List("#/definitions/PolymorphismOrderingChild3", "#/definitions/PolymorphismOrderingChild1", "#/definitions/PolymorphismOrderingChild4", "#/definitions/PolymorphismOrderingChild2") == oneOfList)
}
test("@NotNull annotations and nullable types") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorNullable, testData.notNullableButNullBoolean)
val schema = generateAndValidateSchema(jsonSchemaGeneratorNullable, testData.notNullableButNullBoolean.getClass, None)
val exception = intercept[Exception] {
useSchema(schema, Some(jsonNode))
}
// While our compiler will let us do what we're about to do, the validator should give us a message that looks like this...
assert(exception.getMessage.contains("json does not validate against schema"))
assert(exception.getMessage.contains("error: instance type (null) does not match any allowed primitive type (allowed: [\\"boolean\\"])"))
assert(schema.at("/properties/notNullBooleanObject/type").asText() == "boolean")
assertPropertyRequired(schema, "notNullBooleanObject", required = true)
}
test("nestedPolymorphism") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorScala, testData.nestedPolymorphism)
assertToFromJson(jsonSchemaGeneratorScala, testData.nestedPolymorphism, classOf[NestedPolymorphism1Base])
generateAndValidateSchema(jsonSchemaGeneratorScala, classOf[NestedPolymorphism1Base], Some(jsonNode))
}
test("PolymorphismAndTitle") {
val schema = jsonSchemaGeneratorScala.generateJsonSchema(classOf[PolymorphismAndTitleBase])
println("--------------------------------------------")
println(asPrettyJson(schema, jsonSchemaGeneratorScala.rootObjectMapper))
assert( schema.at("/oneOf/0/$ref").asText() == "#/definitions/PolymorphismAndTitle1")
assert( schema.at("/oneOf/0/title").asText() == "CustomTitle1")
}
test("UsingJsonSchemaOptions") {
{
val schema = jsonSchemaGeneratorScala.generateJsonSchema(classOf[UsingJsonSchemaOptions])
println("--------------------------------------------")
println(asPrettyJson(schema, jsonSchemaGeneratorScala.rootObjectMapper))
assert(schema.at("/options/classOption").asText() == "classOptionValue")
assert(schema.at("/properties/propertyUsingOneProperty/options/o1").asText() == "v1")
}
{
val schema = jsonSchemaGeneratorScala.generateJsonSchema(classOf[UsingJsonSchemaOptionsBase])
println("--------------------------------------------")
println(asPrettyJson(schema, jsonSchemaGeneratorScala.rootObjectMapper))
assert(schema.at("/definitions/UsingJsonSchemaOptionsChild1/options/classOption1").asText() == "classOptionValue1")
assert(schema.at("/definitions/UsingJsonSchemaOptionsChild1/properties/propertyUsingOneProperty/options/o1").asText() == "v1")
assert(schema.at("/definitions/UsingJsonSchemaOptionsChild2/options/classOption2").asText() == "classOptionValue2")
assert(schema.at("/definitions/UsingJsonSchemaOptionsChild2/properties/propertyUsingOneProperty/options/o1").asText() == "v1")
}
}
test("UsingJsonSchemaInject") {
{
val customUserNameLoaderVariable = "xx"
val customUserNamesLoader = new CustomUserNamesLoader(customUserNameLoaderVariable)
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(jsonSuppliers = Map("myCustomUserNamesLoader" -> customUserNamesLoader))
val _jsonSchemaGeneratorScala = new JsonSchemaGenerator(_objectMapperScala, debug = true, config)
val schema = _jsonSchemaGeneratorScala.generateJsonSchema(classOf[UsingJsonSchemaInject])
println("--------------------------------------------")
println(asPrettyJson(schema, _jsonSchemaGeneratorScala.rootObjectMapper))
assert(schema.at("/patternProperties/^s[a-zA-Z0-9]+/type").asText() == "string")
assert(schema.at("/patternProperties/^i[a-zA-Z0-9]+/type").asText() == "integer")
assert(schema.at("/properties/sa/type").asText() == "string")
assert(schema.at("/properties/injectedInProperties").asText() == "true")
assert(schema.at("/properties/sa/options/hidden").asText() == "true")
assert(schema.at("/properties/saMergeFalse/type").asText() == "integer")
assert(schema.at("/properties/saMergeFalse/default").asText() == "12")
assert(schema.at("/properties/saMergeFalse/pattern").isMissingNode)
assert(schema.at("/properties/ib/type").asText() == "integer")
assert(schema.at("/properties/ib/multipleOf").asInt() == 7)
assert(schema.at("/properties/ib/exclusiveMinimum").asBoolean())
assert(schema.at("/properties/uns/items/enum/0").asText() == "foo")
assert(schema.at("/properties/uns/items/enum/1").asText() == "bar")
assert(schema.at("/properties/uns2/items/enum/0").asText() == "foo_" + customUserNameLoaderVariable)
assert(schema.at("/properties/uns2/items/enum/1").asText() == "bar_" + customUserNameLoaderVariable)
}
}
test("UsingJsonSchemaInjectWithTopLevelMergeFalse") {
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4
val _jsonSchemaGeneratorScala = new JsonSchemaGenerator(_objectMapperScala, debug = true, config)
val schema = _jsonSchemaGeneratorScala.generateJsonSchema(classOf[UsingJsonSchemaInjectWithTopLevelMergeFalse])
val schemaJson = asPrettyJson(schema, _jsonSchemaGeneratorScala.rootObjectMapper)
println("--------------------------------------------")
println(schemaJson)
val fasit =
"""{
| "everything" : "should be replaced"
|}""".stripMargin
assert( schemaJson == fasit )
}
test("Preventing polymorphism by using classTypeReMapping") {
val config = JsonSchemaConfig.vanillaJsonSchemaDraft4.copy(classTypeReMapping = Map(classOf[Parent] -> classOf[Child1]))
val _jsonSchemaGenerator = new JsonSchemaGenerator(_objectMapper, debug = true, config)
// Class with property
{
def assertDefaultValues(schema: JsonNode): Unit = {
assert(schema.at("/properties/stringWithDefault/type").asText() == "string")
assert(schema.at("/properties/stringWithDefault/default").asText() == "x")
assert(schema.at("/properties/intWithDefault/type").asText() == "integer")
assert(schema.at("/properties/intWithDefault/default").asInt() == 12)
assert(schema.at("/properties/booleanWithDefault/type").asText() == "boolean")
assert(schema.at("/properties/booleanWithDefault/default").asBoolean())
}
// PojoWithParent has a property of type Parent (which uses polymorphism).
// Default rendering schema will make this property oneOf Child1 and Child2.
// In this test we're preventing this by remapping Parent to Child1.
// Now, when generating the schema, we should generate it as if the property where of type Child1
val jsonNode = assertToFromJson(_jsonSchemaGenerator, testData.pojoWithParent)
assertToFromJson(_jsonSchemaGenerator, testData.pojoWithParent, classOf[PojoWithParent])
val schema = generateAndValidateSchema(_jsonSchemaGenerator, classOf[PojoWithParent], Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/pojoValue/type").asText() == "boolean")
assertDefaultValues(schema)
assertChild1(schema, "/properties/child")
}
// remapping root class
{
def doTest(pojo:Object, clazz:Class[_], g:JsonSchemaGenerator): Unit = {
val jsonNode = assertToFromJson(g, pojo)
val schema = generateAndValidateSchema(g, clazz, Some(jsonNode))
assert(!schema.at("/additionalProperties").asBoolean())
assert(schema.at("/properties/parentString/type").asText() == "string")
assertJsonSubTypesInfo(schema, "type", "child1")
}
doTest(testData.child1, classOf[Parent], _jsonSchemaGenerator)
}
//remapping arrays
{
def doTest(pojo:Object, clazz:Class[_], g:JsonSchemaGenerator, html5Checks:Boolean): Unit ={
val jsonNode = assertToFromJson(g, pojo)
val schema = generateAndValidateSchema(g, clazz, Some(jsonNode))
assert(schema.at("/properties/intArray1/type").asText() == "array")
assert(schema.at("/properties/intArray1/items/type").asText() == "integer")
assert(schema.at("/properties/stringArray/type").asText() == "array")
assert(schema.at("/properties/stringArray/items/type").asText() == "string")
assert(schema.at("/properties/stringList/type").asText() == "array")
assert(schema.at("/properties/stringList/items/type").asText() == "string")
assert(schema.at("/properties/stringList/minItems").asInt() == 1)
assert(schema.at("/properties/stringList/maxItems").asInt() == 10)
assert(schema.at("/properties/polymorphismList/type").asText() == "array")
assertChild1(schema, "/properties/polymorphismList/items", html5Checks = html5Checks)
assert(schema.at("/properties/polymorphismArray/type").asText() == "array")
assertChild1(schema, "/properties/polymorphismArray/items", html5Checks = html5Checks)
assert(schema.at("/properties/listOfListOfStrings/type").asText() == "array")
assert(schema.at("/properties/listOfListOfStrings/items/type").asText() == "array")
assert(schema.at("/properties/listOfListOfStrings/items/items/type").asText() == "string")
assert(schema.at("/properties/setOfUniqueValues/type").asText() == "array")
assert(schema.at("/properties/setOfUniqueValues/items/type").asText() == "string")
if (html5Checks) {
assert(schema.at("/properties/setOfUniqueValues/uniqueItems").asText() == "true")
assert(schema.at("/properties/setOfUniqueValues/format").asText() == "checkbox")
}
}
val c = new Child1()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
val _classNotExtendingAnything = {
val o = new ClassNotExtendingAnything
o.someString = "Something"
o.myEnum = MyEnum.C
o
}
val _pojoWithArrays = new PojoWithArrays(
Array(1,2,3),
Array("a1","a2","a3"),
List("l1", "l2", "l3").asJava,
List[Parent](c, c).asJava,
List[Parent](c, c).toArray,
List(_classNotExtendingAnything, _classNotExtendingAnything).asJava,
PojoWithArrays._listOfListOfStringsValues, // It was difficult to construct this from scala :)
Set(MyEnum.B).asJava
)
doTest(_pojoWithArrays, _pojoWithArrays.getClass, _jsonSchemaGenerator, html5Checks = false)
}
}
test("Basic json (de)serialization of Kotlin data class") {
val a = new KotlinClass("a", 1)
val json = _objectMapperKotlin.writeValueAsString(a)
val r = _objectMapperKotlin.readValue(json, classOf[KotlinClass])
assert( a == r)
}
test("Non-nullable parameter with default value is always required for Kotlin class") {
val jsonNode = assertToFromJson(jsonSchemaGeneratorKotlin, testData.kotlinWithDefaultValues)
val schema = generateAndValidateSchema(jsonSchemaGeneratorKotlin, testData.kotlinWithDefaultValues.getClass, Some(jsonNode))
println(schema)
assert("string" == schema.at("/properties/optional/type").asText())
assert("string" == schema.at("/properties/required/type").asText())
assert("string" == schema.at("/properties/optionalDefault/type").asText())
assert("string" == schema.at("/properties/optionalDefaultNull/type").asText())
assertPropertyRequired(schema, "optional", required = false)
assertPropertyRequired(schema, "required", required = true)
assertPropertyRequired(schema, "optionalDefault", required = true)
assertPropertyRequired(schema, "optionalDefaultNull", required = false)
}
test("JsonSchema DRAFT-06") {
val jsg = jsonSchemaGenerator_draft_06
val jsonNode = assertToFromJson(jsg, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsg, testData.classNotExtendingAnything.getClass, Some(jsonNode),
jsonSchemaDraft = JsonSchemaDraft.DRAFT_06
)
// Currently there are no differences in the generated jsonSchema other than the $schema-url
}
test("JsonSchema DRAFT-07") {
val jsg = jsonSchemaGenerator_draft_07
val jsonNode = assertToFromJson(jsg, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsg, testData.classNotExtendingAnything.getClass, Some(jsonNode),
jsonSchemaDraft = JsonSchemaDraft.DRAFT_07
)
// Currently there are no differences in the generated jsonSchema other than the $schema-url
}
test("JsonSchema DRAFT-2019-09") {
val jsg = jsonSchemaGenerator_draft_2019_09
val jsonNode = assertToFromJson(jsg, testData.classNotExtendingAnything)
val schema = generateAndValidateSchema(jsg, testData.classNotExtendingAnything.getClass, Some(jsonNode),
jsonSchemaDraft = JsonSchemaDraft.DRAFT_2019_09
)
// Currently there are no differences in the generated jsonSchema other than the $schema-url
}
}
trait TestData {
import scala.collection.JavaConverters._
val child1 = {
val c = new Child1()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
val child2 = {
val c = new Child2()
c.parentString = "pv"
c.child2int = 12
c
}
val pojoWithParent = {
val p = new PojoWithParent
p.pojoValue = true
p.child = child1
p.stringWithDefault = "y"
p.intWithDefault = 13
p.booleanWithDefault = true
p
}
val child21 = {
val c = new Child21()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
val child22 = {
val c = new Child22()
c.parentString = "pv"
c.child2int = 12
c
}
val child31 = {
val c = new Child31()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
val child32 = {
val c = new Child32()
c.parentString = "pv"
c.child2int = 12
c
}
val child41 = new Child41()
val child42 = new Child42()
val child51 = {
val c = new Child51()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
val child52 = {
val c = new Child52()
c.parentString = "pv"
c.child2int = 12
c
}
val child61 = {
val c = new Child61()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
val child2Scala = Child2Scala("pv", 12)
val child1Scala = Child1Scala("pv", "cs", "cs2", "cs3")
val pojoWithParentScala = PojoWithParentScala(pojoValue = true, child1Scala, "y", 13, booleanWithDefault = true)
val classNotExtendingAnything = {
val o = new ClassNotExtendingAnything
o.someString = "Something"
o.myEnum = MyEnum.C
o
}
val classNotExtendingAnythingScala = ClassNotExtendingAnythingScala("Something", MyEnum.C, Some(MyEnum.A))
val manyPrimitives = new ManyPrimitives("s1", 1, 2, true, false, true, 0.1, 0.2, MyEnum.B)
val manyPrimitivesNulls = new ManyPrimitives(null, null, 1, null, false, false, null, 0.1, null)
val manyPrimitivesScala = ManyPrimitivesScala("s1", 1, _boolean = true, 0.1)
val pojoUsingOptionScala = PojoUsingOptionScala(Some("s1"), Some(1), Some(true), Some(0.1), Some(child1Scala), Some(List(classNotExtendingAnythingScala)))
val pojoUsingOptionalJava = new PojoUsingOptionalJava(Optional.of("s"), Optional.of(1), Optional.of(child1), Optional.of(util.Arrays.asList(classNotExtendingAnything)))
val pojoWithCustomSerializer = {
val p = new PojoWithCustomSerializer
p.myString = "xxx"
p
}
val objectWithPropertyWithCustomSerializer = new ObjectWithPropertyWithCustomSerializer("s1", pojoWithCustomSerializer)
val pojoWithArrays = new PojoWithArrays(
Array(1,2,3),
Array("a1","a2","a3"),
List("l1", "l2", "l3").asJava,
List(child1, child2).asJava,
List(child1, child2).toArray,
List(classNotExtendingAnything, classNotExtendingAnything).asJava,
PojoWithArrays._listOfListOfStringsValues, // It was difficult to construct this from scala :)
Set(MyEnum.B).asJava
)
val pojoWithArraysNullable = new PojoWithArraysNullable(
Array(1,2,3),
Array("a1","a2","a3"),
List("l1", "l2", "l3").asJava,
List(child1, child2).asJava,
List(child1, child2).toArray,
List(classNotExtendingAnything, classNotExtendingAnything).asJava,
PojoWithArrays._listOfListOfStringsValues, // It was difficult to construct this from scala :)
Set(MyEnum.B).asJava
)
val pojoWithArraysScala = PojoWithArraysScala(
Some(List(1,2,3)),
List("a1","a2","a3"),
List("l1", "l2", "l3"),
List(child1, child2),
List(child1, child2),
List(classNotExtendingAnything, classNotExtendingAnything),
List(List("l11","l12"), List("l21")),
setOfUniqueValues = Set(MyEnum.B)
)
val recursivePojo = new RecursivePojo("t1", List(new RecursivePojo("c1", null)).asJava)
val pojoUsingMaps = new PojoUsingMaps(
Map[String, Integer]("a" -> 1, "b" -> 2).asJava,
Map("x" -> "y", "z" -> "w").asJava,
Map[String, Parent]("1" -> child1, "2" -> child2).asJava
)
val pojoUsingFormat = new PojoUsingFormat("[email protected]", true, OffsetDateTime.now(), OffsetDateTime.now())
val manyDates = ManyDates(LocalDateTime.now(), OffsetDateTime.now(), LocalDate.now(), org.joda.time.LocalDate.now())
val defaultAndExamples = DefaultAndExamples("[email protected]", 18, "s", 2, false)
val classUsingValidation = ClassUsingValidation(
"_stringUsingNotNull", "_stringUsingNotBlank", "_stringUsingNotBlankAndNotNull", "_stringUsingNotEmpty", List("l1", "l2", "l3"), Map("mk1" -> "mv1", "mk2" -> "mv2"),
"_stringUsingSize", "_stringUsingSizeOnlyMin", "_stringUsingSizeOnlyMax", "_stringUsingPatternA", "_stringUsingPatternList",
1, 2, 1.0, 2.0, 1.6, 2.0, "[email protected]"
)
val classUsingValidationWithGroups = ClassUsingValidationWithGroups(
"_noGroup", "_defaultGroup", "_group1", "_group2", "_group12"
)
val pojoUsingValidation = new PojoUsingValidation(
"_stringUsingNotNull", "_stringUsingNotBlank", "_stringUsingNotBlankAndNotNull", "_stringUsingNotEmpty", Array("a1", "a2", "a3"), List("l1", "l2", "l3").asJava,
Map("mk1" -> "mv1", "mk2" -> "mv2").asJava, "_stringUsingSize", "_stringUsingSizeOnlyMin", "_stringUsingSizeOnlyMax", "_stringUsingPatternA",
"_stringUsingPatternList", 1, 2, 1.0, 2.0, 1.6, 2.0
)
val mixinChild1 = {
val c = new MixinChild1()
c.parentString = "pv"
c.child1String = "cs"
c.child1String2 = "cs2"
c.child1String3 = "cs3"
c
}
// Test the collision of @NotNull validations and null fields.
val notNullableButNullBoolean = new PojoWithNotNull(null)
val nestedPolymorphism = NestedPolymorphism1_1("a1", NestedPolymorphism2_2("a2", Some(NestedPolymorphism3("b3"))))
val genericClassVoid = new GenericClassVoid()
val genericMapLike = new GenericMapLike(Collections.singletonMap("foo", "bar"))
val kotlinWithDefaultValues = new KotlinWithDefaultValues("1", "2", "3", "4")
}
|
mbknor/mbknor-jackson-jsonSchema
|
src/test/scala/com/kjetland/jackson/jsonSchema/JsonSchemaGeneratorTest.scala
|
Scala
|
mit
| 90,750 |
package org.scaladebugger.test
import scala.language.reflectiveCalls
object Main extends App {
val x = 3
var y = 4
val x123 = "huh?"
def runMe(x: Int = 3) = println(x)
val myClass = new MyClass((x) => (z) => x + z) {
def anotherMethod = {
val something = 1
something + "asdf"
}
}
while (true) {
val z = x + y
myClass.process(3)
myClass.anotherMethod
val func = (x: Int, y: Int) => {
println(s"Adding $x + $y")
x + y
}
println("Running " + runMe())
Thread.sleep(1000)
println("Past sleep!")
println(z)
}
y = 5
runMe()
}
class MyClass(www: Int => Int => Int) {
def process(x: Int) = www(x)(x)
}
|
ensime/scala-debugger
|
scala-debugger-test/src/main/scala/org/scaladebugger/test/Main.scala
|
Scala
|
apache-2.0
| 694 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.text
import org.apache.hadoop.fs.FileStatus
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.connector.write.{LogicalWriteInfo, Write, WriteBuilder}
import org.apache.spark.sql.execution.datasources.FileFormat
import org.apache.spark.sql.execution.datasources.v2.FileTable
import org.apache.spark.sql.types.{DataType, StringType, StructField, StructType}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
case class TextTable(
name: String,
sparkSession: SparkSession,
options: CaseInsensitiveStringMap,
paths: Seq[String],
userSpecifiedSchema: Option[StructType],
fallbackFileFormat: Class[_ <: FileFormat])
extends FileTable(sparkSession, options, paths, userSpecifiedSchema) {
override def newScanBuilder(options: CaseInsensitiveStringMap): TextScanBuilder =
TextScanBuilder(sparkSession, fileIndex, schema, dataSchema, options)
override def inferSchema(files: Seq[FileStatus]): Option[StructType] =
Some(StructType(Seq(StructField("value", StringType))))
override def newWriteBuilder(info: LogicalWriteInfo): WriteBuilder =
new WriteBuilder {
override def build(): Write = TextWrite(paths, formatName, supportsDataType, info)
}
override def supportsDataType(dataType: DataType): Boolean = dataType == StringType
override def formatName: String = "Text"
}
|
maropu/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/text/TextTable.scala
|
Scala
|
apache-2.0
| 2,207 |
package oxalis.security
import com.scalableminds.util.enumeration.ExtendedEnumeration
object TokenType extends ExtendedEnumeration {
type TokenType = Value
val Authentication, DataStore, ResetPassword = Value
}
|
scalableminds/webknossos
|
app/oxalis/security/TokenType.scala
|
Scala
|
agpl-3.0
| 217 |
package mesosphere.marathon
package api.v2
import javax.inject.Inject
import javax.servlet.http.HttpServletRequest
import javax.ws.rs._
import javax.ws.rs.container.{AsyncResponse, Suspended}
import javax.ws.rs.core.{Context, MediaType}
import mesosphere.marathon.api._
import mesosphere.marathon.core.appinfo.EnrichedTask
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.health.HealthCheckManager
import mesosphere.marathon.core.instance.Instance
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.core.task.tracker.InstanceTracker.InstancesBySpec
import mesosphere.marathon.plugin.auth._
import mesosphere.marathon.raml.AnyToRaml
import mesosphere.marathon.raml.TaskConversion._
import mesosphere.marathon.state.AbsolutePathId
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.util.toRichFuture
import scala.async.Async._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
@Consumes(Array(MediaType.APPLICATION_JSON))
@Produces(Array(MediaType.APPLICATION_JSON))
class AppTasksResource @Inject() (
instanceTracker: InstanceTracker,
taskKiller: TaskKiller,
healthCheckManager: HealthCheckManager,
val config: MarathonConf,
groupManager: GroupManager,
val authorizer: Authorizer,
val authenticator: Authenticator
)(implicit val executionContext: ExecutionContext)
extends AuthResource {
val GroupTasks = """^((?:.+/)|)\\*$""".r
@GET
def indexJson(@PathParam("appId") id: String, @Context req: HttpServletRequest, @Suspended asyncResponse: AsyncResponse): Unit =
sendResponse(asyncResponse) {
async {
implicit val identity = await(authenticatedAsync(req))
val instancesBySpec = await(instanceTracker.instancesBySpec)
id match {
case GroupTasks(gid) =>
val groupPath = gid.toAbsolutePath
val maybeGroup = groupManager.group(groupPath)
await(withAuthorization(ViewGroup, maybeGroup, Future.successful(unknownGroup(groupPath))) { group =>
async {
val tasks = await(runningTasks(group.transitiveAppIds, instancesBySpec)).toRaml
ok(raml.TaskList(tasks))
}
})
case _ =>
val appId = id.toAbsolutePath
val maybeApp = groupManager.app(appId)
val tasks = await(runningTasks(Set(appId), instancesBySpec)).toRaml
withAuthorization(ViewRunSpec, maybeApp, unknownApp(appId)) { _ =>
ok(raml.TaskList(tasks))
}
}
}
}
def runningTasks(appIds: Iterable[AbsolutePathId], instancesBySpec: InstancesBySpec): Future[Vector[EnrichedTask]] = {
Future
.sequence(appIds.withFilter(instancesBySpec.hasSpecInstances).map { id =>
async {
val health = await(healthCheckManager.statuses(id))
instancesBySpec.specInstances(id).flatMap { i =>
EnrichedTask.fromInstance(i, healthCheckResults = health.getOrElse(i.instanceId, Nil))
}
}
})
.map(_.iterator.flatten.toVector)
}
@DELETE
def deleteMany(
@PathParam("appId") appId: String,
@QueryParam("host") host: String,
@QueryParam("scale") @DefaultValue("false") scale: Boolean = false,
@QueryParam("force") @DefaultValue("false") force: Boolean = false,
@QueryParam("wipe") @DefaultValue("false") wipe: Boolean = false,
@Context req: HttpServletRequest,
@Suspended asyncResponse: AsyncResponse
): Unit =
sendResponse(asyncResponse) {
async {
implicit val identity = await(authenticatedAsync(req))
val pathId = appId.toAbsolutePath
def findToKill(appTasks: Seq[Instance]): Seq[Instance] = {
Option(host).fold(appTasks) { hostname =>
appTasks.filter(_.hostname.contains(hostname) || hostname == "*")
}
}
if (scale && wipe) throw new BadRequestException("You cannot use scale and wipe at the same time.")
if (scale) {
val deploymentF = taskKiller.killAndScale(pathId, findToKill, force)
deploymentResult(await(deploymentF))
} else {
await(taskKiller.kill(pathId, findToKill, wipe).asTry) match {
case Success(instances) =>
val healthStatuses = await(healthCheckManager.statuses(pathId))
val enrichedTasks: Seq[EnrichedTask] = instances.flatMap { i =>
EnrichedTask.singleFromInstance(i, healthCheckResults = healthStatuses.getOrElse(i.instanceId, Nil))
}
ok(raml.TaskList(enrichedTasks.toRaml))
case Failure(PathNotFoundException(appId, version)) => unknownApp(appId, version)
}
}
}
}
@DELETE
@Path("{taskId}")
def deleteOne(
@PathParam("appId") appId: String,
@PathParam("taskId") id: String,
@QueryParam("scale") @DefaultValue("false") scale: Boolean = false,
@QueryParam("force") @DefaultValue("false") force: Boolean = false,
@QueryParam("wipe") @DefaultValue("false") wipe: Boolean = false,
@Context req: HttpServletRequest,
@Suspended asyncResponse: AsyncResponse
): Unit =
sendResponse(asyncResponse) {
async {
implicit val identity = await(authenticatedAsync(req))
val pathId = appId.toAbsolutePath
def findToKill(appTasks: Seq[Instance]): Seq[Instance] = {
try {
val instanceId = Task.Id.parse(id).instanceId
appTasks.filter(_.instanceId == instanceId)
} catch {
// the id can not be translated to an instanceId
case _: MatchError => Seq.empty
}
}
if (scale && wipe) throw new BadRequestException("You cannot use scale and wipe at the same time.")
if (scale) {
val deploymentF = taskKiller.killAndScale(pathId, findToKill, force)
deploymentResult(await(deploymentF))
} else {
await(taskKiller.kill(pathId, findToKill, wipe).asTry) match {
case Success(instances) =>
val healthStatuses = await(healthCheckManager.statuses(pathId))
instances.headOption match {
case None =>
unknownTask(id)
case Some(i) =>
val killedTask = EnrichedTask.singleFromInstance(i).get
val enrichedTask = killedTask.copy(healthCheckResults = healthStatuses.getOrElse(i.instanceId, Nil))
ok(raml.TaskSingle(enrichedTask.toRaml))
}
case Failure(PathNotFoundException(appId, version)) => unknownApp(appId, version)
}
}
}
}
}
|
mesosphere/marathon
|
src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala
|
Scala
|
apache-2.0
| 6,795 |
package org.sisioh.aws4s.s3.model
import com.amazonaws.services.s3.model.{ MultipartUpload, MultipartUploadListing }
import org.sisioh.aws4s.PimpedType
import scala.collection.JavaConverters._
object MultipartUploadListingFactory {
def create(): MultipartUploadListing = new MultipartUploadListing()
}
class RichMultipartUploadListing(val underlying: MultipartUploadListing)
extends AnyVal
with PimpedType[MultipartUploadListing] {
def bucketNameOpt = Option(underlying.getBucketName)
def bucketNameOpt_=(value: Option[String]): Unit =
underlying.setBucketName(value.orNull)
// ---
def keyMarkerOpt = Option(underlying.getKeyMarker)
def keyMarkerOpt_=(value: Option[String]): Unit =
underlying.setKeyMarker(value.orNull)
// ---
def delimiterOpt: Option[String] = Option(underlying.getDelimiter)
def delimiterOpt_=(value: Option[String]): Unit =
underlying.setDelimiter(value.orNull)
// ---
def prefixOpt: Option[String] = Option(underlying.getPrefix)
def prefixOpt_=(value: Option[String]): Unit =
underlying.setPrefix(value.orNull)
// ---
def uploadIdMarkerOpt: Option[String] = Option(underlying.getUploadIdMarker)
def uploadIdMarkerOpt_=(value: Option[String]): Unit =
underlying.setUploadIdMarker(value.orNull)
// ---
def maxUploadsOpt: Option[Int] = Option(underlying.getMaxUploads)
def maxUploadsOpt_=(value: Option[Int]): Unit =
underlying.setMaxUploads(value.map(_.asInstanceOf[java.lang.Integer]).orNull)
// ---
def encodingTypeOpt: Option[String] = Option(underlying.getEncodingType)
def encodingTypeOpt_=(value: Option[String]): Unit =
underlying.setEncodingType(value.orNull)
// ---
def truncatedOpt: Option[Boolean] = Option(underlying.isTruncated)
def truncatedOpt_=(value: Option[Boolean]): Unit =
underlying.setTruncated(value.map(_.asInstanceOf[java.lang.Boolean]).orNull)
// ---
def nextKeyMarkerOpt: Option[String] = Option(underlying.getNextKeyMarker)
def nextKeyMarkerOpt_=(value: Option[String]): Unit =
underlying.setNextKeyMarker(value.orNull)
// ---
def nextUploadIdMarkerOpt: Option[String] =
Option(underlying.getNextUploadIdMarker)
def nextUploadIdMarkerOpt_=(value: Option[String]): Unit =
underlying.setNextUploadIdMarker(value.orNull)
// ---
def multipartUploads: Seq[MultipartUpload] =
underlying.getMultipartUploads.asScala.toSeq
def multipartUploads_=(value: Seq[MultipartUpload]): Unit =
underlying.setMultipartUploads(value.asJava)
// ---
def commonPrefixes: Seq[String] = underlying.getCommonPrefixes.asScala.toSeq
def commonPrefixes_=(value: Seq[String]): Unit =
underlying.setCommonPrefixes(value.asJava)
}
|
sisioh/aws4s
|
aws4s-s3/src/main/scala/org/sisioh/aws4s/s3/model/RichMultipartUploadListing.scala
|
Scala
|
mit
| 2,724 |
package org.loom.geometry
/**
* PolygonType
* either straight line based or spline based
* @author brogan
*
*/
object PolygonType {
val Line_Polygon: Int = 0
val Spline_Polygon: Int = 1
}
|
brogan/Loom
|
src/org/loom/geometry/PolygonType.scala
|
Scala
|
gpl-3.0
| 200 |
package fakeapi.http
import akka.actor._
import akka.agent._
import akka.event.Logging
import fakeapi.common._
import com.typesafe.config.ConfigFactory
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.math._
import scala.util.Random
case class ActivateSpike(duration: Int)
case class GetResponse()
case class TimesUp()
case class Spike(active: Boolean, duration: Int = 0)
trait Latency {
val perPathLatencyEnabled = config.getBoolean("fakeapi.latency.enabled")
//val customStatusCodesEnabled = config.getBoolean("fakeapi.status-codes.enabled")
//val customResponseBodiesEnabled = config.getBoolean("fakeapi.response-bodies.enabled")
val defaultMaxLatency = config.getInt("fakeapi.defaults.latency.max-duration")
//val defaultStatusCode = config.getInt("fakeapi.defaults.status-codes.code")
//val defaultResponseBody = config.getString("fakeapi.defaults.response")
//TODO::
//val defaultDistribution = config.getList("fakeapi.defaults.latency.distribution").asScala
val defaultMaxSpike = config.getInt("fakeapi.defaults.latency.spike")
val spikeEnabled = config.getBoolean("fakeapi.defaults.latency.spike-enabled")
//TODO::
//there will be a config object floating around to be used
val maxLatency = defaultMaxLatency
def latency = {abs(Random.nextGaussian) * maxLatency}.toInt
}
class LatencyActor extends Actor with Latency{
private val log = Logging(context.system, this)
val spikeAgent = Agent(Spike(false))
def receive = {
case ActivateSpike(duration: Int) => spikeAgent send(Spike(true,duration))
case GetResponse =>
val sp = spikeAgent.apply()
val delay = sp.active match {
case true =>
spikeAgent send(Spike(false))
sp.duration
case false => {
val d = latency
if(d > 0.95*maxLatency && d <= maxLatency) spikeAgent send(Spike(true, defaultMaxSpike))
d
}
}
log.info("Lagging this request for {} milliseconds", delay)
context.system.scheduler.scheduleOnce(delay milliseconds, sender, TimesUp)(context.dispatcher, context.self)
case _ =>
}
}
|
cicika/fake-api
|
src/main/scala/fake-api/http/Latency.scala
|
Scala
|
gpl-2.0
| 2,140 |
package is.hail.backend
import is.hail.backend.spark.SparkBackend
import is.hail.expr.ir.{ExecuteContext, IR, SortField}
import is.hail.expr.ir.lowering.{TableStage, TableStageDependency}
import is.hail.linalg.BlockMatrix
import is.hail.types.BlockMatrixType
import is.hail.types.virtual.Type
import is.hail.utils._
import scala.reflect.ClassTag
object Backend {
private var id: Long = 0L
def nextID(): String = {
id += 1
s"hail_query_$id"
}
}
abstract class BroadcastValue[T] { def value: T }
abstract class BackendContext
abstract class Backend {
def defaultParallelism: Int
def broadcast[T: ClassTag](value: T): BroadcastValue[T]
def persist(backendContext: BackendContext, id: String, value: BlockMatrix, storageLevel: String): Unit
def unpersist(backendContext: BackendContext, id: String): Unit
def getPersistedBlockMatrix(backendContext: BackendContext, id: String): BlockMatrix
def getPersistedBlockMatrixType(backendContext: BackendContext, id: String): BlockMatrixType
def parallelizeAndComputeWithIndex(
backendContext: BackendContext,
collection: Array[Array[Byte]],
dependency: Option[TableStageDependency] = None)(f: (Array[Byte], Int) => Array[Byte]): Array[Array[Byte]]
def stop(): Unit
def asSpark(op: String): SparkBackend =
fatal(s"${ getClass.getSimpleName }: $op requires SparkBackend")
def lowerDistributedSort(ctx: ExecuteContext, stage: TableStage, sortFields: IndexedSeq[SortField], relationalLetsAbove: Map[String, IR]): TableStage
}
|
danking/hail
|
hail/src/main/scala/is/hail/backend/Backend.scala
|
Scala
|
mit
| 1,526 |
package leibniz
import leibniz.inhabitance.{Inhabited, Proposition}
import leibniz.internal.Unsafe
/**
* The data type `Is` is the encoding of Leibnitzβ law which states that
* if `a` and `b` are identical then they must have identical properties.
* Leibnitzβ original definition reads as follows:
* a β‘ b = β f .f a β f b
* and can be proven to be equivalent to:
* a β‘ b = β f .f a β f b
*
* The `Is` data type encodes true type equality, since the identity
* function is the only non-diverging conversion function that can be used
* as an implementation of the `subst` method assuming that we do not break
* parametricity. As the substitution function has to work for any `F[_]`, it
* cannot make assumptions about the structure of `F[_]`, making it impossible
* to construct a value of type `F[A]` or to access values of type `A` that
* may be stored inside a value of type `F[A]`. Hence it is impossible for
* a substitution function to alter the value it takes as argument.
*
* Not taking into account the partial functions that never terminate
* (infinite loops), functions returning `null`, or throwing exceptions,
* the identity function is the only function that can be used in place of
* `subst` to construct a value of type `Is[A, B]`.
*
* The existence of a value of type `Is[A, B]` now implies that a β‘ b,
* since the conversion function, that converts an `A` into a `B`, must be
* the identity function.
*
* This technique was first used in
* [[http://portal.acm.org/citation.cfm?id=583852.581494
* Typing Dynamic Typing]] (Baars and Swierstra, ICFP 2002).
*
* @see [[===]] `A === B` is a type synonym to `Is[A, B]`
* @see [[http://typelevel.org/blog/2014/09/20/higher_leibniz.html
* Higher Leibniz]]
*/
sealed abstract class Is[A, B] private[Is]() { ab =>
import Is._
/**
* To create an instance of `Is[A, B]` you must show that for every
* choice of `F[_]` you can convert `F[A]` to `F[B]`.
*/
def subst[F[_]](fa: F[A]): F[B]
/**
* Substitution on identity brings about a direct coercion function of the
* same form that `=:=` provides.
*
* @see [[coerce]]
*/
def apply(a: A): B = coerce(a)
/**
* Substitution on identity brings about a direct coercion function of the
* same form that [[=:=]] provides.
*
* @see [[apply]]
*/
def coerce(a: A): B = {
type f[x] = x
subst[f](a)
}
/**
* Equality is transitive relation and its witnesses can be composed
* in a chain much like functions.
*
* @see [[compose]]
*/
final def andThen[C](bc: B === C): A === C = {
type f[b] = A === b
bc.subst[f](ab)
}
/**
* Equality is transitive relation and its witnesses can be composed
* in a chain much like functions.
*
* @see [[andThen]]
*/
final def compose[Z](za: Z === A): Z === B =
za.andThen(ab)
/**
* Equality is symmetric relation and therefore can be flipped around.
* Flipping is its own inverse, so `x.flip.flip == x`.
*/
final def flip: B === A = {
type f[a] = a === A
subst[f](refl)
}
/**
* Given `A === B` we can prove that `F[A] === F[B]`.
*
* @see [[Is.lift]]
* @see [[Is.lift2]]
*/
final def lift[F[_]]: F[A] === F[B] =
Is.lift[F, A, B](ab)
/**
* Given `A === B` and `I === J` we can prove that `F[A, I] === F[B, J]`.
*
* This method allows you to compose two `===` values in infix manner:
* {{{
* def either(ab: A === B, ij: I === J): Either[A, I] === Either[B, J] =
* ab lift2[Either] ij
* }}}
*
* @see [[Is.lift]]
* @see [[Is.lift2]]
* @see [[Is.lift3]]
*/
def lift2[F[_, _]]: PartiallyAppliedLift2[F] =
new PartiallyAppliedLift2[F]
final class PartiallyAppliedLift2[F[_, _]] {
def apply[I, J](ij: I === J): F[A, I] === F[B, J] =
Is.lift2[F, A, B, I, J](ab, ij)
}
/**
* Given `A === B` we can convert `(X => A)` into `(X => B)`.
*/
def onF[X](fa: X => A): X => B = {
type f[a] = X => a
subst[f](fa)
}
/**
* Given `A === B`, prove `A =:= B`.
*/
def toPredef: A =:= B = {
type f[a] = A =:= a
subst[f](implicitly[A =:= A])
}
/**
* Given `A === B`, make an `Iso[A, B]`.
*/
def toIso: Iso[A, B] =
subst[Iso[A, ?]](Iso.id[A])
/**
* Given `A === B`, prove `A <~< B`.
*/
def toAs: A <~< B = {
type f[a] = A <~< a
subst[f](As.refl[A])
}
}
object Is {
def apply[A, B](implicit ev: A Is B): A Is B = ev
final case class Refl[A]() extends Is[A, A] {
def subst[F[_]](fa: F[A]): F[A] = fa
}
/**
* Equality is reflexive relation.
*/
implicit def refl[A]: A === A = new Refl[A]()
/**
* Given `A === B` we can prove that `F[A] === F[B]`.
*
* @see [[lift2]]
* @see [[lift3]]
*/
def lift[F[_], A, B]
(ab: A === B): F[A] === F[B] = {
type f[Ξ±] = F[A] === F[Ξ±]
ab.subst[f](refl)
}
/**
* Given `A === B` and `I === J` we can prove that `F[A, I] === F[B, J]`.
*
* @see [[lift]]
* @see [[lift3]]
*/
def lift2[F[_, _], A, B, I, J]
(ab: A === B, ij: I === J): F[A, I] === F[B, J] = {
type f1[Ξ±] = F[A, I] === F[Ξ±, I]
type f2[Ξ±] = F[A, I] === F[B, Ξ±]
ij.subst[f2](ab.subst[f1](refl))
}
/**
* Given `A === B`, `I === J`, and `M === N` we can prove that
* `F[A, I] === F[B, J]`.
*
* @see [[lift]]
* @see [[lift2]]
*/
def lift3[F[_, _, _], A, B, I, J, M, N]
(ab: A === B, ij: I === J, mn: M === N): F[A, I, M] === F[B, J, N] = {
type f1[Ξ±] = F[A, I, M] === F[Ξ±, I, M]
type f2[Ξ±] = F[A, I, M] === F[B, Ξ±, M]
type f3[Ξ±] = F[A, I, M] === F[B, J, Ξ±]
mn.subst[f3](ij.subst[f2](ab.subst[f1](refl)))
}
/**
* It can be convenient to convert a [[=:=]] value into a `Leibniz` value.
* This is not strictly valid as while it is almost certainly true that
* `A =:= B` implies `A === B` it is not the case that you can create
* evidence of `A === B` except via a coercion. Use responsibly.
*/
def fromPredef[A, B](eq: A =:= B): A === B = Axioms.predefEq(eq)
implicit def proposition[A, B]: Proposition[Is[A, B]] =
(p: ¬¬[Is[A, B]]) => Axioms.isConsistency[A, B](p.run)
def lem[A, B]: ¬¬[Either[A =!= B, A === B]] = Inhabited.lem[A === B].map {
case Right(eqv) => Right(eqv)
case Left(neqv) => Left(WeakApart(neqv))
}
def consistent[A, B](f: (A =!= B) => Void): A === B =
proposition[A, B].proved(Inhabited.witness(a => f(WeakApart(a))))
}
|
alexknvl/leibniz
|
src/main/scala/leibniz/Is.scala
|
Scala
|
mit
| 6,594 |
package com.plasmaconduit.framework.string
import scala.util.matching.Regex
sealed trait StringMatcher {
def matches(n: String): Boolean
}
object StringMatcher {
import scala.language.implicitConversions
implicit def toLiteralStringMatcher(s: String): StringMatcher = {
StringLiteralMatcher(s)
}
implicit def toRegexStringMatcher(r: Regex): StringMatcher = {
StringRegexMatcher(r)
}
}
final case class StringLiteralMatcher(s: String) extends StringMatcher {
def matches(n: String) = s == n
}
final case class StringRegexMatcher(r: Regex) extends StringMatcher {
def matches(n: String) = r.findFirstMatchIn(n).isDefined
}
|
plasmaconduit/plasmaconduit-framework
|
src/main/scala/com/plasmaconduit/framework/string/StringMatcher.scala
|
Scala
|
mit
| 653 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import org.scalactic.Equality
import org.scalactic.{Pass, Fail}
import org.scalactic.{Good, Bad}
import org.scalatest._
import org.scalatest.prop.GeneratorDrivenPropertyChecks
import scala.collection.mutable.WrappedArray
import OptionValues._
import scala.util.{Failure, Success, Try}
trait NegZIntSpecSupport {
implicit def tryEquality[T]: Equality[Try[T]] = new Equality[Try[T]] {
override def areEqual(a: Try[T], b: Any): Boolean = a match {
case Success(double: Double) if double.isNaN => // This is because in scala.js x/0 results to NaN not ArithmetricException like in jvm, and we need to make sure Success(NaN) == Success(NaN) is true to pass the test.
b match {
case Success(bDouble: Double) if bDouble.isNaN => true
case _ => false
}
// I needed this because with GenDrivenPropertyChecks, got:
// [info] - should offer a '%' method that is consistent with Int *** FAILED ***
// [info] Success(NaN) did not equal Success(NaN) (PosIntExperiment.scala:498)
case Success(float: Float) if float.isNaN =>
b match {
case Success(bFloat: Float) if bFloat.isNaN => true
case _ => false
}
case _: Success[_] => a == b
case Failure(ex) => b match {
case _: Success[_] => false
case Failure(otherEx) => ex.getClass == otherEx.getClass && ex.getMessage == otherEx.getMessage
case _ => false
}
}
}
}
class NegZIntSpec extends funspec.AnyFunSpec with matchers.should.Matchers with GeneratorDrivenPropertyChecks with NegZIntSpecSupport {
describe("A NegZInt") {
describe("should offer a from factory method that") {
it("returns Some[NegZInt] if the passed Int is lesser than or equal to 0") {
NegZInt.from(0).value.value shouldBe 0
NegZInt.from(-50).value.value shouldBe -50
NegZInt.from(-100).value.value shouldBe -100
}
it("returns None if the passed Int is NOT lesser than or equal to 0") {
NegZInt.from(1) shouldBe None
NegZInt.from(99) shouldBe None
}
}
describe("should offer an ensuringValid factory method that") {
it("returns NegZInt if the passed Int is lesser than or equal to 0") {
NegZInt.ensuringValid(0).value shouldBe 0
NegZInt.ensuringValid(-50).value shouldBe -50
NegZInt.ensuringValid(-100).value shouldBe -100
}
it("throws AssertionError if the passed Int is NOT lesser than or equal to 0") {
an [AssertionError] should be thrownBy NegZInt.ensuringValid(1)
an [AssertionError] should be thrownBy NegZInt.ensuringValid(99)
}
}
describe("should offer a tryingValid factory method that") {
import TryValues._
it("returns a NegZInt wrapped in a Success if the passed Int is lesser than or equal 0") {
NegZInt.tryingValid(-0).success.value.value shouldBe -0
NegZInt.tryingValid(-50).success.value.value shouldBe -50
NegZInt.tryingValid(-100).success.value.value shouldBe -100
}
it("returns an AssertionError wrapped in a Failure if the passed Int is greater than 0") {
NegZInt.tryingValid(1).failure.exception shouldBe an [AssertionError]
NegZInt.tryingValid(99).failure.exception shouldBe an [AssertionError]
}
}
describe("should offer a passOrElse factory method that") {
it("returns a Pass if the given Int is lesser than or equal 0") {
NegZInt.passOrElse(0)(i => i) shouldBe Pass
NegZInt.passOrElse(-50)(i => i) shouldBe Pass
NegZInt.passOrElse(-100)(i => i) shouldBe Pass
}
it("returns an error value produced by passing the given Int to the given function if the passed Int is greater than 0, wrapped in a Fail") {
NegZInt.passOrElse(1)(i => i) shouldBe Fail(1)
NegZInt.passOrElse(99)(i => i.toLong + 3L) shouldBe Fail(102L)
}
}
describe("should offer a goodOrElse factory method that") {
it("returns a NegZInt wrapped in a Good if the given Int is lesser than or equal 0") {
NegZInt.goodOrElse(-0)(i => i) shouldBe Good(NegZInt(-0))
NegZInt.goodOrElse(-50)(i => i) shouldBe Good(NegZInt(-50))
NegZInt.goodOrElse(-100)(i => i) shouldBe Good(NegZInt(-100))
}
it("returns an error value produced by passing the given Int to the given function if the passed Int is greater than 0, wrapped in a Bad") {
NegZInt.goodOrElse(1)(i => i) shouldBe Bad(1)
NegZInt.goodOrElse(99)(i => i.toLong + 3L) shouldBe Bad(102L)
}
}
describe("should offer a rightOrElse factory method that") {
it("returns a NegZInt wrapped in a Right if the given Int is lesser than or equal 0") {
NegZInt.rightOrElse(0)(i => i) shouldBe Right(NegZInt(0))
NegZInt.rightOrElse(-50)(i => i) shouldBe Right(NegZInt(-50))
NegZInt.rightOrElse(-100)(i => i) shouldBe Right(NegZInt(-100))
}
it("returns an error value produced by passing the given Int to the given function if the passed Int is greater than 0, wrapped in a Left") {
NegZInt.rightOrElse(1)(i => i) shouldBe Left(1)
NegZInt.rightOrElse(99)(i => i.toLong + 3L) shouldBe Left(102L)
}
}
describe("should offer an isValid predicate method that") {
it("returns true if the passed Int is lesser than or equal to 0") {
NegZInt.isValid(-50) shouldBe true
NegZInt.isValid(-100) shouldBe true
NegZInt.isValid(0) shouldBe true
NegZInt.isValid(-0) shouldBe true
NegZInt.isValid(1) shouldBe false
NegZInt.isValid(99) shouldBe false
}
}
describe("should offer a fromOrElse factory method that") {
it("returns a NegZInt if the passed Int is lesser than or equal to 0") {
NegZInt.fromOrElse(-50, NegZInt(-42)).value shouldBe -50
NegZInt.fromOrElse(-100, NegZInt(-42)).value shouldBe -100
NegZInt.fromOrElse(0, NegZInt(-42)).value shouldBe 0
}
it("returns a given default if the passed Int is NOT greater than 0") {
NegZInt.fromOrElse(1, NegZInt(-42)).value shouldBe -42
NegZInt.fromOrElse(99, NegZInt(-42)).value shouldBe -42
}
}
it("should offer MaxValue and MinValue factory methods") {
NegZInt.MaxValue shouldEqual NegZInt.from(0).get
NegZInt.MinValue shouldEqual NegZInt.from(Int.MinValue).get
}
it("should be sortable") {
val xs = List(NegZInt(-2), NegZInt(-0), NegZInt(-1), NegZInt(-3))
xs.sorted shouldEqual List(NegZInt(-3), NegZInt(-2), NegZInt(-1), NegZInt(0))
}
describe("when created with apply method") {
it("should compile when -8 is passed in") {
"NegZInt(-8)" should compile
NegZInt(-8).value shouldEqual -8
}
it("should compile when 0 is passed in") {
"NegZInt(0)" should compile
NegZInt(0).value shouldEqual 0
}
it("should not compile when 8 is passed in") {
"NegZInt(8)" shouldNot compile
}
it("should not compile when x is passed in") {
val x: Int = -8
"NegZInt(x)" shouldNot compile
}
}
describe("when specified as a plain-old Int") {
def takesNegZInt(pos: NegZInt): Int = pos.value
it("should compile when -8 is passed in") {
"takesNegZInt(-8)" should compile
takesNegZInt(-8) shouldEqual -8
}
it("should compile when 0 is passed in") {
"takesNegZInt(0)" should compile
}
it("should not compile when 8 is passed in") {
"takesNegZInt(8)" shouldNot compile
}
it("should not compile when x is passed in") {
val x: Int = -8
"takesNegZInt(x)" shouldNot compile
}
}
it("should offer a unary ~ method that is consistent with Int") {
forAll { (pzint: NegZInt) =>
(~pzint) shouldEqual (~(pzint.toInt))
}
}
it("should offer a unary + method that is consistent with Int") {
forAll { (p: NegZInt) =>
(+p).toInt shouldEqual (+(p.toInt))
}
}
it("should offer a unary - method that returns PosZInt") {
forAll { (p: NegZInt) =>
(-p) shouldEqual (-(p.toInt))
}
}
it("should offer << methods that are consistent with Int") {
forAll { (pzint: NegZInt, shift: Int) =>
pzint << shift shouldEqual pzint.toInt << shift
}
forAll { (pzint: NegZInt, shift: Long) =>
pzint << shift shouldEqual pzint.toInt << shift
}
}
it("should offer >>> methods that are consistent with Int") {
forAll { (pzint: NegZInt, shift: Int) =>
pzint >>> shift shouldEqual pzint.toInt >>> shift
}
forAll { (pzint: NegZInt, shift: Long) =>
pzint >>> shift shouldEqual pzint.toInt >>> shift
}
}
it("should offer >> methods that are consistent with Int") {
forAll { (pzint: NegZInt, shift: Int) =>
pzint >> shift shouldEqual pzint.toInt >> shift
}
forAll { (pzint: NegZInt, shift: Long) =>
pzint >> shift shouldEqual pzint.toInt >> shift
}
}
it("should offer a '|' method that is consistent with Int") {
forAll { (pzint: NegZInt, byte: Byte) =>
(pzint | byte) shouldEqual (pzint.toInt | byte)
}
forAll { (pzint: NegZInt, short: Short) =>
(pzint | short) shouldEqual (pzint.toInt | short)
}
forAll { (pzint: NegZInt, char: Char) =>
(pzint | char) shouldEqual (pzint.toInt | char)
}
forAll { (pzint: NegZInt, int: Int) =>
(pzint | int) shouldEqual (pzint.toInt | int)
}
forAll { (pzint: NegZInt, long: Long) =>
(pzint | long) shouldEqual (pzint.toInt | long)
}
}
it("should offer an '&' method that is consistent with Int") {
forAll { (pzint: NegZInt, byte: Byte) =>
(pzint & byte) shouldEqual (pzint.toInt & byte)
}
forAll { (pzint: NegZInt, short: Short) =>
(pzint & short) shouldEqual (pzint.toInt & short)
}
forAll { (pzint: NegZInt, char: Char) =>
(pzint & char) shouldEqual (pzint.toInt & char)
}
forAll { (pzint: NegZInt, int: Int) =>
(pzint & int) shouldEqual (pzint.toInt & int)
}
forAll { (pzint: NegZInt, long: Long) =>
(pzint & long) shouldEqual (pzint.toInt & long)
}
}
it("should offer an '^' method that is consistent with Int") {
forAll { (pzint: NegZInt, byte: Byte) =>
(pzint ^ byte) shouldEqual (pzint.toInt ^ byte)
}
forAll { (pzint: NegZInt, char: Char) =>
(pzint ^ char) shouldEqual (pzint.toInt ^ char)
}
forAll { (pzint: NegZInt, short: Short) =>
(pzint ^ short) shouldEqual (pzint.toInt ^ short)
}
forAll { (pzint: NegZInt, int: Int) =>
(pzint ^ int) shouldEqual (pzint.toInt ^ int)
}
forAll { (pzint: NegZInt, long: Long) =>
(pzint ^ long) shouldEqual (pzint.toInt ^ long)
}
}
it("should offer 'min' and 'max' methods that are consistent with Int") {
forAll { (pzint1: NegZInt, pzint2: NegZInt) =>
pzint1.max(pzint2).toInt shouldEqual pzint1.toInt.max(pzint2.toInt)
pzint1.min(pzint2).toInt shouldEqual pzint1.toInt.min(pzint2.toInt)
}
}
it("should offer a 'toBinaryString' method that is consistent with Int") {
forAll { (pzint: NegZInt) =>
pzint.toBinaryString shouldEqual pzint.toInt.toBinaryString
}
}
it("should offer a 'toHexString' method that is consistent with Int") {
forAll { (pzint: NegZInt) =>
pzint.toHexString shouldEqual pzint.toInt.toHexString
}
}
it("should offer a 'toOctalString' method that is consistent with Int") {
forAll { (pzint: NegZInt) =>
pzint.toOctalString shouldEqual pzint.toInt.toOctalString
}
}
it("should offer 'to' and 'until' methods that are consistent with Int") {
// The reason we need this is that in Scala 2.10, the equals check (used by shouldEqual below) will call range.length
// and it'll cause IllegalArgumentException to be thrown when we do the Try(x) shouldEqual Try(y) assertion below,
// while starting from scala 2.11 the equals call implementation does not call .length.
// To make the behavior consistent for all scala versions, we explicitly call .length for all returned Range, and
// shall it throws IllegalArgumentException, it will be wrapped as Failure for the Try.
def ensuringValid(range: Range): Range = {
range.length // IllegalArgumentException will be thrown if it is an invalid range, this will turn the Success to Failure for Try
range
}
forAll { (pzint: NegZInt, end: Int, step: Int) =>
Try(ensuringValid(pzint.to(end)))shouldEqual Try(ensuringValid(pzint.toInt.to(end)))
Try(ensuringValid(pzint.to(end, step))) shouldEqual Try(ensuringValid(pzint.toInt.to(end, step)))
Try(ensuringValid(pzint.until(end))) shouldEqual Try(ensuringValid(pzint.toInt.until(end)))
Try(ensuringValid(pzint.until(end, step))) shouldEqual Try(ensuringValid(pzint.toInt.until(end, step)))
}
}
it("should offer an ensuringValid method that takes an Int => Int, throwing AssertionError if the result is invalid") {
NegZInt(-33).ensuringValid(_ + 1) shouldEqual NegZInt(-32)
an [AssertionError] should be thrownBy { NegZInt.MaxValue.ensuringValid(_ + 1) }
}
}
}
|
scalatest/scalatest
|
jvm/scalactic-test/src/test/scala/org/scalactic/anyvals/NegZIntSpec.scala
|
Scala
|
apache-2.0
| 14,090 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.queryapitests.scalaxml
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import nl.ebpi.yaidom.convert.ScalaXmlConversions
import nl.ebpi.yaidom.queryapitests.AbstractXbrlInstanceQueryTest
import nl.ebpi.yaidom.resolved
import nl.ebpi.yaidom.scalaxml.ScalaXmlElem
import nl.ebpi.yaidom.scalaxml.ScalaXmlNode
/**
* XBRL instance query test case for Scala XML wrapper elements.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class XbrlInstanceQueryTest extends AbstractXbrlInstanceQueryTest {
final type E = ScalaXmlElem
protected final val xbrlInstance: ScalaXmlElem = {
val is = classOf[XbrlInstanceQueryTest].getResourceAsStream("/nl/ebpi/yaidom/queryapitests/sample-xbrl-instance.xml")
val xmlParser = scala.xml.parsing.ConstructingParser.fromSource(scala.io.Source.fromInputStream(is), true)
val root: ScalaXmlElem = ScalaXmlNode.wrapElement(xmlParser.document().docElem.asInstanceOf[scala.xml.Elem])
root
}
protected final def toResolvedElem(elem: E): resolved.Elem =
resolved.Elem(ScalaXmlConversions.convertToElem(elem.wrappedNode))
}
|
EBPI/yaidom
|
src/test/scala/nl/ebpi/yaidom/queryapitests/scalaxml/XbrlInstanceQueryTest.scala
|
Scala
|
apache-2.0
| 1,741 |
package org.lolczak.dcg.parser.language
import org.lolczak.dcg.model.Grammar
import scalaz.\\/
trait Completer {
def complete(chart: Chart)(edge: Passive): Set[Active \\/ PassiveCandidate]
def completeEmpty(grammar: Grammar)(edge: Active): Set[Active \\/ PassiveCandidate]
}
|
lolczak/dcg
|
src/main/scala/org/lolczak/dcg/parser/language/Completer.scala
|
Scala
|
apache-2.0
| 282 |
/*
* Copyright (c) 2021 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.couchbase.spark.kv
import com.couchbase.client.scala.kv.{LookupInOptions, LookupInResult, LookupInSpec}
import com.couchbase.spark.config.{CouchbaseConfig, CouchbaseConnection}
import com.couchbase.spark.{DefaultConstants, Keyspace}
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.{Partition, SparkContext, TaskContext}
import reactor.core.scala.publisher.SFlux
class LookupInRDD(@transient private val sc: SparkContext, val docs: Seq[LookupIn], val keyspace: Keyspace, lookupInOptions: LookupInOptions = null)
extends RDD[LookupInResult](sc, Nil)
with Logging {
private val globalConfig = CouchbaseConfig(sparkContext.getConf)
private val bucketName = globalConfig.implicitBucketNameOr(this.keyspace.bucket.orNull)
override def compute(split: Partition, context: TaskContext): Iterator[LookupInResult] = {
val partition = split.asInstanceOf[KeyValuePartition]
val connection = CouchbaseConnection()
val cluster = connection.cluster(globalConfig)
val scopeName = globalConfig
.implicitScopeNameOr(this.keyspace.scope.orNull).
getOrElse(DefaultConstants.DefaultScopeName)
val collectionName = globalConfig
.implicitCollectionName(this.keyspace.collection.orNull)
.getOrElse(DefaultConstants.DefaultCollectionName)
val collection = cluster.bucket(bucketName).scope(scopeName).collection(collectionName).reactive
val options = if (this.lookupInOptions == null) {
LookupInOptions()
} else {
this.lookupInOptions
}
logDebug(s"Performing bulk LookupIn fetch against ids ${partition.ids} with options $options")
SFlux
.fromIterable(docs)
.filter(doc => partition.ids.contains(doc.id))
.flatMap(doc => collection.lookupIn(doc.id, doc.specs, options))
.collectSeq()
.block()
.iterator
}
override protected def getPartitions: Array[Partition] = {
val partitions = KeyValuePartition
.partitionsForIds(docs.map(d => d.id), CouchbaseConnection(), globalConfig, bucketName)
.asInstanceOf[Array[Partition]]
logDebug(s"Calculated KeyValuePartitions for LookupIn operation ${partitions.mkString("Array(", ", ", ")")}")
partitions
}
override protected def getPreferredLocations(split: Partition): Seq[String] = {
split.asInstanceOf[KeyValuePartition].location match {
case Some(l) => Seq(l)
case _ => Nil
}
}
}
|
couchbaselabs/couchbase-spark-connector
|
src/main/scala/com/couchbase/spark/kv/LookupInRDD.scala
|
Scala
|
apache-2.0
| 3,049 |
/*
* Copyright 2012 Sanjin Sehic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.ac.tuwien.infosys
package amber
package util
trait Logging {
protected def logger: LoggerFactory
trait LoggerFactory {
def create(name: Logger.Name): Logger
}
}
object Logging {
sealed trait Level
object Level {
case object Debug extends Level
case object Info extends Level
case object Warn extends Level
case object Error extends Level
}
trait Delegator extends Logging {
protected def logging: Logging
override protected object logger extends LoggerFactory {
override def create(name: Logger.Name) = logging.logger.create(name)
}
}
}
|
tuwiendsg/CAPA
|
core/src/scala/util/Logging.scala
|
Scala
|
apache-2.0
| 1,208 |
package bignum.benchmark
import com.google.caliper.Param
import com.google.caliper.{Runner => CaliperRunner}
import bignum.BigInt2
object ShiftBenchmark {
def main(args: Array[String]) {
CaliperRunner.main(classOf[ShiftBenchmark], args: _*)
}
}
class ShiftBenchmark extends SimpleScalaBenchmark {
@Param(Array("200", "500", "1000", "5000", "10000", "50000"))
val param: Int = 0
var bigint = BigInt("0")
var bigint2 = BigInt2("0")
var biginteger = new java.math.BigInteger("0")
var shift = 0
override def setUp() {
val rng = new java.util.Random(System.currentTimeMillis)
val a = new java.math.BigInteger(param, rng).toString
shift = rng.nextInt(param)
bigint = BigInt(a)
bigint2 = BigInt2(a)
biginteger = new java.math.BigInteger(a)
}
def timeBigIntLeft(reps: Int) = repeat(reps) {
var result = bigint
var i = 0
while (i < 2) {
result = result << shift
i = i + 1
}
result
}
def timeBigInt2Left(reps: Int) = repeat(reps) {
var result = bigint2
var i = 0
while (i < 2) {
result = result << shift
i = i + 1
}
result
}
def timeBigIntegerLeft(reps: Int) = repeat(reps) {
var result = biginteger
var i = 0
while (i < 2) {
result = result.shiftLeft(shift)
i = i + 1
}
result
}
}
|
techaddict/bignum
|
benchmark/src/main/scala/benchmark/ShiftLeft.scala
|
Scala
|
mit
| 1,339 |
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.tool
import shapeless._
package types {
trait TypesPackage {
implicit def hlistToA[H <: HList, A](h: H)(implicit s: ops.hlist.Selector[H, A]): A = s(h)
}
}
package object types extends TypesPackage
|
openmole/openmole
|
openmole/third-parties/org.openmole.tool.types/src/main/scala/org/openmole/tool/types/package.scala
|
Scala
|
agpl-3.0
| 933 |
def foo(x: Int*) = 1
/*start*/foo _/*end*/
//(Seq[Int]) => Int
|
ilinum/intellij-scala
|
testdata/typeInference/bugs5/SCL3213.scala
|
Scala
|
apache-2.0
| 62 |
package com.github.tarao
package slickjdbc
package interpolation
import interpolation.{Literal => LiteralParameter}
import scala.reflect.macros.blackbox.Context
private[interpolation] class MacroTreeBuilder(val c: Context) {
import c.universe._
import scala.collection.mutable.ListBuffer
import slick.jdbc.SQLActionBuilder
import slick.sql.SqlAction
import slick.dbio.{NoStream, Effect}
def abort(msg: String) = c.abort(c.enclosingPosition, msg)
// Retrieve string parts of interpolation from caller context
lazy val rawQueryParts: List[String] = {
// match SQLInterpolationImpl(StringContext(strArg: _*)).sql(params: _*)
val Apply(Select(Apply(_, List(Apply(_, strArg))), _), params) =
c.macroApplication
strArg map {
case Literal(Constant(x: String)) => x
case _ => abort("The interpolation must be a string literal")
}
}
private val NS = q"com.github.tarao.slickjdbc"
private val interpolation = q"$NS.interpolation"
private lazy val ListRejected =
tq"""$interpolation.${TypeName("ListRejected")}"""
private lazy val OptionRejected =
tq"""$interpolation.${TypeName("OptionRejected")}"""
private lazy val EitherRejected =
tq"""$interpolation.${TypeName("EitherRejected")}"""
private lazy val ValidParameter =
tq"""$interpolation.${TypeName("ValidParameter")}"""
private lazy val ValidProduct =
tq"""$interpolation.${TypeName("ValidProduct")}"""
private lazy val ValidRefinedNonEmpty =
tq"""$interpolation.${TypeName("ValidRefinedNonEmpty")}"""
private def ensure(required: Type, base: Tree = ValidParameter) =
q"implicitly[$base[$required]]"
private val ToPlaceholder =
tq"""$interpolation.${TypeName("ToPlaceholder")}"""
private def toPlaceholder(target: Type, base: Tree = ToPlaceholder) =
q"implicitly[$base[$target]]"
private val Translators =
tq"Iterable[$NS.query.Translator]"
def invokeInterpolation(param: c.Expr[Any]*): Tree = {
val stats = new ListBuffer[Tree]
// Additional features of SQL interpolation by preprocessing
// string parts and arguments. The interpolation translates to
// another (expanded) interpolation call of
// `ActionBasedSQLInterpolation`.
//
// [Embedding literals]
//
// A parameter of type `Literal` is embedded as a literal string
// by using "#" expansion.
//
// val literal = new Literal { def toString = "string" }
// sql"some text with a ${literal} value"
// ~> SQLInterpolationImpl(
// StringContext("some text with a ", " value")
// ).sql(literal)
// ~> ActionBasedSQLInterpolation(
// StringContext("some text with a #", " value")
// ).sql(literal)
// => SQLActionBuilder(Seq("some text with a string value"), ...)
//
// [Embedding non-empty lists]
//
// A parameter of type `NonEmpty[Any]` is embedded with repeated
// "?"s. "?"s (except the last one) are inserted as a literal
// string parameter not as a literal string part of
// `StringContext` since the number of elements is not known at
// compile time.
//
// val list = NonEmpty(1, 2, 3)
// sql"some text with a ${list} value"
// ~> SQLInterpolationImpl(
// StringContext("some text with a (#", "", ")#", " value")
// ).sql(new Placeholders(list), list, "")
// ~> ActionBasedSQLInterpolation(
// StringContext("some text with a (#", "", ")#", " value")
// ).sql(new Placeholders(list), list, "")
// => ActionBasedSQLInterpolation(
// StringContext("some text with a (#", "", ")#", " value")
// ).sql("?, ?, ", list, "")
// => SQLActionBuilder(Seq("some text with a (?, ?, ?) value"), ...)
//
// Note that the third "?" is inserted by a conversion of argument
// `list`.
val queryParts = new ListBuffer[Tree]
val params = new ListBuffer[c.Expr[Any]]
def pushLiteral(literal: String) = {
params.append(c.Expr(q""" ${""} """))
queryParts.append(q""" ${literal + "#"} """)
}
def mayCompleteParen(param: c.Expr[Any], s: String)(block: => Unit) = {
if (!s.matches("""(?s).*\\(\\s*""")) {
params.append(c.Expr(q""" ${toPlaceholder(param.actualType)}.open """))
queryParts.append(q""" ${"#"} """)
block
params.append(c.Expr(q""" ${toPlaceholder(param.actualType)}.close """))
queryParts.append(q""" ${"#"} """)
} else block
}
param.toList.iterator.zip(rawQueryParts.iterator).foreach { zipped =>
val (param, s, literal) = zipped match { case (param, s) => {
val literal = s.reverseIterator.takeWhile(_ == '#').length % 2 == 1
if (param.actualType <:< typeOf[LiteralParameter])
(param, s + { if (literal) "" else "#" }, true)
else (param, s, literal)
} }
if (!literal) {
pushLiteral(s)
mayCompleteParen(param, s) {
// for "?, ?, ?, ..." except the last one
params.append(c.Expr(q"""
${toPlaceholder(param.actualType)}
.apply(${param})
.toTopLevelString
"""))
queryParts.append(q""" ${"#"} """)
// for the last "?" (inserted by ActionBasedSQLInterpolation)
params.append(param)
queryParts.append(q""" ${""} """)
}
} else {
params.append(param)
queryParts.append(q"$s")
}
if (!literal) {
// Insert parameter type checker for a fine type error message.
// The order is significant since there can be a type matches
// with multiple conditions for example an
// Option[NonEmpty[Any]] is also a Product.
stats.append(ensure(param.actualType, ValidRefinedNonEmpty))
stats.append(ensure(param.actualType, ListRejected))
param.actualType.foreach { t =>
if (t <:< typeOf[Any]) {
stats.append(ensure(t, OptionRejected))
stats.append(ensure(t, EitherRejected))
}
}
stats.append(ensure(param.actualType, ValidProduct))
stats.append(ensure(param.actualType, ValidParameter))
}
}
queryParts.append(q"${rawQueryParts.last}")
// Call the original SQL interpolation of
// `ActionBasedSQLInterpolation`. And translate the query string
// by `SQLActionTranslator`.
stats.append(q"""
$NS.query.Translator.translateBuilder(
new slick.jdbc.ActionBasedSQLInterpolation(
StringContext(..$queryParts)
).sql(..$params)
)(implicitly[$Translators])
""")
q"{ ..$stats }"
}
def sqlImpl(param: c.Expr[Any]*): c.Expr[SQLActionBuilder] =
c.Expr(invokeInterpolation(param: _*))
def sqluImpl(param: c.Expr[Any]*): c.Expr[SqlAction[Int, NoStream, Effect]] =
c.Expr(q""" ${invokeInterpolation(param: _*)}.asUpdate """)
}
|
tarao/slick-jdbc-extension-scala
|
src/main/scala/com/github/tarao/slickjdbc/interpolation/MacroTreeBuilder.scala
|
Scala
|
mit
| 6,862 |
package jp.co.bizreach.ses
import scala.concurrent.Future
import scala.collection.JavaConverters._
import com.amazonaws.auth._
import com.amazonaws.client.builder.ExecutorFactory
import com.amazonaws.handlers.AsyncHandler
import com.amazonaws.regions.Regions
import com.amazonaws.services.simpleemail._
import com.amazonaws.services.simpleemail.model._
trait SES { self: SESClient =>
import aws._
def buildRequest(email: models.Email): SendEmailRequest = {
val destination = new Destination()
if(email.to.nonEmpty) destination.setToAddresses(email.to.map(_.encoded).asJavaCollection)
if(email.cc.nonEmpty) destination.setCcAddresses(email.cc.map(_.encoded).asJavaCollection)
if(email.bcc.nonEmpty) destination.setBccAddresses(email.bcc.map(_.encoded).asJavaCollection)
val subject = new Content(email.subject.data).withCharset(email.subject.charset)
val body = new Body()
email.bodyHtml.foreach { bodyHtml =>
val htmlContent = new Content(bodyHtml.data)
htmlContent.setCharset(bodyHtml.charset)
body.setHtml(htmlContent)
}
email.bodyText.foreach { bodyText =>
val textContent = new Content(bodyText.data)
textContent.setCharset(bodyText.charset)
body.setText(textContent)
}
val message = new Message(subject, body)
val req = new SendEmailRequest(email.source.encoded, destination, message)
if(email.replyTo.nonEmpty) req.setReplyToAddresses(email.replyTo.map(_.encoded).asJavaCollection)
email.configurationSet.foreach { configurationSetName =>
req.setConfigurationSetName(configurationSetName)
}
val messageTags = email.messageTags.map { case (name, value) =>
new MessageTag().withName(name).withValue(value)
}
req.setTags(messageTags.asJavaCollection)
email.returnPath.map { returnPath =>
req.setReturnPath(returnPath)
}
req
}
def send(email: models.Email): Future[SendEmailResult] = wrapAsyncMethod {
sendEmailAsync(buildRequest(email), _: AsyncHandler[SendEmailRequest, SendEmailResult])
}
}
object SESClient {
def apply(accessKeyId: String, secretKeyId: String)(implicit region: Regions): SESClient = {
apply(new BasicAWSCredentials(accessKeyId, secretKeyId))
}
def apply(awsCredentials: AWSCredentials = new AnonymousAWSCredentials)(implicit region: Regions): SESClient = {
apply(new AWSStaticCredentialsProvider(awsCredentials))
}
def apply(awsCredentials: AWSCredentials, executorFactory: ExecutorFactory)(implicit region: Regions): SESClient = {
apply(new AWSStaticCredentialsProvider(awsCredentials), executorFactory)
}
def apply(awsCredentialsProvider: AWSCredentialsProvider)(implicit region: Regions): SESClient = {
val client = AmazonSimpleEmailServiceAsyncClientBuilder.standard
.withCredentials(awsCredentialsProvider)
.withRegion(region)
.build()
new SESClient(client)
}
def apply(awsCredentialsProvider: AWSCredentialsProvider, executorFactory: ExecutorFactory)
(implicit region: Regions): SESClient = {
val client = AmazonSimpleEmailServiceAsyncClientBuilder.standard
.withCredentials(awsCredentialsProvider)
.withExecutorFactory(executorFactory)
.withRegion(region)
.build()
new SESClient(client)
}
}
class SESClient (val aws: AmazonSimpleEmailServiceAsync) extends SES
|
bizreach/aws-ses-scala
|
src/main/scala/jp/co/bizreach/ses/SESClient.scala
|
Scala
|
apache-2.0
| 3,372 |
package akka
import akka.QuizProtocol.{AlreadyCreated, CreatePlayer, UserCreated}
import akka.actor.{ActorLogging, ActorRef, Props}
import akka.backpressure.MasterWorkerProtocol.WorkComplete
object CreatePlayerExecutor {
def props(): Props = Props(classOf[CreatePlayerExecutor])
}
class CreatePlayerExecutor extends AbstractPlayerExecutorActor with ActorLogging {
def receive: Receive = {
case CreatePlayer(user) => {
val onSuccess = (forward: ActorRef) => {
forward ! UserCreated(user)
println("User saved: " + user.firstname)
self ! WorkComplete("done")
}
val onError = (e: Throwable, forward: ActorRef) => {
forward ! AlreadyCreated(user)
println(s"User saved failed: ${e}")
self ! WorkComplete("fail")
}
//asyncSetPlayer(Player(user), sender)(onSuccess, onError)
Thread.sleep(1000)
sender ! UserCreated(user)
log.info(s"create >${user}<")
context.parent ! WorkComplete("done")
}
case unknownMessage => log.error(s"??? $unknownMessage")
}
}
|
djoudjou/Quiz
|
app/akka/CreatePlayerExecutor.scala
|
Scala
|
cc0-1.0
| 1,072 |
package de.htwg.zeta.server.controller
import java.net.URLDecoder
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import de.htwg.zeta.persistence.general.UserRepository
import de.htwg.zeta.server.model.TokenCache
import de.htwg.zeta.server.routing.routes
import de.htwg.zeta.server.silhouette.SilhouetteLoginInfoDao
import play.api.i18n.Messages
import play.api.libs.mailer.Email
import play.api.libs.mailer.MailerClient
import play.api.mvc.AnyContent
import play.api.mvc.InjectedController
import play.api.mvc.Request
import play.api.mvc.Result
/**
* The `Activate Account` controller.
*
* @param mailerClient The mailer client.
*/
class ActivateAccountController @Inject()(
mailerClient: MailerClient,
tokenCache: TokenCache,
userRepo: UserRepository,
loginInfoRepo: SilhouetteLoginInfoDao,
implicit val ec: ExecutionContext
) extends InjectedController {
/** Sends an account activation email to the user with the given email.
*
* @param email The email address of the user to send the activation mail to.
* @param request request
* @param messages messages
* @return The result to display.
*/
def send(email: String)(request: Request[AnyContent], messages: Messages): Future[Result] = {
val decodedEmail = URLDecoder.decode(email, "UTF-8")
val loginInfo = LoginInfo(CredentialsProvider.ID, decodedEmail)
val userId = loginInfoRepo.read(loginInfo)
val user = userId.flatMap(userId => userRepo.read(userId))
user.map { user =>
if (!user.activated) {
tokenCache.create(user.id).map { id =>
val url = routes.ScalaRoutes.getAccountActivate(id).absoluteURL()(request)
mailerClient.send(Email(
subject = messages("email.activate.account.subject"),
from = messages("email.from"),
to = Seq(decodedEmail),
bodyText = Some(views.txt.silhouette.emails.activateAccount(user, url, messages).body),
bodyHtml = Some(views.html.silhouette.emails.activateAccount(user, url, messages).body)
))
}
Accepted
} else {
Ok
}
}.recover {
case _ => Forbidden
}
}
/** Activates an account.
*
* @param token token The token to identify a user.
* @param request request
* @param messages messages
* @return The result to display.
*/
def activate(token: UUID)(request: Request[AnyContent], messages: Messages): Future[Result] = {
tokenCache.read(token).flatMap(userId =>
userRepo.update(userId, _.copy(activated = true)).map(_ => Accepted)
).recover {
case _ => Forbidden("Invalid activation link")
}
}
}
|
Zeta-Project/zeta
|
api/server/app/de/htwg/zeta/server/controller/ActivateAccountController.scala
|
Scala
|
bsd-2-clause
| 2,854 |
package org.scalatra
import javax.servlet.http.{ HttpServletRequest, HttpServletResponse }
import scala.util.DynamicVariable
trait RequestResponseScope {
/**
* The currently scoped request. Valid only inside the `handle` method.
*/
implicit def request: HttpServletRequest
/**
* The currently scoped response. Valid only inside the `handle` method.
*/
implicit def response: HttpServletResponse
protected def withRequestResponse[A](request: HttpServletRequest, response: HttpServletResponse)(f: => A): A
/**
* Executes the block with the given request bound to the `request`
* method.
*/
protected def withRequest[A](request: HttpServletRequest)(f: => A): A
/**
* Executes the block with the given response bound to the `response`
* method.
*/
protected def withResponse[A](response: HttpServletResponse)(f: => A): A
}
/**
* The Scalatra DSL requires a dynamically scoped request and response.
* This trick is explained in greater detail in Gabriele Renzi's blog
* post about Step, out of which Scalatra grew:
*
* http://www.riffraff.info/2009/4/11/step-a-scala-web-picoframework
*/
trait DynamicScope extends RequestResponseScope {
/**
* The currently scoped request. Valid only inside the `handle` method.
*/
implicit def request: HttpServletRequest = dynamicRequest.value
private[this] val dynamicRequest = new DynamicVariable[HttpServletRequest](null)
/**
* The currently scoped response. Valid only inside the `handle` method.
*/
implicit def response: HttpServletResponse = dynamicResponse.value
private[this] val dynamicResponse = new DynamicVariable[HttpServletResponse](null)
protected[scalatra] def withRequestResponse[A](request: HttpServletRequest, response: HttpServletResponse)(f: => A) = {
withRequest(request) {
withResponse(response) {
f
}
}
}
/**
* Executes the block with the given request bound to the `request`
* method.
*/
protected def withRequest[A](request: HttpServletRequest)(f: => A) =
dynamicRequest.withValue(request) {
f
}
/**
* Executes the block with the given response bound to the `response`
* method.
*/
protected def withResponse[A](response: HttpServletResponse)(f: => A) =
dynamicResponse.withValue(response) {
f
}
@deprecated("Do not invoke directly. Use `withRequest` to change the binding, or request to get the value", "2.1.0")
protected def _request = dynamicRequest
@deprecated("Do not invoke directly. Use `withResponse` to change the binding, or `response` to get the value", "2.1.0")
protected def _response = dynamicResponse
}
|
lightvector/scalatra
|
core/src/main/scala/org/scalatra/DynamicScope.scala
|
Scala
|
bsd-2-clause
| 2,669 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.text.SimpleDateFormat
import java.util.{Date, UUID}
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.spark.internal.Logging
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.plans.logical.{EventTimeWatermark, LogicalPlan}
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanExec
import org.apache.spark.sql.sources.v2.reader.streaming.MicroBatchReadSupport
import org.apache.spark.sql.streaming._
import org.apache.spark.sql.streaming.StreamingQueryListener.QueryProgressEvent
import org.apache.spark.util.Clock
/**
* Responsible for continually reporting statistics about the amount of data processed as well
* as latency for a streaming query. This trait is designed to be mixed into the
* [[StreamExecution]], who is responsible for calling `startTrigger` and `finishTrigger`
* at the appropriate times. Additionally, the status can updated with `updateStatusMessage` to
* allow reporting on the streams current state (i.e. "Fetching more data").
*/
trait ProgressReporter extends Logging {
case class ExecutionStats(
inputRows: Map[BaseStreamingSource, Long],
stateOperators: Seq[StateOperatorProgress],
eventTimeStats: Map[String, String])
// Internal state of the stream, required for computing metrics.
protected def id: UUID
protected def runId: UUID
protected def name: String
protected def triggerClock: Clock
protected def logicalPlan: LogicalPlan
protected def lastExecution: QueryExecution
protected def newData: Map[BaseStreamingSource, LogicalPlan]
protected def sources: Seq[BaseStreamingSource]
protected def sink: BaseStreamingSink
protected def offsetSeqMetadata: OffsetSeqMetadata
protected def currentBatchId: Long
protected def sparkSession: SparkSession
protected def postEvent(event: StreamingQueryListener.Event): Unit
// Local timestamps and counters.
private var currentTriggerStartTimestamp = -1L
private var currentTriggerEndTimestamp = -1L
private var currentTriggerStartOffsets: Map[BaseStreamingSource, String] = _
private var currentTriggerEndOffsets: Map[BaseStreamingSource, String] = _
// TODO: Restore this from the checkpoint when possible.
private var lastTriggerStartTimestamp = -1L
private val currentDurationsMs = new mutable.HashMap[String, Long]()
/** Flag that signals whether any error with input metrics have already been logged */
private var metricWarningLogged: Boolean = false
/** Holds the most recent query progress updates. Accesses must lock on the queue itself. */
private val progressBuffer = new mutable.Queue[StreamingQueryProgress]()
private val noDataProgressEventInterval =
sparkSession.sessionState.conf.streamingNoDataProgressEventInterval
// The timestamp we report an event that has no input data
private var lastNoDataProgressEventTime = Long.MinValue
private val timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") // ISO8601
timestampFormat.setTimeZone(DateTimeUtils.getTimeZone("UTC"))
@volatile
protected var currentStatus: StreamingQueryStatus = {
new StreamingQueryStatus(
message = "Initializing StreamExecution",
isDataAvailable = false,
isTriggerActive = false)
}
/** Returns the current status of the query. */
def status: StreamingQueryStatus = currentStatus
/** Returns an array containing the most recent query progress updates. */
def recentProgress: Array[StreamingQueryProgress] = progressBuffer.synchronized {
progressBuffer.toArray
}
/** Returns the most recent query progress update or null if there were no progress updates. */
def lastProgress: StreamingQueryProgress = progressBuffer.synchronized {
progressBuffer.lastOption.orNull
}
/** Begins recording statistics about query progress for a given trigger. */
protected def startTrigger(): Unit = {
logDebug("Starting Trigger Calculation")
lastTriggerStartTimestamp = currentTriggerStartTimestamp
currentTriggerStartTimestamp = triggerClock.getTimeMillis()
currentStatus = currentStatus.copy(isTriggerActive = true)
currentTriggerStartOffsets = null
currentTriggerEndOffsets = null
currentDurationsMs.clear()
}
/**
* Record the offsets range this trigger will process. Call this before updating
* `committedOffsets` in `StreamExecution` to make sure that the correct range is recorded.
*/
protected def recordTriggerOffsets(from: StreamProgress, to: StreamProgress): Unit = {
currentTriggerStartOffsets = from.mapValues(_.json)
currentTriggerEndOffsets = to.mapValues(_.json)
}
private def updateProgress(newProgress: StreamingQueryProgress): Unit = {
progressBuffer.synchronized {
progressBuffer += newProgress
while (progressBuffer.length >= sparkSession.sqlContext.conf.streamingProgressRetention) {
progressBuffer.dequeue()
}
}
postEvent(new QueryProgressEvent(newProgress))
logInfo(s"Streaming query made progress: $newProgress")
}
/** Finalizes the query progress and adds it to list of recent status updates. */
protected def finishTrigger(hasNewData: Boolean): Unit = {
assert(currentTriggerStartOffsets != null && currentTriggerEndOffsets != null)
currentTriggerEndTimestamp = triggerClock.getTimeMillis()
val executionStats = extractExecutionStats(hasNewData)
val processingTimeSec =
(currentTriggerEndTimestamp - currentTriggerStartTimestamp).toDouble / 1000
val inputTimeSec = if (lastTriggerStartTimestamp >= 0) {
(currentTriggerStartTimestamp - lastTriggerStartTimestamp).toDouble / 1000
} else {
Double.NaN
}
logDebug(s"Execution stats: $executionStats")
val sourceProgress = sources.distinct.map { source =>
val numRecords = executionStats.inputRows.getOrElse(source, 0L)
new SourceProgress(
description = source.toString,
startOffset = currentTriggerStartOffsets.get(source).orNull,
endOffset = currentTriggerEndOffsets.get(source).orNull,
numInputRows = numRecords,
inputRowsPerSecond = numRecords / inputTimeSec,
processedRowsPerSecond = numRecords / processingTimeSec
)
}
val sinkProgress = new SinkProgress(sink.toString)
val newProgress = new StreamingQueryProgress(
id = id,
runId = runId,
name = name,
timestamp = formatTimestamp(currentTriggerStartTimestamp),
batchId = currentBatchId,
durationMs = new java.util.HashMap(currentDurationsMs.toMap.mapValues(long2Long).asJava),
eventTime = new java.util.HashMap(executionStats.eventTimeStats.asJava),
stateOperators = executionStats.stateOperators.toArray,
sources = sourceProgress.toArray,
sink = sinkProgress)
if (hasNewData) {
// Reset noDataEventTimestamp if we processed any data
lastNoDataProgressEventTime = Long.MinValue
updateProgress(newProgress)
} else {
val now = triggerClock.getTimeMillis()
if (now - noDataProgressEventInterval >= lastNoDataProgressEventTime) {
lastNoDataProgressEventTime = now
updateProgress(newProgress)
}
}
currentStatus = currentStatus.copy(isTriggerActive = false)
}
/** Extract statistics about stateful operators from the executed query plan. */
private def extractStateOperatorMetrics(hasNewData: Boolean): Seq[StateOperatorProgress] = {
if (lastExecution == null) return Nil
// lastExecution could belong to one of the previous triggers if `!hasNewData`.
// Walking the plan again should be inexpensive.
lastExecution.executedPlan.collect {
case p if p.isInstanceOf[StateStoreWriter] =>
val progress = p.asInstanceOf[StateStoreWriter].getProgress()
if (hasNewData) progress else progress.copy(newNumRowsUpdated = 0)
}
}
/** Extracts statistics from the most recent query execution. */
private def extractExecutionStats(hasNewData: Boolean): ExecutionStats = {
val hasEventTime = logicalPlan.collect { case e: EventTimeWatermark => e }.nonEmpty
val watermarkTimestamp =
if (hasEventTime) Map("watermark" -> formatTimestamp(offsetSeqMetadata.batchWatermarkMs))
else Map.empty[String, String]
// SPARK-19378: Still report metrics even though no data was processed while reporting progress.
val stateOperators = extractStateOperatorMetrics(hasNewData)
if (!hasNewData) {
return ExecutionStats(Map.empty, stateOperators, watermarkTimestamp)
}
val numInputRows = extractSourceToNumInputRows()
val eventTimeStats = lastExecution.executedPlan.collect {
case e: EventTimeWatermarkExec if e.eventTimeStats.value.count > 0 =>
val stats = e.eventTimeStats.value
Map(
"max" -> stats.max,
"min" -> stats.min,
"avg" -> stats.avg.toLong).mapValues(formatTimestamp)
}.headOption.getOrElse(Map.empty) ++ watermarkTimestamp
ExecutionStats(numInputRows, stateOperators, eventTimeStats)
}
/** Extract number of input sources for each streaming source in plan */
private def extractSourceToNumInputRows(): Map[BaseStreamingSource, Long] = {
def sumRows(tuples: Seq[(BaseStreamingSource, Long)]): Map[BaseStreamingSource, Long] = {
tuples.groupBy(_._1).mapValues(_.map(_._2).sum) // sum up rows for each source
}
val onlyDataSourceV2Sources = {
// Check whether the streaming query's logical plan has only V2 data sources
val allStreamingLeaves =
logicalPlan.collect { case s: StreamingExecutionRelation => s }
allStreamingLeaves.forall { _.source.isInstanceOf[MicroBatchReadSupport] }
}
if (onlyDataSourceV2Sources) {
// It's possible that multiple DataSourceV2ScanExec instances may refer to the same source
// (can happen with self-unions or self-joins). This means the source is scanned multiple
// times in the query, we should count the numRows for each scan.
val sourceToInputRowsTuples = lastExecution.executedPlan.collect {
case s: DataSourceV2ScanExec if s.readSupport.isInstanceOf[BaseStreamingSource] =>
val numRows = s.metrics.get("numOutputRows").map(_.value).getOrElse(0L)
val source = s.readSupport.asInstanceOf[BaseStreamingSource]
source -> numRows
}
logDebug("Source -> # input rows\n\t" + sourceToInputRowsTuples.mkString("\n\t"))
sumRows(sourceToInputRowsTuples)
} else {
// Since V1 source do not generate execution plan leaves that directly link with source that
// generated it, we can only do a best-effort association between execution plan leaves to the
// sources. This is known to fail in a few cases, see SPARK-24050.
//
// We want to associate execution plan leaves to sources that generate them, so that we match
// the their metrics (e.g. numOutputRows) to the sources. To do this we do the following.
// Consider the translation from the streaming logical plan to the final executed plan.
//
// streaming logical plan (with sources) <==> trigger's logical plan <==> executed plan
//
// 1. We keep track of streaming sources associated with each leaf in trigger's logical plan
// - Each logical plan leaf will be associated with a single streaming source.
// - There can be multiple logical plan leaves associated with a streaming source.
// - There can be leaves not associated with any streaming source, because they were
// generated from a batch source (e.g. stream-batch joins)
//
// 2. Assuming that the executed plan has same number of leaves in the same order as that of
// the trigger logical plan, we associate executed plan leaves with corresponding
// streaming sources.
//
// 3. For each source, we sum the metrics of the associated execution plan leaves.
//
val logicalPlanLeafToSource = newData.flatMap { case (source, logicalPlan) =>
logicalPlan.collectLeaves().map { leaf => leaf -> source }
}
val allLogicalPlanLeaves = lastExecution.logical.collectLeaves() // includes non-streaming
val allExecPlanLeaves = lastExecution.executedPlan.collectLeaves()
if (allLogicalPlanLeaves.size == allExecPlanLeaves.size) {
val execLeafToSource = allLogicalPlanLeaves.zip(allExecPlanLeaves).flatMap {
case (lp, ep) => logicalPlanLeafToSource.get(lp).map { source => ep -> source }
}
val sourceToInputRowsTuples = execLeafToSource.map { case (execLeaf, source) =>
val numRows = execLeaf.metrics.get("numOutputRows").map(_.value).getOrElse(0L)
source -> numRows
}
sumRows(sourceToInputRowsTuples)
} else {
if (!metricWarningLogged) {
def toString[T](seq: Seq[T]): String = s"(size = ${seq.size}), ${seq.mkString(", ")}"
logWarning(
"Could not report metrics as number leaves in trigger logical plan did not match that" +
s" of the execution plan:\n" +
s"logical plan leaves: ${toString(allLogicalPlanLeaves)}\n" +
s"execution plan leaves: ${toString(allExecPlanLeaves)}\n")
metricWarningLogged = true
}
Map.empty
}
}
}
/** Records the duration of running `body` for the next query progress update. */
protected def reportTimeTaken[T](triggerDetailKey: String)(body: => T): T = {
val startTime = triggerClock.getTimeMillis()
val result = body
val endTime = triggerClock.getTimeMillis()
val timeTaken = math.max(endTime - startTime, 0)
val previousTime = currentDurationsMs.getOrElse(triggerDetailKey, 0L)
currentDurationsMs.put(triggerDetailKey, previousTime + timeTaken)
logDebug(s"$triggerDetailKey took $timeTaken ms")
result
}
private def formatTimestamp(millis: Long): String = {
timestampFormat.format(new Date(millis))
}
/** Updates the message returned in `status`. */
protected def updateStatusMessage(message: String): Unit = {
currentStatus = currentStatus.copy(message = message)
}
}
|
michalsenkyr/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/ProgressReporter.scala
|
Scala
|
apache-2.0
| 15,134 |
package models
import java.util.Date
import org.bson.types.ObjectId
import org.specs2.mutable.Specification
import play.api.test.Helpers.running
import play.api.test.FakeApplication
/**
* Requires mongod running
*/
class ClientIntegrationSpec extends Specification {
"Client model" should {
"be insertable" in {
running(FakeApplication()) {
val client = Client(new ObjectId, "email", "token", new Date, true)
val id = Client.insert(client)
id must beSome[ObjectId]
}
}
"be retrievable in bulk and by id" in {
running(FakeApplication()) {
val all = Client.all
all.size must be greaterThan(0)
val head = all.head
val client = Client.findOneById(head.id)
client must beSome[Client]
}
}
"be retrievable by email and token" in {
running(FakeApplication()) {
val client = Client.findOneByEmailAndToken("email", "token")
client must beSome[Client]
}
}
"be updatable" in {
running(FakeApplication()) {
val client = Client.findOneByEmailAndToken("email", "token").get
Client.update(client.copy(email = "email2", token = "token2", isBlocked = false))
val updatedClient = Client.findOneById(client.id)
updatedClient must beSome.which {
_ match {
case (Client(_, "email2", "token2", _, false)) => true
case _ => false
}
}
}
}
"be removable" in {
running(FakeApplication()) {
val client = Client.findOneByEmailAndToken("email2", "token2")
Client.remove(client.get)
val removedClient = Client.findOneById(client.get.id)
removedClient must beNone
}
}
}
}
|
lukaszbudnik/geo-layers
|
test/models/ClientIntegrationSpec.scala
|
Scala
|
apache-2.0
| 1,764 |
package org.keycloak.performance
object AdminConsole extends AdminConsole {
}
trait AdminConsole extends Admin {
import AdminConsole._
// TODO Existing tests from keycloak.AdminConsole* will be moved here.
}
|
keycloak/keycloak
|
testsuite/performance/tests/src/test/scala/org/keycloak/performance/AdminConsole.scala
|
Scala
|
apache-2.0
| 223 |
package tutorial.webapp
import scala.scalajs.js.annotation.JSExport
import org.scalajs.dom
import org.scalajs.dom.html
import scala.util.Random
case class Point(x: Int, y: Int){
def +(p: Point) = Point(x + p.x, y + p.y)
def /(d: Int) = Point(x / d, y / d)
}
@JSExport
object ScalaJSExample {
@JSExport
def main(canvas: html.Canvas): Unit = {
val ctx = canvas.getContext("2d")
.asInstanceOf[dom.CanvasRenderingContext2D]
var count = 0
var p = Point(0, 0)
val corners = Seq(Point(255, 255), Point(0, 255), Point(128, 0))
def clear() = {
ctx.fillStyle = "black"
ctx.fillRect(0, 0, 255, 255)
}
def run = for (i <- 0 until 10){
if (count % 3000 == 0) clear()
count += 1
p = (p + corners(Random.nextInt(3))) / 2
val height = 512.0 / (255 + p.y)
val r = (p.x * height).toInt
val g = ((255-p.x) * height).toInt
val b = p.y
ctx.fillStyle = s"rgb($g, $r, $b)"
ctx.fillRect(p.x, p.y, 1, 1)
}
dom.window.setInterval(() => run, 50)
}
}
|
CraigGiles/mynab
|
scalajs/src/main/scala/tutorial/webapp/ScalaJSExample.scala
|
Scala
|
mit
| 1,064 |
package com.artclod.slick
import play.api.db.slick.Config.driver.simple._
/**
* Most of the aggregation functions in the DB do not work on db booleans.
* It can be useful to have a boolean treated as a number in the db so max and min for example
* can be used to determine if there are any of the values are true or false.
*/
object NumericBoolean {
val T : Short = 1
val F : Short = 0
def apply(s: Short) = s match {
case 0 => false
case 1 => true
case _ => throw new IllegalStateException("Converting short to correct value was [" + s + "] must be in { 0 -> false, 1 -> true }, coding error")
}
def apply(b: Boolean) : Short = if(b) 1 else 0
def asDouble(b: Boolean) : Double = if(b) 1d else 0d
// implicit def boolean2DBNumber = MappedColumnType.base[Boolean, Short](
// bool => if(bool) 1 else 0,
// dbShort => if(dbShort == 1){ true } else if(dbShort == 0) { false } else { throw new IllegalStateException("DB returned [" + dbShort + "] for boolean, must be either 0 or 1")}
// )
}
|
kristiankime/web-education-games
|
app/com/artclod/slick/NumericBoolean.scala
|
Scala
|
mit
| 1,030 |
/*
* Copyright 2014 SEEBURGER AG
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.cassandra
import java.util.concurrent.Executors
import com.twitter.util.{ Future, FuturePool }
import com.twitter.storehaus.Store
import me.prettyprint.hector.api.factory.HFactory
import me.prettyprint.cassandra.service.CassandraHostConfigurator
import me.prettyprint.hector.api.{Cluster, Keyspace}
import me.prettyprint.hector.api.beans.HColumn
import me.prettyprint.cassandra.serializers.StringSerializer
import me.prettyprint.hector.api.Serializer
import me.prettyprint.hector.api.ddl.ComparatorType
import me.prettyprint.cassandra.service.ThriftKsDef
import scala.collection.JavaConversions._
import me.prettyprint.hector.api.ddl.KeyspaceDefinition
import me.prettyprint.cassandra.model.ConfigurableConsistencyLevel
import me.prettyprint.hector.api.HConsistencyLevel
import me.prettyprint.hector.api.ConsistencyLevelPolicy
import me.prettyprint.hector.api.exceptions.HNotFoundException
import me.prettyprint.hector.api.beans.DynamicComposite
import me.prettyprint.cassandra.serializers.DynamicCompositeSerializer
/**
* This is a Storehaus-Cassandra wrapper for dynamic composite keys provided by the well known Hector library
* often used in the JAVA-Cassandra world.
*
* The Store variant provided here has the ability to do slice queries over column slices, which allows
* for better performance and unknown column names (i.e. to store values in column names).
*
* For an explanation of input params please consult CassandraKeyValueStore
*
* For convenience type parameters are more flexible than needed. One can pass in a bunch of
* Cassandra/Hector-serializbale objects and the corresponding serializers:
* RK: represents the type(s) of the Cassandra-composite-row-key.
* One can either pass in a single type or a List of types.
* Remember it might not possible to peform range scans with random-partitioners like Murmur3.
* CK: represents the type(s) of the Cassandra-composite-column-key.
* See RK for details.
* It is possible to perform queries on column slices.
* V: The type of the value.
* In any case (even if one only uses a single row or a single column key) it is imperative to pass
* in the correct Cassandra/Hector serializers as a list. The list should be ordered exactly according to
* the keys (i.e. RK and CK).
*
* WARNING: use at your own risk!
*
* @author Andreas Petter
*/
object CassandraCompositeWideColumnStore {
def apply[RK, CK, V](hostNames: String, clusterName: String, keyspaceName : String, columnFamilyName: String, keySerializer: Tuple2[List[Serializer[Any]], List[Serializer[Any]]], valueSerializer: Serializer[V]): CassandraCompositeWideColumnStore[RK, CK, V] = {
val cassHostConfigurator = new CassandraHostConfigurator(hostNames)
cassHostConfigurator.setRunAutoDiscoveryAtStartup(true)
cassHostConfigurator.setAutoDiscoverHosts(true)
cassHostConfigurator.setRetryDownedHosts(true)
val cluster = HFactory.getOrCreateCluster(clusterName, cassHostConfigurator)
val keyspace = HFactory.createKeyspace(keyspaceName, cluster)
new CassandraCompositeWideColumnStore[RK, CK, V](keyspace, columnFamilyName, keySerializer, valueSerializer)
}
/**
* Optionally this method can be used to setup storage on the Cassandra cluster.
* (shouldn't work on on existing keyspaces and column-families)
*/
def setupStore[RK, CK, V](hostNames: String, clusterName: String, keyspaceName : String, columnFamilyName: String, keySerializer: Tuple2[List[Serializer[Any]], List[Serializer[Any]]], valueSerializer: Serializer[V], replicationFactor: Int) = {
val cluster : Cluster = HFactory.getOrCreateCluster(clusterName,hostNames)
val cfDef = HFactory.createColumnFamilyDefinition(keyspaceName, columnFamilyName, ComparatorType.DYNAMICCOMPOSITETYPE)
cfDef.setComparatorTypeAlias(DynamicComposite.DEFAULT_DYNAMIC_COMPOSITE_ALIASES)
cfDef.setKeyValidationClass(ComparatorType.DYNAMICCOMPOSITETYPE.getClassName())
cfDef.setKeyValidationAlias(DynamicComposite.DEFAULT_DYNAMIC_COMPOSITE_ALIASES)
cfDef.setDefaultValidationClass(valueSerializer.getComparatorType().getClassName())
val keyspace : KeyspaceDefinition = HFactory.createKeyspaceDefinition(keyspaceName, ThriftKsDef.DEF_STRATEGY_CLASS, replicationFactor,
Array(cfDef).toList)
cluster.addKeyspace(keyspace, true)
}
}
class CassandraCompositeWideColumnStore[RK, CK, V] (
val keyspace : Keyspace,
val columnFamilyName: String,
val keySerializer: Tuple2[List[Serializer[Any]], List[Serializer[Any]]],
val valueSerializer: Serializer[V])
extends Store[(RK, CK), V] with CassandraConsistencyLevelableStore
{
val futurePool = FuturePool(Executors.newFixedThreadPool(10))
override def put(kv: ((RK, CK), Option[V])): Future[Unit] = {
// create the keys for rows and columns
val ((rk, ck),valueOpt) = kv
val rowKey = createDynamicKey(rk, keySerializer._1)
val colKey = createDynamicKey(ck, keySerializer._2)
valueOpt match {
case Some(value) => futurePool {
// write the new entry to Cassandra
val mutator = HFactory.createMutator(keyspace, DynamicCompositeSerializer.get)
mutator.addInsertion(rowKey, columnFamilyName, HFactory.createColumn(colKey, value))
mutator.execute()
}
case None => futurePool {
// delete the entry
val mutator = HFactory.createMutator(keyspace, DynamicCompositeSerializer.get)
mutator.addDeletion(rowKey, columnFamilyName, colKey, DynamicCompositeSerializer.get)
mutator.execute()
}
}
}
def createDynamicKey(orgKey: Any, keySerializers: List[Serializer[Any]]):DynamicComposite = {
val result = new DynamicComposite
orgKey match {
case _:List[Any] => orgKey.asInstanceOf[List[Any]]
.foldLeft(0)((index,key) => { result.addComponent(key, keySerializers.lift(index).get); index + 1 } )
case _ => result.addComponent(orgKey, keySerializers.lift(0).get)
}
result
}
override def get(key: (RK, CK)): Future[Option[V]] = {
val (rk, ck) = key
val rowKey = createDynamicKey(rk, keySerializer._1)
val colKey = createDynamicKey(ck, keySerializer._2)
futurePool {
val result = HFactory.createColumnQuery(keyspace, DynamicCompositeSerializer.get, DynamicCompositeSerializer.get, valueSerializer)
.setColumnFamily(columnFamilyName)
.setKey(rowKey)
.setName(colKey)
.execute()
if (result.get == null) None else Some(result.get.getValue)
}
}
def setConsistencyLevelPolicy(policy: ConsistencyLevelPolicy) {
keyspace.setConsistencyLevelPolicy(policy)
}
}
|
AndreasPetter/storehaus-cassandra
|
storehaus-cassandra/src/main/scala/com/twitter/storehaus/cassandra/CassandraCompositeStore.scala
|
Scala
|
apache-2.0
| 7,355 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.container
import java.util.{Objects, Optional}
import java.util.concurrent.ScheduledExecutorService
import org.apache.samza.SamzaException
import org.apache.samza.checkpoint.OffsetManager
import org.apache.samza.config.Config
import org.apache.samza.config.StreamConfig.Config2Stream
import org.apache.samza.context._
import org.apache.samza.job.model.{JobModel, TaskModel}
import org.apache.samza.scheduler.{CallbackSchedulerImpl, EpochTimeScheduler, ScheduledCallback}
import org.apache.samza.storage.kv.KeyValueStore
import org.apache.samza.storage.TaskStorageManager
import org.apache.samza.system._
import org.apache.samza.table.TableManager
import org.apache.samza.task._
import org.apache.samza.util.{Logging, ScalaJavaUtil}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.Map
class TaskInstance(
val task: Any,
taskModel: TaskModel,
val metrics: TaskInstanceMetrics,
systemAdmins: SystemAdmins,
consumerMultiplexer: SystemConsumers,
collector: TaskInstanceCollector,
val offsetManager: OffsetManager = new OffsetManager,
storageManager: TaskStorageManager = null,
tableManager: TableManager = null,
val systemStreamPartitions: Set[SystemStreamPartition] = Set(),
val exceptionHandler: TaskInstanceExceptionHandler = new TaskInstanceExceptionHandler,
jobModel: JobModel = null,
streamMetadataCache: StreamMetadataCache = null,
inputStreamMetadata: Map[SystemStream, SystemStreamMetadata] = Map(),
timerExecutor : ScheduledExecutorService = null,
jobContext: JobContext,
containerContext: ContainerContext,
applicationContainerContextOption: Option[ApplicationContainerContext],
applicationTaskContextFactoryOption: Option[ApplicationTaskContextFactory[ApplicationTaskContext]],
externalContextOption: Option[ExternalContext]
) extends Logging {
val taskName: TaskName = taskModel.getTaskName
val isInitableTask = task.isInstanceOf[InitableTask]
val isWindowableTask = task.isInstanceOf[WindowableTask]
val isEndOfStreamListenerTask = task.isInstanceOf[EndOfStreamListenerTask]
val isClosableTask = task.isInstanceOf[ClosableTask]
val isAsyncTask = task.isInstanceOf[AsyncStreamTask]
val epochTimeScheduler: EpochTimeScheduler = EpochTimeScheduler.create(timerExecutor)
private val kvStoreSupplier = ScalaJavaUtil.toJavaFunction(
(storeName: String) => {
if (storageManager != null && storageManager.getStore(storeName).isDefined) {
storageManager.getStore(storeName).get.asInstanceOf[KeyValueStore[_, _]]
} else {
null
}
})
private val taskContext = new TaskContextImpl(taskModel, metrics.registry, kvStoreSupplier, tableManager,
new CallbackSchedulerImpl(epochTimeScheduler), offsetManager, jobModel, streamMetadataCache)
// need separate field for this instead of using it through Context, since Context throws an exception if it is null
private val applicationTaskContextOption = applicationTaskContextFactoryOption
.map(_.create(externalContextOption.orNull, jobContext, containerContext, taskContext,
applicationContainerContextOption.orNull))
val context = new ContextImpl(jobContext, containerContext, taskContext,
Optional.ofNullable(applicationContainerContextOption.orNull),
Optional.ofNullable(applicationTaskContextOption.orNull), Optional.ofNullable(externalContextOption.orNull))
// store the (ssp -> if this ssp has caught up) mapping. "caught up"
// means the same ssp in other taskInstances have the same offset as
// the one here.
var ssp2CaughtupMapping: scala.collection.mutable.Map[SystemStreamPartition, Boolean] =
scala.collection.mutable.Map[SystemStreamPartition, Boolean]()
systemStreamPartitions.foreach(ssp2CaughtupMapping += _ -> false)
private val config: Config = jobContext.getConfig
val intermediateStreams: Set[String] = config.getStreamIds.filter(config.getIsIntermediateStream).toSet
val streamsToDeleteCommittedMessages: Set[String] = config.getStreamIds.filter(config.getDeleteCommittedMessages).map(config.getPhysicalName).toSet
def registerOffsets {
debug("Registering offsets for taskName: %s" format taskName)
offsetManager.register(taskName, systemStreamPartitions)
}
def startTableManager {
if (tableManager != null) {
debug("Starting table manager for taskName: %s" format taskName)
tableManager.init(context)
} else {
debug("Skipping table manager initialization for taskName: %s" format taskName)
}
}
def initTask {
initCaughtUpMapping()
if (isInitableTask) {
debug("Initializing task for taskName: %s" format taskName)
task.asInstanceOf[InitableTask].init(context)
} else {
debug("Skipping task initialization for taskName: %s" format taskName)
}
applicationTaskContextOption.foreach(applicationTaskContext => {
debug("Starting application-defined task context for taskName: %s" format taskName)
applicationTaskContext.start()
})
}
def registerProducers {
debug("Registering producers for taskName: %s" format taskName)
collector.register
}
/**
* Computes the starting offset for the partitions assigned to the task and registers them with the underlying {@see SystemConsumers}.
*
* Starting offset for a partition of the task is computed in the following manner:
*
* 1. If a startpoint exists for a task, system stream partition and it resolves to a offset, then the resolved offset is used as the starting offset.
* 2. Else, the checkpointed offset for the system stream partition is used as the starting offset.
*/
def registerConsumers() {
debug("Registering consumers for taskName: %s" format taskName)
systemStreamPartitions.foreach(systemStreamPartition => {
val startingOffset: String = getStartingOffset(systemStreamPartition)
consumerMultiplexer.register(systemStreamPartition, startingOffset)
metrics.addOffsetGauge(systemStreamPartition, () => offsetManager.getLastProcessedOffset(taskName, systemStreamPartition).orNull)
})
}
def process(envelope: IncomingMessageEnvelope, coordinator: ReadableCoordinator,
callbackFactory: TaskCallbackFactory = null) {
metrics.processes.inc
val incomingMessageSsp = envelope.getSystemStreamPartition
if (!ssp2CaughtupMapping.getOrElse(incomingMessageSsp,
throw new SamzaException(incomingMessageSsp + " is not registered!"))) {
checkCaughtUp(envelope)
}
if (ssp2CaughtupMapping(incomingMessageSsp)) {
metrics.messagesActuallyProcessed.inc
trace("Processing incoming message envelope for taskName and SSP: %s, %s"
format (taskName, incomingMessageSsp))
if (isAsyncTask) {
exceptionHandler.maybeHandle {
val callback = callbackFactory.createCallback()
task.asInstanceOf[AsyncStreamTask].processAsync(envelope, collector, coordinator, callback)
}
} else {
exceptionHandler.maybeHandle {
task.asInstanceOf[StreamTask].process(envelope, collector, coordinator)
}
trace("Updating offset map for taskName, SSP and offset: %s, %s, %s"
format(taskName, incomingMessageSsp, envelope.getOffset))
offsetManager.update(taskName, incomingMessageSsp, envelope.getOffset)
}
}
}
def endOfStream(coordinator: ReadableCoordinator): Unit = {
if (isEndOfStreamListenerTask) {
exceptionHandler.maybeHandle {
task.asInstanceOf[EndOfStreamListenerTask].onEndOfStream(collector, coordinator)
}
}
}
def window(coordinator: ReadableCoordinator) {
if (isWindowableTask) {
trace("Windowing for taskName: %s" format taskName)
metrics.windows.inc
exceptionHandler.maybeHandle {
task.asInstanceOf[WindowableTask].window(collector, coordinator)
}
}
}
def scheduler(coordinator: ReadableCoordinator) {
trace("Scheduler for taskName: %s" format taskName)
exceptionHandler.maybeHandle {
epochTimeScheduler.removeReadyTimers().entrySet().foreach { entry =>
entry.getValue.asInstanceOf[ScheduledCallback[Any]].onCallback(entry.getKey.getKey, collector, coordinator)
}
}
}
def commit {
metrics.commits.inc
val checkpoint = offsetManager.buildCheckpoint(taskName)
trace("Flushing producers for taskName: %s" format taskName)
collector.flush
trace("Flushing state stores for taskName: %s" format taskName)
if (storageManager != null) {
storageManager.flush
}
trace("Flushing tables for taskName: %s" format taskName)
if (tableManager != null) {
tableManager.flush
}
trace("Checkpointing offsets for taskName: %s" format taskName)
offsetManager.writeCheckpoint(taskName, checkpoint)
if (checkpoint != null) {
checkpoint.getOffsets.asScala
.filter { case (ssp, _) => streamsToDeleteCommittedMessages.contains(ssp.getStream) } // Only delete data of intermediate streams
.groupBy { case (ssp, _) => ssp.getSystem }
.foreach { case (systemName: String, offsets: Map[SystemStreamPartition, String]) =>
systemAdmins.getSystemAdmin(systemName).deleteMessages(offsets.asJava)
}
}
}
def shutdownTask {
applicationTaskContextOption.foreach(applicationTaskContext => {
debug("Stopping application-defined task context for taskName: %s" format taskName)
applicationTaskContext.stop()
})
if (task.isInstanceOf[ClosableTask]) {
debug("Shutting down stream task for taskName: %s" format taskName)
task.asInstanceOf[ClosableTask].close
} else {
debug("Skipping stream task shutdown for taskName: %s" format taskName)
}
}
def shutdownTableManager {
if (tableManager != null) {
debug("Shutting down table manager for taskName: %s" format taskName)
tableManager.close
} else {
debug("Skipping table manager shutdown for taskName: %s" format taskName)
}
}
override def toString() = "TaskInstance for class %s and taskName %s." format (task.getClass.getName, taskName)
def toDetailedString() = "TaskInstance [taskName = %s, windowable=%s, closable=%s endofstreamlistener=%s]" format
(taskName, isWindowableTask, isClosableTask, isEndOfStreamListenerTask)
/**
* From the envelope, check if this SSP has caught up with the starting offset of the SSP
* in this TaskInstance. If the offsets are not comparable, default to true, which means
* it's already caught up.
*/
private def checkCaughtUp(envelope: IncomingMessageEnvelope) = {
val incomingMessageSsp = envelope.getSystemStreamPartition
if (IncomingMessageEnvelope.END_OF_STREAM_OFFSET.equals(envelope.getOffset)) {
ssp2CaughtupMapping(incomingMessageSsp) = true
} else {
systemAdmins match {
case null => {
warn("systemAdmin is null. Set all SystemStreamPartitions to caught-up")
ssp2CaughtupMapping(incomingMessageSsp) = true
}
case others => {
val startingOffset = getStartingOffset(incomingMessageSsp)
val system = incomingMessageSsp.getSystem
others.getSystemAdmin(system).offsetComparator(envelope.getOffset, startingOffset) match {
case null => {
info("offsets in " + system + " is not comparable. Set all SystemStreamPartitions to caught-up")
ssp2CaughtupMapping(incomingMessageSsp) = true // not comparable
}
case result => {
if (result >= 0) {
info(incomingMessageSsp.toString + " has caught up.")
ssp2CaughtupMapping(incomingMessageSsp) = true
}
}
}
}
}
}
}
/**
* Check each partition assigned to the task is caught to the last offset
*/
def initCaughtUpMapping() {
if (inputStreamMetadata != null && inputStreamMetadata.nonEmpty) {
systemStreamPartitions.foreach(ssp => {
if (inputStreamMetadata.contains(ssp.getSystemStream)) {
val partitionMetadata = inputStreamMetadata(ssp.getSystemStream)
.getSystemStreamPartitionMetadata.get(ssp.getPartition)
val upcomingOffset = partitionMetadata.getUpcomingOffset
val startingOffset = offsetManager.getStartingOffset(taskName, ssp)
.getOrElse(throw new SamzaException("No offset defined for SystemStreamPartition: %s" format ssp))
// Mark ssp to be caught up if the starting offset is already the
// upcoming offset, meaning the task has consumed all the messages
// in this partition before and waiting for the future incoming messages.
if(Objects.equals(upcomingOffset, startingOffset)) {
ssp2CaughtupMapping(ssp) = true
}
}
})
}
}
private def getStartingOffset(systemStreamPartition: SystemStreamPartition) = {
val offset = offsetManager.getStartingOffset(taskName, systemStreamPartition)
val startingOffset = offset.getOrElse(
throw new SamzaException("No offset defined for SystemStreamPartition: %s" format systemStreamPartition))
startingOffset
}
}
|
Swrrt/Samza
|
samza-core/src/main/scala/org/apache/samza/container/TaskInstance.scala
|
Scala
|
apache-2.0
| 14,047 |
package com.mogproject.mogami.core
import org.scalacheck.Gen
/**
* Square generator for scalacheck
*/
object SquareGen {
val squares: Gen[Square] = Gen.oneOf(Square.all)
def squaresOnBoardExcept(except: Seq[Square]): Gen[Square] = Gen.oneOf((Square.all.toSet -- except).toSeq)
}
/**
* Constant values
*/
object SquareConstant {
val P11 = Square(1, 1)
val P12 = Square(1, 2)
val P13 = Square(1, 3)
val P14 = Square(1, 4)
val P15 = Square(1, 5)
val P16 = Square(1, 6)
val P17 = Square(1, 7)
val P18 = Square(1, 8)
val P19 = Square(1, 9)
val P21 = Square(2, 1)
val P22 = Square(2, 2)
val P23 = Square(2, 3)
val P24 = Square(2, 4)
val P25 = Square(2, 5)
val P26 = Square(2, 6)
val P27 = Square(2, 7)
val P28 = Square(2, 8)
val P29 = Square(2, 9)
val P31 = Square(3, 1)
val P32 = Square(3, 2)
val P33 = Square(3, 3)
val P34 = Square(3, 4)
val P35 = Square(3, 5)
val P36 = Square(3, 6)
val P37 = Square(3, 7)
val P38 = Square(3, 8)
val P39 = Square(3, 9)
val P41 = Square(4, 1)
val P42 = Square(4, 2)
val P43 = Square(4, 3)
val P44 = Square(4, 4)
val P45 = Square(4, 5)
val P46 = Square(4, 6)
val P47 = Square(4, 7)
val P48 = Square(4, 8)
val P49 = Square(4, 9)
val P51 = Square(5, 1)
val P52 = Square(5, 2)
val P53 = Square(5, 3)
val P54 = Square(5, 4)
val P55 = Square(5, 5)
val P56 = Square(5, 6)
val P57 = Square(5, 7)
val P58 = Square(5, 8)
val P59 = Square(5, 9)
val P61 = Square(6, 1)
val P62 = Square(6, 2)
val P63 = Square(6, 3)
val P64 = Square(6, 4)
val P65 = Square(6, 5)
val P66 = Square(6, 6)
val P67 = Square(6, 7)
val P68 = Square(6, 8)
val P69 = Square(6, 9)
val P71 = Square(7, 1)
val P72 = Square(7, 2)
val P73 = Square(7, 3)
val P74 = Square(7, 4)
val P75 = Square(7, 5)
val P76 = Square(7, 6)
val P77 = Square(7, 7)
val P78 = Square(7, 8)
val P79 = Square(7, 9)
val P81 = Square(8, 1)
val P82 = Square(8, 2)
val P83 = Square(8, 3)
val P84 = Square(8, 4)
val P85 = Square(8, 5)
val P86 = Square(8, 6)
val P87 = Square(8, 7)
val P88 = Square(8, 8)
val P89 = Square(8, 9)
val P91 = Square(9, 1)
val P92 = Square(9, 2)
val P93 = Square(9, 3)
val P94 = Square(9, 4)
val P95 = Square(9, 5)
val P96 = Square(9, 6)
val P97 = Square(9, 7)
val P98 = Square(9, 8)
val P99 = Square(9, 9)
}
|
mogproject/mog-core-scala
|
shared/src/test/scala/com/mogproject/mogami/core/SquareGen.scala
|
Scala
|
apache-2.0
| 2,372 |
trait X {
trait A { type T >: Int <: Int }
val x : A
var n : x.T = 3
}
trait Y extends X {
trait B { type T >: String <: String }
lazy val x : A with B = {println(""); x}
n = "foo"
}
object Test extends App {
new Y {}
}
|
yusuke2255/dotty
|
tests/untried/neg/override.scala
|
Scala
|
bsd-3-clause
| 236 |
package org.monkeynuthead.monkeybarrel.web
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Source
import akka.testkit.{TestKit, TestActorRef}
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterAll, MustMatchers, WordSpec}
import org.scalatest.junit.JUnitRunner
import scala.concurrent.Await
import scala.language.postfixOps
/**
* Tests for the [[org.monkeynuthead.monkeybarrel.web.ToUppercaseActor]]
*/
@RunWith(classOf[JUnitRunner])
class ToUppercaseActorSpec extends WordSpec with MustMatchers with BeforeAndAfterAll {
import scala.concurrent.duration._
import ToUppercaseActor._
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
"The ToUppercaseActor" must {
"convert Some(a) to Some(a.toUpperCase())" in {
import system.dispatcher
val kit = new TestKit(system)
val ref = TestActorRef(ToUppercaseActor.create())
ref ! Next(kit.testActor)
ref ! Message(Source.single("George Test"))
val finalString = kit.expectMsgPF(3 seconds) {
case Message(source) => source
}.runFold(new StringBuilder()) { (b, s) => b.append(s) }.map(_.toString())
Await.result(finalString, 3 seconds) must equal("George Test".toUpperCase())
}
}
override protected def afterAll(): Unit = {
system.shutdown()
super.afterAll()
}
}
|
georgenicoll/monkey-barrel
|
web/src/test/scala/org/monkeynuthead/monkeybarrel/web/ToUppercaseActorSpec.scala
|
Scala
|
gpl-2.0
| 1,459 |
object Test {
class Foo[T]
type C[T] = Foo[_ <: T]
val a: C[AnyRef] = new Foo[AnyRef]
}
|
loskutov/intellij-scala
|
testdata/scalacTests/pos/t0654.scala
|
Scala
|
apache-2.0
| 94 |
package dnmar;
import scalala.scalar._;
import scalala.tensor.::;
import scalala.tensor.mutable._;
import scalala.tensor.dense._;
import scalala.tensor.sparse._;
import scalala.library.Library._;
import scalala.library.Numerics._;
import scalala.library.LinearAlgebra._;
import scalala.library.Statistics._;
import scalala.library.Plotting._;
import scalala.operators.Implicits._;
import scala.collection.mutable.ListBuffer
import scala.util.Random
import math._
import java.io._
class HiddenVariablesHypothesisTwoSided(postZ:DenseMatrix[Double], postObs:DenseVector[Double], zPart:List[Int], rPart:Array[Double], obs:Array[Double], sPartial:Double, val score:Double) extends Hypothesis {
def z:Array[Int] = {
return zPartial.reverse.toArray
}
var rPartial = rPart
var zPartial = zPart
def sucessors:Array[Hypothesis] = {
val result = new ListBuffer[Hypothesis]
if(zPartial.length == postZ.numRows) {
return result.toArray
}
for(rel <- 0 until postZ.numCols) {
val newZ = rel :: zPartial
var newSpartial = sPartial + postZ(newZ.length-1, rel)
var newScore = newSpartial
//Update rPartial
val newRpartial = rPartial.clone
newRpartial(rel) = 1.0
//Add max scores for rest of z's (note: this is an upper bound / admissible heuristic)
for(i <- newZ.length until postZ.numRows) {
newScore += postZ(i,::).max
}
//Observation factors
for(rel <- 0 until postZ.numCols) {
if(newRpartial(rel) == 1.0) {
newScore += postObs(rel)
} else if(postObs(rel) > 0.0) {
//Find the bess possible way of changing one of the remaining z's (note: it is possible we could use the same z to satisfy 2 different relations, but hey this is an upper bound!)
var maxValue = Double.NegativeInfinity
for(i <- newZ.length until postZ.numRows) {
val possibleValue = postObs(rel) + postZ(i,rel) - postZ(i,::).max
if(possibleValue > maxValue) {
maxValue = possibleValue
}
}
if(maxValue > 0.0) {
newScore += maxValue
}
}
}
result += new HiddenVariablesHypothesisTwoSided(postZ, postObs, newZ, newRpartial, obs, newSpartial, newScore)
}
return result.toArray
}
}
class DNMAR(data:EntityPairData) extends Parameters(data) {
//Randomly permute the training data
//var training = Random.shuffle((0 until data.data.length).toList).filter((e12) => true)
//TODO: seperate training for theta & phi?
val mid2name = new FreebaseUtils.Mid2Name("../data/mid2name")
def train(nIter:Int) = {
train(nIter, null)
}
def train(nIter:Int, fw:FileWriter) = {
for(i <- 0 until nIter) {
//Randomly permute the training data
var training = Random.shuffle((0 until data.data.length).toList).filter((e12) => true)
//println("iteration " + i)
var j = 0
for(e12 <- training) {
if(Constants.DEBUG && data.data(e12).features.length > 10) {
print("entity pair " + j + "/" + training.length + ":" + data.data(e12).features.length + "\\n")
}
//Run le inference
val iAll = inferAll(data.data(e12))
var iHidden:EntityPair = null //Just needed to asign it something temporarily...
var score = 0.0
if(trainSimple) {
iHidden = inferHiddenMULTIR(data.data(e12))
data.data(e12).z = iHidden.z
} else {
//val result = inferHiddenLocalSearch(data.data(e12), 10)
//val result = inferHiddenBranchAndBound(data.data(e12))
//val result = inferHiddenLocalSearch(data.data(e12), 20)
val result = inferHiddenLocalSearch(data.data(e12), 1)
iHidden = result._1
score = result._2
data.data(e12).z = iHidden.z
//Figure out search error (in cases where we can efficiently do exact inference)
//if(fw != null && data.data(e12).features.length > 1 && data.data(e12).features.length < 100) {
if(fw != null && data.data(e12).features.length > 1 && data.data(e12).features.length < 500) {
Utils.Timer.start("inferenceTime")
val (iHidden1rs, score1rs) = inferHiddenLocalSearch(data.data(e12), 1)
val time1rs = Utils.Timer.reset("inferenceTime")
Utils.Timer.start("inferenceTime")
val (iHidden10rs, score10rs) = inferHiddenLocalSearch(data.data(e12), 10)
val time10rs = Utils.Timer.reset("inferenceTime")
Utils.Timer.start("inferenceTime")
val (iHidden20rs, score20rs) = inferHiddenLocalSearch(data.data(e12), 20)
val time20rs = Utils.Timer.reset("inferenceTime")
Utils.Timer.start("inferenceTime")
val (iHidden1kBeam, score1kBeam) = inferHiddenAstar(data.data(e12), 1000)
val time1kBeam = Utils.Timer.reset("inferenceTime")
Utils.Timer.start("inferenceTime")
val (iHiddenBNB, scoreBNB) = inferHiddenBranchAndBound(data.data(e12))
val timeBNB = Utils.Timer.reset("inferenceTime")
Utils.Timer.start("inferenceTime")
val (iHiddenExact, scoreExact) = inferHiddenAstar(data.data(e12), -1)
val timeExact = Utils.Timer.reset("inferenceTime")
fw.write(List(score1rs, time1rs, score10rs, time10rs, score20rs, time20rs, score1kBeam, time1kBeam, scoreBNB, timeBNB, scoreExact, timeExact, data.data(e12).features.length).map(_.toString).reduceLeft(_ + "\\t" + _) + "\\n")
fw.flush
}
}
if(updateTheta && j % 10 != 0) {
//if(updateTheta) {
updateTheta(iAll, iHidden)
}
if(updatePhi && j % 10 == 0) {
//if(updatePhi) {
updatePhi(iAll, iHidden)
}
j += 1
}
}
}
def simpleObsScore(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
//postObs(r) = -4.0
//postObs(r) = -2.0
//postObs(r) = 0.0
postObs(r) = -0.0
} else if(ep.obs(r) == 0.0) {
postObs(r) = -5.0
} else {
//postObs(r) = 100.0
postObs(r) = 10000.0
}
}
postObs
}
def simpleObsScoreKbp(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
//postObs(r) = -4.0
//postObs(r) = -2.0
//postObs(r) = 0.0
postObs(r) = -0.0
} else if(ep.obs(r) == 0.0) {
postObs(r) = -100.0
} else {
//postObs(r) = 100.0
postObs(r) = 10000.0
}
}
postObs
}
def simpleObsScoreNER(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
postObs(r) = -0.0
} else if(ep.obs(r) == 0.0) {
//postObs(r) = -10.0
postObs(r) = -100.0
//postObs(r) = -5.0
} else {
//postObs(r) = 100.0
postObs(r) = 10000.0
}
}
postObs
}
//This version doesn't take other values into account or anything. Just scales by the entity frequency as a model of missing text / DB probability
def fbObsScore2(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
val e1 = data.entityVocab(ep.e1id)
val e2 = data.entityVocab(ep.e2id)
var allSenseRels = Set[String]()
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
postObs(r) = 0.0
} else if(ep.obs(r) == 0.0) {
postObs(r) = -5.0
} else {
postObs(r) = 1000.0
}
//Scale based on the entity frequency...
postObs(r) *= 0.01 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
}
//println(e1 + "\\t" + e2 + "\\t" + (0 until postObs.length).map(i => i + ":" + data.relVocab(i) + postObs(i)).mkString("\\t").filter(x => x != -5))
postObs
}
def fbObsScore3(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
val e1 = data.entityVocab(ep.e1id)
val e2 = data.entityVocab(ep.e2id)
var allSenseRels = Set[String]()
for(sense1 <- mid2name(e1).flatMap(x => mid2name(x))) {
for(sense2 <- mid2name(e2).flatMap(x => mid2name(x))) {
for(rel <- data.fbData.getRels(sense1, sense2)) {
allSenseRels += rel
}
}
}
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
//postObs(r) = 0.0
postObs(r) = -1.0
} else if(ep.obs(r) == 0.0) {
//Simple missing data model
val rel = data.relVocab(r)
val values = data.fbData.getA2s(e1,rel);
postObs(r) = -5.0
} else {
val rel = data.relVocab(r)
if(rel == "/loction/location/contains" ||
rel == "/people/person/place_lived" ||
rel == "/people/person/nationality" ||
rel == "/people/person/children" ||
rel == "/location/neighborhood/neighborhood_of" ||
rel == "/business/person/company") {
//postObs(r) = 400.0
postObs(r) = 1000.0
} else if(rel == "/location/country/capitol" ||
rel == "/location/country/administrative_divisions" ||
//rel == "/people/person/place_of_birth" ||
rel == "/people/person/place_of_death" ||
rel == "/location/us_state/capitol") {
//postObs(r) = 50.0
postObs(r) = 200.0
} else {
//postObs(r) = 200.0
postObs(r) = 500.0
}
//postObs(r) = 10000.0
}
//Scale based on the entity frequency...
//if(postObs(r) < 100) {
if(postObs(r) < 0) {
//postObs(r) *= 0.01 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
postObs(r) *= 0.01 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
//println(postObs(r))
}
}
//println(e1 + "\\t" + e2 + "\\t" + (0 until postObs.length).map(i => i + ":" + data.relVocab(i) + postObs(i)).mkString("\\t").filter(x => x != -5))
postObs
}
def fbObsScoreNER(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
val e1 = data.entityVocab(ep.e1id)
val e2 = data.entityVocab(ep.e2id)
var allSenseRels = Set[String]()
for(sense1 <- mid2name(e1).flatMap(x => mid2name(x))) {
for(sense2 <- mid2name(e2).flatMap(x => mid2name(x))) {
for(rel <- data.fbData.getRels(sense1, sense2)) {
allSenseRels += rel
}
}
}
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
//postObs(r) = 0.0
postObs(r) = -0.0
} else if(ep.obs(r) == 0.0) {
//Simple missing data model
val rel = data.relVocab(r)
val values = data.fbData.getA2s(e1,rel);
postObs(r) = -15.0
} else {
val rel = data.relVocab(r)
//println(rel)
if(rel == "person" ||
rel == "geo-loc") {
//postObs(r) = 400.0
postObs(r) = 10000.0
} else {
//postObs(r) = 200.0
postObs(r) = 10000.0
}
//postObs(r) = 10000.0
}
//Scale based on the entity frequency...
//if(postObs(r) < 100) {
if(postObs(r) < 0) {
//postObs(r) *= 0.01 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
postObs(r) *= 1.0 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
//println(postObs(r))
}
}
//println(e1 + "\\t" + e2 + "\\t" + (0 until postObs.length).map(i => i + ":" + data.relVocab(i) + postObs(i)).mkString("\\t").filter(x => x != -5))
postObs
}
def fbObsScore(ep:EntityPair):DenseVector[Double] = {
val postObs = DenseVector.zeros[Double](data.nRel)
val e1 = data.entityVocab(ep.e1id)
val e2 = data.entityVocab(ep.e2id)
var allSenseRels = Set[String]()
for(sense1 <- mid2name(e1).flatMap(x => mid2name(x))) {
for(sense2 <- mid2name(e2).flatMap(x => mid2name(x))) {
for(rel <- data.fbData.getRels(sense1, sense2)) {
allSenseRels += rel
}
}
}
/*
val dataRels = (0 until ep.obs.length).filter(i => ep.obs(i) != 0).map(i => data.relVocab(i))
if(dataRels.length != allSenseRels.toList.length) {
println(mid2name(e1).mkString + "\\t" + mid2name(e1).flatMap(x => mid2name(x)))
println(mid2name(e2).mkString + "\\t" + mid2name(e2).flatMap(x => mid2name(x)))
println("allSenseRels=\\t" + allSenseRels)
println("dataRels=\\t" + (0 until ep.obs.length).filter(i => ep.obs(i) != 0).map(i => data.relVocab(i)).mkString("\\t"))
}
*/
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
postObs(r) = 0.0
} else if(ep.obs(r) == 0.0) {
//Simple missing data model
val rel = data.relVocab(r)
/*
val values = if(rel == "/location/location/contains") {
data.fbData.getA1s(e2,rel)
} else {
data.fbData.getA2s(e1,rel)
}
*/
val values = data.fbData.getA2s(e1,rel);
//TODO: this whole thing may need some debugging...
if(allSenseRels.contains(rel)) {
//Q: Is there another sense in which this is true?
//postObs(r) = -1.0
//postObs(r) = 10000.0
postObs(r) = 10.0
//} else if(values.filter(x => data.fbData.aContainsB(e2,x)).length > 0) {
} else if(data.relVocab(r) != "/people/person/nationality" &&
(values.filter(x => data.fbData.aContainsB(e2,x)).length > 0 || values.filter(x => data.fbData.aContainsB(x,e2)).length > 0)) {
//Another value is present, and it is contained by e2
postObs(r) = 10.0
//} else if(values.filter(x => !data.fbData.aContainsB(e2,x)).length > 0) {
} else if((data.relVocab(r) == "/people/person/nationality" && values.length > 0) ||
(values.filter(x => !data.fbData.aContainsB(e2,x)).length > 0 && values.filter(x => !data.fbData.aContainsB(x,e2)).length > 0)) {
//Q: Is there another value for this rel?
//postObs(r) = -10.0
postObs(r) = -5.0
} else {
postObs(r) = -5.0
}
} else {
postObs(r) = 10000.0
}
//Scale based on the entity frequency...
if(postObs(r) < 100) {
postObs(r) *= 0.01 * (1.0 + math.min(data.fbData.entityFreq(e1), data.fbData.entityFreq(e2)))
//println(postObs(r))
}
}
//println(e1 + "\\t" + e2 + "\\t" + (0 until postObs.length).map(i => i + ":" + data.relVocab(i) + postObs(i)).mkString("\\t").filter(x => x != -5))
postObs
}
//Computes the score for each of the observation factors based on observed data in Freebase
def computeObsScore(ep:EntityPair):DenseVector[Double] = {
//Posterior distribution over observations
val postObs = DenseVector.zeros[Double](data.nRel)
for(r <- 0 until data.nRel) {
if(r == data.relVocab("NA")) {
//postObs(r) = -4.0
//postObs(r) = -2.0
postObs(r) = 0.0
} else if(ep.obs(r) == 0.0) {
var s = 0.0
s += phiMid(ep.e1id)
s += phiMid(ep.e2id)
s += phiMid(data.entityVocab.size + r)
s += phiMid(phiMit.length-1)
if(s > -5.0) { //Don't let phiMid grow > 0
s = -5.0
}
postObs(r) = s
//postObs(r) = -5.0
} else {
/*
* NOTE: this should work for finding the MAP result, but need to add Phi for all possible MIT cases to get the correct score....
*/
var s = 0.0
s -= phiMit(data.entityVocab.size + r)
s -= phiMit(phiMid.length-1)
s -= phiMit(data.entityVocab.size + r)
s -= phiMit(phiMid.length-1)
if(s < 5.0) { //Don't let phiMit grow > -5
s = 5.0
}
postObs(r) = s
//postObs(r) = 100.0
}
}
postObs
}
def inferHiddenBranchAndBound(ep:EntityPair):Tuple2[EntityPair,Double] = {
if(Constants.TIMING) {
Utils.Timer.start("inferHiddenBranchAndBound")
}
val postZ = DenseMatrix.zeros[Double](ep.features.length, data.nRel)
for(i <- 0 until ep.features.length) {
postZ(i,::) := (theta * ep.features(i)).toDense
}
val postObs = simpleObsScore(ep)
val (iHidden1rs, score1rs) = inferHiddenLocalSearch(ep, 1)
val result = inferHiddenBranchAndBound(ep, postZ, postObs, iHidden1rs, score1rs, new Array[Double](postZ.numCols), List(), 0.0)
if(Constants.TIMING) {
Utils.Timer.stop("inferHiddenBranchAndBound")
}
result
}
def inferHiddenBranchAndBound(ep:EntityPair, postZ:DenseMatrix[Double], postObs:DenseVector[Double], epbest:EntityPair, scorebest:Double, rPartial:Array[Double], zPartial:List[Int], sPartial:Double):Tuple2[EntityPair,Double] = {
var epBest = epbest
var scoreBest = scorebest
for(rel <- 0 until postZ.numCols) {
val newZ = rel :: zPartial
val newSpartial = sPartial + postZ(newZ.length-1, rel)
var newScore = newSpartial
//Update rPartial
val newRpartial = rPartial.clone
newRpartial(rel) = 1.0
//Add max scores for rest of z's (note: this is an upper bound / admissible heuristic)
for(i <- newZ.length until postZ.numRows) {
newScore += postZ(i,::).max
}
//Observation factors
for(rel <- 0 until postZ.numCols) {
if(newRpartial(rel) == 1.0) {
newScore += postObs(rel)
} else if(postObs(rel) > 0.0) {
//Find the bess possible way of changing one of the remaining z's (note: it is possible we could use the same z to satisfy 2 different relations, but hey this is an upper bound!)
var maxValue = Double.NegativeInfinity
for(i <- newZ.length until postZ.numRows) {
val possibleValue = postObs(rel) + postZ(i,rel) - postZ(i,::).max
if(possibleValue > maxValue) {
maxValue = possibleValue
}
}
if(maxValue > 0.0) {
newScore += maxValue
}
}
}
if(newZ.length == postZ.numRows && newScore > scoreBest) {
//Found a better full assignment
epBest = new EntityPair(ep.e1id, ep.e2id, ep.features, DenseVector(newRpartial).t, DenseVector(newZ.toArray), null, ep.obs)
scoreBest = newScore
} else if(newScore > scoreBest) {
val results = inferHiddenBranchAndBound(ep, postZ, postObs, epBest, scoreBest, newRpartial, newZ, newSpartial)
epBest = results._1
scoreBest = results._2
}
}
return (epBest, scoreBest)
}
def inferHiddenLocalSearch(ep:EntityPair, nRandomRestarts:Int):Tuple2[EntityPair,Double] = {
if(Constants.TIMING) {
Utils.Timer.start("inferHiddenLocalSearch")
}
//println("N:" + ep.features.length)
val postZ = DenseMatrix.zeros[Double](ep.features.length, data.nRel)
for(i <- 0 until ep.features.length) {
postZ(i,::) := (theta * ep.features(i)).toDense
//Normalize (note: this isn't necessary, except for analasys purposes and generating P/R on training data...)
val logExpSum = MathUtils.LogExpSum(postZ(i,::).toArray)
postZ(i,::) -= logExpSum
}
ep.postZ = postZ
var bestZ:DenseVector[Int] = null
var bestRel:DenseVectorRow[Double] = null
var bestScore = Double.NegativeInfinity
//val postObs = simpleObsScore(ep)
//val postObs = simpleObsScoreKbp(ep)
//val postObs = fbObsScore(ep)
//val postObs = fbObsScore2(ep)
val postObs = fbObsScore3(ep)
//val postObs = simpleObsScoreNER(ep)
//val postObs = fbObsScoreNER(ep)
for(n <- 0 until nRandomRestarts) {
val z = DenseVector.zeros[Int](postZ.numRows)
val rel = DenseVector.zeros[Double](postZ.numCols).t
val rCounts = DenseVector.zeros[Int](postZ.numCols)
var score = 0.0
//Random initialization
for(i <- 0 until z.length) {
//z(i) = scala.util.Random.nextInt(postObs.length)
z(i) = MathUtils.Sample(scalala.library.Library.exp(postZ(i,::)).toArray)
score += postZ(i,z(i))
rCounts(z(i)) += 1
rel(z(i)) = 1.0
}
for(r <- 0 until rCounts.length) {
if(rCounts(r) > 0) {
score += postObs(r)
}
}
var changed = false
do {
//Recompute Deltas
if(Constants.TIMING) {
Utils.Timer.start("re-computing deltas")
}
//First search operator (change one variable)
val deltas = DenseMatrix.zeros[Double](postZ.numRows, postZ.numCols)
//Compute this in parallel over rows?
// for(i <- (0 until postZ.numRows)) {
// for(r <- 0 until postZ.numCols) {
for(r <- (0 until postZ.numCols).par) { //Compute this in parallel over columns?
for(i <- (0 until postZ.numRows)) {
if(r != z(i)) {
deltas(i,r) = postZ(i,r) - postZ(i,z(i))
if(rCounts(r) == 0) {
//This will be the first instance of r to be extracted...
deltas(i,r) += postObs(r)
}
if(rCounts(z(i)) == 1) {
//z(i) is the last instance of r remaining...
deltas(i,r) -= postObs(z(i))
}
} else {
deltas(i,r) = 0.0
}
}
}
//Second search operator (switch all instances of relation r to NA)
//val deltasNA = DenseVector.zeros[Double](postZ.numCols)
val deltasAggregate = DenseMatrix.zeros[Double](postZ.numCols, postZ.numCols)
for(r1 <- (0 until postZ.numCols).par) {
// for(r1 <- (0 until postZ.numCols)) {
for(r2 <- 0 until postZ.numCols) {
if(rCounts(r1) > 0 && r1 != r2) {
for(i <- 0 until postZ.numRows) {
if(z(i) == r1) {
deltasAggregate(r1,r2) += postZ(i,r2) - postZ(i,r1)
}
}
deltasAggregate(r1,r2) -= postObs(r1)
if(rCounts(r2) == 0) {
deltasAggregate(r1,r2) += postObs(r2)
}
}
}
}
if(Constants.TIMING) {
Utils.Timer.stop("re-computing deltas")
}
changed = false
val (i, newRel) = deltas.argmax
val oldRel = z(i)
val delta = deltas(i,newRel)
val deltaAggregate = deltasAggregate.max
//Check which search operator provides the greatest score delta
if(deltaAggregate > delta && deltaAggregate > 0) {
//Change all instances of the max deltaNA relation to NA
score += deltaAggregate
val (r1, r2) = deltasAggregate.argmax
for(i <- 0 until z.length) {
if(z(i) == r1) {
z(i) = r2
rCounts(r2) += 1
}
}
rCounts(r1) = 0
rel(r1) = 0.0
rel(r2) = 1.0
changed = true
} else if(oldRel != newRel && delta > 0) {
score += delta
z(i) = newRel
rCounts(newRel) += 1
rel(newRel) = 1.0
rCounts(oldRel) -= 1
if(rCounts(oldRel) == 0.0) {
rel(oldRel) = 0
}
changed = true
}
} while(changed)
if(score > bestScore) {
bestScore = score
bestZ = z
bestRel = rel
}
}
if(Constants.DEBUG) {
println("constrained score=\\t" + bestScore)
println("constrained result.z=\\t" + bestZ.toList.map((r) => data.relVocab(r)))
println("constrained rel=\\t" + (0 until bestRel.length).filter((r) => bestRel(r) == 1.0).map((r) => data.relVocab(r)))
println("constrained obs=\\t" + (0 until ep.obs.length).filter((r) => ep.obs(r) == 1.0).map((r) => data.relVocab(r)))
}
val result = new EntityPair(ep.e1id, ep.e2id, ep.features, bestRel, bestZ, null, ep.obs)
if(Constants.TIMING) {
Utils.Timer.stop("inferHiddenLocalSearch")
}
(result, bestScore)
}
def inferHiddenAstar(ep:EntityPair):Tuple2[EntityPair,Double] = {
inferHiddenAstar(ep, -1)
}
def inferHiddenAstar(ep:EntityPair, beamSize:Int):Tuple2[EntityPair,Double] = {
if(Constants.TIMING) {
Utils.Timer.start("inferHiddenAstar")
}
val postZ = DenseMatrix.zeros[Double](ep.features.length, data.nRel)
for(i <- 0 until ep.features.length) {
postZ(i,::) := (theta * ep.features(i)).toDense
}
//Posterior distribution over observations
val postObs = simpleObsScore(ep)
val rPartial = new Array[Double](data.nRel)
var sPartial = 0.0
val bs = new BeamSearch(new HiddenVariablesHypothesisTwoSided(postZ, postObs, Nil, rPartial, ep.obs.toArray, sPartial, sPartial), beamSize);
while(bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].z.length < ep.features.length) {
bs.UpdateQ
}
val rel = DenseVector.zeros[Double](data.nRel).t
for(r <- bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].z.toArray) {
rel(r) = 1.0
}
val score = bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].score
if(Constants.DEBUG) {
val z = bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].z
val rPartial = bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].rPartial
println("constrained score=\\t" + score)
println("constrained result.z=\\t" + z.toList.map((r) => data.relVocab(r)))
println("constrained rel=\\t" + (0 until rel.length).filter((r) => rel(r) == 1.0).map((r) => data.relVocab(r)))
println("constrained rPartial=\\t" + (0 until rel.length).filter((r) => rPartial(r) == 1.0).map((r) => data.relVocab(r)))
println("constrained obs=\\t" + (0 until rel.length).filter((r) => ep.obs(r) == 1.0).map((r) => data.relVocab(r)))
}
val result = new EntityPair(ep.e1id, ep.e2id, ep.features, rel, DenseVector(bs.Head.asInstanceOf[HiddenVariablesHypothesisTwoSided].z.toArray), null, ep.obs)
if(Constants.TIMING) {
Utils.Timer.stop("inferHiddenAstar")
}
(result, score)
}
def inferHiddenMULTIR(ep:EntityPair):EntityPair = {
if(Constants.TIMING) {
Utils.Timer.start("inferHiddenMULTIR")
}
val z = DenseVector.zeros[Int](ep.features.length)
val zScore = DenseVector.zeros[Double](ep.features.length)
val postZ = DenseMatrix.zeros[Double](ep.features.length, data.nRel)
//First pre-compute postZ
for(i <- 0 until ep.features.length) {
postZ(i,::) := (theta * ep.features(i)).toDense
}
val covered = DenseVector.zeros[Boolean](ep.features.length) //Indicates whether each mention is already assigned...
var nCovered = 0
for(rel <- 0 until ep.rel.length) {
if(ep.obs(rel) == 1.0 && nCovered < ep.features.length) {
val scores = postZ(::,rel)
scores(covered) := Double.NegativeInfinity
val best = scores.argmax
z(best) = rel
zScore(best) = scores.max
covered(best) = true
nCovered += 1
}
}
for(i <- 0 until ep.features.length) {
if(!covered(i)) {
//Whatever....
for(rel <- 0 until ep.rel.length) {
if(ep.obs(rel) == 0 && rel != data.relVocab("NA")) {
postZ(i,rel) = Double.MinValue
}
}
z(i) = postZ(i,::).argmax
zScore(i) = postZ(i,::).max
}
}
if(Constants.DEBUG) {
println("constrained result.z=" + z.toList.map((r) => data.relVocab(r)))
println("constrained obs=\\t" + (0 until ep.rel.length).filter((r) => ep.obs(r) == 1.0).map((r) => data.relVocab(r)))
}
val result = new EntityPair(ep.e1id, ep.e2id, ep.features, ep.rel, z, zScore)
if(Constants.TIMING) {
Utils.Timer.stop("inferHiddenMULTIR")
}
result
}
/*
* Greedy search for best overall assignment to z, aggregate rel and obs
* (1) find best assignment to z
* (2) compute rel (deterministically)
* (3) predict max observation value for each fact
*/
def inferAll(ep:EntityPair):EntityPair = {
inferAll(ep, false)
}
def inferAll(ep:EntityPair, useAverage:Boolean):EntityPair = {
if(Constants.TIMING) {
Utils.Timer.start("inferAll")
}
val z = DenseVector.zeros[Int](ep.features.length)
//val postZ = new Array[SparseVector[Double]](ep.features.length)
val postZ = DenseMatrix.zeros[Double](ep.features.length, data.nRel)
val zScore = DenseVector.zeros[Double](ep.features.length)
val rel = DenseVector.zeros[Double](data.nRel).t
for(i <- 0 until ep.features.length) {
if(useAverage) {
postZ(i,::) := (theta_average * ep.features(i)).toDense
} else {
postZ(i,::) := (theta * ep.features(i)).toDense
}
//Normalize?
//val logExpSum = MathUtils.LogExpSum(postZ(i,::).toArray)
//postZ(i,::) -= logExpSum
z(i) = postZ(i,::).argmax
zScore(i) = postZ(i,::).max
//println(scalala.library.Library.exp(postZ(i,::)))
//println(z(i))
//println(math.exp(zScore(i)))
//Set the aggregate variables
rel(z(i)) = 1.0
}
val postObs = DenseVector.zeros[Double](data.nRel)
val newObs = DenseVector.zeros[Double](data.nRel)
//NOTE: this doesn't really do anything now...
for(r <- 0 until data.nRel) {
if(rel(r) == 1.0) {
var s = 0.0
s -= phiMid(ep.e1id)
s -= phiMid(ep.e2id)
s -= phiMid(data.entityVocab.size + r)
s -= phiMid(phiMid.length-1) //Bias feature
postObs(r) = s
} else {
var s = 0.0
s += phiMit(ep.e1id)
s += phiMit(ep.e2id)
s += phiMit(data.entityVocab.size + r)
s += phiMit(phiMit.length-1) //Bias feature
postObs(r) = s
}
//if(rel(r) == 1.0 && postObs(r) > 0.0) {
if(postObs(r) > 0) {
newObs(r) = 1.0
}
}
if(Constants.DEBUG) {
val rels = rel
println("unconstrained result.z=" + z.toList.map((r) => data.relVocab(r)))
println("unconstrained rel=" + (0 until rels.length).filter((r) => rels(r) == 1.0).map((r) => data.relVocab(r)))
println("unconstrained obs=" + (0 until newObs.length).filter((r) => newObs(r) == 1.0).map((r) => data.relVocab(r)))
println("unconstrained postObs=" + postObs.toList)
}
//TODO: get rid of zScore, replace with postZ...
val result = new EntityPair(ep.e1id, ep.e2id, ep.features, rel, z, zScore, newObs)
result.postObs = postObs
if(Constants.TIMING) {
Utils.Timer.stop("inferAll")
}
result
}
}
|
aritter/dnmar
|
scala/src/main/scala/DNMAR.scala
|
Scala
|
gpl-3.0
| 28,632 |
package com.codacy.client.bitbucket.v1.service
import com.codacy.client.bitbucket.client.{BitbucketClient, Request, RequestResponse}
import play.api.libs.json.{JsNull, JsObject}
class UrlServices(client: BitbucketClient) {
/*
* Post to a api url
*/
def post(url: String): RequestResponse[JsObject] = {
client.postJson(Request(url, classOf[JsObject]), JsNull)
}
}
|
codacy/bitbucket-scala-client
|
src/main/scala/com/codacy/client/bitbucket/v1/service/UrlServices.scala
|
Scala
|
apache-2.0
| 383 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.Tables
// SKIP-SCALATESTJS,NATIVE-START
import org.junit.Test
import org.scalatestplus.junit.JUnit3Suite
import org.scalatestplus.junit.JUnitSuite
import org.scalatest.refspec.RefSpec
import org.scalatestplus.testng.TestNGSuite
import org.testng.annotations.{Test => TestNGTest}
// SKIP-SCALATESTJS,NATIVE-END
import org.scalatest.prop.TableDrivenPropertyChecks._
import Matchers._
import org.scalatest.events.Ordinal
import org.scalatest.events.IndentedText
import org.scalatest.events.Formatter
import SharedHelpers._
/*
This test could perhaps just be dropped. The history is that before I had the idea to
do a full on Spec with `names like this`, i had and implemented the idea of having
Suite itself treat names like `test: bla bla bla` specially in that it dropped the test$colon$$space$
from the front of the test name. The suites in this file made sure Suite had that behavior, and none
of the others styles had it. Then I decided to just deprecate Suite as a style trait, but I left in
this behavior during the deprecation period. The deprecation period for Suite as a style triat has
expired, so now it is only testing that no style traits handle test: prefixes specially.
*/
trait NonTestColonEscapeExamples extends Tables {
// SKIP-SCALATESTJS,NATIVE-START
def spec: RefSpec
def junit3Suite: JUnit3Suite
def junitSuite: JUnitSuite
def testngSuite: TestNGSuite
// SKIP-SCALATESTJS,NATIVE-END
def funSuite: FunSuite
def fixtureFunSuite: fixture.FunSuite
def funSpec: FunSpec
def fixtureFunSpec: fixture.FunSpec
def featureSpec: FeatureSpec
def fixtureFeatureSpec: fixture.FeatureSpec
def flatSpec: FlatSpec
def fixtureFlatSpec: fixture.FlatSpec
def freeSpec: FreeSpec
def fixtureFreeSpec: fixture.FreeSpec
def propSpec: PropSpec
def fixturePropSpec: fixture.PropSpec
def wordSpec: WordSpec
def fixtureWordSpec: fixture.WordSpec
def pathFreeSpec: path.FreeSpec
def pathFunSpec: path.FunSpec
def examples =
Table(
("suite", "succeeded", "failed", "ignored", "pending", "canceled"),
// SKIP-SCALATESTJS,NATIVE-START
(spec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(junit3Suite, Some("- test: A Succeeded Test(org.scalatest.TestColonEscapeExampleJUnit3Suite)"), Some("- test: A Failed Test(org.scalatest.TestColonEscapeExampleJUnit3Suite)"), None, None, None),
(junitSuite, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), None, None),
(testngSuite, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), None, None),
// SKIP-SCALATESTJS,NATIVE-END
(funSuite, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(fixtureFunSuite, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(funSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(fixtureFunSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(featureSpec, Some("Scenario: test: A Succeeded Test"), Some("Scenario: test: A Failed Test"), Some("- Scenario: test: An Ignored Test"), Some("Scenario: test: A Pending Test"), Some("Scenario: test: A Canceled Test")),
(fixtureFeatureSpec, Some("Scenario: test: A Succeeded Test"), Some("Scenario: test: A Failed Test"), Some("- Scenario: test: An Ignored Test"), Some("Scenario: test: A Pending Test"), Some("Scenario: test: A Canceled Test")),
(flatSpec, Some("- should test: A Succeeded Test"), Some("- should test: A Failed Test"), Some("- should test: An Ignored Test"), Some("- should test: A Pending Test"), Some("- should test: A Canceled Test")),
(fixtureFlatSpec, Some("- should test: A Succeeded Test"), Some("- should test: A Failed Test"), Some("- should test: An Ignored Test"), Some("- should test: A Pending Test"), Some("- should test: A Canceled Test")),
(freeSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(fixtureFreeSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(propSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(fixturePropSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(wordSpec, Some("- should test: A Succeeded Test"), Some("- should test: A Failed Test"), Some("- should test: An Ignored Test"), Some("- should test: A Pending Test"), Some("- should test: A Canceled Test")),
(fixtureWordSpec, Some("- should test: A Succeeded Test"), Some("- should test: A Failed Test"), Some("- should test: An Ignored Test"), Some("- should test: A Pending Test"), Some("- should test: A Canceled Test")),
(pathFreeSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test")),
(pathFunSpec, Some("- test: A Succeeded Test"), Some("- test: A Failed Test"), Some("- test: An Ignored Test"), Some("- test: A Pending Test"), Some("- test: A Canceled Test"))
)
}
// SKIP-SCALATESTJS,NATIVE-START
@DoNotDiscover
class TestColonEscapeExampleJUnit3Suite extends JUnit3Suite {
def `test: A Succeeded Test`(): Unit = {}
def `test: A Failed Test`(): Unit = { _root_.junit.framework.Assert.assertEquals("fail on purpose", 1, 2) }
}
@DoNotDiscover
class TestColonEscapeExampleJUnitSuite extends JUnitSuite {
@Test def `test: A Succeeded Test`(): Unit = {}
@Test def `test: A Failed Test`(): Unit = { _root_.org.junit.Assert.assertEquals(1, 2) }
@_root_.org.junit.Ignore @Test def `test: An Ignored Test`(): Unit = {}
}
@DoNotDiscover
class TestColonEscapeExampleTestNGSuite extends TestNGSuite {
@TestNGTest def `test: A Succeeded Test`(): Unit = { }
@TestNGTest(groups = Array("run")) def `test: A Failed Test`(): Unit = { _root_.org.testng.Assert.assertEquals(1, 2) }
@TestNGTest(dependsOnGroups = Array("run")) def `test: An Ignored Test`(): Unit = {}
}
// SKIP-SCALATESTJS,NATIVE-END
@DoNotDiscover
protected[scalatest] class TestColonEscapeExamplePathFreeSpec extends path.FreeSpec {
"A Scope" - {
"test: A Succeeded Test" in {}
"test: A Failed Test" in { fail }
"test: An Ignored Test" ignore {}
"test: A Pending Test" in { pending }
"test: A Canceled Test" in { cancel }
}
override def newInstance: path.FreeSpecLike = new TestColonEscapeExamplePathFreeSpec
}
@DoNotDiscover
protected[scalatest] class TestColonEscapeExamplePathFunSpec extends path.FunSpec {
describe("A Spec") {
it("test: A Succeeded Test") { }
it("test: A Failed Test") { fail }
ignore("test: An Ignored Test") { }
it("test: A Pending Test") { pending }
it("test: A Canceled Test") { cancel }
}
override def newInstance: path.FunSpecLike = new TestColonEscapeExamplePathFunSpec
}
class NonTestColonEscapeProp extends FunSuite with NonTestColonEscapeExamples {
def assertFormattedText(formatter: Option[Formatter], expected: Option[String]): Unit = {
expected match {
case Some(expected) =>
formatter match {
case Some(formatter) =>
formatter match {
case IndentedText(formattedText, _, _) =>
assert(formattedText === expected)
case _ =>
fail("Expected Some(IndentedText as formatter, but got: " + formatter)
}
case None =>
fail("Expected Some(IndentedText) as formatter, but got None.")
}
case None =>
}
}
test("All others style traits besides Suite and fixture.Suite should not escape 'test:' prefix in its IndentedText's formattedText") {
forAll(examples) { (suite, succeeded, failed, ignored, pending, canceled) =>
val reporter = new EventRecordingReporter
suite.run(None, Args(reporter))
if (succeeded.isDefined) {
assert(reporter.testSucceededEventsReceived.size === 1)
val testSucceeded = reporter.testSucceededEventsReceived(0)
assertFormattedText(testSucceeded.formatter, succeeded)
}
if (failed.isDefined) {
assert(reporter.testFailedEventsReceived.size === 1)
val testFailed = reporter.testFailedEventsReceived(0)
assertFormattedText(testFailed.formatter, failed)
}
if (ignored.isDefined) {
assert(reporter.testIgnoredEventsReceived.size === 1)
val testIgnored = reporter.testIgnoredEventsReceived(0)
assertFormattedText(testIgnored.formatter, ignored)
}
if (pending.isDefined) {
assert(reporter.testPendingEventsReceived.size === 1)
val testPending = reporter.testPendingEventsReceived(0)
assertFormattedText(testPending.formatter, pending)
}
if (canceled.isDefined) {
assert(reporter.testCanceledEventsReceived.size === 1)
val testCanceled = reporter.testCanceledEventsReceived(0)
assertFormattedText(testCanceled.formatter, canceled)
}
}
}
// SKIP-SCALATESTJS,NATIVE-START
def spec = new ExampleSpec()
class ExampleSpec extends RefSpec {
def `test: A Succeeded Test`: Unit = {}
def `test: A Failed Test` = { fail }
@Ignore def `test: An Ignored Test`: Unit = {}
def `test: A Pending Test` = { pending }
def `test: A Canceled Test` = { cancel }
}
// SKIP-SCALATESTJS,NATIVE-END
def funSuite = new ExampleFunSuite()
class ExampleFunSuite extends FunSuite {
test("test: A Succeeded Test") {}
test("test: A Failed Test") { fail }
ignore("test: An Ignored Test") {}
test("test: A Pending Test") { pending }
test("test: A Canceled Test") { cancel }
}
def fixtureFunSuite = new ExampleFixtureFunSuite
class ExampleFixtureFunSuite extends fixture.FunSuite with StringFixture {
test("test: A Succeeded Test") { fixture => }
test("test: A Failed Test") { fixture => fail }
ignore("test: An Ignored Test") { fixture => }
test("test: A Pending Test") { fixture => pending }
test("test: A Canceled Test") { fixture => cancel }
}
def funSpec = new ExampleFunSpec
class ExampleFunSpec extends FunSpec {
describe("A Spec") {
it("test: A Succeeded Test") {}
it("test: A Failed Test") { fail }
ignore("test: An Ignored Test") {}
it("test: A Pending Test") { pending }
it("test: A Canceled Test") { cancel }
}
}
def fixtureFunSpec = new ExampleFixtureFunSpec
class ExampleFixtureFunSpec extends fixture.FunSpec with StringFixture {
describe("A Spec") {
it("test: A Succeeded Test") { fixture => }
it("test: A Failed Test") { fixture => fail }
ignore("test: An Ignored Test") { fixture => }
it("test: A Pending Test") { fixture => pending }
it("test: A Canceled Test") { fixture => cancel }
}
}
def featureSpec = new ExampleFeatureSpec
class ExampleFeatureSpec extends FeatureSpec {
Scenario("test: A Succeeded Test") {}
Scenario("test: A Failed Test") { fail }
ignore("test: An Ignored Test") {}
Scenario("test: A Pending Test") { pending }
Scenario("test: A Canceled Test") { cancel }
}
def fixtureFeatureSpec = new ExampleFixtureFeatureSpec
class ExampleFixtureFeatureSpec extends fixture.FeatureSpec with StringFixture {
Scenario("test: A Succeeded Test") { fixture => }
Scenario("test: A Failed Test") { fixture => fail }
ignore("test: An Ignored Test") { fixture => }
Scenario("test: A Pending Test") { fixture => pending }
Scenario("test: A Canceled Test") { fixture => cancel }
}
def flatSpec = new ExampleFlatSpec
class ExampleFlatSpec extends FlatSpec {
"A Scope" should "test: A Succeeded Test" in {}
"A Scope" should "test: A Failed Test" in { fail }
"A Scope" should "test: An Ignored Test" ignore {}
"A Scope" should "test: A Pending Test" in { pending }
"A Scope" should "test: A Canceled Test" in { cancel }
}
def fixtureFlatSpec = new ExampleFixtureFlatSpec
class ExampleFixtureFlatSpec extends fixture.FlatSpec with StringFixture {
"A Scope" should "test: A Succeeded Test" in { fixture => }
"A Scope" should "test: A Failed Test" in { fixture => fail }
"A Scope" should "test: An Ignored Test" ignore { fixture => }
"A Scope" should "test: A Pending Test" in { fixture => pending }
"A Scope" should "test: A Canceled Test" in { fixture => cancel }
}
def freeSpec = new ExampleFreeSpec
class ExampleFreeSpec extends FreeSpec {
"A Scope" - {
"test: A Succeeded Test" in {}
"test: A Failed Test" in { fail }
"test: An Ignored Test" ignore {}
"test: A Pending Test" in { pending }
"test: A Canceled Test" in { cancel }
}
}
def fixtureFreeSpec = new ExampleFixtureFreeSpec
class ExampleFixtureFreeSpec extends fixture.FreeSpec with StringFixture {
"A Scope" - {
"test: A Succeeded Test" in { fixture => }
"test: A Failed Test" in { fixture => fail }
"test: An Ignored Test" ignore { fixture => }
"test: A Pending Test" in { fixture => pending }
"test: A Canceled Test" in { fixture => cancel }
}
}
def propSpec = new ExamplePropSpec
class ExamplePropSpec extends PropSpec {
property("test: A Succeeded Test") {}
property("test: A Failed Test") { fail }
ignore("test: An Ignored Test") {}
property("test: A Pending Test") { pending }
property("test: A Canceled Test") { cancel }
}
def fixturePropSpec = new ExampleFixturePropSpec
class ExampleFixturePropSpec extends fixture.PropSpec with StringFixture {
property("test: A Succeeded Test") { fixture => }
property("test: A Failed Test") { fixture => fail }
ignore("test: An Ignored Test") { fixture => }
property("test: A Pending Test") { fixture => pending }
property("test: A Canceled Test") { fixture => cancel }
}
def wordSpec = new ExampleWordSpec
class ExampleWordSpec extends WordSpec {
"A Scope" should {
"test: A Succeeded Test" in {}
"test: A Failed Test" in { fail }
"test: An Ignored Test" ignore {}
"test: A Pending Test" in { pending }
"test: A Canceled Test" in { cancel }
}
}
def fixtureWordSpec = new ExampleFixtureWordSpec
class ExampleFixtureWordSpec extends fixture.WordSpec with StringFixture {
"A Scope" should {
"test: A Succeeded Test" in { fixture => }
"test: A Failed Test" in { fixture => fail }
"test: An Ignored Test" ignore { fixture => }
"test: A Pending Test" in { fixture => pending }
"test: A Canceled Test" in { fixture => cancel }
}
}
def pathFreeSpec = new TestColonEscapeExamplePathFreeSpec
def pathFunSpec = new TestColonEscapeExamplePathFunSpec
// SKIP-SCALATESTJS,NATIVE-START
def junit3Suite = new TestColonEscapeExampleJUnit3Suite
def junitSuite = new TestColonEscapeExampleJUnitSuite
def testngSuite = new TestColonEscapeExampleTestNGSuite
// SKIP-SCALATESTJS,NATIVE-END
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/TestColonEscapeProp.scala
|
Scala
|
apache-2.0
| 16,447 |
package com.larroy
/**
* @author piotr 17.11.14
*/
case class Path(path: Seq[Long], length_m: Double)
|
larroy/astar_scala
|
src/main/scala/Path.scala
|
Scala
|
mit
| 105 |
package io.druid.cli.spark
import com.google.common.base.Joiner
import com.google.common.collect.Lists
import com.google.inject.Inject
import com.metamx.common.logger.Logger
import io.airlift.airline.Arguments
import io.airlift.airline.Command
import io.airlift.airline.Option
import io.druid.cli.Main
import io.druid.guice.ExtensionsConfig
import io.druid.initialization.Initialization
import io.tesla.aether.internal.DefaultTeslaAether
import java.io.File
import java.lang.reflect.Method
import java.net.URL
import java.net.URLClassLoader
import java.util.{Arrays => jArrays}
import java.util.{List => jList}
/**
*/
@Command (
name = "spark",
description = "Runs the batch Spark Druid Indexer, see " +
"https://github.com/SparklineData/spark-druid-indexer for a description."
)
class CliSparkIndexer extends Runnable {
import CliSparkIndexer._
private val log: Logger = new Logger(classOf[CliSparkIndexer])
@Arguments(description = "A JSON object or the path to a file that contains a JSON object",
required = true)
private var argumentSpec: String = null
@Option(name = Array("-c", "--coordinate", "hadoopDependencies"),
description = "extra dependencies to pull down (e.g. non-default hadoop coordinates or extra hadoop jars)")
private var coordinates: java.util.List[String] = null
@Option(name = Array("--no-default-hadoop"),
description = "don't pull down the default hadoop version (currently org.apache.hadoop:hadoop-client:2.3.0)",
required = false)
var noDefaultHadoop: Boolean = false
@Inject private var extensionsConfig: ExtensionsConfig = null
@SuppressWarnings(Array("unchecked")) def run {
import scala.collection.JavaConversions._
try {
val allCoordinates: jList[String] = Lists.newArrayList[String]
if (coordinates != null) {
allCoordinates.addAll(coordinates)
}
if (!noDefaultHadoop) {
allCoordinates.add(DEFAULT_HADOOP_COORDINATES)
}
val aetherClient: DefaultTeslaAether = Initialization.getAetherClient(extensionsConfig)
val extensionURLs: jList[URL] = Lists.newArrayList[URL]
for (coordinate <- extensionsConfig.getCoordinates) {
val coordinateLoader: ClassLoader =
Initialization.getClassLoaderForCoordinates(aetherClient, coordinate,
extensionsConfig.getDefaultVersion)
extensionURLs.addAll(jArrays.asList(
(coordinateLoader.asInstanceOf[URLClassLoader]).getURLs:_*))
}
val nonHadoopURLs: jList[URL] = Lists.newArrayList[URL]
nonHadoopURLs.addAll(jArrays.asList((
classOf[CliSparkIndexer].getClassLoader.asInstanceOf[URLClassLoader]).getURLs:_*))
nonHadoopURLs.addAll(jArrays.asList((
classOf[Main].getClassLoader.asInstanceOf[URLClassLoader]).getURLs:_*))
val driverURLs: jList[URL] = Lists.newArrayList[URL]
driverURLs.addAll(nonHadoopURLs)
// put hadoop dependencies last to avoid jets3t & apache.httpcore version conflicts
import scala.collection.JavaConversions._
for (coordinate <- allCoordinates) {
val hadoopLoader: ClassLoader = Initialization.getClassLoaderForCoordinates(aetherClient,
coordinate, extensionsConfig.getDefaultVersion)
driverURLs.addAll(jArrays.asList((hadoopLoader.asInstanceOf[URLClassLoader]).getURLs:_*))
}
val loader: URLClassLoader = new URLClassLoader(
driverURLs.toArray(new Array[URL](driverURLs.size)), null)
Thread.currentThread.setContextClassLoader(loader)
val jobUrls: jList[URL] = Lists.newArrayList[URL]
jobUrls.addAll(nonHadoopURLs)
jobUrls.addAll(extensionURLs)
System.setProperty("druid.hadoop.internal.classpath",
Joiner.on(File.pathSeparator).join(jobUrls))
val mainClass: Class[_] = loader.loadClass(classOf[Main].getName)
val mainMethod: Method = mainClass.getMethod("main", classOf[Array[String]])
val args: Array[String] = Array[String]("internal", "hadoop-indexer", argumentSpec)
mainMethod.invoke(null, args)
}
catch {
case e: Exception => {
log.error(e, "failure!!!!")
System.exit(1)
}
}
}
}
object CliSparkIndexer {
private val DEFAULT_HADOOP_COORDINATES: String = "org.apache.hadoop:hadoop-client:2.3.0"
}
|
SparklineData/spark-druid-indexer
|
src/main/scala/io/druid/cli/spark/CliSparkIndexer.scala
|
Scala
|
apache-2.0
| 4,291 |
package net.yoshinorin.gitbucket.monitoring.controllers
import scala.jdk.CollectionConverters._
import scala.util.{Failure, Success}
import gitbucket.core.controller.ControllerBase
import gitbucket.core.util.AdminAuthenticator
import net.yoshinorin.gitbucket.monitoring.services._
import net.yoshinorin.gitbucket.monitoring.information._
import net.yoshinorin.gitbucket.monitoring.models.{Cpu, LoadAverage, Memory, Swap, Tasks}
import net.yoshinorin.gitbucket.monitoring.utils.Error
class MonitoringController extends ControllerBase with AdminAuthenticator {
private val instance = net.yoshinorin.gitbucket.monitoring.services.operatingsystem.OperatingSystem.getInstance
get("/admin/monitoring")(adminOnly {
redirect(s"/admin/monitoring/systeminformation")
})
get("/admin/monitoring/systeminformation")(adminOnly {
val upTime: Either[Error, UpTime] = instance.getUpTime match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
html.system(
instance.timeZone.toString,
instance.getCurrentTime.toString,
instance.getZoneOffset.toString,
instance.getDayOfWeek.toString,
instance.onDocker,
upTime
)
})
get("/admin/monitoring/environmentvariable")(adminOnly {
html.environmentVariable(System.getenv().asScala.toMap)
})
get("/admin/monitoring/machineresources")(adminOnly {
val cpu: Either[Error, Cpu] = instance.getCpu match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
val swap: Either[Error, Swap] = instance.getSwap match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
val memory: Either[Error, Memory] = instance.getMemory match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
html.resources(
instance.cpuCore,
cpu,
memory,
swap,
instance.getDiskSpace
)
})
get("/admin/monitoring/process")(adminOnly {
val tasks: Either[Error, Tasks] = instance.getTasks match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
val loadAve: Either[Error, LoadAverage] = instance.getLoadAverage match {
case Success(s) =>
s match {
case Some(s) => Right(s)
case None => Left(Error.NOTSUPPORTED)
}
case Failure(f) => Left(Error.FAILURE)
}
html.process(
tasks,
loadAve
)
})
get("/admin/monitoring/java")(adminOnly {
redirect(s"/admin/monitoring/java/systemproperties")
})
get("/admin/monitoring/java/systemproperties")(adminOnly {
java.html.systemproperties(Java.getSystemProperties)
})
get("/admin/monitoring/java/memory")(adminOnly {
java.html.memory(Java.getMemoryInfo)
})
}
|
YoshinoriN/gitbucket-monitoring-plugin
|
src/main/scala/net/yoshinorin/gitbucket/monitoring/controllers/MonitoringController.scala
|
Scala
|
apache-2.0
| 3,315 |
/* Title: Pure/Admin/afp.scala
Author: Makarius
Administrative support for the Archive of Formal Proofs.
*/
package isabelle
import java.time.LocalDate
import scala.collection.immutable.SortedMap
object AFP
{
val repos_source = "https://isabelle.sketis.net/repos/afp-devel"
val groups: Map[String, String] =
Map("large" -> "full 64-bit memory model or word arithmetic required",
"slow" -> "CPU time much higher than 60min (on mid-range hardware)",
"very_slow" -> "elapsed time of many hours (on high-end hardware)")
def groups_bulky: List[String] = List("large", "slow")
def init(options: Options, base_dir: Path = Path.explode("$AFP_BASE")): AFP =
new AFP(options, base_dir)
/* entries */
def parse_date(s: String): Date =
{
val t = Date.Formatter.pattern("uuuu-MM-dd").parse(s)
Date(LocalDate.from(t).atStartOfDay(Date.timezone_berlin))
}
def trim_mail(s: String): String = s.replaceAll("<[^>]*>", "").trim
sealed case class Entry(name: String, metadata: Properties.T, sessions: List[String])
{
def get(prop: String): Option[String] = Properties.get(metadata, prop)
def get_string(prop: String): String = get(prop).getOrElse("")
def get_strings(prop: String): List[String] =
space_explode(',', get_string(prop)).map(_.trim).filter(_.nonEmpty)
def title: String = get_string("title")
def authors: List[String] = get_strings("author")
def date: Date =
parse_date(get("date").getOrElse(error("Missing date for entry " + quote(name))))
def topics: List[String] = get_strings("topic")
def `abstract`: String = get_string("abstract").trim
def maintainers: List[String] = get_strings("notify")
def contributors: List[String] = get_strings("contributors")
def license: String = get("license").getOrElse("BSD")
def rdf_meta_data: Properties.T =
RDF.meta_data(
proper_string(title).map(Markup.META_TITLE -> _).toList :::
authors.map(Markup.META_CREATOR -> _) :::
contributors.map(Markup.META_CONTRIBUTOR -> _) :::
List(Markup.META_DATE -> RDF.date_format(date)) :::
List(Markup.META_LICENSE -> license) :::
proper_string(`abstract`).map(Markup.META_DESCRIPTION -> _).toList)
}
}
class AFP private(options: Options, val base_dir: Path)
{
override def toString: String = base_dir.expand.toString
val main_dir: Path = base_dir + Path.explode("thys")
/* metadata */
private val entry_metadata: Map[String, Properties.T] =
{
val metadata_file = base_dir + Path.explode("metadata/metadata")
var result = Map.empty[String, Properties.T]
var section = ""
var props = List.empty[Properties.Entry]
val Section = """^\\[(\\S+)\\]\\s*$""".r
val Property = """^(\\S+)\\s*=(.*)$""".r
val Extra_Line = """^\\s+(.*)$""".r
val Blank_Line = """^\\s*$""".r
def flush()
{
if (section != "") result += (section -> props.reverse.filter(p => p._2.nonEmpty))
section = ""
props = Nil
}
for ((line, i) <- split_lines(File.read(metadata_file)).zipWithIndex)
{
def err(msg: String): Nothing =
error(msg + Position.here(Position.Line_File(i + 1, metadata_file.expand.implode)))
line match {
case Section(name) => flush(); section = name
case Property(a, b) =>
if (section == "") err("Property without a section")
props = (a -> b.trim) :: props
case Extra_Line(line) =>
props match {
case Nil => err("Extra line without a property")
case (a, b) :: rest => props = (a, b + "\\n" + line.trim) :: rest
}
case Blank_Line() =>
case _ => err("Bad input")
}
}
flush()
result
}
/* entries */
val entries_map: SortedMap[String, AFP.Entry] =
{
val entries =
for (name <- Sessions.parse_roots(main_dir + Sessions.ROOTS)) yield {
val metadata =
entry_metadata.getOrElse(name, error("Entry without metadata: " + quote(name)))
val sessions =
Sessions.parse_root_entries(main_dir + Path.explode(name) + Sessions.ROOT).map(_.name)
AFP.Entry(name, metadata, sessions)
}
val entries_map =
(SortedMap.empty[String, AFP.Entry] /: entries)({ case (m, e) => m + (e.name -> e) })
val extra_metadata =
(for ((name, _) <- entry_metadata.iterator if !entries_map.isDefinedAt(name)) yield name).
toList.sorted
if (extra_metadata.nonEmpty)
error("Meta data without entry: " + commas_quote(extra_metadata))
entries_map
}
val entries: List[AFP.Entry] = entries_map.toList.map(_._2)
/* sessions */
val sessions_map: SortedMap[String, AFP.Entry] =
(SortedMap.empty[String, AFP.Entry] /: entries)(
{ case (m1, e) => (m1 /: e.sessions)({ case (m2, s) => m2 + (s -> e) }) })
val sessions: List[String] = entries.flatMap(_.sessions)
val sessions_structure: Sessions.Structure =
Sessions.load_structure(options, dirs = List(main_dir)).
selection(Sessions.Selection(sessions = sessions.toList))
/* dependency graph */
private def sessions_deps(entry: AFP.Entry): List[String] =
entry.sessions.flatMap(sessions_structure.imports_graph.imm_preds(_)).distinct.sorted
lazy val entries_graph: Graph[String, Unit] =
{
val session_entries =
(Map.empty[String, String] /: entries) {
case (m1, e) => (m1 /: e.sessions) { case (m2, s) => m2 + (s -> e.name) }
}
(Graph.empty[String, Unit] /: entries) { case (g, entry) =>
val e1 = entry.name
(g.default_node(e1, ()) /: sessions_deps(entry)) { case (g1, s) =>
(g1 /: session_entries.get(s).filterNot(_ == e1)) { case (g2, e2) =>
try { g2.default_node(e2, ()).add_edge_acyclic(e2, e1) }
catch {
case exn: Graph.Cycles[_] =>
error(cat_lines(exn.cycles.map(cycle =>
"Cyclic dependency of " + cycle.map(c => quote(c.toString)).mkString(" via ") +
" due to session " + quote(s))))
}
}
}
}
}
def entries_graph_display: Graph_Display.Graph =
Graph_Display.make_graph(entries_graph)
def entries_json_text: String =
(for (entry <- entries.iterator) yield {
val distrib_deps = sessions_deps(entry).filterNot(sessions.contains(_))
val afp_deps = entries_graph.imm_preds(entry.name).toList
"""
{""" + JSON.Format(entry.name) + """:
{"distrib_deps": """ + JSON.Format(distrib_deps) + """,
"afp_deps": """ + JSON.Format(afp_deps) + """
}
}"""
}).mkString("[", ", ", "\\n]\\n")
/* partition sessions */
val force_partition1: List[String] = List("Category3", "HOL-ODE")
def partition(n: Int): List[String] =
n match {
case 0 => Nil
case 1 | 2 =>
val graph = sessions_structure.build_graph.restrict(sessions.toSet)
val force_part1 =
graph.all_preds(graph.all_succs(force_partition1.filter(graph.defined(_)))).toSet
val (part1, part2) = graph.keys.partition(a => force_part1(a) || graph.is_isolated(a))
if (n == 1) part1 else part2
case _ => error("Bad AFP partition: " + n + " (should be 0, 1, 2)")
}
}
|
larsrh/libisabelle
|
modules/pide/2019-RC4/src/main/scala/Admin/afp.scala
|
Scala
|
apache-2.0
| 7,216 |
/*
* Copyright (c) 2015 Alpine Data Labs
* All rights reserved.
*/
package com.alpine.model.pack.multiple
import com.alpine.features.FeatureDesc
import com.alpine.json.JsonTestUtil
import com.alpine.model.pack.ml.LinearRegressionModel
import com.alpine.model.{MLModel, RowModel}
import com.alpine.model.pack.preprocess.{OneHotEncodingModel, OneHotEncodingModelTest}
import org.scalatest.FunSuite
/**
* Tests serialization of various pipeline models.
*/
class PipelineRowModelTest extends FunSuite {
val oneHotEncoderModel: OneHotEncodingModel = new OneHotEncodingModelTest().oneHotEncoderModel
val liRModel = {
val coefficients = Seq[Double](0.9, 1, 5, -1)
val lirInputFeatures = oneHotEncoderModel.transformationSchema.outputFeatures.map(f => f.asInstanceOf[FeatureDesc[_ <: Number]])
LinearRegressionModel.make(coefficients, lirInputFeatures)
}
test("Serialization of the Pipeline Model should work") {
val pipelineModel = new PipelineRowModel(List[RowModel](oneHotEncoderModel))
JsonTestUtil.testJsonization(pipelineModel)
}
test("Serialization of the Pipeline Regression Model should work") {
val pipelineModel = new PipelineRegressionModel(List[RowModel](new OneHotEncodingModelTest().oneHotEncoderModel), liRModel)
JsonTestUtil.testJsonization(pipelineModel)
}
test("Should include the classes of each component model, the pipeline model and the MLModel") {
val classesForLoading = new PipelineRegressionModel(List[RowModel](new OneHotEncodingModelTest().oneHotEncoderModel), liRModel).classesForLoading
val expectedClasses = Set[Class[_]](classOf[MLModel], classOf[PipelineRegressionModel], classOf[LinearRegressionModel], classOf[OneHotEncodingModel])
assert(expectedClasses == classesForLoading)
}
}
|
holdenk/PluginSDK
|
alpine-model-pack/src/test/scala/com/alpine/model/pack/multiple/PipelineRowModelTest.scala
|
Scala
|
apache-2.0
| 1,780 |
package com.github.mdr.ascii.diagram.parser
class DiagramParserException(message: String) extends RuntimeException(message)
|
mdr/ascii-graphs
|
src/main/scala/com/github/mdr/ascii/diagram/parser/DiagramParserException.scala
|
Scala
|
mit
| 126 |
import com.raquo.domtypes.generic.keys
import outwatch.helpers.BasicStyleBuilder
package object outwatch extends ManagedSubscriptions {
type EmitterBuilder[+O, +R] = EmitterBuilderExecution[O, R, EmitterBuilder.Execution]
//TODO: invent typeclass CanBuildStyle[F[_]]
@inline implicit def StyleIsBuilder[T](style: keys.Style[T]): BasicStyleBuilder[T] = new BasicStyleBuilder[T](style.cssName)
}
|
OutWatch/outwatch
|
outwatch/src/main/scala/outwatch/package.scala
|
Scala
|
apache-2.0
| 402 |
package metal
package mutable
import scala.reflect.{classTag, ClassTag}
import spire.algebra.Order
import spire.math.QuickSort
import spire.syntax.cfor._
import util.Dummy
final class Buffer[@specialized V](var array: Array[V], var length: Int)(implicit val V: MetalTag[V], val ctV: ClassTag[V]) extends generic.Buffer[V] with mutable.Collection {
@inline final def apply(idx: Int): V = array(idx)
def toImmutable = new immutable.Buffer[V](array.clone, length) // TODO: trim the array
def toScala = toImmutable.toScala
def sort()(implicit order: Order[V]): Unit = {
QuickSort.qsort(array, 0, length.toInt - 1)(order, ctV)
}
def clear(): Unit = {
array = ctV.newArray(0)
length = 0
}
def reset(): Unit = {
cforRange(0 until length) { i =>
array(i) = null.asInstanceOf[V]
}
length = 0
}
def result() = {
val res = new metal.immutable.Buffer[V](array, length)
array = ctV.newArray(0)
length = 0
res
}
def +=(elem: V): this.type = {
ensureLength(length + 1)
array(length.toInt) = elem
length += 1
this
}
def update(idx: Long, v: V): Unit = {
array(idx.toInt) = v
}
/** Grow if necessary the underlying array to accomodate at least n elements. */
def ensureLength(n: Long): Dummy[V] = {
val arrayLength: Long = array.length
if (n > arrayLength) {
var newLength: Long = spire.math.max(arrayLength.toLong * 2, 1)
while (n > newLength) newLength = newLength * 2
if (newLength > Int.MaxValue) newLength = Int.MaxValue
val newArray = ctV.newArray(newLength.toInt)
Array.copy(array, 0, newArray, 0, length.toInt)
array = newArray
}
null
}
def remove(idx: Long): V = {
val last = length.toInt - 1
if (idx < 0) throw new IndexOutOfBoundsException(idx.toString)
else if (idx < last) {
val v = array(idx.toInt)
Array.copy(array, idx.toInt + 1, array, idx.toInt, last - idx.toInt)
array(last) = null.asInstanceOf[V]
length = last
v
} else if (idx == last) {
val v = array(idx.toInt)
array(last) = null.asInstanceOf[V]
length = last
v
} else throw new IndexOutOfBoundsException(idx.toString)
}
}
object Buffer extends generic.BufferFactory {
val startSize = 8
def empty[@specialized V:ClassTag]: mutable.Buffer[V] = new mutable.Buffer[V](classTag[V].newArray(startSize), 0)
def apply[@specialized V:ClassTag](items: V*): mutable.Buffer[V] = {
val array = classTag[V].newArray(items.size)
val it = items.iterator
var i = 0
while (it.hasNext) {
array(i) = it.next
i += 1
}
new Buffer[V](array, array.length)
}
def fromArray[@specialized V:ClassTag](array: Array[V]): mutable.Buffer[V] =
new mutable.Buffer[V](array.clone, array.length)
def fromIterable[@specialized V:ClassTag](iterable: Iterable[V]): mutable.Buffer[V] = {
val array = classTag[V].newArray(iterable.size)
val it = iterable.iterator
var i = 0
while (it.hasNext) {
array(i) = it.next
i += 1
}
new Buffer[V](array, array.length)
}
}
|
denisrosset/ptrcoll
|
library/src/main/scala/metal/mutable/Buffer.scala
|
Scala
|
mit
| 3,121 |
package org.jetbrains.plugins.scala
package conversion
import com.intellij.codeInsight.AnnotationUtil
import com.intellij.codeInsight.editorActions.ReferenceData
import com.intellij.lang.java.JavaLanguage
import com.intellij.openapi.util.TextRange
import com.intellij.psi._
import com.intellij.psi.search.LocalSearchScope
import com.intellij.psi.search.searches.ReferencesSearch
import com.intellij.psi.util.{PsiTreeUtil, PsiUtil}
import org.jetbrains.plugins.scala.conversion.ast.ClassConstruction.ClassType
import org.jetbrains.plugins.scala.conversion.ast._
import org.jetbrains.plugins.scala.conversion.copy.AssociationHelper
import org.jetbrains.plugins.scala.conversion.visitors.SimplePrintVisitor
import org.jetbrains.plugins.scala.extensions.{PsiClassExt, PsiMemberExt}
import org.jetbrains.plugins.scala.lang.dependency.{DependencyKind, Path}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.language.postfixOps
/**
* Author: Alexander Podkhalyuzin
* Date: 23.07.2009
*/
object JavaToScala {
private val context: ThreadLocal[mutable.Stack[(Boolean, String)]] = new ThreadLocal[mutable.Stack[(Boolean, String)]] {
override def initialValue(): mutable.Stack[(Boolean, String)] = new mutable.Stack[(Boolean, String)]()
}
def findVariableUsage(elementToFind: PsiElement, elementWhereFindOption: Option[PsiElement]): Seq[PsiReferenceExpression] = {
import scala.collection.JavaConverters._
def searchReference(elementWhereFind: PsiElement) = {
ReferencesSearch.search(elementToFind, new LocalSearchScope(elementWhereFind)).findAll().asScala.toSeq
.collect { case el: PsiReferenceExpression => el }
}
elementWhereFindOption.map(elementWhereFind =>
searchReference(elementWhereFind)).getOrElse(Seq[PsiReferenceExpression]())
}
def isVar(element: PsiModifierListOwner, parent: Option[PsiElement]): Boolean = {
val possibleVal = element.hasModifierProperty(PsiModifier.FINAL)
val possibleVar = element.hasModifierProperty(PsiModifier.PUBLIC) || element.hasModifierProperty(PsiModifier.PROTECTED)
val references = findVariableUsage(element, parent).filter((el: PsiReferenceExpression) => PsiUtil.isAccessedForWriting(el))
references.length match {
case 0 if possibleVal => false
case 0 if possibleVar => true
case 0 => false
case 1 if possibleVal => false
case 1 if possibleVar => true
case _ => true
}
}
trait ExternalProperties {}
case class WithReferenceExpression(yep: Boolean) extends ExternalProperties
def convertPsiToIntermdeiate(element: PsiElement, externalProperties: ExternalProperties)
(implicit associations: ListBuffer[AssociationHelper] = new ListBuffer(),
refs: Seq[ReferenceData] = Seq.empty,
withComments: Boolean = false): IntermediateNode = {
if (element == null) return LiteralExpression("")
if (element.getLanguage != JavaLanguage.INSTANCE) LiteralExpression("")
val result: IntermediateNode = element match {
case f: PsiFile =>
val m = MainConstruction()
m.addChildren(f.getChildren.map(convertPsiToIntermdeiate(_, externalProperties)))
m
case e: PsiExpressionStatement => convertPsiToIntermdeiate(e.getExpression, externalProperties)
case l: PsiLiteralExpression => LiteralExpression(l.getText)
case t: PsiTypeElement => TypeConstruction.createStringTypePresentation(t.getType, t.getProject)
case w: PsiWhiteSpace => LiteralExpression(w.getText)
case r: PsiReturnStatement => ReturnStatement(convertPsiToIntermdeiate(r.getReturnValue, externalProperties))
case t: PsiThrowStatement => ThrowStatement(convertPsiToIntermdeiate(t.getException, externalProperties))
case i: PsiImportStatement =>
ImportStatement(convertPsiToIntermdeiate(i.getImportReference, externalProperties), i.isOnDemand)
case i: PsiImportStaticStatement => ImportStatement(convertPsiToIntermdeiate(i.getImportReference, externalProperties), i.isOnDemand)
case i: PsiImportList => ImportStatementList(i.getAllImportStatements.map(convertPsiToIntermdeiate(_, externalProperties)))
case a: PsiAssignmentExpression =>
BinaryExpressionConstruction(convertPsiToIntermdeiate(a.getLExpression, externalProperties),
convertPsiToIntermdeiate(a.getRExpression, externalProperties), a.getOperationSign.getText)
case e: PsiExpressionListStatement =>
ExpressionListStatement(e.getExpressionList.getExpressions.map(convertPsiToIntermdeiate(_, externalProperties)))
case d: PsiDeclarationStatement => ExpressionListStatement(d.getDeclaredElements.map(convertPsiToIntermdeiate(_, externalProperties)))
case b: PsiBlockStatement => convertPsiToIntermdeiate(b.getCodeBlock, externalProperties)
case s: PsiSynchronizedStatement =>
val lock = Option(s.getLockExpression).map(convertPsiToIntermdeiate(_, externalProperties))
val body = Option(s.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
SynchronizedStatement(lock, body)
case b: PsiCodeBlock =>
BlockConstruction(b.getStatements.map(convertPsiToIntermdeiate(_, externalProperties)))
case t: PsiTypeParameter =>
TypeParameterConstruction(t.getName, t.getExtendsList.getReferenceElements.map(convertPsiToIntermdeiate(_, externalProperties)))
case i: PsiIfStatement =>
val condition = Option(i.getCondition).map(convertPsiToIntermdeiate(_, externalProperties))
val thenBranch = Option(i.getThenBranch).map(convertPsiToIntermdeiate(_, externalProperties))
val elseBranch = Option(i.getElseBranch).map(convertPsiToIntermdeiate(_, externalProperties))
IfStatement(condition, thenBranch, elseBranch)
case c: PsiConditionalExpression =>
val condition = Option(c.getCondition).map(convertPsiToIntermdeiate(_, externalProperties))
val thenBranch = Option(c.getThenExpression).map(convertPsiToIntermdeiate(_, externalProperties))
val elseBranch = Option(c.getElseExpression).map(convertPsiToIntermdeiate(_, externalProperties))
IfStatement(condition, thenBranch, elseBranch)
case w: PsiWhileStatement =>
val condition = Option(w.getCondition).map(convertPsiToIntermdeiate(_, externalProperties))
val body = Option(w.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
WhileStatement(None, condition, body, None, WhileStatement.PRE_TEST_LOOP)
case w: PsiDoWhileStatement =>
val condition = Option(w.getCondition).map(convertPsiToIntermdeiate(_, externalProperties))
val body = Option(w.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
WhileStatement(None, condition, body, None, WhileStatement.POST_TEST_LOOP)
case f: PsiForStatement =>
val initialization = Option(f.getInitialization).map(convertPsiToIntermdeiate(_, externalProperties))
val condition = Some(f.getCondition match {
case empty: PsiEmptyStatement => LiteralExpression("true")
case null => LiteralExpression("true")
case _ => convertPsiToIntermdeiate(f.getCondition, externalProperties)
})
val body = Option(f.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
val update = Option(f.getUpdate).map(convertPsiToIntermdeiate(_, externalProperties))
WhileStatement(initialization, condition, body, update, WhileStatement.PRE_TEST_LOOP)
case a: PsiAssertStatement =>
val condition = Option(a.getAssertCondition).map(convertPsiToIntermdeiate(_, externalProperties))
val description = Option(a.getAssertDescription).map(convertPsiToIntermdeiate(_, externalProperties))
AssertStatement(condition, description)
case s: PsiSwitchLabelStatement =>
val caseValue = if (s.isDefaultCase)
Some(LiteralExpression("_"))
else
Option(s.getCaseValue).map(convertPsiToIntermdeiate(_, externalProperties))
SwitchLabelStatement(caseValue, ScalaPsiUtil.functionArrow(s.getProject))
case s: PsiSwitchStatement =>
val expr = Option(s.getExpression).map(convertPsiToIntermdeiate(_, externalProperties))
val body = Option(s.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
SwitchStatemtnt(expr, body)
case p: PsiPackageStatement => PackageStatement(convertPsiToIntermdeiate(p.getPackageReference, externalProperties))
case f: PsiForeachStatement =>
val tp = Option(f.getIteratedValue).flatMap((e: PsiExpression) => Option(e.getType))
val isJavaCollection = if (tp.isEmpty) true else !tp.get.isInstanceOf[PsiArrayType]
val iteratedValue = Option(f.getIteratedValue).map(convertPsiToIntermdeiate(_, externalProperties))
val body = Option(f.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
ForeachStatement(f.getIterationParameter.getName, iteratedValue, body, isJavaCollection)
case r: PsiReferenceExpression =>
val args = Option(r.getParameterList).map(convertPsiToIntermdeiate(_, externalProperties))
val refName = if (externalProperties.isInstanceOf[WithReferenceExpression]) {
fieldParamaterMap.getOrElse(r.getReferenceName, r.getReferenceName)
} else r.getReferenceName
if (r.getQualifierExpression != null) {
val t = Option(r.getQualifierExpression).map(convertPsiToIntermdeiate(_, externalProperties))
return JavaCodeReferenceStatement(t, args, refName)
} else {
r.resolve() match {
case f: PsiMember
if f.hasModifierProperty("static") =>
val clazz = f.containingClass
if (clazz != null && context.get().contains((false, clazz.qualifiedName))) {
return JavaCodeReferenceStatement(Some(LiteralExpression(clazz.getName)), args, refName)
}
case _ =>
}
}
JavaCodeReferenceStatement(None, args, refName)
case p: PsiJavaCodeReferenceElement =>
val qualifier = Option(p.getQualifier).map(convertPsiToIntermdeiate(_, externalProperties))
val args = Option(p.getParameterList).map(convertPsiToIntermdeiate(_, externalProperties))
JavaCodeReferenceStatement(qualifier, args, p.getReferenceName)
case be: PsiBinaryExpression =>
def isOk: Boolean = {
if (be.getLOperand.getType.isInstanceOf[PsiPrimitiveType]) return false
be.getROperand match {
case l: PsiLiteralExpression if l.getText == "null" => return false
case _ =>
}
true
}
val operation = be.getOperationSign.getText match {
case "==" if isOk => "eq"
case "!=" if isOk => "ne"
case x => x
}
BinaryExpressionConstruction(
convertPsiToIntermdeiate(be.getLOperand, externalProperties),
convertPsiToIntermdeiate(be.getROperand, externalProperties),
operation)
case c: PsiTypeCastExpression =>
ClassCast(
convertPsiToIntermdeiate(c.getOperand, externalProperties), convertPsiToIntermdeiate(c.getCastType, externalProperties),
c.getCastType.getType.isInstanceOf[PsiPrimitiveType] && c.getOperand.getType.isInstanceOf[PsiPrimitiveType])
case a: PsiArrayAccessExpression =>
ArrayAccess(
convertPsiToIntermdeiate(a.getArrayExpression, externalProperties),
convertPsiToIntermdeiate(a.getIndexExpression, externalProperties))
case a: PsiArrayInitializerExpression =>
ArrayInitializer(a.getInitializers.map(convertPsiToIntermdeiate(_, externalProperties)))
case c: PsiClassObjectAccessExpression => ClassObjectAccess(convertPsiToIntermdeiate(c.getOperand, externalProperties))
case i: PsiInstanceOfExpression =>
InstanceOfConstruction(
convertPsiToIntermdeiate(i.getOperand, externalProperties),
convertPsiToIntermdeiate(i.getCheckType, externalProperties))
case m: PsiMethodCallExpression =>
m.getMethodExpression.resolve() match {
case method: PsiMethod if method.getName == "parseInt" && m.getArgumentList.getExpressions.length == 1 &&
method.getContainingClass != null && method.getContainingClass.qualifiedName == "java.lang.Integer" =>
ClassCast(convertPsiToIntermdeiate(m.getArgumentList.getExpressions.apply(0), externalProperties),
TypeConstruction("Int"), isPrimitive = true)
case method: PsiMethod if method.getName == "parseDouble" && m.getArgumentList.getExpressions.length == 1 &&
method.getContainingClass != null && method.getContainingClass.qualifiedName == "java.lang.Double" =>
ClassCast(convertPsiToIntermdeiate(m.getArgumentList.getExpressions.apply(0), externalProperties),
TypeConstruction("Double"), isPrimitive = true)
case method: PsiMethod if method.getName == "round" && m.getArgumentList.getExpressions.length == 1 &&
method.getContainingClass != null && method.getContainingClass.qualifiedName == "java.lang.Math" =>
MethodCallExpression.build(
convertPsiToIntermdeiate(m.getArgumentList.getExpressions.apply(0), externalProperties), ".round", null)
case method: PsiMethod if method.getName == "equals" && m.getTypeArguments.isEmpty
&& m.getArgumentList.getExpressions.length == 1 =>
MethodCallExpression.build(
Option(m.getMethodExpression.getQualifierExpression).map(convertPsiToIntermdeiate(_, externalProperties))
.getOrElse(LiteralExpression("this")),
" == ", convertPsiToIntermdeiate(m.getArgumentList.getExpressions.apply(0), externalProperties))
case _ =>
MethodCallExpression(m.getMethodExpression.getQualifiedName,
convertPsiToIntermdeiate(m.getMethodExpression, externalProperties),
convertPsiToIntermdeiate(m.getArgumentList, externalProperties))
}
case t: PsiThisExpression =>
ThisExpression(Option(t.getQualifier).map(convertPsiToIntermdeiate(_, externalProperties)))
case s: PsiSuperExpression =>
SuperExpression(Option(s.getQualifier).map(convertPsiToIntermdeiate(_, externalProperties)))
case e: PsiExpressionList =>
ExpressionList(e.getExpressions.map(convertPsiToIntermdeiate(_, externalProperties)))
case l: PsiLocalVariable =>
val parent = Option(PsiTreeUtil.getParentOfType(l, classOf[PsiCodeBlock], classOf[PsiBlockStatement]))
val needVar = if (parent.isEmpty) false else isVar(l, parent)
val initalizer = Option(l.getInitializer).map(convertPsiToIntermdeiate(_, externalProperties))
LocalVariable(handleModifierList(l), l.getName, convertPsiToIntermdeiate(l.getTypeElement, externalProperties),
needVar, initalizer)
case f: PsiField =>
val modifiers = handleModifierList(f)
val needVar = isVar(f, Option(f.getContainingClass))
val initalizer = Option(f.getInitializer).map(convertPsiToIntermdeiate(_, externalProperties))
FieldConstruction(modifiers, f.getName, convertPsiToIntermdeiate(f.getTypeElement, externalProperties),
needVar, initalizer)
case p: PsiParameterList =>
ParameterListConstruction(p.getParameters.map(convertPsiToIntermdeiate(_, externalProperties)))
case m: PsiMethod =>
def body: Option[IntermediateNode] = {
if (m.isConstructor) {
getFirstStatement(m).map(_.getExpression).flatMap {
case mc: PsiMethodCallExpression if mc.getMethodExpression.getQualifiedName == "this" =>
Some(convertPsiToIntermdeiate(m.getBody, externalProperties))
case _ =>
getStatements(m).map(statements => BlockConstruction(LiteralExpression("this()")
+: statements.map(convertPsiToIntermdeiate(_, externalProperties))))
}
} else {
Option(m.getBody).map(convertPsiToIntermdeiate(_, externalProperties))
}
}
if (m.isConstructor) {
ConstructorSimply(handleModifierList(m), m.getTypeParameters.map(convertPsiToIntermdeiate(_, externalProperties)),
convertPsiToIntermdeiate(m.getParameterList, externalProperties), body)
} else {
MethodConstruction(handleModifierList(m), m.getName, m.getTypeParameters.map(convertPsiToIntermdeiate(_, externalProperties)),
convertPsiToIntermdeiate(m.getParameterList, externalProperties), body,
if (m.getReturnType != PsiType.VOID) convertPsiToIntermdeiate(m.getReturnTypeElement, externalProperties) else null)
}
case c: PsiClass => createClass(c, externalProperties)
case p: PsiParenthesizedExpression =>
val expr = Option(p.getExpression).map(convertPsiToIntermdeiate(_, externalProperties))
ParenthesizedExpression(expr)
case v: PsiArrayInitializerMemberValue =>
ArrayInitializer(v.getInitializers.map(convertPsiToIntermdeiate(_, externalProperties)).toSeq)
case annot: PsiAnnotation =>
def isArrayAnnotationParameter(pair: PsiNameValuePair): Boolean = {
AnnotationUtil.getAnnotationMethod(pair) match {
case method: PsiMethod =>
val returnType = method.getReturnType
returnType != null && returnType.isInstanceOf[PsiArrayType]
case _ => false
}
}
val attributes = annot.getParameterList.getAttributes
val attrResult = new ArrayBuffer[(Option[String], Option[IntermediateNode])]()
for (attribute <- attributes) {
val value = Option(attribute.getValue) match {
case Some(v: PsiAnnotationMemberValue) if isArrayAnnotationParameter(attribute) =>
ArrayInitializer(Seq(convertPsiToIntermdeiate(v, externalProperties)))
case Some(_) => convertPsiToIntermdeiate(attribute.getValue, externalProperties)
case _ => null
}
attrResult += ((Option(attribute.getName), Option(value)))
}
val inAnnotation = PsiTreeUtil.getParentOfType(annot, classOf[PsiAnnotation]) != null
val name = Option(annot.getNameReferenceElement).map(convertPsiToIntermdeiate(_, externalProperties))
AnnotaionConstruction(inAnnotation, attrResult, name)
case p: PsiParameter =>
val modifiers = handleModifierList(p)
val name = p.getName
if (p.isVarArgs) {
p.getTypeElement.getType match {
case at: PsiArrayType =>
val scCompType = TypeConstruction.createStringTypePresentation(at.getComponentType, p.getProject)
ParameterConstruction(modifiers, name, scCompType, isArray = true)
case _ => ParameterConstruction(modifiers, name, convertPsiToIntermdeiate(p.getTypeElement, externalProperties), isArray = false) // should not happen
}
} else ParameterConstruction(modifiers, name, convertPsiToIntermdeiate(p.getTypeElement, externalProperties), isArray = false)
case n: PsiNewExpression =>
if (n.getAnonymousClass != null) {
return AnonymousClassExpression(convertPsiToIntermdeiate(n.getAnonymousClass, externalProperties))
}
val mtype = TypeConstruction.createStringTypePresentation(n.getType, n.getProject)
if (n.getArrayInitializer != null) {
NewExpression(mtype, n.getArrayInitializer.getInitializers.map(convertPsiToIntermdeiate(_, externalProperties)),
withArrayInitalizer = true)
} else if (n.getArrayDimensions.nonEmpty) {
NewExpression(mtype, n.getArrayDimensions.map(convertPsiToIntermdeiate(_, externalProperties)),
withArrayInitalizer = false)
} else {
val argList: Seq[IntermediateNode] = if (n.getArgumentList != null) {
if (n.getArgumentList.getExpressions.isEmpty) {
n.getParent match {
case r: PsiJavaCodeReferenceElement if n == r.getQualifier => Seq(LiteralExpression("()"))
case _ => null
}
} else {
Seq(convertPsiToIntermdeiate(n.getArgumentList, externalProperties))
}
} else null
NewExpression(mtype, argList, withArrayInitalizer = false)
}
case t: PsiTryStatement =>
val resourceList = Option(t.getResourceList)
val resourcesVariables = new ArrayBuffer[(String, IntermediateNode)]()
if (resourceList.isDefined) {
val it = resourceList.get.iterator
while (it.hasNext) {
val next = it.next()
next match {
case varible: PsiResourceVariable =>
resourcesVariables += ((varible.getName, convertPsiToIntermdeiate(varible, externalProperties)))
case _ =>
}
}
}
val tryBlock = Option(t.getTryBlock).map((c: PsiCodeBlock) => convertPsiToIntermdeiate(c, externalProperties))
val catches = t.getCatchSections.map((cb: PsiCatchSection) =>
(convertPsiToIntermdeiate(cb.getParameter, externalProperties), convertPsiToIntermdeiate(cb.getCatchBlock, externalProperties)))
val finallys = Option(t.getFinallyBlock).map((f: PsiCodeBlock) => f.getStatements.map(convertPsiToIntermdeiate(_, externalProperties)).toSeq)
TryCatchStatement(resourcesVariables, tryBlock, catches, finallys, ScalaPsiUtil.functionArrow(t.getProject))
case p: PsiPrefixExpression =>
PrefixExpression(convertPsiToIntermdeiate(p.getOperand, externalProperties), p.getOperationSign.getText, canBeSimpified(p))
case p: PsiPostfixExpression =>
PostfixExpression(convertPsiToIntermdeiate(p.getOperand, externalProperties), p.getOperationSign.getText, canBeSimpified(p))
case p: PsiPolyadicExpression =>
val tokenValue = if (p.getOperands.nonEmpty) {
p.getTokenBeforeOperand(p.getOperands.apply(1)).getText
} else ""
PolyadicExpression(p.getOperands.map(convertPsiToIntermdeiate(_, externalProperties)), tokenValue)
case r: PsiReferenceParameterList => TypeParameters(r.getTypeParameterElements.map(convertPsiToIntermdeiate(_, externalProperties)))
case b: PsiBreakStatement =>
if (b.getLabelIdentifier != null)
NotSupported(None, "break " + b.getLabelIdentifier.getText + "// todo: label break is not supported")
else NotSupported(None, "break //todo: break is not supported")
case c: PsiContinueStatement =>
if (c.getLabelIdentifier != null)
NotSupported(None, "continue " + c.getLabelIdentifier.getText + " //todo: continue is not supported")
else NotSupported(None, "continue //todo: continue is not supported")
case s: PsiLabeledStatement =>
val statements = Option(s.getStatement).map(convertPsiToIntermdeiate(_, externalProperties))
NotSupported(statements, s.getLabelIdentifier.getText + " //todo: labels is not supported")
case e: PsiEmptyStatement => EmptyConstruction()
case e: PsiErrorElement => EmptyConstruction()
case e => LiteralExpression(e.getText)
}
hanldleAssociations(element, result)
result
}
def hanldleAssociations(element: PsiElement, result: IntermediateNode)
(implicit associations: ListBuffer[AssociationHelper] = new ListBuffer(),
refs: Seq[ReferenceData] = Seq.empty,
withComments: Boolean = false) = {
element match {
case expression: PsiNewExpression if expression.getClassReference != null =>
associations ++= associationFor(expression.getClassReference)
case e: PsiElement => associations ++= associationFor(e)
case _ =>
}
result match {
case parametrizedConstruction: ParametrizedConstruction =>
associations ++= parametrizedConstruction.getAssociations.map {
case (node, path) => AssociationHelper(DependencyKind.Reference, node, Path(path))
}
case arrayConstruction: ArrayConstruction =>
associations ++= arrayConstruction.getAssociations.map {
case (node, path) => AssociationHelper(DependencyKind.Reference, node, Path(path))
}
case _ =>
}
def associationFor(range: PsiElement): Option[AssociationHelper] = {
refs.find(ref => new TextRange(ref.startOffset, ref.endOffset) == range.getTextRange).map {
ref =>
if (ref.staticMemberName == null) {
AssociationHelper(DependencyKind.Reference, result, Path(ref.qClassName))
} else {
AssociationHelper(DependencyKind.Reference, result, Path(ref.qClassName, ref.staticMemberName))
}
}
}
}
val fieldParamaterMap = new mutable.HashMap[String, String]()
def createClass(inClass: PsiClass, externalProperties: ExternalProperties)
(implicit associations: ListBuffer[AssociationHelper] = new ListBuffer(),
refs: Seq[ReferenceData] = Seq.empty,
withComments: Boolean = false): IntermediateNode = {
def extendList: Seq[PsiJavaCodeReferenceElement] = {
val typez = new ArrayBuffer[PsiJavaCodeReferenceElement]
if (inClass.getExtendsList != null) typez ++= inClass.getExtendsList.getReferenceElements
if (inClass.getImplementsList != null) typez ++= inClass.getImplementsList.getReferenceElements
typez
}
def collectClassObjectMembers(): (Seq[PsiMember], Seq[PsiMember]) = {
var forClass = new ArrayBuffer[PsiMember]()
var forObject = new ArrayBuffer[PsiMember]()
for (method <- inClass.getMethods) {
if (method.hasModifierProperty("static")) {
forObject += method
} else forClass += method
}
val serialVersionUID = serialVersion(inClass)
for (field <- inClass.getFields if !serialVersionUID.contains(field)) {
if (field.hasModifierProperty("static")) {
forObject += field
} else forClass += field
}
for (clazz <- inClass.getInnerClasses) {
if (clazz.hasModifierProperty("static")) {
forObject += clazz
} else forClass += clazz
}
forClass = forClass.sortBy(_.getTextOffset)
forObject = forObject.sortBy(_.getTextOffset)
(forClass, forObject)
}
def handleObject(objectMembers: Seq[PsiMember]): IntermediateNode = {
def handleAsEnum(modifiers: IntermediateNode): IntermediateNode = {
Enum(inClass.getName, modifiers,
objectMembers.filter(_.isInstanceOf[PsiEnumConstant]).map((el: PsiMember) => el.getName))
}
def handleAsObject(modifiers: IntermediateNode): IntermediateNode = {
val membersOut = objectMembers.filter(!_.isInstanceOf[PsiEnumConstant]).map(convertPsiToIntermdeiate(_, externalProperties))
val initializers = inClass.getInitializers.map((x: PsiClassInitializer) => convertPsiToIntermdeiate(x.getBody, externalProperties))
val primaryConstructor = None
val typeParams = None
val companionObject = EmptyConstruction()
ClassConstruction(inClass.getName, primaryConstructor, membersOut, modifiers,
typeParams, Some(initializers), ClassType.OBJECT, companionObject, None)
}
if (objectMembers.nonEmpty && !inClass.isInstanceOf[PsiAnonymousClass]) {
context.get().push((true, inClass.qualifiedName))
try {
val modifiers = handleModifierList(inClass)
val updatedModifiers = modifiers.asInstanceOf[ModifiersConstruction].without(ModifierType.ABSTRACT)
if (inClass.isEnum)
handleAsEnum(updatedModifiers)
else
handleAsObject(updatedModifiers)
} finally {
context.get().pop()
}
} else {
EmptyConstruction()
}
}
def handleAsClass(classMembers: Seq[PsiMember], objectMembers: Seq[PsiMember],
companionObject: IntermediateNode, extendList: Seq[PsiJavaCodeReferenceElement]): IntermediateNode = {
def handleAnonymousClass(clazz: PsiAnonymousClass): IntermediateNode = {
val tp = TypeConstruction.createStringTypePresentation(clazz.getBaseClassType, clazz.getProject)
val argList = convertPsiToIntermdeiate(clazz.getArgumentList, externalProperties)
AnonymousClass(tp, argList, classMembers.map(convertPsiToIntermdeiate(_, externalProperties)),
extendList.map(convertPsiToIntermdeiate(_, externalProperties)))
}
def sortMembers(): Seq[PsiMember] = {
def isConstructor(member: PsiMember): Boolean =
member match {
case m: PsiMethod if m.isConstructor => true
case _ => false
}
def sort(targetMap: mutable.HashMap[PsiMethod, PsiMethod]): Seq[PsiMember] = {
def compareAsConstructors(left: PsiMethod, right: PsiMethod) = {
val rightFromMap = targetMap.get(left)
if (rightFromMap.isDefined && rightFromMap.get == right) {
false // right constructor must be upper then left
} else {
val leftFromMap = targetMap.get(right)
if (leftFromMap.isDefined && leftFromMap.get == left) {
true
} else {
compareByOrder(right, left)
}
}
}
def compareByOrder(left: PsiMember, right: PsiMember): Boolean =
classMembers.indexOf(left) > classMembers.indexOf(right)
if (targetMap.isEmpty)
classMembers
else classMembers.sortWith {
(left, right) =>
if (isConstructor(left) && isConstructor(right)) {
compareAsConstructors(left.asInstanceOf[PsiMethod], right.asInstanceOf[PsiMethod])
} else {
compareByOrder(right, left)
}
}
}
val constructorsCallMap = buildConstructorTargetMap(inClass.getConstructors.sortBy(_.getTextOffset))
sort(constructorsCallMap)
}
def updateMembersAndConvert(dropMembes: Option[Seq[PsiMember]]): Seq[IntermediateNode] = {
val sortedMembers = sortMembers()
val updatedMembers = dropMembes.map(el => sortedMembers.filter(!el.contains(_))).getOrElse(sortedMembers)
updatedMembers.map(convertPsiToIntermdeiate(_, externalProperties))
}
if (classMembers.nonEmpty || objectMembers.isEmpty) {
context.get().push((false, inClass.qualifiedName))
try {
inClass match {
case clazz: PsiAnonymousClass => handleAnonymousClass(clazz)
case _ =>
val typeParams = inClass.getTypeParameters.map(convertPsiToIntermdeiate(_, externalProperties))
val modifiers = handleModifierList(inClass)
val (dropMembers, primaryConstructor) = handlePrimaryConstructor(inClass.getConstructors)
val classType = if (inClass.isInterface) ClassType.INTERFACE else ClassType.CLASS
val members = updateMembersAndConvert(dropMembers)
ClassConstruction(inClass.getName, primaryConstructor, members, modifiers, Some(typeParams),
None, classType, companionObject, Some(extendList.map(convertPsiToIntermdeiate(_, externalProperties))))
}
} finally {
context.get().pop()
}
} else {
companionObject
}
}
val (classMembers, objectMembers) = collectClassObjectMembers()
val companionObject = handleObject(objectMembers)
handleAsClass(classMembers, objectMembers, companionObject, extendList)
}
def getFirstStatement(constructor: PsiMethod): Option[PsiExpressionStatement] = {
Option(constructor.getBody).map(_.getStatements)
.flatMap(_.headOption).collect { case exp: PsiExpressionStatement => exp }
}
// build map of constructor and constructor that it call
def buildConstructorTargetMap(constructors: Seq[PsiMethod]): mutable.HashMap[PsiMethod, PsiMethod] = {
val toTargetConstructorMap = new mutable.HashMap[PsiMethod, PsiMethod]()
for (constructor <- constructors) {
val refExpr = getFirstStatement(constructor).map(_.getExpression).flatMap {
case mc: PsiMethodCallExpression if mc.getMethodExpression.getQualifiedName == "this" =>
Some(mc.getMethodExpression)
case _ => None
}
refExpr.foreach { expr =>
val target = Option(expr.resolve()).flatMap {
case m: PsiMethod => Some(m)
case _ => None
}
if (target.isDefined && target.get.isConstructor) {
val finalTarget: PsiMethod = toTargetConstructorMap.getOrElse(target.get, target.get)
toTargetConstructorMap.put(constructor, finalTarget)
}
}
}
toTargetConstructorMap
}
//primary constructor may apply only when there is one constructor with params
def handlePrimaryConstructor(constructors: Seq[PsiMethod])
(implicit associations: ListBuffer[AssociationHelper] = new ListBuffer(),
refs: Seq[ReferenceData] = Seq.empty,
withComments: Boolean = false): (Option[Seq[PsiMember]], Option[PrimaryConstruction]) = {
val dropFields = new ArrayBuffer[PsiField]()
def createPrimaryConstructor(constructor: PsiMethod): PrimaryConstruction = {
def notContains(statement: PsiStatement, where: Seq[PsiExpressionStatement]): Boolean = {
!statement.isInstanceOf[PsiExpressionStatement] ||
(statement.isInstanceOf[PsiExpressionStatement] && !where.contains(statement))
}
def getSuperCall(dropStatements: ArrayBuffer[PsiExpressionStatement]): IntermediateNode = {
val firstStatement = getFirstStatement(constructor)
val isSuper = firstStatement.map(_.getExpression).flatMap {
case mc: PsiMethodCallExpression if mc.getMethodExpression.getQualifiedName == "super" =>
Some(mc)
case _ => None
}
if (isSuper.isDefined) {
dropStatements += firstStatement.get
convertPsiToIntermdeiate(isSuper.get.getArgumentList, null)
} else {
null
}
}
def getCorrespondedFieldInfo(param: PsiParameter): Seq[(PsiField, PsiExpressionStatement)] = {
val dropInfo = new ArrayBuffer[(PsiField, PsiExpressionStatement)]()
val usages = findVariableUsage(param, Option(constructor.getBody))
for (usage <- usages) {
val parent = Option(usage.getParent)
val leftPart = parent.flatMap {
case ae: PsiAssignmentExpression if (ae.getOperationSign.getTokenType == JavaTokenType.EQ)
&& ae.getLExpression.isInstanceOf[PsiReferenceExpression] =>
Some(ae.getLExpression.asInstanceOf[PsiReferenceExpression])
case _ => None
}
val field = if (leftPart.isDefined) leftPart.get.resolve() match {
case f: PsiField if f.getContainingClass == constructor.getContainingClass && f.getInitializer == null =>
Some(f)
case _ => None
} else None
var statement: Option[PsiExpressionStatement] =
if (field.isDefined && parent.isDefined && parent.get.getParent.isInstanceOf[PsiExpressionStatement]) {
Some(parent.get.getParent.asInstanceOf[PsiExpressionStatement])
} else None
if (statement.isDefined && statement.get.getParent != constructor.getBody) {
statement = None
}
if (field.isDefined && statement.isDefined) {
dropInfo += ((field.get, statement.get))
if (field.get.getName != param.getName)
fieldParamaterMap += ((param.getName, field.get.getName))
}
}
dropInfo
}
def createContructor: PrimaryConstruction = {
val params = constructor.getParameterList.getParameters
val updatedParams = new ArrayBuffer[(String, IntermediateNode, Boolean)]()
val dropStatements = new ArrayBuffer[PsiExpressionStatement]()
for (param <- params) {
val fieldInfo = getCorrespondedFieldInfo(param)
val updatedField = if (fieldInfo.isEmpty) {
val p = convertPsiToIntermdeiate(param, null).asInstanceOf[ParameterConstruction]
(p.name, p.scCompType, false)
} else {
fieldInfo.foreach {
case (field, statement) =>
dropFields += field
dropStatements += statement
}
val p = convertPsiToIntermdeiate(fieldInfo.head._1, WithReferenceExpression(true)).asInstanceOf[FieldConstruction]
(p.name, p.ftype, p.isVar)
}
updatedParams += updatedField
}
val superCall = getSuperCall(dropStatements)
getStatements(constructor).map {
statements =>
PrimaryConstruction(updatedParams, superCall,
statements.filter(notContains(_, dropStatements))
.map(convertPsiToIntermdeiate(_, WithReferenceExpression(true))), handleModifierList(constructor))
}.orNull
}
createContructor
}
//If can't choose one - return emptyConstructor
def GetComplexPrimaryConstructor(): PsiMethod = {
val possibleConstructors = buildConstructorTargetMap(constructors)
val candidates = constructors.filter(!possibleConstructors.contains(_))
def tryFindWithoutParamConstructor(): PsiMethod = {
val emptyParamsConstructors = constructors.filter(_.getParameterList.getParametersCount == 0)
emptyParamsConstructors.length match {
case 1 => emptyParamsConstructors.head
case _ => null
}
}
// we expected to have one primary constructor
// or try to use constructor with empty parameters if it is defined
// and there are other constructors
candidates.length match {
case 1 => candidates.head
case _ => tryFindWithoutParamConstructor()
}
}
constructors.length match {
case 0 => (None, None)
case 1 =>
val updatedConstructor = createPrimaryConstructor(constructors.head)
(Some(constructors.head +: dropFields), Some(updatedConstructor))
case _ =>
val pc = GetComplexPrimaryConstructor()
if (pc != null) {
val updatedConstructor = createPrimaryConstructor(pc)
(Some(pc +: dropFields), Some(updatedConstructor))
}
else (None, None)
}
}
val SIMPLE_MODIFIERS_MAP = Map(
(PsiModifier.VOLATILE, ModifierType.VOLATILE),
(PsiModifier.PRIVATE, ModifierType.PRIVATE),
(PsiModifier.PROTECTED, ModifierType.PROTECTED),
(PsiModifier.TRANSIENT, ModifierType.TRANSIENT),
(PsiModifier.NATIVE, ModifierType.NATIVE)
)
def handleModifierList(owner: PsiModifierListOwner)
(implicit associations: ListBuffer[AssociationHelper] = new ListBuffer(),
refs: Seq[ReferenceData] = Seq.empty,
withComments: Boolean = false): IntermediateNode = {
val annotationDropList = Seq("java.lang.Override", "org.jetbrains.annotations.Nullable",
"org.jetbrains.annotations.NotNull", "org.jetbrains.annotations.NonNls")
def handleAnnotations: Seq[IntermediateNode] = {
val annotations = new ArrayBuffer[IntermediateNode]()
for {
a <- owner.getModifierList.getAnnotations
optValue = Option(a.getQualifiedName).map(annotationDropList.contains(_))
if optValue.isDefined && !optValue.get
} {
annotations.append(convertPsiToIntermdeiate(a, null))
}
annotations
}
def handleModifiers: Seq[IntermediateNode] = {
val modifiers = new ArrayBuffer[IntermediateNode]()
val simpleList = SIMPLE_MODIFIERS_MAP.filter {
case (psiType, el) => owner.hasModifierProperty(psiType)
}.values
modifiers ++= simpleList.map(SimpleModifier)
owner match {
case method: PsiMethod =>
val references = method.getThrowsList.getReferenceElements
for (ref <- references) {
modifiers.append(ModifierWithExpression(ModifierType.THROW, convertPsiToIntermdeiate(ref, null)))
}
if (method.findSuperMethods.exists(!_.hasModifierProperty("abstract")))
modifiers.append(SimpleModifier(ModifierType.OVERRIDE))
case c: PsiClass =>
serialVersion(c) match {
case Some(f) =>
modifiers.append(ModifierWithExpression(ModifierType.SerialVersionUID, convertPsiToIntermdeiate(f.getInitializer, null)))
case _ =>
}
if ((!c.isInterface) && c.hasModifierProperty(PsiModifier.ABSTRACT))
modifiers.append(SimpleModifier(ModifierType.ABSTRACT))
case _ =>
}
if (!owner.hasModifierProperty(PsiModifier.PUBLIC) &&
!owner.hasModifierProperty(PsiModifier.PRIVATE) &&
!owner.hasModifierProperty(PsiModifier.PROTECTED) &&
owner.getParent != null && owner.getParent.isInstanceOf[PsiClass]) {
val packageName: String = owner.getContainingFile.asInstanceOf[PsiClassOwner].getPackageName
if (packageName != "")
modifiers.append(ModifierWithExpression(ModifierType.PRIVATE,
LiteralExpression(packageName.substring(packageName.lastIndexOf(".") + 1))))
}
if (owner.hasModifierProperty(PsiModifier.FINAL) && context.get.nonEmpty && !context.get.top._1) {
owner match {
case _: PsiLocalVariable =>
case _: PsiParameter =>
case _ =>
modifiers.append(SimpleModifier(ModifierType.FINAL)) //only to classes, not objects
}
}
modifiers
}
ModifiersConstruction(handleAnnotations, handleModifiers)
}
def convertPsisToText(elements: Array[PsiElement]): String = {
val resultNode = new MainConstruction
for (part <- elements) {
resultNode.addChild(convertPsiToIntermdeiate(part, null))
}
val visitor = new SimplePrintVisitor
visitor.visit(resultNode)
visitor.stringResult
}
def convertPsiToText(element: PsiElement): String = {
val visitor = new SimplePrintVisitor
visitor.visit(convertPsiToIntermdeiate(element, null))
visitor.stringResult
}
private def getStatements(m: PsiMethod): Option[Array[PsiStatement]] = Option(m.getBody).map(_.getStatements)
private def serialVersion(c: PsiClass): Option[PsiField] = {
val serialField = c.findFieldByName("serialVersionUID", false)
if (serialField != null && serialField.getType.isAssignableFrom(PsiType.LONG) &&
serialField.hasModifierProperty("static") && serialField.hasModifierProperty("final") &&
serialField.hasInitializer) {
Some(serialField)
} else None
}
/**
* @param expr prefix or postfix expression
* @return true if this expression is under block
*/
private def canBeSimpified(expr: PsiExpression): Boolean = {
expr.getParent match {
case b: PsiExpressionStatement =>
b.getParent match {
case b: PsiBlockStatement => true
case b: PsiCodeBlock => true
case _ => false
}
case _ => false
}
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/conversion/JavaToScala.scala
|
Scala
|
apache-2.0
| 43,598 |
package org.machine.engine.flow
import scala.concurrent.{ExecutionContext, Future}
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.model.ws.{Message, BinaryMessage, TextMessage}
import akka.stream.{ActorMaterializer, FlowShape, Inlet, Outlet, SourceShape}
import akka.stream.scaladsl.{Broadcast, Flow, GraphDSL, Keep, Merge, MergePreferred, Partition, RunnableGraph, Source, Sink}
object WebSocketFlow{
def flow:Flow[Message, Message, _] = {
val graph = GraphDSL.create(){ implicit b: GraphDSL.Builder[NotUsed] =>
import GraphDSL.Implicits._
val deadend = b.add(Sink.ignore)
// val logMsg = b.add(Flow[Message].map[Message](m => {println(m); m}))
val partitionByMsgType = b.add(Partition[Message](2, msg => partition(msg)))
val msgToClientMsg = b.add(Flow.fromFunction[Message, ClientMessage](transform))
// val clientMsgToEngMsg = b.add(Flow.fromFunction[ClientMessage, EngineMessage](transform2))
val coreFlow = b.add(CoreFlow.flow)
val engMsgToTxtMsg = b.add(Flow.fromFunction[EngineMessage, Message](transform3))
partitionByMsgType.out(0) ~> msgToClientMsg ~> coreFlow ~> engMsgToTxtMsg
partitionByMsgType.out(1) ~> deadend
FlowShape(partitionByMsgType.in, engMsgToTxtMsg.out)
}.named("ws-flow")
return Flow.fromGraph(graph);
}
def partition(message: Message):Int = {
return message match{
case t: TextMessage => 0
case b: BinaryMessage => 1
}
}
def transform(message: Message):ClientMessage = {
val msg = message.asInstanceOf[TextMessage.Strict].text
return new ClientMessageBase(msg)
}
def transform2(msg: ClientMessage):EngineMessage = {
return new EngineMessageBase(
"hard coded ID",
EngineCapsuleStatuses.Ok.name,
EngineMessageTypes.CmdResult.name,
msg.payload
)
}
def transform3(msg: EngineMessage):TextMessage = {
val json = EngineMessage.toJSON(msg)
return TextMessage.apply(json)
}
}
|
sholloway/graph-engine
|
src/main/scala/org/machine/engine/flow/WebSocketFlow.scala
|
Scala
|
mit
| 2,016 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import scala.collection.JavaConverters._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s._
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit._
import org.apache.spark.util.Utils
class DriverCommandFeatureStepSuite extends SparkFunSuite {
test("java resource") {
val mainResource = "local:///main.jar"
val spec = applyFeatureStep(
JavaMainAppResource(Some(mainResource)),
appArgs = Array("5", "7"))
assert(spec.pod.container.getArgs.asScala === List(
"driver",
"--properties-file", SPARK_CONF_PATH,
"--class", KubernetesTestConf.MAIN_CLASS,
"spark-internal", "5", "7"))
val jars = Utils.stringToSeq(spec.systemProperties("spark.jars"))
assert(jars.toSet === Set(mainResource))
}
test("python resource with no extra files") {
val mainResource = "local:///main.py"
val sparkConf = new SparkConf(false)
.set(PYSPARK_MAJOR_PYTHON_VERSION, "3")
val spec = applyFeatureStep(
PythonMainAppResource(mainResource),
conf = sparkConf)
assert(spec.pod.container.getArgs.asScala === List(
"driver",
"--properties-file", SPARK_CONF_PATH,
"--class", KubernetesTestConf.MAIN_CLASS,
"/main.py"))
val envs = spec.pod.container.getEnv.asScala
.map { env => (env.getName, env.getValue) }
.toMap
assert(envs(ENV_PYSPARK_MAJOR_PYTHON_VERSION) === "3")
val files = Utils.stringToSeq(spec.systemProperties("spark.files"))
assert(files.toSet === Set(mainResource))
}
test("python resource with extra files") {
val expectedMainResource = "/main.py"
val expectedPySparkFiles = "/example2.py:/example3.py"
val filesInConf = Set("local:///example.py")
val mainResource = s"local://$expectedMainResource"
val pyFiles = Seq("local:///example2.py", "local:///example3.py")
val sparkConf = new SparkConf(false)
.set("spark.files", filesInConf.mkString(","))
.set(PYSPARK_MAJOR_PYTHON_VERSION, "2")
val spec = applyFeatureStep(
PythonMainAppResource(mainResource),
conf = sparkConf,
appArgs = Array("5", "7", "9"),
pyFiles = pyFiles)
assert(spec.pod.container.getArgs.asScala === List(
"driver",
"--properties-file", SPARK_CONF_PATH,
"--class", KubernetesTestConf.MAIN_CLASS,
"/main.py", "5", "7", "9"))
val envs = spec.pod.container.getEnv.asScala
.map { env => (env.getName, env.getValue) }
.toMap
val expected = Map(
ENV_PYSPARK_FILES -> expectedPySparkFiles,
ENV_PYSPARK_MAJOR_PYTHON_VERSION -> "2")
assert(envs === expected)
val files = Utils.stringToSeq(spec.systemProperties("spark.files"))
assert(files.toSet === pyFiles.toSet ++ filesInConf ++ Set(mainResource))
}
test("R resource") {
val expectedMainResource = "/main.R"
val mainResource = s"local://$expectedMainResource"
val spec = applyFeatureStep(
RMainAppResource(mainResource),
appArgs = Array("5", "7", "9"))
assert(spec.pod.container.getArgs.asScala === List(
"driver",
"--properties-file", SPARK_CONF_PATH,
"--class", KubernetesTestConf.MAIN_CLASS,
"/main.R", "5", "7", "9"))
}
private def applyFeatureStep(
resource: MainAppResource,
conf: SparkConf = new SparkConf(false),
appArgs: Array[String] = Array(),
pyFiles: Seq[String] = Nil): KubernetesDriverSpec = {
val kubernetesConf = KubernetesTestConf.createDriverConf(
sparkConf = conf,
mainAppResource = resource,
appArgs = appArgs,
pyFiles = pyFiles)
val step = new DriverCommandFeatureStep(kubernetesConf)
val pod = step.configurePod(SparkPod.initialPod())
val props = step.getAdditionalPodSystemProperties()
KubernetesDriverSpec(pod, Nil, props)
}
}
|
WindCanDie/spark
|
resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStepSuite.scala
|
Scala
|
apache-2.0
| 4,745 |
/**
* This file is part of nMix.
* Copyright (C) 2015-2016-2017 Agora Voting SL <[email protected]>
* nMix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License.
* nMix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License
* along with nMix. If not, see <http://www.gnu.org/licenses/>.
**/
package org.nvotes.mix
import org.nvotes.libmix._
/** The configuration for a protocol run, typically for an election
*
* The trustees and ballotbox field are public keys. They must be formatted without
* spaces, using \\n as markers for newlines. Read by Crypto.ReadPublicRSA
*
* Generate files with this format with the GenConfig Command
*/
case class Config(id: String, name: String, modulus: String, generator: String,
items: Int, ballotbox: String, trustees: Seq[String]) {
override def toString() = s"Config($id $name $items)"
}
/** A share of the distributed key.
*
* The public part is stored as an nMix EncryptionKeyShareDTO , which
* contains the share and the proof of knowledge.
*
* The private part is aes encrypted by the authority.
*/
case class Share(share: EncryptionKeyShareDTO, encryptedPrivateKey: String, aesIV: String)
/** Permutation data resulting from offline phase of mixing
*
* In the current implementation this data is only stored locally
* in memory. For this reason
*
* 1. The data does not need to be encrypted.
* 2. The data does not need to be serialized.
*
* Changing the implementation to store this remotely _must_
* include encryption of permutation data.
*/
case class PreShuffleData(proof: PermutationProofDTO, pData: PermutationData)
/** Ballots provided by the ballotbox in unicrypt format. Encrypted */
case class Ballots(ballots: Seq[String])
/** Plaintexts jointly encrypted by authorities after mixing, in unicrypt format */
case class Plaintexts(plaintexts: Seq[String])
/** Convenience class to pass around relevant data */
case class Context(config: Config, section: BoardSectionInterface, trusteeCfg: TrusteeConfig,
position: Int, cSettings: CryptoSettings)
|
nVotesOrg/nMix
|
src/main/scala/Models.scala
|
Scala
|
agpl-3.0
| 2,465 |
package tshrdlu.util.index
/**
* Copyright 2013 Nick Wilson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File
import java.io.{ByteArrayInputStream,ByteArrayOutputStream}
import java.io.{ObjectInputStream,ObjectOutputStream}
import org.apache.commons.codec.binary.Base64
import org.apache.lucene.analysis.Analyzer
import org.apache.lucene.analysis.en.EnglishAnalyzer
import org.apache.lucene.document.{Document,StoredField}
import org.apache.lucene.store.{Directory,SimpleFSDirectory}
import org.apache.lucene.util.Version
/**
* Provides common functionality for reader/writer factories.
*
* @tparam T the type of object in the index (must be serializable)
*/
abstract class ReaderOrWriterFactory[T] {
/**
* Finds and uses an index by name. See
* [[tshrdlu.util.index.Settings.BaseIndexPath]] for a decription of where
* the index is located on disk.
*
* @param indexName the name of the index
*/
def apply(indexName: String): T = {
apply(new File(Settings.BaseIndexPath + "/index-" + indexName))
}
/**
* Uses an index in the specified directory.
*
* @param indexDirectory the path to the index
*/
def apply(indexDirectory: File): T = {
val index = new SimpleFSDirectory(indexDirectory)
apply(index)
}
/**
* Uses an existing object representing a Lucene index.
*
* @param index the index
*/
def apply(index: Directory): T
}
/**
* Converts between Lucene <code>Document</code>s and objects to index.
* Objects are serialized and written to a field in the <code>Document</code>.
* If the concrete implementation of this class provides a unique ID for each
* object, the ID is also written to a field.
*
* @param idFieldName the field name to use for the ID
* @param serializedFieldName the field name to use for the serialized object
*/
abstract class ObjectToDocument[T](
val idFieldName: String = "_object_id_",
val serializedFieldName: String = "_base64_object_")
extends (T => Document) {
/**
* Convert an object of type <code>T</code> to a Lucene <code>Document</code>.
*
* @param theObject the object to convert to a <code>Document</code>
*/
def apply(theObject: T): Document = {
val document = new Document()
getId(theObject) collect {
case id => document.add(new StoredField(idFieldName, id))
}
document.add(new StoredField(serializedFieldName, serialize(theObject)))
addFields(document, theObject)
document
}
/**
* Converts a Lucene <code>Document</code> to an object of type
* <code>T</code>.
*
* @param document the <code>Document</code> to convert to a object
*/
def unapply(document: Document): Option[T] = {
try {
Some(deserialize(document.get(serializedFieldName)))
} catch {
case e: Exception => None
}
}
/**
* When converting to a <code>Document</code>, adds fields to the index that
* can be used from search queries.
*
* @param document the document to add fields to
* @param theObject the object used to populate the field values
*/
def addFields(document: Document, theObject: T)
/**
* Gets a unique ID for the object if desired, otherwise <code>None</code>.
* If an ID is returned, it is added to the index.
*
* @param theObject the object to extract an ID from
* @return <code>Some(id)</code> or <code>None</code>
*/
def getId(theObject: T): Option[Long]
private def serialize(theObject: T): String = {
val baos = new ByteArrayOutputStream()
val oas = new ObjectOutputStream(baos)
oas.writeObject(theObject)
Base64.encodeBase64String(baos.toByteArray())
}
private def deserialize(serializedObject: String): T = {
val b = Base64.decodeBase64(serializedObject)
val bi = new ByteArrayInputStream(b)
val si = new ObjectInputStream(bi)
si.readObject().asInstanceOf[T]
}
}
/**
* Creates a standard English analyzer for tokenizing text.
*/
object EnglishAnalyzerCreator extends (Version => Analyzer) {
def apply(luceneVersion: Version): Analyzer = new EnglishAnalyzer(luceneVersion)
}
/**
* Miscellaneous settings.
*/
object Settings {
/**
* The version of the Lucene index format to use.
*/
val LuceneVersion = Version.LUCENE_42
/**
* The directory where an index is stored if it is referenced by name. This
* value is set to "<code>[base]/index-[indexName]/</code>" where:
*
* <ul>
* <li><code>[base]</code> is the value of the
* <code>TSHRDLU_INDEX_DIR</code> environment variable if set,
* otherwise the temp directory, in the dir "tshrdlu".
* <li><code>[indexName]</code> is the name of the index</li>
* </ul>
*/
val BaseIndexPath: String = {
val result = Option(System.getenv("TSHRDLU_INDEX_DIR")).getOrElse(
new File(new File(System.getProperty("java.io.tmpdir")), "tshrdlu")).toString
new File(result).mkdirs()
result
}
}
|
utcompling/tshrdlu
|
src/main/scala/tshrdlu/util/index/Common.scala
|
Scala
|
apache-2.0
| 5,470 |
package parser.px.analyzers.structured
import scala.collection.mutable.HashMap
/**
* Utility class for referencing a field by it's full path.
* The class keeps track of all intermediary objects.
*/
protected class Binder(val parent:Binder, val fld:Field) {
def ->>(name:String) = Binder(this,name,0)
def ->>(name:(String,Int)) = Binder(this,name._1,name._2)
def ->>(idx:Int) = Binder(this,fld.name,idx)
/** Inserts a field at the current field position ; the current field (and following) are moved */
def <==(fld:Field):Unit = parent.fld.insertAt(this.fld,fld)
/** Inserts a field after the current field ; the next fields are moved */
def <=+(fld:Field):Unit = parent.fld.insertAfter(this.fld,fld)
/** Replaces the current field */
def <<=(fld:Field):Unit = parent.fld.replace(this.fld,fld)
/** Deletes the field */
def delete():Unit = parent.fld.delete(fld)
override def toString = fld.toString
}
object Binder {
import scala.language.implicitConversions
implicit def toInt(b:Binder) = b.fld.toInt
implicit def toDouble(b:Binder) = b.fld.toDouble
implicit def toBool(b:Binder) = b.fld.toBool
implicit def toFld(b:Binder) = b.fld
implicit def toBinder(f:Container) = this(f)
implicit def toCouple(s:String) = new { def apply(idx:Int)=(s,idx) }
def apply(root:Container):Binder =
new Binder(null,root.asInstanceOf[Field])
protected def apply(parent:Binder, name:String, idx:Int):Binder = parent.fld match {
case c:Container => new Binder(parent,c(name,idx))
case x => throw new IllegalStateException("cannot fetch field <"+name+"> from leaf field <"+x.name+">")
}
}
|
Y-P-/data-processing-binding
|
Parser/src/parser/px/analyzers/structured/Binder.scala
|
Scala
|
gpl-3.0
| 1,728 |
class DummyTest
|
matheshar/simple-build-tool
|
src/sbt-test/tests/scalacheck-a/src/test/scala/DummyTest.scala
|
Scala
|
bsd-3-clause
| 17 |
package breeze
import java.util.zip._
import java.io._
import scala.collection.generic._
import scala.collection.mutable
import java.util.BitSet
/**
*
* @author dlwh
*/
package object util {
/**
* Deserializes an object using java serialization
*/
def readObject[T](loc: File): T = readObject(loc, false)
/**
* Deserializes an object using java serialization
*/
def readObject[T](loc: File, ignoreSerialVersionUID: Boolean) = {
val stream = new BufferedInputStream(new GZIPInputStream(new FileInputStream(loc)))
val oin = nonstupidObjectInputStream(stream, ignoreSerialVersionUID)
try {
oin.readObject().asInstanceOf[T]
} finally {
oin.close()
}
}
/**
* For reasons that are best described as asinine, ObjectInputStream does not take into account
* Thread.currentThread.getContextClassLoader. This fixes that.
*
* @param stream
* @param ignoreSerialVersionUID this is not a safe thing to do, but sometimes...
* @return
*/
def nonstupidObjectInputStream(stream: InputStream, ignoreSerialVersionUID: Boolean = false):ObjectInputStream = {
new ObjectInputStream(stream) with SerializableLogging {
@throws[IOException]
@throws[ClassNotFoundException]
override def resolveClass(desc: ObjectStreamClass): Class[_] = {
try {
val currentTccl: ClassLoader = Thread.currentThread.getContextClassLoader
currentTccl.loadClass(desc.getName)
} catch {
case e: Exception =>
super.resolveClass(desc)
}
}
// from http://stackoverflow.com/questions/1816559/make-java-runtime-ignore-serialversionuids
override protected def readClassDescriptor(): ObjectStreamClass = {
var resultClassDescriptor = super.readClassDescriptor(); // initially streams descriptor
if(ignoreSerialVersionUID) {
var localClass: Class[_] = null; // the class in the local JVM that this descriptor represents.
try {
localClass = Class.forName(resultClassDescriptor.getName)
} catch {
case e: ClassNotFoundException =>
logger.error("No local class for " + resultClassDescriptor.getName, e)
return resultClassDescriptor
}
val localClassDescriptor = ObjectStreamClass.lookup(localClass)
if (localClassDescriptor != null) { // only if class implements serializable
val localSUID = localClassDescriptor.getSerialVersionUID
val streamSUID = resultClassDescriptor.getSerialVersionUID
if (streamSUID != localSUID) { // check for serialVersionUID mismatch.
val s = new StringBuffer("Overriding serialized class version mismatch: ")
s.append("local serialVersionUID = ").append(localSUID)
s.append(" stream serialVersionUID = ").append(streamSUID)
val e = new InvalidClassException(s.toString())
logger.error("Potentially Fatal Deserialization Operation.", e);
resultClassDescriptor = localClassDescriptor; // Use local class descriptor for deserialization
}
}
}
resultClassDescriptor
}
}
}
/**
* Serializes an object using java serialization
*/
def writeObject[T](out: File, parser: T): Unit = {
val stream = new ObjectOutputStream(new BufferedOutputStream(new GZIPOutputStream(new FileOutputStream(out))))
stream.writeObject(parser)
stream.close()
}
/**
* You can write TODO in your code, and get an exception at runtime for any expression.
*/
def TODO = sys.error("TODO (Not implemented)")
/**
* You can write XXX in your code and get an exception at runtime for any expression.
*/
def XXX = sys.error("XXX Not Implemented")
/**
* Similar to the TODO expression, except this one is for types.
*/
type TODO = Nothing
/**
* Computes the current source file and line number.
*/
@noinline def LOCATION = {
val e = new Exception().getStackTrace()(1)
e.getFileName() + ":" + e.getLineNumber()
}
/**
* Computes the source file location of the nth parent.
* 0 is equivalent to LOCATION
*/
@noinline def CALLER(nth : Int) = {
val e = new Exception().getStackTrace()(nth+1)
e.getFileName() + ":" + e.getLineNumber()
}
/**
* Returns a string with info about the available and used space.
*/
def memoryString = {
val r = Runtime.getRuntime
val free = r.freeMemory / (1024 * 1024)
val total = r.totalMemory / (1024 * 1024)
((total - free) + "M used; " + free + "M free; " + total + "M total")
}
/**
* prints a and returns it.
*/
def trace[T](a: T) = {println(a); a}
// this should be a separate trait but Scala is freaking out
class SeqExtras[T](s: Seq[T]) {
def argmax(implicit ordering: Ordering[T]) = {
s.zipWithIndex.reduceLeft( (a,b) => if(ordering.gt(a._1,b._1)) a else b)._2
}
def argmin(implicit ordering: Ordering[T]) = {
s.zipWithIndex.reduceLeft( (a,b) => if(ordering.lt(a._1,b._1)) a else b)._2
}
def unfold[U,To](init: U)(f: (U,T)=>U)(implicit cbf: CanBuildFrom[Seq[T], U, To]) = {
val builder = cbf.apply(s)
builder.sizeHint(s.size + 1)
var u = init
builder += u
for( t <- s) {
u = f(u,t)
builder += u
}
builder.result()
}
}
implicit def seqExtras[T](s: Seq[T]) = new SeqExtras(s)
implicit def arraySeqExtras[T](s: Array[T]) = new SeqExtras(s)
implicit class AwesomeBitSet(val bs: java.util.BitSet) extends AnyVal {
def apply(r: Int) = bs.get(r)
def iterator:Iterator[Int] = new BSIterator(bs)
def map[U, C](f: Int=>U)(implicit cbf: CanBuildFrom[java.util.BitSet, U, C]) = {
val r: mutable.Builder[U, C] = cbf(bs)
r.sizeHint(bs.size)
iterator foreach { i =>
r += f(i)
}
r.result()
}
def foreach[U](f: Int=>U) {
var i = bs.nextSetBit(0)
while(i != -1) {
f(i)
i = bs.nextSetBit(i+1)
}
}
def &=(other: BitSet) = {
bs and other
bs
}
def &~=(other: BitSet) = {
bs andNot other
bs
}
def |=(other: BitSet)= {
bs or other
bs
}
def ^=(other: BitSet) = {
bs xor other
bs
}
def |(other: BitSet) = {
copy |= other
}
def &~(other: BitSet) = {
copy &~= other
}
def &(other: BitSet) = {
copy &= other
}
def ^(other: BitSet) = {
copy ^= other
}
def copy = bs.clone().asInstanceOf[java.util.BitSet]
def nonEmpty = !bs.isEmpty
def +=(i: Int) = {
bs.set(i)
bs
}
}
private class BSIterator(bs: java.util.BitSet) extends Iterator[Int] {
var currentBit = bs.nextSetBit(0)
def hasNext: Boolean = currentBit != -1
def next() = {
assert(currentBit != -1)
val cur = currentBit
currentBit = bs.nextSetBit(cur+1)
cur
}
}
implicit def _bitsetcbf[U]:CanBuildFrom[java.util.BitSet, U, Set[U]] = new CanBuildFrom[java.util.BitSet, U, Set[U]] {
def apply(from: BitSet): mutable.Builder[U, Set[U]] = Set.newBuilder[U]
def apply(): mutable.Builder[U, Set[U]] = Set.newBuilder[U]
}
implicit class AwesomeScalaBitSet(val bs: scala.collection.BitSet) extends AnyVal {
def toJavaBitSet = {
val jbs = new java.util.BitSet(bs.lastOption.getOrElse(0) + 1)
bs.foreach(jbs.set(_))
jbs
}
}
}
|
wstcpyt/breeze
|
math/src/main/scala/breeze/util/package.scala
|
Scala
|
apache-2.0
| 7,520 |
package uk.org.lidalia.lang.collection
import org.scalatest.PropSpec
import org.scalatest.prop.TableDrivenPropertyChecks
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class FlatMapZeroToZeroAsOptionTests extends PropSpec with TableDrivenPropertyChecks {
val zeroAsOption: ?[Int] = Zero.asInstanceOf[?[Int]]
property("Flat Mapping a Zero to a Zero as ?") {
val mapped: Zero.type = Zero.flatMap { _: Nothing => zeroAsOption }
assert(mapped === Zero)
}
// property("Flat Mapping a Zero as a ? to a Zero as ?") {
// val initialZero: ?[Int] = Zero
// val mapped: ?[Int] = initialZero.flatMap { i: Int => zeroAsOption }
// assert(mapped === Zero)
// }
property("Flat Mapping a Zero as an Iterable to a Zero as ?") {
val initialZero: Iterable[Int] = Zero
val mapped: Iterable[Int] = initialZero.flatMap { i: Int => zeroAsOption }
assert(mapped === Zero)
}
// property("Flat Mapping a Zero as a List to a Zero as ?") {
// val initialZero: List[Int] = Zero
// val mapped: List[Int] = initialZero.flatMap { i: Int => zeroAsOption }
// assert(mapped === Zero)
// }
// property("Flat Mapping a Zero as a Set to a Zero as ?") {
// val initialZero: Set[Int] = Zero
// val mapped: Set[Int] = initialZero.flatMap { i: Int => zeroAsOption }
// assert(mapped === Zero)
// }
}
|
Mahoney/collections
|
src/test/scala/uk/org/lidalia/lang/collection/FlatMapZeroToZeroAsOptionTests.scala
|
Scala
|
mit
| 1,387 |
package demesne.module
import akka.cluster.sharding.ShardRegion
import demesne.{ AggregateRootType, DomainModel }
/**
* Created by rolfsd on 8/31/16.
*/
sealed trait AggregateEnvironment
object AggregateEnvironment {
trait Resolver extends (DomainModel => AggregateEnvironment )
object Resolver {
val local: Resolver = (m: DomainModel) => LocalAggregate
val clustered: Resolver = (m: DomainModel) => {
ClusteredAggregate(
toExtractEntityId = (rt: AggregateRootType) => rt.aggregateIdFor,
toExtractShardId = (rt: AggregateRootType) => rt.shardIdFor
)
}
}
}
case class ClusteredAggregate(
toExtractEntityId: AggregateRootType => ShardRegion.ExtractEntityId = (rt: AggregateRootType) =>
rt.aggregateIdFor,
toExtractShardId: AggregateRootType => ShardRegion.ExtractShardId = (rt: AggregateRootType) =>
rt.shardIdFor
) extends AggregateEnvironment
case object LocalAggregate extends AggregateEnvironment
|
dmrolfs/demesne
|
core/src/main/scala/demesne/module/AggregateEnvironment.scala
|
Scala
|
apache-2.0
| 968 |
package dao
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import javax.inject.Inject
import play.api.db.slick.DatabaseConfigProvider
import play.api.db.slick.HasDatabaseConfigProvider
import slick.driver.JdbcProfile
import models._
import scala.concurrent.Future
import org.joda.time.DateTime
import com.github.tototoshi.slick.H2JodaSupport._
import play.api.Logger
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.Props
/**
* @author golubotsky
*/
class UpdatesDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider) extends HasDatabaseConfigProvider[JdbcProfile] with Actor with ActorLogging {
import driver.api._
import UpdatesDAO._
private val Updates = TableQuery[UpdatesQuery]
private val UpdatesInserter = TableQuery[UpdatesQuery] returning TableQuery[UpdatesQuery].map(_.updateId)
def all(): Future[Seq[Update]] = db.run(Updates.result)
def insert(update: Update): Future[Long] = {
val result: Future[Long] = db.run(UpdatesInserter += update)
result onFailure {case err => Logger.error(s"Error: ${err.getMessage}")}
result
}
def update(update: Update): Unit = db.run(Updates.filter( _.updateId === update.updateId ).update(update))
def getUpdate(id: Long): Future[Option[Update]] = db.run(Updates.filter( _.updateId === id ).result.headOption)
private class UpdatesQuery(tag: Tag) extends Table[Update](tag, "UPDATES") {
def updateId = column[Long]("update_id", O.PrimaryKey)
def message = column[String]("message")
def isProcessed = column[Boolean]("is_processed")
def received = column[Option[DateTime]]("received")
def * = (updateId, message, isProcessed, received) <> ((Update.apply _).tupled , Update.unapply)
}
def receive = {
case InsertUpdate(update) => sender ! this.insert(update)
//this.insert(update).map { updateId => sender ! updateId }
case UpdateUpdate(update) => this.update(update)
case GetUpdate(id) => {
this.getUpdate(id).map {
case Some(update) => sender ! update
case None => log.info("update not found for id: %d".format(id))
}
}
case GetAllUpdates => {
this.all.map {
case _ => sender ! _
}
}
}
}
object UpdatesDAO {
val proprs = Props[UpdatesDAO]
case class InsertUpdate(update: Update)
case class UpdateUpdate(update: Update)
case class GetUpdate(id: Long)
case class GetAllUpdates()
}
|
timonag/ya-money-bot
|
app/dao/UpdatesDAO.scala
|
Scala
|
mit
| 2,476 |
object CallbackTo {
extension [A](self: CallbackTo[Option[A]]) {
inline def asOption: Option[A] =
self.toScalaFn()
}
}
final class CallbackTo[A] (val x: List[A]) {
inline def runNow(): A =
x.head
inline def toScalaFn: () => A =
() => runNow()
def map[B](f: A => B): CallbackTo[B] =
???
def toOption: Option[A] = {
val x = map[Option[A]](Some(_))
val y = x: CallbackTo[Option[A]] // ok: type is what we expect
y.asOption // error
}
}
|
lampepfl/dotty
|
tests/pos/i11894b.scala
|
Scala
|
apache-2.0
| 485 |
package jgo.tools.compiler.parser.combinatorExten
import scala.util.parsing.combinator.Parsers
trait TracePrintingParsers extends FancyParsers {
override def nameize[T](p: Parser[T], name: String) =
log(super.nameize(p, name))(name)
override def log[T](p: => Parser[T])(name: String): Parser[T] = Parser{ in =>
println("trying "+ name +" at\\n"+
in.pos.longString.linesWithSeparators.map("| " + _).mkString)
val r = p(in)
println(name +" --> "+ r)
r
}
}
|
thomasmodeneis/jgo
|
src/src/main/scala/jgo/tools/compiler/parser/combinatorExten/TracePrintingParsers.scala
|
Scala
|
gpl-3.0
| 489 |
package mist.api
import mist.api.internal.{ArgCombiner, FnForTuple}
import mist.api.data.JsMap
import scala.util._
trait ArgDef[A] { self =>
def extract(ctx: FnContext): Extraction[A]
def describe(): Seq[ArgInfo]
private[api] def validate(params: JsMap): Extraction[Unit]
final def combine[B](other: ArgDef[B])(implicit cmb: ArgCombiner[A, B]): ArgDef[cmb.Out] = cmb(self, other)
final def &[B](other: ArgDef[B])(implicit cmb: ArgCombiner[A, B]): ArgDef[cmb.Out] = cmb(self, other)
final def map[B](f: A => B): ArgDef[B] = {
new ArgDef[B] {
override def describe(): Seq[ArgInfo] = self.describe()
override def extract(ctx: FnContext): Extraction[B] = self.extract(ctx).map(f)
override def validate(params: JsMap): Extraction[Unit] = self.validate(params)
}
}
final def andThen[B](f: A => Extraction[B]): ArgDef[B] = {
new ArgDef[B] {
override def describe(): Seq[ArgInfo] = self.describe()
override def extract(ctx: FnContext): Extraction[B] = self.extract(ctx).flatMap(f)
override def validate(params: JsMap): Extraction[Unit] = self.validate(params)
}
}
final def apply[F, R, RR <: R](f: F)(implicit fnT: FnForTuple.Aux[A, F, RR]): RawHandle[R] = {
new RawHandle[R] {
override def invoke(ctx: FnContext): Try[R] = self.extract(ctx) match {
case Extracted(a) => Try(fnT(f, a))
case f: Failed => Failure(new IllegalArgumentException(s"Arguments does not conform to job [$f]"))
}
override def describe(): Seq[ArgInfo] = self.describe()
override def validate(params: JsMap): Extraction[Unit] = self.validate(params)
}
}
}
|
Hydrospheredata/mist
|
mist-lib/src/main/scala/mist/api/ArgDef.scala
|
Scala
|
apache-2.0
| 1,651 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.parser
import scala.collection.mutable
import scala.language.implicitConversions
import org.apache.spark.sql.{CarbonToSparkAdapter, DeleteRecords, UpdateTable}
import org.apache.spark.sql.catalyst.{CarbonDDLSqlParser, TableIdentifier}
import org.apache.spark.sql.catalyst.CarbonTableIdentifierImplicit._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.execution.command.datamap.{CarbonCreateDataMapCommand, CarbonDataMapRebuildCommand, CarbonDataMapShowCommand, CarbonDropDataMapCommand}
import org.apache.spark.sql.execution.command.management._
import org.apache.spark.sql.execution.command.partition.{CarbonAlterTableDropPartitionCommand, CarbonAlterTableSplitPartitionCommand}
import org.apache.spark.sql.execution.command.schema.{CarbonAlterTableAddColumnCommand, CarbonAlterTableColRenameDataTypeChangeCommand, CarbonAlterTableDropColumnCommand}
import org.apache.spark.sql.execution.command.table.CarbonCreateTableCommand
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.CarbonExpressions.CarbonUnresolvedRelation
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.execution.command.stream.{CarbonCreateStreamCommand, CarbonDropStreamCommand, CarbonShowStreamsCommand}
import org.apache.spark.sql.util.CarbonException
import org.apache.spark.util.CarbonReflectionUtils
import org.apache.carbondata.api.CarbonStore.LOGGER
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.spark.CarbonOption
import org.apache.carbondata.spark.util.{CarbonScalaUtil, CommonUtil}
/**
* TODO remove the duplicate code and add the common methods to common class.
* Parser for All Carbon DDL, DML cases in Unified context
*/
class CarbonSpark2SqlParser extends CarbonDDLSqlParser {
override def parse(input: String): LogicalPlan = {
synchronized {
// Initialize the Keywords.
initLexical
phrase(start)(new lexical.Scanner(input)) match {
case Success(plan, _) =>
CarbonScalaUtil.cleanParserThreadLocals()
plan match {
case x: CarbonLoadDataCommand =>
x.inputSqlString = input
x
case x: CarbonAlterTableCompactionCommand =>
x.alterTableModel.alterSql = input
x
case logicalPlan => logicalPlan
}
case failureOrError =>
CarbonScalaUtil.cleanParserThreadLocals()
CarbonException.analysisException(failureOrError.toString)
}
}
}
protected lazy val start: Parser[LogicalPlan] = explainPlan | startCommand
protected lazy val startCommand: Parser[LogicalPlan] =
loadManagement | showLoads | alterTable | restructure | updateTable | deleteRecords |
alterPartition | datamapManagement | alterTableFinishStreaming | stream | cli
protected lazy val loadManagement: Parser[LogicalPlan] =
deleteLoadsByID | deleteLoadsByLoadDate | cleanFiles | loadDataNew
protected lazy val restructure: Parser[LogicalPlan] =
alterTableColumnRenameAndModifyDataType | alterTableDropColumn | alterTableAddColumns
protected lazy val alterPartition: Parser[LogicalPlan] =
alterAddPartition | alterSplitPartition | alterDropPartition
protected lazy val datamapManagement: Parser[LogicalPlan] =
createDataMap | dropDataMap | showDataMap | refreshDataMap
protected lazy val stream: Parser[LogicalPlan] =
createStream | dropStream | showStreams
protected lazy val alterAddPartition: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (ADD ~> PARTITION ~>
"(" ~> repsep(stringLit, ",") <~ ")") <~ opt(";") ^^ {
case dbName ~ table ~ addInfo =>
val alterTableAddPartitionModel = AlterTableSplitPartitionModel(dbName, table, "0", addInfo)
CarbonAlterTableSplitPartitionCommand(alterTableAddPartitionModel)
}
protected lazy val alterSplitPartition: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (SPLIT ~> PARTITION ~>
"(" ~> numericLit <~ ")") ~ (INTO ~> "(" ~> repsep(stringLit, ",") <~ ")") <~ opt(";") ^^ {
case dbName ~ table ~ partitionId ~ splitInfo =>
val alterTableSplitPartitionModel =
AlterTableSplitPartitionModel(dbName, table, partitionId, splitInfo)
if (partitionId == 0) {
sys.error("Please use [Alter Table Add Partition] statement to split default partition!")
}
CarbonAlterTableSplitPartitionCommand(alterTableSplitPartitionModel)
}
protected lazy val alterDropPartition: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (DROP ~> PARTITION ~>
"(" ~> numericLit <~ ")") ~ (WITH ~> DATA).? <~ opt(";") ^^ {
case dbName ~ table ~ partitionId ~ withData =>
val dropWithData = withData.getOrElse("NO") match {
case "NO" => false
case _ => true
}
val alterTableDropPartitionModel =
AlterTableDropPartitionModel(dbName, table, partitionId, dropWithData)
CarbonAlterTableDropPartitionCommand(alterTableDropPartitionModel)
}
protected lazy val alterTable: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ (COMPACT ~ stringLit) ~
(WHERE ~> (SEGMENT ~ "." ~ ID) ~> IN ~> "(" ~> repsep(segmentId, ",") <~ ")").? <~
opt(";") ^^ {
case dbName ~ table ~ (compact ~ compactType) ~ segs =>
val altertablemodel =
AlterTableModel(convertDbNameToLowerCase(dbName), table, None, compactType,
Some(System.currentTimeMillis()), null, segs)
CarbonAlterTableCompactionCommand(altertablemodel)
}
/**
* The below syntax is used to change the status of the segment
* from "streaming" to "streaming finish".
* ALTER TABLE tableName FINISH STREAMING
*/
protected lazy val alterTableFinishStreaming: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident <~ FINISH <~ STREAMING <~ opt(";") ^^ {
case dbName ~ table =>
CarbonAlterTableFinishStreaming(dbName, table)
}
/**
* The syntax of CREATE STREAM
* CREATE STREAM [IF NOT EXISTS] streamName ON TABLE [dbName.]tableName
* [STMPROPERTIES('KEY'='VALUE')]
* AS SELECT COUNT(COL1) FROM tableName
*/
protected lazy val createStream: Parser[LogicalPlan] =
CREATE ~> STREAM ~> opt(IF ~> NOT ~> EXISTS) ~ ident ~
(ON ~> TABLE ~> (ident <~ ".").?) ~ ident ~
(STMPROPERTIES ~> "(" ~> repsep(loadOptions, ",") <~ ")").? ~
(AS ~> restInput) <~ opt(";") ^^ {
case ifNotExists ~ streamName ~ dbName ~ tableName ~ options ~ query =>
val optionMap = options.getOrElse(List[(String, String)]()).toMap[String, String]
CarbonCreateStreamCommand(
streamName, dbName, tableName, ifNotExists.isDefined, optionMap, query)
}
/**
* The syntax of DROP STREAM
* DROP STREAM [IF EXISTS] streamName
*/
protected lazy val dropStream: Parser[LogicalPlan] =
DROP ~> STREAM ~> opt(IF ~> EXISTS) ~ ident <~ opt(";") ^^ {
case ifExists ~ streamName =>
CarbonDropStreamCommand(streamName, ifExists.isDefined)
}
/**
* The syntax of SHOW STREAMS
* SHOW STREAMS [ON TABLE dbName.tableName]
*/
protected lazy val showStreams: Parser[LogicalPlan] =
SHOW ~> STREAMS ~> opt(ontable) <~ opt(";") ^^ {
case tableIdent =>
CarbonShowStreamsCommand(tableIdent)
}
/**
* The syntax of datamap creation is as follows.
* CREATE DATAMAP IF NOT EXISTS datamapName [ON TABLE tableName]
* USING 'DataMapProviderName'
* [WITH DEFERRED REBUILD]
* DMPROPERTIES('KEY'='VALUE') AS SELECT COUNT(COL1) FROM tableName
*/
protected lazy val createDataMap: Parser[LogicalPlan] =
CREATE ~> DATAMAP ~> opt(IF ~> NOT ~> EXISTS) ~ ident ~
opt(ontable) ~
(USING ~> stringLit) ~
opt(WITH ~> DEFERRED ~> REBUILD) ~
(DMPROPERTIES ~> "(" ~> repsep(loadOptions, ",") <~ ")").? ~
(AS ~> restInput).? <~ opt(";") ^^ {
case ifnotexists ~ dmname ~ tableIdent ~ dmProviderName ~ deferred ~ dmprops ~ query =>
val map = dmprops.getOrElse(List[(String, String)]()).toMap[String, String]
CarbonCreateDataMapCommand(dmname, tableIdent, dmProviderName, map, query,
ifnotexists.isDefined, deferred.isDefined)
}
protected lazy val ontable: Parser[TableIdentifier] =
ON ~> TABLE ~> (ident <~ ".").? ~ ident ^^ {
case dbName ~ tableName =>
TableIdentifier(tableName, dbName)
}
/**
* The below syntax is used to drop the datamap.
* DROP DATAMAP IF EXISTS datamapName ON TABLE tablename
*/
protected lazy val dropDataMap: Parser[LogicalPlan] =
DROP ~> DATAMAP ~> opt(IF ~> EXISTS) ~ ident ~ opt(ontable) <~ opt(";") ^^ {
case ifexists ~ dmname ~ tableIdent =>
CarbonDropDataMapCommand(dmname, ifexists.isDefined, tableIdent)
}
/**
* The syntax of show datamap is used to show datamaps on the table
* SHOW DATAMAP ON TABLE tableName
*/
protected lazy val showDataMap: Parser[LogicalPlan] =
SHOW ~> DATAMAP ~> opt(ontable) <~ opt(";") ^^ {
case tableIdent =>
CarbonDataMapShowCommand(tableIdent)
}
/**
* The syntax of show datamap is used to show datamaps on the table
* REBUILD DATAMAP datamapname [ON TABLE] tableName
*/
protected lazy val refreshDataMap: Parser[LogicalPlan] =
REBUILD ~> DATAMAP ~> ident ~ opt(ontable) <~ opt(";") ^^ {
case datamap ~ tableIdent =>
CarbonDataMapRebuildCommand(datamap, tableIdent)
}
protected lazy val deleteRecords: Parser[LogicalPlan] =
(DELETE ~> FROM ~> aliasTable) ~ restInput.? <~ opt(";") ^^ {
case table ~ rest =>
val tableName = getTableName(table._2)
val relation: LogicalPlan = table._3 match {
case Some(a) =>
DeleteRecords(
"select tupleId from " + tableName + " " + table._3.getOrElse("")
+ rest.getOrElse(""),
Some(table._3.get),
table._1)
case None =>
DeleteRecords(
"select tupleId from " + tableName + " " + rest.getOrElse(""),
None,
table._1)
}
relation
}
protected lazy val updateTable: Parser[LogicalPlan] =
UPDATE ~> aliasTable ~
(SET ~> "(" ~> repsep(element, ",") <~ ")") ~
("=" ~> restInput) <~ opt(";") ^^ {
case tab ~ columns ~ rest =>
val (sel, where) = splitQuery(rest)
val (selectStmt, relation) =
if (!sel.toLowerCase.startsWith("select ")) {
if (sel.trim.isEmpty) {
sys.error("At least one source column has to be specified ")
}
// only list of expression are given, need to convert that list of expressions into
// select statement on destination table
val relation : UnresolvedRelation = tab._1 match {
case r@CarbonUnresolvedRelation(tableIdentifier) =>
tab._3 match {
case Some(a) => updateRelation(r, tableIdentifier, tab._4, Some(tab._3.get))
case None => updateRelation(r, tableIdentifier, tab._4, None)
}
case _ => tab._1
}
tab._3 match {
case Some(a) =>
("select " + sel + " from " + getTableName(tab._2) + " " + tab._3.get, relation)
case None =>
("select " + sel + " from " + getTableName(tab._2), relation)
}
} else {
(sel, updateRelation(tab._1, tab._2, tab._4, tab._3))
}
val rel = tab._3 match {
case Some(a) => UpdateTable(relation, columns, selectStmt, Some(tab._3.get), where)
case None => UpdateTable(relation,
columns,
selectStmt,
Some(tab._1.tableIdentifier.table),
where)
}
rel
}
private def updateRelation(
r: UnresolvedRelation,
tableIdent: Seq[String],
tableIdentifier: TableIdentifier,
alias: Option[String]): UnresolvedRelation = {
alias match {
case Some(_) => r
case _ =>
val tableAlias = tableIdent match {
case Seq(dbName, tableName) => Some(tableName)
case Seq(tableName) => Some(tableName)
}
// Use Reflection to choose between Spark2.1 and Spark2.2
// Move UnresolvedRelation(tableIdentifier, tableAlias) to reflection.
CarbonReflectionUtils.getUnresolvedRelation(tableIdentifier, tableAlias)
}
}
protected lazy val element: Parser[String] =
(ident <~ ".").? ~ ident ^^ {
case table ~ column => column.toLowerCase
}
protected lazy val table: Parser[UnresolvedRelation] = {
rep1sep(attributeName, ".") ~ opt(ident) ^^ {
case tableIdent ~ alias => UnresolvedRelation(tableIdent)
}
}
protected lazy val aliasTable: Parser[(UnresolvedRelation, List[String], Option[String],
TableIdentifier)] = {
rep1sep(attributeName, ".") ~ opt(ident) ^^ {
case tableIdent ~ alias =>
val tableIdentifier: TableIdentifier = toTableIdentifier(tableIdent)
// Use Reflection to choose between Spark2.1 and Spark2.2
// Move (UnresolvedRelation(tableIdent, alias), tableIdent, alias) to reflection.
val unresolvedRelation = CarbonReflectionUtils.getUnresolvedRelation(tableIdentifier, alias)
(unresolvedRelation, tableIdent, alias, tableIdentifier)
}
}
private def splitQuery(query: String): (String, String) = {
val stack = scala.collection.mutable.Stack[Char]()
var foundSingleQuotes = false
var foundDoubleQuotes = false
var foundEscapeChar = false
var ignoreChar = false
var stop = false
var bracketCount = 0
val (selectStatement, where) = query.span {
ch => {
if (stop) {
false
} else {
ignoreChar = false
if (foundEscapeChar && (ch == '\'' || ch == '\"' || ch == '\\')) {
foundEscapeChar = false
ignoreChar = true
}
// If escaped single or double quotes found, no need to consider
if (!ignoreChar) {
if (ch == '\\') {
foundEscapeChar = true
} else if (ch == '\'') {
foundSingleQuotes = !foundSingleQuotes
} else if (ch == '\"') {
foundDoubleQuotes = !foundDoubleQuotes
}
else if (ch == '(' && !foundSingleQuotes && !foundDoubleQuotes) {
bracketCount = bracketCount + 1
stack.push(ch)
} else if (ch == ')' && !foundSingleQuotes && !foundDoubleQuotes) {
bracketCount = bracketCount + 1
stack.pop()
if (0 == stack.size) {
stop = true
}
}
}
true
}
}
}
if (bracketCount == 0 || bracketCount % 2 != 0) {
sys.error("Parsing error, missing bracket ")
}
val select = selectStatement.trim
select.substring(1, select.length - 1).trim -> where.trim
}
protected lazy val attributeName: Parser[String] = acceptMatch("attribute name", {
case lexical.Identifier(str) => str.toLowerCase
case lexical.Keyword(str) if !lexical.delimiters.contains(str) => str.toLowerCase
})
private def getTableName(tableIdentifier: Seq[String]): String = {
if (tableIdentifier.size > 1) {
tableIdentifier.head + "." + tableIdentifier(1)
} else {
tableIdentifier.head
}
}
protected lazy val loadDataNew: Parser[LogicalPlan] =
LOAD ~> DATA ~> opt(LOCAL) ~> INPATH ~> stringLit ~ opt(OVERWRITE) ~
(INTO ~> TABLE ~> (ident <~ ".").? ~ ident) ~
(PARTITION ~>"("~> repsep(partitions, ",") <~ ")").? ~
(OPTIONS ~> "(" ~> repsep(loadOptions, ",") <~ ")").? <~ opt(";") ^^ {
case filePath ~ isOverwrite ~ table ~ partitions ~ optionsList =>
val (databaseNameOp, tableName) = table match {
case databaseName ~ tableName => (databaseName, tableName.toLowerCase())
}
if (optionsList.isDefined) {
validateOptions(optionsList)
}
val optionsMap = optionsList.getOrElse(List.empty[(String, String)]).toMap
val partitionSpec = partitions.getOrElse(List.empty[(String, Option[String])]).toMap
CarbonLoadDataCommand(
databaseNameOp = convertDbNameToLowerCase(databaseNameOp),
tableName = tableName,
factPathFromUser = filePath,
dimFilesPath = Seq(),
options = optionsMap,
isOverwriteTable = isOverwrite.isDefined,
inputSqlString = null,
dataFrame = None,
updateModel = None,
tableInfoOp = None,
internalOptions = Map.empty,
partition = partitionSpec)
}
protected lazy val deleteLoadsByID: Parser[LogicalPlan] =
DELETE ~> FROM ~ TABLE ~> (ident <~ ".").? ~ ident ~
(WHERE ~> (SEGMENT ~ "." ~ ID) ~> IN ~> "(" ~> repsep(segmentId, ",")) <~ ")" ~
opt(";") ^^ {
case dbName ~ tableName ~ loadids =>
CarbonDeleteLoadByIdCommand(loadids, dbName, tableName.toLowerCase())
}
protected lazy val deleteLoadsByLoadDate: Parser[LogicalPlan] =
DELETE ~> FROM ~> TABLE ~> (ident <~ ".").? ~ ident ~
(WHERE ~> (SEGMENT ~ "." ~ STARTTIME ~> BEFORE) ~ stringLit) <~
opt(";") ^^ {
case database ~ table ~ condition =>
condition match {
case dateField ~ dateValue =>
CarbonDeleteLoadByLoadDateCommand(convertDbNameToLowerCase(database),
table.toLowerCase(),
dateField,
dateValue)
}
}
protected lazy val cleanFiles: Parser[LogicalPlan] =
CLEAN ~> FILES ~> FOR ~> TABLE ~> (ident <~ ".").? ~ ident <~ opt(";") ^^ {
case databaseName ~ tableName =>
CarbonCleanFilesCommand(
convertDbNameToLowerCase(databaseName),
Option(tableName.toLowerCase()))
}
protected lazy val explainPlan: Parser[LogicalPlan] =
(EXPLAIN ~> opt(EXTENDED)) ~ startCommand ^^ {
case isExtended ~ logicalPlan =>
logicalPlan match {
case _: CarbonCreateTableCommand =>
ExplainCommand(logicalPlan, extended = isExtended.isDefined)
case _ => CarbonToSparkAdapter.getExplainCommandObj
}
}
protected lazy val showLoads: Parser[LogicalPlan] =
(SHOW ~> opt(HISTORY) <~ SEGMENTS <~ FOR <~ TABLE) ~ (ident <~ ".").? ~ ident ~
(LIMIT ~> numericLit).? <~
opt(";") ^^ {
case showHistory ~ databaseName ~ tableName ~ limit =>
CarbonShowLoadsCommand(
convertDbNameToLowerCase(databaseName), tableName.toLowerCase(), limit,
showHistory.isDefined)
}
protected lazy val cli: Parser[LogicalPlan] =
(CARBONCLI ~> FOR ~> TABLE) ~> (ident <~ ".").? ~ ident ~
(OPTIONS ~> "(" ~> commandOptions <~ ")").? <~
opt(";") ^^ {
case databaseName ~ tableName ~ commandList =>
var commandOptions: String = null
if (commandList.isDefined) {
commandOptions = commandList.get
}
CarbonCliCommand(
convertDbNameToLowerCase(databaseName),
tableName.toLowerCase(),
commandOptions)
}
protected lazy val alterTableColumnRenameAndModifyDataType: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ CHANGE ~ ident ~ ident ~
ident ~ opt("(" ~> rep1sep(valueOptions, ",") <~ ")") <~ opt(";") ^^ {
case dbName ~ table ~ change ~ columnName ~ columnNameCopy ~ dataType ~ values =>
var isColumnRename = false
// If both the column name are not same, then its a call for column rename
if (!columnName.equalsIgnoreCase(columnNameCopy)) {
isColumnRename = true
}
val alterTableColRenameAndDataTypeChangeModel =
AlterTableDataTypeChangeModel(parseDataType(dataType.toLowerCase,
values,
isColumnRename),
convertDbNameToLowerCase(dbName),
table.toLowerCase,
columnName.toLowerCase,
columnNameCopy.toLowerCase,
isColumnRename)
CarbonAlterTableColRenameDataTypeChangeCommand(alterTableColRenameAndDataTypeChangeModel)
}
protected lazy val alterTableAddColumns: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~
(ADD ~> COLUMNS ~> "(" ~> repsep(anyFieldDef, ",") <~ ")") ~
(TBLPROPERTIES ~> "(" ~> repsep(loadOptions, ",") <~ ")").? <~ opt(";") ^^ {
case dbName ~ table ~ fields ~ tblProp =>
fields.foreach{ f =>
if (isComplexDimDictionaryExclude(f.dataType.get)) {
throw new MalformedCarbonCommandException(
s"Add column is unsupported for complex datatype column: ${f.column}")
}
}
val tableProps = if (tblProp.isDefined) {
tblProp.get.groupBy(_._1.toLowerCase).foreach(f =>
if (f._2.size > 1) {
val name = f._1.toLowerCase
val colName = name.substring(14)
if (name.startsWith("default.value.") &&
fields.count(p => p.column.equalsIgnoreCase(colName)) == 1) {
sys.error(s"Duplicate default value exist for new column: ${ colName }")
}
}
)
// default value should not be converted to lower case
val tblProps = tblProp.get
.map(f => if (CarbonCommonConstants.TABLE_BLOCKSIZE.equalsIgnoreCase(f._1) ||
CarbonCommonConstants.SORT_COLUMNS.equalsIgnoreCase(f._1) ||
CarbonCommonConstants.LOCAL_DICTIONARY_ENABLE.equalsIgnoreCase(f._1) ||
CarbonCommonConstants.LOCAL_DICTIONARY_THRESHOLD.equalsIgnoreCase(f._1)) {
throw new MalformedCarbonCommandException(
s"Unsupported Table property in add column: ${ f._1 }")
} else if (f._1.toLowerCase.startsWith("default.value.")) {
if (fields.count(field => checkFieldDefaultValue(field.column,
f._1.toLowerCase)) == 1) {
f._1 -> f._2
} else {
throw new MalformedCarbonCommandException(
s"Default.value property does not matches with the columns in ALTER command. " +
s"Column name in property is: ${ f._1}")
}
} else {
f._1 -> f._2.toLowerCase
})
scala.collection.mutable.Map(tblProps: _*)
} else {
scala.collection.mutable.Map.empty[String, String]
}
val tableModel = prepareTableModel (false,
convertDbNameToLowerCase(dbName),
table.toLowerCase,
fields.map(convertFieldNamesToLowercase),
Seq.empty,
tableProps,
None,
true)
val alterTableAddColumnsModel = AlterTableAddColumnsModel(
convertDbNameToLowerCase(dbName),
table,
tableProps.toMap,
tableModel.dimCols,
tableModel.msrCols,
tableModel.highcardinalitydims.getOrElse(Seq.empty))
CarbonAlterTableAddColumnCommand(alterTableAddColumnsModel)
}
private def checkFieldDefaultValue(fieldName: String, defaultValueColumnName: String): Boolean = {
defaultValueColumnName.equalsIgnoreCase("default.value." + fieldName)
}
private def convertFieldNamesToLowercase(field: Field): Field = {
val name = field.column.toLowerCase
field.copy(column = name, name = Some(name))
}
protected lazy val alterTableDropColumn: Parser[LogicalPlan] =
ALTER ~> TABLE ~> (ident <~ ".").? ~ ident ~ DROP ~ COLUMNS ~
("(" ~> rep1sep(ident, ",") <~ ")") <~ opt(";") ^^ {
case dbName ~ table ~ drop ~ columns ~ values =>
// validate that same column name is not repeated
values.map(_.toLowerCase).groupBy(identity).collect {
case (x, ys) if ys.lengthCompare(1) > 0 =>
throw new MalformedCarbonCommandException(s"$x is duplicate. Duplicate columns not " +
s"allowed")
}
val alterTableDropColumnModel = AlterTableDropColumnModel(convertDbNameToLowerCase(dbName),
table.toLowerCase,
values.map(_.toLowerCase))
CarbonAlterTableDropColumnCommand(alterTableDropColumnModel)
}
def getFields(schema: Seq[StructField]): Seq[Field] = {
schema.map { col =>
var columnComment: String = ""
var plainComment: String = ""
if (col.getComment().isDefined) {
columnComment = " comment \"" + col.getComment().get + "\""
plainComment = col.getComment().get
}
val x =
if (col.dataType.catalogString == "float") {
'`' + col.name + '`' + " double" + columnComment
} else {
'`' + col.name + '`' + ' ' + col.dataType.catalogString + columnComment
}
val f: Field = anyFieldDef(new lexical.Scanner(x.toLowerCase))
match {
case Success(field, _) => field.asInstanceOf[Field]
case failureOrError => throw new MalformedCarbonCommandException(
s"Unsupported data type: ${ col.dataType }")
}
// the data type of the decimal type will be like decimal(10,0)
// so checking the start of the string and taking the precision and scale.
// resetting the data type with decimal
if (f.dataType.getOrElse("").startsWith("decimal")) {
val (precision, scale) = CommonUtil.getScaleAndPrecision(col.dataType.catalogString)
f.precision = precision
f.scale = scale
f.dataType = Some("decimal")
}
if (f.dataType.getOrElse("").startsWith("char")) {
f.dataType = Some("char")
}
else if (f.dataType.getOrElse("").startsWith("float")) {
f.dataType = Some("double")
}
f.rawSchema = x
f.columnComment = plainComment
f
}
}
def addPreAggFunction(sql: String): String = {
addPreAgg(new lexical.Scanner(sql.toLowerCase)) match {
case Success(query, _) => query
case _ =>
throw new MalformedCarbonCommandException(s"Unsupported query")
}
}
def addPreAggLoadFunction(sql: String): String = {
addPreAggLoad(new lexical.Scanner(sql.toLowerCase)) match {
case Success(query, _) => query
case _ =>
throw new MalformedCarbonCommandException(s"Unsupported query")
}
}
def getBucketFields(
properties: mutable.Map[String, String],
fields: Seq[Field],
options: CarbonOption): Option[BucketFields] = {
if (!CommonUtil.validateTblProperties(properties,
fields)) {
throw new MalformedCarbonCommandException("Invalid table properties")
}
if (options.isBucketingEnabled) {
if (options.bucketNumber.toString.contains("-") ||
options.bucketNumber.toString.contains("+") || options.bucketNumber == 0) {
throw new MalformedCarbonCommandException("INVALID NUMBER OF BUCKETS SPECIFIED")
}
else {
Some(BucketFields(options.bucketColumns.toLowerCase.split(",").map(_.trim),
options.bucketNumber))
}
} else {
None
}
}
}
|
manishgupta88/carbondata
|
integration/spark2/src/main/scala/org/apache/spark/sql/parser/CarbonSpark2SqlParser.scala
|
Scala
|
apache-2.0
| 28,245 |
package be.mygod.text
import org.xml.sax.SAXException
/**
* @author Mygod
*/
final class AttributeMissingException(attributeNames: String) extends SAXException(attributeNames + " required.") { }
|
Mygod/MygodSpeechSynthesizer-android
|
src/main/scala/be/mygod/text/AttributeMissingException.scala
|
Scala
|
gpl-3.0
| 199 |
package nest.sparkle.loader.kafka
import kafka.message.MessageAndMetadata
import org.scalatest.{ FunSuite, Matchers }
import org.scalatest.prop.PropertyChecks
import nest.sparkle.loader.kafka.KafkaTestUtil.{ withTestEncodedTopic, withTestReader }
class TestEncodedKafka extends FunSuite with Matchers with PropertyChecks with KafkaTestConfig {
val stringSerde = KafkaTestUtil.stringSerde
test("read/write a few encoded elements from the kafka queue"){
forAll(MinSuccessful(5), MinSize(1)) { records: List[String] =>
withTestEncodedTopic[String, Unit](rootConfig, stringSerde) { testTopic =>
testTopic.writer.write(records)
withTestReader(testTopic, stringSerde) { reader =>
val iter = reader.messageAndMetaDataIterator()
val results: List[MessageAndMetadata[String, String]] = iter.take(records.length).toList
records zip results foreach {
case (record, result) =>
record shouldBe result.message()
val msg = result.message()
record shouldBe msg
}
}
}
}
}
}
|
mighdoll/sparkle
|
kafka/src/it/scala/nest/sparkle/loader/kafka/TestEncodedKafka.scala
|
Scala
|
apache-2.0
| 1,108 |
package com.twitter.finagle.util
import com.twitter.finagle.benchmark.StdBenchAnnotations
import com.twitter.finagle.core.util.InetAddressUtil
import java.net.InetAddress
import org.openjdk.jmh.annotations._
@State(Scope.Benchmark)
@Threads(1)
class InetAddressUtilBench extends StdBenchAnnotations {
val ip = "69.55.236.117"
@Benchmark
def timeOldInetAddressGetByName(): InetAddress = {
InetAddress.getByName(ip)
}
@Benchmark
def timeNewInetAddressGetByName(): InetAddress = {
InetAddressUtil.getByName(ip)
}
}
|
twitter/finagle
|
finagle-benchmark/src/main/scala/com/twitter/finagle/util/InetAddressUtilBench.scala
|
Scala
|
apache-2.0
| 538 |
package firesoft.util
import java.nio.file.{Paths, Files}
import java.nio.charset.StandardCharsets
object FileWriter
{
def write(data : String, path : String)= {
Files.write(Paths.get(path), data.getBytes(StandardCharsets.UTF_8))
}
}
|
firephil/scala-math-problems
|
src/firesoft/util/FileWriter.scala
|
Scala
|
mit
| 244 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors.mocks
import config.AuthClientConnector
import org.mockito.ArgumentMatchers
import org.mockito.Mockito.when
import org.mockito.stubbing.OngoingStubbing
import org.scalatestplus.mockito.MockitoSugar
import uk.gov.hmrc.auth.core.AffinityGroup.{Agent, Individual, Organisation}
import uk.gov.hmrc.auth.core.retrieve.v2.Retrievals
import uk.gov.hmrc.auth.core.retrieve.{Retrieval, ~}
import uk.gov.hmrc.auth.core.{AffinityGroup, Enrolment, Enrolments, InsufficientConfidenceLevel, InvalidBearerToken}
import scala.concurrent.Future
trait AuthMock {
this: MockitoSugar =>
lazy val mockAuthClientConnector: AuthClientConnector = mock[AuthClientConnector]
def agentEnrolment(arn: String) = Enrolment("HMRC-AS-AGENT").withIdentifier("AgentReferenceNumber", arn)
def mockAuthenticatedBasic: OngoingStubbing[Future[Unit]] =
when(mockAuthClientConnector.authorise[Unit](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future.successful({}))
def mockAuthenticated(arn: Option[String] = None): OngoingStubbing[Future[Enrolments]] = {
val enrolments = arn.map(ref => Set(agentEnrolment(ref))).getOrElse(Set())
when(
mockAuthClientConnector.authorise(ArgumentMatchers.any(), ArgumentMatchers.eq(Retrievals.allEnrolments))(ArgumentMatchers.any(), ArgumentMatchers.any())
).thenReturn(Future.successful(Enrolments(enrolments)))
}
def mockNotAuthenticated(): OngoingStubbing[Future[Unit]] = {
when(mockAuthClientConnector.authorise[Unit](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future.failed(new InsufficientConfidenceLevel))
}
def mockNoActiveSession(): OngoingStubbing[Future[Unit]] = {
when(mockAuthClientConnector.authorise[Unit](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any()))
.thenReturn(Future.failed(new InvalidBearerToken))
}
def mockAuthenticatedOrg(): OngoingStubbing[Future[Option[AffinityGroup]]] = {
when(
mockAuthClientConnector.authorise[Option[AffinityGroup]](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any())
) thenReturn Future.successful(Some(Organisation))
}
def mockAuthenticatedAgent(): OngoingStubbing[Future[Option[AffinityGroup]]] = {
when(
mockAuthClientConnector.authorise[Option[AffinityGroup]](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any())
) thenReturn Future.successful(Some(Agent))
}
def mockAuthenticatedIndividual(): OngoingStubbing[Future[Option[AffinityGroup]]] = {
when(
mockAuthClientConnector.authorise[Option[AffinityGroup]](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any())
) thenReturn Future.successful(Some(Individual))
}
def mockAuthenticatedInternalId(internalId: Option[String]): OngoingStubbing[Future[Option[String]]] = {
when(
mockAuthClientConnector.authorise[Option[String]](ArgumentMatchers.any(), ArgumentMatchers.any())(ArgumentMatchers.any(), ArgumentMatchers.any())
) thenReturn Future.successful(internalId)
}
}
|
hmrc/vat-registration-frontend
|
test/connectors/mocks/AuthMock.scala
|
Scala
|
apache-2.0
| 3,849 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.precondition
import com.krux.hyperion.action.Action
import com.krux.hyperion.adt.{ HDuration, HString, HInt }
import com.krux.hyperion.aws.{ AdpRef, AdpPrecondition }
import com.krux.hyperion.common.{ NamedPipelineObject, PipelineObject }
import com.krux.hyperion.HyperionContext
/**
* The base trait of all preconditions.
*
* A precondition is a condition that must be met before the object can run. The activity cannot run
* until all its conditions are met.
*/
trait Precondition extends NamedPipelineObject {
type Self <: Precondition
def preconditionFields: PreconditionFields
def updatePreconditionFields(fields: PreconditionFields): Self
/**
* The IAM role to use for this precondition.
*/
def role: HString = preconditionFields.role
def withRole(role: HString) = updatePreconditionFields(
preconditionFields.copy(role = role)
)
/**
* The precondition will be retried until the retryTimeout with a gap of retryDelay between attempts.
* Time period; for example, "1 hour".
*/
def preconditionTimeout: Option[HDuration] = preconditionFields.preconditionTimeout
def withPreconditionTimeout(timeout: HDuration) = updatePreconditionFields(
preconditionFields.copy(preconditionTimeout = Option(timeout))
)
def maximumRetries = preconditionFields.maximumRetries
def withMaximumRetries(retries: HInt) = updatePreconditionFields(
preconditionFields.copy(maximumRetries = Option(retries))
)
def onFail = preconditionFields.onFail
def onFail(actions: Action*) = updatePreconditionFields(
preconditionFields.copy(onFail = preconditionFields.onFail ++ actions)
)
def onLateAction = preconditionFields.onLateAction
def onLateAction(actions: Action*) = updatePreconditionFields(
preconditionFields.copy(onLateAction = preconditionFields.onLateAction ++ actions)
)
def onSuccess = preconditionFields.onSuccess
def onSuccess(actions: Action*) = updatePreconditionFields(
preconditionFields.copy(onSuccess = preconditionFields.onSuccess ++ actions)
)
def serialize: AdpPrecondition
def ref: AdpRef[AdpPrecondition] = AdpRef(serialize)
def objects: Iterable[PipelineObject] = onFail.toSeq ++ onLateAction.toSeq ++ onSuccess.toSeq
}
object Precondition {
def defaultPreconditionFields(implicit hc: HyperionContext) = PreconditionFields(
role = hc.role,
preconditionTimeout = None
)
}
|
realstraw/hyperion
|
core/src/main/scala/com/krux/hyperion/precondition/Precondition.scala
|
Scala
|
bsd-3-clause
| 2,651 |
package edu.rice.habanero.benchmarks.bitonicsort
import edu.rice.habanero.actors.{JumiActor, JumiActorState, JumiPool}
import edu.rice.habanero.benchmarks.philosopher.PhilosopherAkkaActorBenchmark.ExitMessage
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner, PseudoRandom}
import scala.collection.mutable.ListBuffer
/**
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> ([email protected])
*/
object BitonicSortJumiActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new BitonicSortJumiActorBenchmark)
}
private final class BitonicSortJumiActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
BitonicSortConfig.parseArgs(args)
}
def printArgInfo() {
BitonicSortConfig.printArgs()
}
def runIteration() {
val validationActor = new ValidationActor(BitonicSortConfig.N)
validationActor.start()
val adapterActor = new DataValueAdapterActor(validationActor)
adapterActor.start()
val kernelActor = new BitonicSortKernelActor(BitonicSortConfig.N, true, adapterActor)
kernelActor.start()
val sourceActor = new IntSourceActor(BitonicSortConfig.N, BitonicSortConfig.M, BitonicSortConfig.S, kernelActor)
sourceActor.start()
sourceActor.send(StartMessage())
JumiActorState.awaitTermination()
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double): Unit = {
if (lastIteration) {
JumiPool.shutdown()
}
}
}
private case class NextActorMessage(actor: JumiActor[AnyRef])
private case class ValueMessage(value: Long)
private case class DataMessage(orderId: Int, value: Long)
private case class StartMessage()
private class ValueDataAdapterActor(orderId: Int, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
nextActor.send(new DataMessage(orderId, vm.value))
case dm: DataMessage =>
nextActor.send(dm)
case em: ExitMessage =>
nextActor.send(em)
exit()
}
}
}
private class DataValueAdapterActor(nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
nextActor.send(vm)
case dm: DataMessage =>
nextActor.send(new ValueMessage(dm.value))
case em: ExitMessage =>
nextActor.send(em)
exit()
}
}
}
private class RoundRobinSplitterActor(name: String, length: Int, receivers: Array[JumiActor[AnyRef]]) extends JumiActor[AnyRef] {
private var receiverIndex = 0
private var currentRun = 0
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
receivers(receiverIndex).send(vm)
currentRun += 1
if (currentRun == length) {
receiverIndex = (receiverIndex + 1) % receivers.length
currentRun = 0
}
case em: ExitMessage =>
receivers.foreach(loopActor => loopActor.send(em))
exit()
}
}
}
private class RoundRobinJoinerActor(name: String, length: Int, numJoiners: Int, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
private val receivedData = Array.tabulate[ListBuffer[DataMessage]](numJoiners)(i => new ListBuffer[DataMessage]())
private var forwardIndex = 0
private var currentRun = 0
private var exitsReceived = 0
override def process(msg: AnyRef) {
msg match {
case dm: DataMessage =>
receivedData(dm.orderId).append(dm)
tryForwardMessages(dm)
case em: ExitMessage =>
exitsReceived += 1
if (exitsReceived == numJoiners) {
nextActor.send(em)
exit()
}
}
}
def tryForwardMessages(dm: DataMessage) {
while (receivedData(forwardIndex).nonEmpty) {
val dm = receivedData(forwardIndex).remove(0)
val vm = new ValueMessage(dm.value)
nextActor.send(vm)
currentRun += 1
if (currentRun == length) {
forwardIndex = (forwardIndex + 1) % numJoiners
currentRun = 0
}
}
}
}
/**
* Compares the two input keys and exchanges their order if they are not sorted.
*
* sortDirection determines if the sort is nondecreasing (UP) [true] or nonincreasing (DOWN) [false].
*/
private class CompareExchangeActor(orderId: Int, sortDirection: Boolean, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
private var k1: Long = 0
private var valueAvailable = false
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
if (!valueAvailable) {
valueAvailable = true
k1 = vm.value
} else {
valueAvailable = false
val k2 = vm.value
val (minK, maxK) = if (k1 <= k2) (k1, k2) else (k2, k1)
if (sortDirection) {
// UP sort
nextActor.send(DataMessage(orderId, minK))
nextActor.send(DataMessage(orderId, maxK))
} else {
// DOWN sort
nextActor.send(DataMessage(orderId, maxK))
nextActor.send(DataMessage(orderId, minK))
}
}
case em: ExitMessage =>
nextActor.send(em)
exit()
}
}
}
/**
* Partition the input bitonic sequence of length L into two bitonic sequences of length L/2,
* with all numbers in the first sequence <= all numbers in the second sequence if sortdir is UP (similar case for DOWN sortdir)
*
* Graphically, it is a bunch of CompareExchanges with same sortdir, clustered together in the sort network at a particular step (of some merge stage).
*/
private class PartitionBitonicSequenceActor(orderId: Int, length: Int, sortDir: Boolean, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val halfLength = length / 2
val forwardActor = {
val actor = new ValueDataAdapterActor(orderId, nextActor)
actor.start()
actor
}
val joinerActor = {
val actor = new RoundRobinJoinerActor("Partition-" + orderId, 1, halfLength, forwardActor)
actor.start()
actor
}
val workerActors = Array.tabulate[JumiActor[AnyRef]](halfLength)(i => {
val actor = new CompareExchangeActor(i, sortDir, joinerActor)
actor.start()
actor
})
val splitterActor = {
val actor = new RoundRobinSplitterActor("Partition-" + orderId, 1, workerActors)
actor.start()
actor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
splitterActor.send(vm)
case em: ExitMessage =>
splitterActor.send(em)
exit()
}
}
}
/**
* One step of a particular merge stage (used by all merge stages except the last)
*
* directionCounter determines which step we are in the current merge stage (which in turn is determined by <L, numSeqPartitions>)
*/
private class StepOfMergeActor(orderId: Int, length: Int, numSeqPartitions: Int, directionCounter: Int, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val forwardActor = {
val actor = new DataValueAdapterActor(nextActor)
actor.start()
actor
}
val joinerActor = {
val actor = new RoundRobinJoinerActor("StepOfMerge-" + orderId + ":" + length, length, numSeqPartitions, forwardActor)
actor.start()
actor
}
val workerActors = Array.tabulate[JumiActor[AnyRef]](numSeqPartitions)(i => {
// finding out the currentDirection is a bit tricky -
// the direction depends only on the subsequence number during the FIRST step.
// So to determine the FIRST step subsequence to which this sequence belongs, divide this sequence's number j by directionCounter
// (bcoz 'directionCounter' tells how many subsequences of the current step make up one subsequence of the FIRST step).
// Then, test if that result is even or odd to determine if currentDirection is UP or DOWN respectively.
val currentDirection = (i / directionCounter) % 2 == 0
// The last step needs special care to avoid split-joins with just one branch.
if (length > 2) {
val actor = new PartitionBitonicSequenceActor(i, length, currentDirection, joinerActor)
actor.start()
actor
} else {
// PartitionBitonicSequence of the last step (L=2) is simply a CompareExchange
val actor = new CompareExchangeActor(i, currentDirection, joinerActor)
actor.start()
actor
}
})
val splitterActor = {
val actor = new RoundRobinSplitterActor("StepOfMerge-" + orderId + ":" + length, length, workerActors)
actor.start()
actor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
splitterActor.send(vm)
case em: ExitMessage =>
splitterActor.send(em)
exit()
}
}
}
/**
* One step of the last merge stage
*
* Main difference form StepOfMerge is the direction of sort.
* It is always in the same direction - sortdir.
*/
private class StepOfLastMergeActor(length: Int, numSeqPartitions: Int, sortDirection: Boolean, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val joinerActor = {
val actor = new RoundRobinJoinerActor("StepOfLastMerge-" + length, length, numSeqPartitions, nextActor)
actor.start()
actor
}
val workerActors = Array.tabulate[JumiActor[AnyRef]](numSeqPartitions)(i => {
// The last step needs special care to avoid split-joins with just one branch.
if (length > 2) {
val actor = new PartitionBitonicSequenceActor(i, length, sortDirection, joinerActor)
actor.start()
actor
} else {
// PartitionBitonicSequence of the last step (L=2) is simply a CompareExchange
val actor = new CompareExchangeActor(i, sortDirection, joinerActor)
actor.start()
actor
}
})
val splitterActor = {
val actor = new RoundRobinSplitterActor("StepOfLastMerge-" + length, length, workerActors)
actor.start()
actor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
splitterActor.send(vm)
case em: ExitMessage =>
splitterActor.send(em)
exit()
}
}
}
/**
* Divide the input sequence of length N into subsequences of length P and sort each of them
* (either UP or DOWN depending on what subsequence number [0 to N/P-1] they get.
* All even subsequences are sorted UP and all odd subsequences are sorted DOWN).
* In short, a MergeStage is N/P Bitonic Sorters of order P each.
* But, this MergeStage is implemented *iteratively* as logP STEPS.
*/
private class MergeStageActor(P: Int, N: Int, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val forwardActor = {
var loopActor: JumiActor[AnyRef] = nextActor
// for each of the lopP steps (except the last step) of this merge stage
var i = P / 2
while (i >= 1) {
// length of each sequence for the current step - goes like P, P/2, ..., 2.
val L = P / i
// numSeqPartitions is the number of PartitionBitonicSequence-rs in this step
val numSeqPartitions = (N / P) * i
val directionCounter = i
val tempActor = new StepOfMergeActor(i, L, numSeqPartitions, directionCounter, loopActor)
tempActor.start()
loopActor = tempActor
i /= 2
}
loopActor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
forwardActor.send(vm)
case em: ExitMessage =>
forwardActor.send(em)
exit()
}
}
}
/**
* The LastMergeStage is basically one Bitonic Sorter of order N i.e.,
* it takes the bitonic sequence produced by the previous merge stages
* and applies a bitonic merge on it to produce the final sorted sequence.
*
* This is implemented iteratively as logN steps.
*/
private class LastMergeStageActor(N: Int, sortDirection: Boolean, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val forwardActor = {
var loopActor: JumiActor[AnyRef] = nextActor
// for each of the lopN steps (except the last step) of this merge stage
var i = N / 2
while (i >= 1) {
// length of each sequence for the current step - goes like N, N/2, ..., 2.
val L = N / i
// numSeqPartitions is the number of PartitionBitonicSequence-rs in this step
val numSeqPartitions = i
val tempActor = new StepOfLastMergeActor(L, numSeqPartitions, sortDirection, loopActor)
tempActor.start()
loopActor = tempActor
i /= 2
}
loopActor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
forwardActor.send(vm)
case em: ExitMessage =>
forwardActor.send(em)
exit()
}
}
}
/**
* The top-level kernel of bitonic-sort (iterative version) -
* It has logN merge stages and all merge stages except the last progressively builds a bitonic sequence out of the input sequence.
* The last merge stage acts on the resultant bitonic sequence to produce the final sorted sequence (sortdir determines if it is UP or DOWN).
*/
private class BitonicSortKernelActor(N: Int, sortDirection: Boolean, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
val forwardActor = {
var loopActor: JumiActor[AnyRef] = nextActor
{
val tempActor = new LastMergeStageActor(N, sortDirection, loopActor)
tempActor.start()
loopActor = tempActor
}
var i = N / 2
while (i >= 2) {
val tempActor = new MergeStageActor(i, N, loopActor)
tempActor.start()
loopActor = tempActor
i /= 2
}
loopActor
}
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
forwardActor.send(vm)
case em: ExitMessage =>
forwardActor.send(em)
exit()
}
}
}
private class IntSourceActor(numValues: Int, maxValue: Long, seed: Long, nextActor: JumiActor[AnyRef]) extends JumiActor[AnyRef] {
private val random = new PseudoRandom(seed)
private val sb = new StringBuilder()
override def process(msg: AnyRef) {
msg match {
case nm: StartMessage =>
var i = 0
while (i < numValues) {
val candidate = Math.abs(random.nextLong()) % maxValue
if (BitonicSortConfig.debug) {
sb.append(candidate + " ")
}
val message = new ValueMessage(candidate)
nextActor.send(message)
i += 1
}
if (BitonicSortConfig.debug) {
println(" SOURCE: " + sb)
}
nextActor.send(ExitMessage())
exit()
}
}
}
private class ValidationActor(numValues: Int) extends JumiActor[AnyRef] {
private var sumSoFar = 0.0
private var valuesSoFar = 0
private var prevValue = 0L
private var errorValue = (-1L, -1)
private val sb = new StringBuilder()
override def process(msg: AnyRef) {
msg match {
case vm: ValueMessage =>
valuesSoFar += 1
if (BitonicSortConfig.debug) {
sb.append(vm.value + " ")
}
if (vm.value < prevValue && errorValue._1 < 0) {
errorValue = (vm.value, valuesSoFar - 1)
}
prevValue = vm.value
sumSoFar += prevValue
case em: ExitMessage =>
if (valuesSoFar == numValues) {
if (BitonicSortConfig.debug) {
println(" OUTPUT: " + sb)
}
if (errorValue._1 >= 0) {
println(" ERROR: Value out of place: " + errorValue._1 + " at index " + errorValue._2)
} else {
println(" Elements sum: " + sumSoFar)
}
} else {
println(" ERROR: early exit triggered, received only " + valuesSoFar + " values!")
}
exit()
}
}
}
}
|
shamsmahmood/savina
|
src/main/scala/edu/rice/habanero/benchmarks/bitonicsort/BitonicSortJumiActorBenchmark.scala
|
Scala
|
gpl-2.0
| 16,519 |
package io.muvr.exercise
import akka.actor.{ActorLogging, Props}
import akka.contrib.pattern.ShardRegion
import akka.persistence.PersistentActor
import io.muvr.{UserId, UserMessage}
import scalaz.\\/
/**
* Companion for the ``UserExerciseProcessor`` actor. Defines the fields and messages.
*/
object UserExerciseProcessor {
val shardName = "user-exercise-processor"
val props: Props = Props[UserExerciseProcessor]
val idExtractor: ShardRegion.IdExtractor = {
case ExerciseSubmitEntireResistanceExerciseSession(userId, eres) β (userId.toString, eres)
case ExerciseSubmitSuggestions(userId, suggestions) β (userId.toString, suggestions)
}
val shardResolver: ShardRegion.ShardResolver = {
case x: UserMessage β x.shardRegion()
}
/**
* Submit the entire resistance exercise session
*
* @param userId the user identity
* @param eres the entire session
*/
case class ExerciseSubmitEntireResistanceExerciseSession(userId: UserId, eres: EntireResistanceExerciseSession) extends UserMessage
/**
* Submit exercise suggestions
* @param userId the user identity
* @param suggestions the suggestions
*/
case class ExerciseSubmitSuggestions(userId: UserId, suggestions: Suggestions) extends UserMessage
}
/**
* The cluster-sharded persistent actor that processes the user messages
*/
class UserExerciseProcessor extends PersistentActor with ActorLogging {
// user reference and notifier
private val userId = UserId(self.path.name)
// per-user actor
override val persistenceId: String = UserExerciseProcessorPersistenceId(userId).persistenceId
// no recovery behaviour just yet
override def receiveRecover: Receive = {
case x β println(x)
}
// only deal with sessions
override def receiveCommand: Receive = {
case eres@EntireResistanceExerciseSession(id, session, examples) β
println(eres)
persist(eres) { _ β
sender() ! \\/.right(id)
}
case Suggestions(suggestions) β
println(s"Got $suggestions")
}
}
|
boonhero/muvr-server
|
exercise/src/main/scala/io/muvr/exercise/UserExerciseProcessor.scala
|
Scala
|
bsd-3-clause
| 2,052 |
package com.wavesplatform.lagonaki.unit
import java.io.File
import java.nio.file.Files
import cats.syntax.option._
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.settings.WalletSettings
import com.wavesplatform.test.FunSuite
import com.wavesplatform.wallet.Wallet
class WalletSpecification extends FunSuite {
private val walletSize = 10
val w = Wallet(WalletSettings(None, "cookies".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))
test("wallet - acc creation") {
w.generateNewAccounts(walletSize)
w.privateKeyAccounts.size shouldBe walletSize
w.privateKeyAccounts.map(_.toAddress.toString) shouldBe Seq(
"3MqMwwHW4v2nSEDHVWoh8RCQL8QrsWLkkeB",
"3MuwVgJA8EXHukxo6rcakT5tD6FpvACtitG",
"3MuAvUG4EAsG9RP9jaWjewCVmggaQD2t39B",
"3MqoX4A3UGBYU7cX2JPs6BCzntNC8K8FBR4",
"3N1Q9VVVQtY3GqhwHtJDEyHb3oWBcerZL8X",
"3NARifVFHthMDnCwBacXijPB2szAgNTeBCz",
"3N6dsnfD88j5yKgpnEavaaJDzAVSRBRVbMY",
"3MufvXKZxLuNn5SHcEgGc2Vo7nLWnKVskfJ",
"3Myt4tocZmj7o3d1gnuWRrnQWcoxvx5G7Ac",
"3N3keodUiS8WLEw9W4BKDNxgNdUpwSnpb3K"
)
}
test("wallet - acc deletion") {
val head = w.privateKeyAccounts.head
w.deleteAccount(head)
assert(w.privateKeyAccounts.lengthCompare(walletSize - 1) == 0)
w.deleteAccount(w.privateKeyAccounts.head)
assert(w.privateKeyAccounts.lengthCompare(walletSize - 2) == 0)
w.privateKeyAccounts.foreach(w.deleteAccount)
assert(w.privateKeyAccounts.isEmpty)
}
test("reopening") {
val walletFile = Some(createTestTemporaryFile("wallet", ".dat"))
val w1 = Wallet(WalletSettings(walletFile, "cookies".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))
w1.generateNewAccounts(10)
val w1PrivateKeys = w1.privateKeyAccounts
val w1nonce = w1.nonce
val w2 = Wallet(WalletSettings(walletFile, "cookies".some, None))
w2.privateKeyAccounts.nonEmpty shouldBe true
w2.privateKeyAccounts shouldEqual w1PrivateKeys
w2.nonce shouldBe w1nonce
val seedError = intercept[IllegalArgumentException](Wallet(WalletSettings(walletFile, "cookies".some, ByteStr.decodeBase58("fake").toOption)))
seedError.getMessage should include("Seed from config doesn't match the actual seed")
}
test("reopen with incorrect password") {
val file = Some(createTestTemporaryFile("wallet", ".dat"))
val w1 = Wallet(WalletSettings(file, "password".some, ByteStr.decodeBase58("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption))
w1.generateNewAccounts(3)
assertThrows[IllegalArgumentException] {
Wallet(WalletSettings(file, "incorrect password".some, None))
}
}
def createTestTemporaryFile(name: String, ext: String): File = {
val file = Files.createTempFile(name, ext).toFile
file.deleteOnExit()
file
}
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/lagonaki/unit/WalletSpecification.scala
|
Scala
|
mit
| 2,903 |
package reactivemongo.api.commands.bson
import scala.util.{ Failure, Success }
import reactivemongo.api.commands._
import reactivemongo.bson._
import reactivemongo.core.errors.GenericDriverException
object BSONDropDatabaseImplicits {
implicit object DropDatabaseWriter
extends BSONDocumentWriter[DropDatabase.type] {
val command = BSONDocument("dropDatabase" -> 1)
def write(dd: DropDatabase.type): BSONDocument = command
}
}
object BSONListCollectionNamesImplicits {
implicit object ListCollectionNamesWriter
extends BSONDocumentWriter[ListCollectionNames.type] {
val command = BSONDocument("listCollections" -> 1)
def write(ls: ListCollectionNames.type): BSONDocument = command
}
implicit object BSONCollectionNameReaders
extends BSONDocumentReader[CollectionNames] {
def read(doc: BSONDocument): CollectionNames = (for {
_ <- doc.getAs[BSONNumberLike]("ok").map(_.toInt).filter(_ == 1)
cr <- doc.getAs[BSONDocument]("cursor")
fb <- cr.getAs[List[BSONDocument]]("firstBatch")
ns <- wtColNames(fb, Nil)
} yield CollectionNames(ns)).getOrElse[CollectionNames](
throw GenericDriverException("fails to read collection names"))
}
@annotation.tailrec
private def wtColNames(meta: List[BSONDocument], ns: List[String]): Option[List[String]] = meta match {
case d :: ds => d.getAs[String]("name") match {
case Some(n) => wtColNames(ds, n :: ns)
case _ => None // error
}
case _ => Some(ns.reverse)
}
}
@deprecated("Use [[BSONDropCollectionImplicits]]", "0.12.0")
object BSONDropImplicits {
implicit object DropWriter extends BSONDocumentWriter[ResolvedCollectionCommand[Drop.type]] {
def write(command: ResolvedCollectionCommand[Drop.type]): BSONDocument =
BSONDocument("drop" -> command.collection)
}
}
object BSONDropCollectionImplicits {
implicit object DropCollectionWriter extends BSONDocumentWriter[ResolvedCollectionCommand[DropCollection.type]] {
def write(command: ResolvedCollectionCommand[DropCollection.type]): BSONDocument = BSONDocument("drop" -> command.collection)
}
implicit object DropCollectionResultReader
extends BSONDocumentReader[DropCollectionResult] {
def read(doc: BSONDocument): DropCollectionResult =
CommonImplicits.UnitBoxReader.readTry(doc).transform(
{ _ => Success(true) }, { error =>
if (doc.getAs[BSONNumberLike]("code"). // code == 26
map(_.toInt).exists(_ == 26) ||
doc.getAs[String]("errmsg").
exists(_ startsWith "ns not found")) {
Success(false) // code not avail. before 3.x
} else Failure(error)
}).map(DropCollectionResult(_)).get
}
}
object BSONEmptyCappedImplicits {
implicit object EmptyCappedWriter extends BSONDocumentWriter[ResolvedCollectionCommand[EmptyCapped.type]] {
def write(command: ResolvedCollectionCommand[EmptyCapped.type]): BSONDocument = BSONDocument("emptyCapped" -> command.collection)
}
}
object BSONRenameCollectionImplicits {
implicit object RenameCollectionWriter extends BSONDocumentWriter[RenameCollection] {
def write(command: RenameCollection): BSONDocument =
BSONDocument(
"renameCollection" -> command.fullyQualifiedCollectionName,
"to" -> command.fullyQualifiedTargetName,
"dropTarget" -> command.dropTarget)
}
}
object BSONCreateImplicits {
implicit object CappedWriter extends BSONDocumentWriter[Capped] {
def write(capped: Capped): BSONDocument =
BSONDocument(
"size" -> capped.size,
"max" -> capped.max)
}
implicit object CreateWriter extends BSONDocumentWriter[ResolvedCollectionCommand[Create]] {
def write(command: ResolvedCollectionCommand[Create]): BSONDocument =
BSONDocument(
"create" -> command.collection,
"autoIndexId" -> command.command.autoIndexId) ++ command.command.capped.map(capped =>
CappedWriter.write(capped) ++ ("capped" -> true)).getOrElse(BSONDocument())
}
}
object BSONCollStatsImplicits {
implicit object CollStatsWriter extends BSONDocumentWriter[ResolvedCollectionCommand[CollStats]] {
def write(command: ResolvedCollectionCommand[CollStats]): BSONDocument =
BSONDocument(
"collStats" -> command.collection, "scale" -> command.command.scale)
}
implicit object CollStatsResultReader extends DealingWithGenericCommandErrorsReader[CollStatsResult] {
def readResult(doc: BSONDocument): CollStatsResult = CollStatsResult(
doc.getAs[String]("ns").get,
doc.getAs[BSONNumberLike]("count").map(_.toInt).get,
doc.getAs[BSONNumberLike]("size").map(_.toDouble).get,
doc.getAs[BSONNumberLike]("avgObjSize").map(_.toDouble),
doc.getAs[BSONNumberLike]("storageSize").map(_.toDouble).get,
doc.getAs[BSONNumberLike]("numExtents").map(_.toInt),
doc.getAs[BSONNumberLike]("nindexes").map(_.toInt).get,
doc.getAs[BSONNumberLike]("lastExtentSize").map(_.toInt),
doc.getAs[BSONNumberLike]("paddingFactor").map(_.toDouble),
doc.getAs[BSONNumberLike]("systemFlags").map(_.toInt),
doc.getAs[BSONNumberLike]("userFlags").map(_.toInt),
doc.getAs[BSONNumberLike]("totalIndexSize").map(_.toInt).get,
{
val indexSizes = doc.getAs[BSONDocument]("indexSizes").get
(for (kv <- indexSizes.elements) yield kv._1 -> kv._2.asInstanceOf[BSONInteger].value).toArray
},
doc.getAs[BSONBooleanLike]("capped").fold(false)(_.toBoolean),
doc.getAs[BSONNumberLike]("max").map(_.toLong),
doc.getAs[BSONNumberLike]("maxSize").map(_.toDouble))
}
}
object BSONConvertToCappedImplicits {
implicit object ConvertToCappedWriter extends BSONDocumentWriter[ResolvedCollectionCommand[ConvertToCapped]] {
def write(command: ResolvedCollectionCommand[ConvertToCapped]): BSONDocument =
BSONDocument("convertToCapped" -> command.collection) ++ BSONCreateImplicits.CappedWriter.write(command.command.capped)
}
}
object BSONDropIndexesImplicits {
implicit object BSONDropIndexesWriter extends BSONDocumentWriter[ResolvedCollectionCommand[DropIndexes]] {
def write(command: ResolvedCollectionCommand[DropIndexes]): BSONDocument =
BSONDocument(
"dropIndexes" -> command.collection,
"index" -> command.command.index)
}
implicit object BSONDropIndexesReader extends DealingWithGenericCommandErrorsReader[DropIndexesResult] {
def readResult(doc: BSONDocument): DropIndexesResult =
DropIndexesResult(doc.getAs[BSONNumberLike]("nIndexesWas").map(_.toInt).getOrElse(0))
}
}
object BSONListIndexesImplicits {
import scala.util.{ Failure, Success, Try }
import reactivemongo.api.indexes.{ Index, IndexesManager }
implicit object BSONListIndexesWriter extends BSONDocumentWriter[ResolvedCollectionCommand[ListIndexes]] {
def write(command: ResolvedCollectionCommand[ListIndexes]): BSONDocument =
BSONDocument("listIndexes" -> command.collection)
}
implicit object BSONIndexListReader extends BSONDocumentReader[List[Index]] {
@annotation.tailrec
def readBatch(batch: List[BSONDocument], indexes: List[Index]): Try[List[Index]] = batch match {
case d :: ds => d.asTry[Index](IndexesManager.IndexReader) match {
case Success(i) => readBatch(ds, i :: indexes)
case Failure(e) => Failure(e)
}
case _ => Success(indexes)
}
implicit object LastErrorReader extends BSONDocumentReader[WriteResult] {
def read(doc: BSONDocument): WriteResult = (for {
ok <- doc.getAs[BSONBooleanLike]("ok").map(_.toBoolean)
n = doc.getAs[BSONNumberLike]("n").fold(0)(_.toInt)
msg <- doc.getAs[String]("errmsg")
code <- doc.getAs[BSONNumberLike]("code").map(_.toInt)
} yield DefaultWriteResult(
ok, n, Nil, None, Some(code), Some(msg))).get
}
def read(doc: BSONDocument): List[Index] = (for {
_ <- doc.getAs[BSONNumberLike]("ok").fold[Option[Unit]](
throw GenericDriverException(
"the result of listIndexes must be ok")) { ok =>
if (ok.toInt == 1) Some(()) else {
throw doc.asOpt[WriteResult].
flatMap[Exception](WriteResult.lastError).
getOrElse(new GenericDriverException(
s"fails to create index: ${BSONDocument pretty doc}"))
}
}
a <- doc.getAs[BSONDocument]("cursor")
b <- a.getAs[List[BSONDocument]]("firstBatch")
} yield b).fold[List[Index]](throw GenericDriverException(
"the cursor and firstBatch must be defined"))(readBatch(_, Nil).get)
}
}
object BSONCreateIndexesImplicits {
import reactivemongo.api.commands.WriteResult
implicit object BSONCreateIndexesWriter extends BSONDocumentWriter[ResolvedCollectionCommand[CreateIndexes]] {
import reactivemongo.api.indexes.{ IndexesManager, NSIndex }
implicit val nsIndexWriter = IndexesManager.NSIndexWriter
def write(cmd: ResolvedCollectionCommand[CreateIndexes]): BSONDocument = {
BSONDocument("createIndexes" -> cmd.collection,
"indexes" -> cmd.command.indexes.map(NSIndex(
cmd.command.db + "." + cmd.collection, _)))
}
}
implicit object BSONCreateIndexesResultReader
extends BSONDocumentReader[WriteResult] {
import reactivemongo.api.commands.DefaultWriteResult
def read(doc: BSONDocument): WriteResult =
doc.getAs[BSONNumberLike]("ok").map(_.toInt).fold[WriteResult](
throw GenericDriverException("the count must be defined")) { n =>
doc.getAs[String]("errmsg").fold[WriteResult](
DefaultWriteResult(true, n, Nil, None, None, None))(
err => DefaultWriteResult(false, n, Nil, None, None, Some(err)))
}
}
}
/**
* {{{
* import reactivemongo.api.commands.ReplSetGetStatus
* import reactivemongo.api.commands.bson.BSONReplSetGetStatusImplicits._
*
* adminDb.runCommand(ReplSetGetStatus)
* }}}
*/
object BSONReplSetGetStatusImplicits {
implicit object ReplSetGetStatusWriter
extends BSONDocumentWriter[ReplSetGetStatus.type] {
val bsonCmd = BSONDocument("replSetGetStatus" -> 1)
def write(command: ReplSetGetStatus.type): BSONDocument = bsonCmd
}
implicit object ReplSetMemberReader
extends BSONDocumentReader[ReplSetMember] {
def read(doc: BSONDocument): ReplSetMember = (for {
id <- doc.getAs[BSONNumberLike]("_id").map(_.toLong)
name <- doc.getAs[String]("name")
health <- doc.getAs[BSONNumberLike]("health").map(_.toInt)
state <- doc.getAs[BSONNumberLike]("state").map(_.toInt)
stateStr <- doc.getAs[String]("stateStr")
uptime <- doc.getAs[BSONNumberLike]("uptime").map(_.toLong)
optime <- doc.getAs[BSONNumberLike]("optimeDate").map(_.toLong)
lastHeartbeat <- doc.getAs[BSONNumberLike]("lastHeartbeat").map(_.toLong)
lastHeartbeatRecv <- doc.getAs[BSONNumberLike](
"lastHeartbeatRecv").map(_.toLong)
} yield ReplSetMember(id, name, health, state, stateStr, uptime, optime,
lastHeartbeat, lastHeartbeatRecv,
doc.getAs[String]("lastHeartbeatMessage"),
doc.getAs[BSONNumberLike]("electionTime").map(_.toLong),
doc.getAs[BSONBooleanLike]("self").fold(false)(_.toBoolean),
doc.getAs[BSONNumberLike]("pingMs").map(_.toLong),
doc.getAs[String]("syncingTo"),
doc.getAs[BSONNumberLike]("configVersion").map(_.toInt))).get
}
implicit object ReplSetStatusReader
extends DealingWithGenericCommandErrorsReader[ReplSetStatus] {
def readResult(doc: BSONDocument): ReplSetStatus = (for {
name <- doc.getAs[String]("set")
time <- doc.getAs[BSONNumberLike]("date").map(_.toLong)
myState <- doc.getAs[BSONNumberLike]("myState").map(_.toInt)
members <- doc.getAs[List[ReplSetMember]]("members")
} yield ReplSetStatus(name, time, myState, members)).get
}
}
/**
* {{{
* import reactivemongo.api.commands.ServerStatus
* import reactivemongo.api.commands.bson.BSONServerStatusImplicits._
*
* db.runCommand(ServerStatus)
* }}}
*/
object BSONServerStatusImplicits {
implicit object BSONServerStatusWriter
extends BSONDocumentWriter[ServerStatus.type] {
val bsonCmd = BSONDocument("serverStatus" -> 1)
def write(command: ServerStatus.type) = bsonCmd
}
implicit object BSONServerStatusResultReader
extends DealingWithGenericCommandErrorsReader[ServerStatusResult] {
def readResult(doc: BSONDocument): ServerStatusResult = (for {
host <- doc.getAs[String]("host")
version <- doc.getAs[String]("version")
process <- doc.getAs[String]("process").map[ServerProcess] {
case "mongos" => MongosProcess
case _ => MongodProcess
}
pid <- doc.getAs[BSONNumberLike]("pid").map(_.toLong)
uptime <- doc.getAs[BSONNumberLike]("uptime").map(_.toLong)
uptimeMillis <- doc.getAs[BSONNumberLike]("uptimeMillis").map(_.toLong)
uptimeEstimate <- doc.getAs[BSONNumberLike](
"uptimeEstimate").map(_.toLong)
localTime <- doc.getAs[BSONNumberLike]("localTime").map(_.toLong)
} yield ServerStatusResult(host, version, process, pid,
uptime, uptimeMillis, uptimeEstimate, localTime)).get
}
}
/**
* {{{
* import reactivemongo.api.commands.Resync
* import reactivemongo.api.commands.bson.BSONResyncImplicits._
*
* db.runCommand(Resync)
* }}}
*/
object BSONResyncImplicits {
private val logger =
reactivemongo.util.LazyLogger("reactivemongo.api.commands.bson.Resync")
implicit object ResyncReader extends BSONDocumentReader[ResyncResult.type] {
@inline def notDeadWarn(err: BSONCommandError) =
err.code.exists(_ == 125) || err.errmsg.exists(_ startsWith "not dead")
def read(doc: BSONDocument): ResyncResult.type = try {
CommonImplicits.UnitBoxReader.read(doc)
ResyncResult
} catch {
case err: BSONCommandError if (notDeadWarn(err)) => {
logger.warn(s"no resync done: ${err.errmsg mkString ""}")
ResyncResult
}
case error: Throwable => throw error
}
}
implicit object ResyncWriter extends BSONDocumentWriter[Resync.type] {
val command = BSONDocument("resync" -> 1)
def write(dd: Resync.type): BSONDocument = command
}
}
/**
*
*/
object BSONReplSetMaintenanceImplicits {
implicit val ReplSetMaintenanceReader = CommonImplicits.UnitBoxReader
implicit object ReplSetMaintenanceWriter
extends BSONDocumentWriter[ReplSetMaintenance] {
def write(command: ReplSetMaintenance) =
BSONDocument("replSetMaintenance" -> command.enable)
}
}
|
charleskubicek/ReactiveMongo
|
driver/src/main/scala/api/commands/bson/instanceadministration.scala
|
Scala
|
apache-2.0
| 14,572 |
package uk.gov.gds.common.mongo.migration
import uk.gov.gds.common.mongo.repository.IdentityBasedMongoRepository
import uk.gov.gds.common.logging.Logging
import uk.gov.gds.common.mongo.MongoDatabaseManager
abstract class ChangeLogRepository(databaseManager: MongoDatabaseManager)
extends IdentityBasedMongoRepository[ChangeScriptAudit]
with Logging {
def databaseChangeScripts: List[ChangeScript]
protected lazy val collection = databaseManager("changelog")
protected val databaseIdProperty = "name"
startup()
override def startup() {
super.startup()
databaseChangeScripts.foreach(applyChangeScript(_))
}
override def deleteAll() {
logger.warn("Deleting ALL changescripts from repository. I hope you knew what you were doing!")
super.deleteAll()
}
def appliedChangeScripts = all
def changeScriptAuditFor(changeScriptName: String) = load(changeScriptName)
def changeScriptAuditFor(changeScript: ChangeScript) = load(changeScript.name)
private def applyChangeScript(changeScript: ChangeScript) {
changeScriptAuditFor(changeScript) match {
case Some(audit) if (ChangeScriptStatus.ok.equals(audit.status)) =>
logger.debug("Change script " + changeScript.name + " has already been applied")
case _ => {
logger.info("Applying change script " + changeScript.name)
try {
changeScript.applyToDatabase()
safeInsert(SuccessfulChangeScriptAudit(changeScript))
}
catch {
case e: Exception =>
safeInsert(FailedChangeScriptAudit(changeScript))
logger.error("Change script failed to apply " + changeScript.shortName, e)
throw new ChangeScriptFailedException(
"Change script failed to apply " + changeScript.shortName + " [" + e.getMessage + "]", e)
}
}
}
}
}
|
alphagov/gds-scala-common
|
mongo-utils/src/main/scala/uk/gov/gds/common/mongo/migration/ChangeLogRepository.scala
|
Scala
|
mit
| 1,856 |
package io.github.databob.generators
import io.github.databob.{Databob, Generator, GeneratorType}
import org.json4s.reflect.TypeInfo
class TypeMatchingGenerator[A: Manifest](mk: Databob => A) extends Generator[A] {
val Class = implicitly[Manifest[A]].runtimeClass
def pf(databob: Databob): PartialFunction[GeneratorType, A] = {
case GeneratorType(TypeInfo(Class, _), _, _) => mk(databob)
}
}
|
daviddenton/databob.scala
|
src/main/scala/io/github/databob/generators/TypeMatchingGenerator.scala
|
Scala
|
apache-2.0
| 410 |
/*
* Copyright (c) 2015 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl
import org.eclipse.collections.api.list.ListIterable
import org.eclipse.collections.impl.Prelude._
import org.eclipse.collections.impl.block.procedure.CollectionAddProcedure
import org.eclipse.collections.impl.list.mutable.FastList
import org.junit.{Assert, Test}
trait ListIterableTestTrait extends InternalIterableTestTrait
{
val classUnderTest: ListIterable[String]
@Test
abstract override def forEach
{
super.forEach
val result = FastList.newList[String]
classUnderTest.forEach(CollectionAddProcedure.on(result))
Assert.assertEquals(FastList.newListWith("1", "2", "3"), result)
}
@Test
abstract override def forEachWithIndex
{
super.forEachWithIndex
var count = 0
classUnderTest.forEachWithIndex
{
(each: String, index: Int) =>
Assert.assertEquals(index, count)
count += 1
Assert.assertEquals(String.valueOf(count), each)
}
Assert.assertEquals(3, count)
}
@Test
abstract override def forEachWith
{
super.forEachWith
val unique = new AnyRef
var count = 0
classUnderTest.forEachWith(
(each: String, parameter: AnyRef) =>
{
count += 1
Assert.assertEquals(String.valueOf(count), each)
Assert.assertSame(unique, parameter)
()
},
unique)
Assert.assertEquals(3, count)
}
}
|
g-votte/eclipse-collections
|
scala-unit-tests/src/test/scala/org/eclipse/collections/impl/ListIterableTestTrait.scala
|
Scala
|
bsd-3-clause
| 2,023 |
package com.jakway.gnucash.io
import autogen.buildinfo.BuildInfo
import com.jakway.gnucash.error.ValidationError
import com.jakway.util.StackTraceString
object ErrorPrinter {
def format(validationError: ValidationError, includeIssueTrackerMessage: Boolean = true): String = {
val suppressedExceptions = {
val suppressed = validationError.getSuppressed()
if(suppressed.isEmpty) {
"\\t\\tNone"
} else {
suppressed
.map(formatThrowable)
.foldLeft("\\t\\t")(_ + _)
}
}
val issueTrackerMessage =
if(includeIssueTrackerMessage) {
s"Please report the following information to ${BuildInfo.issueTracker}:\\n"
} else {
""
}
s"An error of type ${validationError.getClass.getCanonicalName} occurred with message " +
s"${validationError.msg}\\n\\n" +
issueTrackerMessage +
"\\tStack Trace: \\n" +
"\\t\\t" + validationError.stackTrace +
"\\tSuppressed Exceptions: \\n" + suppressedExceptions
}
private def formatThrowable(t: Throwable): String =
s"Throwable of type ${t.getClass().getCanonicalName}" +
" with message " + t.getMessage + " with stack trace " +
StackTraceString.stackTraceToString(t)
}
|
tjakway/Gnucash-Regex-Importer
|
src/main/scala/com/jakway/gnucash/io/ErrorPrinter.scala
|
Scala
|
gpl-2.0
| 1,241 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import minitest.TestSuite
import monix.execution.Ack.Continue
import monix.execution.schedulers.TestScheduler
import monix.execution.exceptions.DummyException
import monix.reactive.{Observable, Observer}
import scala.concurrent.Promise
object EndWithErrorSuite extends TestSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(s: TestScheduler) = {
assert(s.state.tasks.isEmpty, "TestScheduler should have no pending tasks")
}
test("should end in the specified error") { implicit s =>
var received = 0
var wasThrown: Throwable = null
val p = Promise[Continue.type]()
val source = Observable
.now(1000)
.endWithError(DummyException("dummy"))
source.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = {
received = elem
p.future
}
def onComplete() = ()
def onError(ex: Throwable) = {
wasThrown = ex
}
})
assertEquals(received, 1000)
assertEquals(wasThrown, DummyException("dummy"))
p.success(Continue)
s.tick()
}
test("can end in another unforeseen error") { implicit s =>
var wasThrown: Throwable = null
val source = Observable
.raiseError(DummyException("unforeseen"))
.endWithError(DummyException("expected"))
source.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = Continue
def onComplete() = ()
def onError(ex: Throwable) = {
wasThrown = ex
}
})
assertEquals(wasThrown, DummyException("unforeseen"))
}
}
|
monix/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/EndWithErrorSuite.scala
|
Scala
|
apache-2.0
| 2,257 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import cascading.tuple.{ TupleEntry, Tuple => CTuple }
import org.scalatest.{ Matchers, WordSpec }
class TupleTest extends WordSpec with Matchers {
def get[T](ctup: CTuple)(implicit tc: TupleConverter[T]) = tc(new TupleEntry(ctup))
def set[T](t: T)(implicit ts: TupleSetter[T]): CTuple = ts(t)
def arityConvMatches[T](t: T, ar: Int)(implicit tc: TupleConverter[T]): Boolean = {
tc.arity == ar
}
def aritySetMatches[T](t: T, ar: Int)(implicit tc: TupleSetter[T]): Boolean = {
tc.arity == ar
}
def roundTrip[T](t: T)(implicit tc: TupleConverter[T], ts: TupleSetter[T]): Boolean = {
tc(new TupleEntry(ts(t))) == t
}
"TupleConverters" should {
"TupleGetter should work as a type-class" in {
val emptyTup = new CTuple
val ctup = new CTuple("hey", new java.lang.Long(2), new java.lang.Integer(3), emptyTup)
TupleGetter.get[String](ctup, 0) shouldBe "hey"
TupleGetter.get[Long](ctup, 1) shouldBe 2L
TupleGetter.get[Int](ctup, 2) shouldBe 3
TupleGetter.get[CTuple](ctup, 3) shouldBe emptyTup
}
"get primitives out of cascading tuples" in {
val ctup = new CTuple("hey", new java.lang.Long(2), new java.lang.Integer(3))
get[(String, Long, Int)](ctup) shouldBe ("hey", 2L, 3)
roundTrip[Int](3) shouldBe true
arityConvMatches(3, 1) shouldBe true
aritySetMatches(3, 1) shouldBe true
roundTrip[Long](42L) shouldBe true
arityConvMatches(42L, 1) shouldBe true
aritySetMatches(42L, 1) shouldBe true
roundTrip[String]("hey") shouldBe true
arityConvMatches("hey", 1) shouldBe true
aritySetMatches("hey", 1) shouldBe true
roundTrip[(Int, Int)]((4, 2)) shouldBe true
arityConvMatches((2, 3), 2) shouldBe true
aritySetMatches((2, 3), 2) shouldBe true
}
"get non-primitives out of cascading tuples" in {
val ctup = new CTuple(None, List(1, 2, 3), 1 -> 2)
get[(Option[Int], List[Int], (Int, Int))](ctup) shouldBe (None, List(1, 2, 3), 1 -> 2)
roundTrip[(Option[Int], List[Int])]((Some(1), List())) shouldBe true
arityConvMatches((None, Nil), 2) shouldBe true
aritySetMatches((None, Nil), 2) shouldBe true
arityConvMatches(None, 1) shouldBe true
aritySetMatches(None, 1) shouldBe true
arityConvMatches(List(1, 2, 3), 1) shouldBe true
aritySetMatches(List(1, 2, 3), 1) shouldBe true
}
"deal with AnyRef" in {
val ctup = new CTuple(None, List(1, 2, 3), 1 -> 2)
get[(AnyRef, AnyRef, AnyRef)](ctup) shouldBe (None, List(1, 2, 3), 1 -> 2)
get[AnyRef](new CTuple("you")) shouldBe "you"
roundTrip[AnyRef]("hey") shouldBe true
roundTrip[(AnyRef, AnyRef)]((Nil, Nil)) shouldBe true
arityConvMatches[(AnyRef, AnyRef)](("hey", "you"), 2) shouldBe true
aritySetMatches[(AnyRef, AnyRef)](("hey", "you"), 2) shouldBe true
}
}
}
|
sriramkrishnan/scalding
|
scalding-core/src/test/scala/com/twitter/scalding/TupleTest.scala
|
Scala
|
apache-2.0
| 3,466 |
/**
* A person has a name and an age.
*/
case class Person(name: String, age: Int)
abstract class Vertical extends CaseJeu
case class Haut(a: Int) extends Vertical
case class Bas(name: String, b: Double) extends Vertical
sealed trait Ior[+A, +B]
case class Left[A](a: A) extends Ior[A, Nothing]
case class Right[B](b: B) extends Ior[Nothing, B]
case class Both[A, B](a: A, b: B) extends Ior[A, B]
trait Functor[F[_]] {
def map[A, B](fa: F[A], f: A => B): F[B]
}
// beware Int.MinValue
def absoluteValue(n: Int): Int =
if (n < 0) -n else n
def interp(n: Int): String =
s"there are $n ${color} balloons.\\n"
type ΞΎ[A] = (A, A)
trait Hist { lhs =>
def β(rhs: Hist): Hist
}
def gsum[A: Ring](as: Seq[A]): A =
as.foldLeft(Ring[A].zero)(_ + _)
val actions: List[Symbol] =
'init :: 'read :: 'write :: 'close :: Nil
trait Cake {
type T;
type Q
val things: Seq[T]
abstract class Spindler
def spindle(s: Spindler, ts: Seq[T], reversed: Boolean = false): Seq[Q]
}
val colors = Map(
"red" -> 0xFF0000,
"turquoise" -> 0x00FFFF,
"black" -> 0x000000,
"orange" -> 0xFF8040,
"brown" -> 0x804000)
lazy val ns = for {
x <- 0 until 100
y <- 0 until 100
} yield (x + y) * 33.33
|
SoftwareHeritage/swh-web-ui
|
swh/web/tests/resources/contents/code/extensions/test.scala
|
Scala
|
agpl-3.0
| 1,227 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.