code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package skutek_examples.sat_solver
import skutek.abstraction._
import skutek.std_effects._
object Parser {
case object ErrorFx extends Except[(String, Int)]
def apply(source: String): AST !! ErrorFx.type =
(for {
ast <- parseExpr
c <- getChar
_ <- if (c == EOF) Return else fail("Expected end of input")
} yield ast)
.handleWith[ErrorFx.type](StFx.handler(ParserState(0, source)).dropState)
case object StFx extends State[ParserState]
type Parser[T] = T !! StFx.type with ErrorFx.type
case class ParserState(position: Int, source: String)
val EOF = '\u0000'
val getChar: Parser[Char] = for {
ps <- StFx.Get
_ <- StFx.Put(ps.copy(position = ps.position + 1))
c <-
if (!ps.source.isDefinedAt(ps.position)) Return(EOF) else {
val c = ps.source(ps.position)
if (c.isWhitespace) getChar else Return(c)
}
} yield c
val ungetChar = StFx.Mod { case ParserState(i, cs) => ParserState(i - 1, cs) }
def fail(msg: String) = StFx.Get.flatMap { case ParserState(i, _) => ErrorFx.Raise((msg, i - 1)) }
def parseExpr: Parser[AST] =
parseBinaryOrElse('=', Equiv)(
parseBinaryOrElse('>', Imply)(
parseBinaryOrElse('|', Or)(
parseBinaryOrElse('&', And)(
parseOther))))
def parseBinaryOrElse(Op: Char, cons: (AST, AST) => AST)(parseArg: Parser[AST]): Parser[AST] = {
def loop: Parser[AST] = for {
lhs <- parseArg
c <- getChar
expr <- c match {
case Op => loop.map(rhs => cons(lhs, rhs))
case _ => ungetChar *>! Return(lhs)
}
} yield expr
loop
}
def parseOther: Parser[AST] = for {
char <- getChar
expr <- char match {
case '!' => parseOther.map(Not)
case c if c.isLetter => Return(Var(c.toString))
case '(' => for {
expr <- parseExpr
c <- getChar
_ <- c match {
case ')' => Return
case _ => fail("Expected closing brace")
}
} yield expr
case ')' => fail("Missing opening brace")
case EOF => fail("Unexpected end of input")
case _ => fail("Expected start of expression")
}
} yield expr
}
|
marcinzh/skutek
|
modules/examples/src/main/scala/skutek_examples/sat_solver/Parser.scala
|
Scala
|
mit
| 2,163 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
/**
* A fast hash map implementation for primitive, non-null keys. This hash map supports
* insertions and updates, but not deletions. This map is about an order of magnitude
* faster than java.util.HashMap, while using much less space overhead.
*
* Under the hood, it uses our OpenHashSet implementation.
*/
private[spark]
class PrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassManifest,
@specialized(Long, Int, Double) V: ClassManifest](
initialCapacity: Int)
extends Iterable[(K, V)]
with Serializable {
def this() = this(64)
require(classManifest[K] == classManifest[Long] || classManifest[K] == classManifest[Int])
// Init in constructor (instead of in declaration) to work around a Scala compiler specialization
// bug that would generate two arrays (one for Object and one for specialized T).
protected var _keySet: OpenHashSet[K] = _
private var _values: Array[V] = _
_keySet = new OpenHashSet[K](initialCapacity)
_values = new Array[V](_keySet.capacity)
private var _oldValues: Array[V] = null
override def size = _keySet.size
/** Get the value for a given key */
def apply(k: K): V = {
val pos = _keySet.getPos(k)
_values(pos)
}
/** Get the value for a given key, or returns elseValue if it doesn't exist. */
def getOrElse(k: K, elseValue: V): V = {
val pos = _keySet.getPos(k)
if (pos >= 0) _values(pos) else elseValue
}
/** Set the value for a key */
def update(k: K, v: V) {
val pos = _keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
_values(pos) = v
_keySet.rehashIfNeeded(k, grow, move)
_oldValues = null
}
/**
* If the key doesn't exist yet in the hash map, set its value to defaultValue; otherwise,
* set its value to mergeValue(oldValue).
*
* @return the newly updated value.
*/
def changeValue(k: K, defaultValue: => V, mergeValue: (V) => V): V = {
val pos = _keySet.addWithoutResize(k)
if ((pos & OpenHashSet.NONEXISTENCE_MASK) != 0) {
val newValue = defaultValue
_values(pos & OpenHashSet.POSITION_MASK) = newValue
_keySet.rehashIfNeeded(k, grow, move)
newValue
} else {
_values(pos) = mergeValue(_values(pos))
_values(pos)
}
}
override def iterator = new Iterator[(K, V)] {
var pos = 0
var nextPair: (K, V) = computeNextPair()
/** Get the next value we should return from next(), or null if we're finished iterating */
def computeNextPair(): (K, V) = {
pos = _keySet.nextPos(pos)
if (pos >= 0) {
val ret = (_keySet.getValue(pos), _values(pos))
pos += 1
ret
} else {
null
}
}
def hasNext = nextPair != null
def next() = {
val pair = nextPair
nextPair = computeNextPair()
pair
}
}
// The following member variables are declared as protected instead of private for the
// specialization to work (specialized class extends the unspecialized one and needs access
// to the "private" variables).
// They also should have been val's. We use var's because there is a Scala compiler bug that
// would throw illegal access error at runtime if they are declared as val's.
protected var grow = (newCapacity: Int) => {
_oldValues = _values
_values = new Array[V](newCapacity)
}
protected var move = (oldPos: Int, newPos: Int) => {
_values(newPos) = _oldValues(oldPos)
}
}
|
windeye/spark
|
core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala
|
Scala
|
apache-2.0
| 4,282 |
package brique.bench
import java.util.concurrent.TimeUnit
import algebra.Eq
import brique.ConsList
import brique.bench.input._
import org.openjdk.jmh.annotations._
import scala.collection.immutable.Range
import scala.{Array, Boolean, Exception, Int, List, Option}
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Benchmark)
class LongMapBench {
@Benchmark def lookupLongMap(in: LongMapInput): Option[Int] =
in.longMap.lookup(in.validIndex)
@Benchmark def lookupStdLongMap(in: StdLongMapInput): Option[Int] =
in.longMap.get(in.validIndex)
}
|
julien-truffaut/brique
|
bench/src/main/scala/brique/bench/LongMapBench.scala
|
Scala
|
mit
| 602 |
package com.productfoundry.akka.cqrs.process
import akka.util.Timeout
import com.productfoundry.akka.cqrs.EntityIdResolution._
import com.productfoundry.akka.cqrs._
import com.productfoundry.akka.cqrs.publish.EventPublication
import com.productfoundry.support.EntityTestSupport
import scala.concurrent.duration._
class ProcessManagerRegistrySpec
extends EntityTestSupport
with Fixtures {
val registry = ProcessManagerRegistry(system, entityContext)
trait DummyEvent extends AggregateEvent {
override type Id = DummyId
}
case class EventWithResolution(id: DummyId) extends DummyEvent
case class EventToIgnore(id: DummyId) extends DummyEvent
implicit object DummyProcessManagerIdResolution extends EntityIdResolution[DummyProcessManager] {
private def entityIdFromEvent: EntityIdResolver = {
case EventWithResolution(id) => id
}
override def entityIdResolver: EntityIdResolver = {
case publication: EventPublication if entityIdFromEvent.isDefinedAt(publication.eventRecord.event) =>
entityIdFromEvent(publication.eventRecord.event)
}
}
implicit def DummyProcessManagerFactory = DummyProcessManager.factory()
val duration = 5.seconds
implicit val timeout = Timeout(duration)
"Process manager registry" must {
"forward events" in new fixture {
val event = EventWithResolution(DummyId.generate())
registry.actor ! createPublication(event)
expectMsgType[AggregateEvent] should be(event)
}
"ignore events" in new fixture {
val event = EventToIgnore(DummyId.generate())
registry.actor ! createPublication(event)
expectNoMsg()
}
}
trait fixture {
registry.register[DummyProcessManager]
system.eventStream.subscribe(self, classOf[Any])
def createPublication(event: AggregateEvent): EventPublication = {
EventPublication(
AggregateEventRecord(
AggregateTag("", event.id.toString, AggregateRevision.Initial),
None,
event
)
)
}
}
}
|
Product-Foundry/akka-cqrs
|
core/src/test/scala/com/productfoundry/akka/cqrs/process/ProcessManagerRegistrySpec.scala
|
Scala
|
apache-2.0
| 2,039 |
package org.catapult.sa.fulgurite.integration
import java.io.{File, FileInputStream}
import com.github.jaiimageio.plugins.tiff.BaselineTIFFTagSet
import org.apache.commons.io.{FileUtils, IOUtils}
import org.catapult.sa.fulgurite.geotiff.{GeoTiffMeta, Index}
import org.catapult.sa.fulgurite.spark.GeoSparkUtils
import org.junit.Assert._
import org.junit.Test
import scala.util.Random
/**
* NOTE: On windows this test requires the hadoop win binary
*/
class TestWriting {
@Test
def testBasicWrite() : Unit = {
val outputName = FileUtils.getTempDirectoryPath + "/tmp" + Random.nextInt()
new File(outputName).deleteOnExit()
val sc = getSparkContext("TestWriting", "local[2]")
val metaData = GeoTiffMeta(11L, 11L, 3, Array(8, 8, 8), 0, 0, Array(0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
Array(1.2, 1.2, 0.0), 2, 1, Array.empty[Int], Array(1, 1, 1), "WGS 84 / UTM zone 30N|WGS 84|",
Array(1L, 1L), Array(1L, 1L), BaselineTIFFTagSet.COMPRESSION_NONE,
Array(1, 1, 0, 7, 1024, 0, 1, 1, 1025, 0, 1, 1, 1026, 34737, 7, 22, 2054, 0, 1, 9102, 3072, 0, 1, 32630, 3076, 0, 1, 9001)
)
val input = sc.parallelize((0 until 11).flatMap(y => (0 until 11).flatMap(x => List(Index(x, y, 0) -> 255, Index(x, y, 1) -> 128, Index(x, y, 2) -> 0))))
GeoSparkUtils.saveGeoTiff(input, metaData, outputName, 10)
GeoSparkUtils.joinOutputFiles( outputName + "/header.tiff", outputName + "/", outputName + "/data.tif")
val result = new Array[Byte](4000)
val expected = new Array[Byte](4000)
IOUtils.read(new FileInputStream(outputName + "/data.tif"), result)
IOUtils.read(new FileInputStream("src/test/resources/data_chunked.tif"), expected)
assertArrayEquals(expected, result)
}
@Test
def testWritePlanar() : Unit = {
val outputName = FileUtils.getTempDirectoryPath + "/tmp" + Random.nextInt()
new File(outputName).deleteOnExit()
val sc = getSparkContext("TestWriting", "local[2]")
val metaData = GeoTiffMeta(11L, 11L, 3, Array(8, 8, 8), 0, 0, Array(0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
Array(1.2, 1.2, 0.0), 2, 1, Array.empty[Int], Array(1, 1, 1), "WGS 84 / UTM zone 30N|WGS 84|",
Array(1L, 1L), Array(1L, 1L), BaselineTIFFTagSet.COMPRESSION_NONE,
Array(1, 1, 0, 7, 1024, 0, 1, 1, 1025, 0, 1, 1, 1026, 34737, 7, 22, 2054, 0, 1, 9102, 3072, 0, 1, 32630, 3076, 0, 1, 9001)
)
metaData.planarConfiguration = BaselineTIFFTagSet.PLANAR_CONFIGURATION_PLANAR
val input = for {
band <- 0 until 3
y <- 0 until 11
x <- 0 until 11
} yield Index(x, y, band) -> band
val inputRDD = sc.parallelize(input)
GeoSparkUtils.saveGeoTiff(inputRDD, metaData, outputName, 10)
GeoSparkUtils.joinOutputFiles( outputName + "/header.tiff", outputName + "/", outputName + "/data.tif")
val result = new Array[Byte](4000)
val expected = new Array[Byte](4000)
IOUtils.read(new FileInputStream(outputName + "/data.tif"), result)
IOUtils.read(new FileInputStream("src/test/resources/data_planar.tif"), expected)
assertArrayEquals(expected, result)
}
@Test
def addingTransparentSample() : Unit = {
val outputName = FileUtils.getTempDirectoryPath + "/tmp" + Random.nextInt()
new File(outputName).deleteOnExit()
val sc = getSparkContext("TestWriting", "local[2]")
val metaData = GeoTiffMeta(11L, 11L, 3, Array(8, 8, 8), 0, 0, Array(0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
Array(1.2, 1.2, 0.0), 2, 1, Array.empty[Int], Array(1, 1, 1), "WGS 84 / UTM zone 30N|WGS 84|",
Array(1L, 1L), Array(1L, 1L), BaselineTIFFTagSet.COMPRESSION_NONE,
Array(1, 1, 0, 7, 1024, 0, 1, 1, 1025, 0, 1, 1, 1026, 34737, 7, 22, 2054, 0, 1, 9102, 3072, 0, 1, 32630, 3076, 0, 1, 9001)
)
metaData.planarConfiguration = BaselineTIFFTagSet.PLANAR_CONFIGURATION_PLANAR
metaData.samplesPerPixel = metaData.samplesPerPixel + 1 // One extra sample
metaData.bitsPerSample = (8 :: metaData.bitsPerSample.toList).toArray // that is 8 bits
metaData.extraSamples = Array(BaselineTIFFTagSet.EXTRA_SAMPLES_UNASSOCIATED_ALPHA)
metaData.sampleFormat = (BaselineTIFFTagSet.SAMPLE_FORMAT_UNSIGNED_INTEGER :: metaData.sampleFormat.toList).toArray
val input = for {
band <- 0 until 4
y <- 0 until 11
x <- 0 until 11
} yield Index(x, y, band) -> (if (band == 3) 255 else band)
val inputRDD = sc.parallelize(input)
GeoSparkUtils.saveGeoTiff(inputRDD, metaData, outputName, 10)
GeoSparkUtils.joinOutputFiles( outputName + "/header.tiff", outputName + "/", outputName + "/data.tif")
val result = new Array[Byte](4000)
val expected = new Array[Byte](4000)
IOUtils.read(new FileInputStream(outputName + "/data.tif"), result)
IOUtils.read(new FileInputStream("src/test/resources/data_planar_transparent.tif"), expected)
assertArrayEquals(expected, result)
}
@Test(expected = classOf[IllegalArgumentException])
def failCompressedWriting() : Unit = {
val sc = getSparkContext("TestWriting", "local[2]")
val metaData = GeoTiffMeta(11L, 11L, 3, Array(8, 8, 8), 0, 0, Array(0.0, 0.0, 0.0, 0.0, 0.0, 0.0),
Array(1.2, 1.2, 0.0), 2, 1, Array.empty[Int], Array(1, 1, 1), "WGS 84 / UTM zone 30N|WGS 84|",
Array(1L, 1L), Array(1L, 1L), BaselineTIFFTagSet.COMPRESSION_JPEG,
Array(1, 1, 0, 7, 1024, 0, 1, 1, 1025, 0, 1, 1, 1026, 34737, 7, 22, 2054, 0, 1, 9102, 3072, 0, 1, 32630, 3076, 0, 1, 9001)
)
val inputRDD = sc.emptyRDD[(Index, Int)]
GeoSparkUtils.saveGeoTiff(inputRDD, metaData, "wibble", 10)
}
}
|
SatelliteApplicationsCatapult/fulgurite
|
fulgurite-core/src/test/scala/org/catapult/sa/fulgurite/integration/TestWriting.scala
|
Scala
|
lgpl-3.0
| 5,676 |
package com.snapswap.telesign.unmarshaller
trait UnMarshallerVerify
extends CommonUnMarshaller
with VerifyResponseUnMarshaller
with PhoneIdResponseUnMarshaller
object UnMarshallerVerify extends UnMarshallerVerify
|
snap-swap/telesign-api-client
|
src/main/scala/com/snapswap/telesign/unmarshaller/UnMarshallerVerify.scala
|
Scala
|
mit
| 224 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import java.math.{BigDecimal, BigInteger}
import java.nio.ByteOrder
import java.time.{ZoneId, ZoneOffset}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.parquet.column.Dictionary
import org.apache.parquet.io.api.{Binary, Converter, GroupConverter, PrimitiveConverter}
import org.apache.parquet.schema.{GroupType, OriginalType, Type}
import org.apache.parquet.schema.OriginalType.LIST
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.{BINARY, FIXED_LEN_BYTE_ARRAY, INT32, INT64, INT96}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, CaseInsensitiveMap, DateTimeUtils, GenericArrayData}
import org.apache.spark.sql.execution.datasources.DataSourceUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A [[ParentContainerUpdater]] is used by a Parquet converter to set converted values to some
* corresponding parent container. For example, a converter for a `StructType` field may set
* converted values to a [[InternalRow]]; or a converter for array elements may append converted
* values to an [[ArrayBuffer]].
*/
private[parquet] trait ParentContainerUpdater {
/** Called before a record field is being converted */
def start(): Unit = ()
/** Called after a record field is being converted */
def end(): Unit = ()
def set(value: Any): Unit = ()
def setBoolean(value: Boolean): Unit = set(value)
def setByte(value: Byte): Unit = set(value)
def setShort(value: Short): Unit = set(value)
def setInt(value: Int): Unit = set(value)
def setLong(value: Long): Unit = set(value)
def setFloat(value: Float): Unit = set(value)
def setDouble(value: Double): Unit = set(value)
}
/** A no-op updater used for root converter (who doesn't have a parent). */
private[parquet] object NoopUpdater extends ParentContainerUpdater
private[parquet] trait HasParentContainerUpdater {
def updater: ParentContainerUpdater
}
/**
* A convenient converter class for Parquet group types with a [[HasParentContainerUpdater]].
*/
private[parquet] abstract class ParquetGroupConverter(val updater: ParentContainerUpdater)
extends GroupConverter with HasParentContainerUpdater
/**
* Parquet converter for Parquet primitive types. Note that not all Spark SQL atomic types
* are handled by this converter. Parquet primitive types are only a subset of those of Spark
* SQL. For example, BYTE, SHORT, and INT in Spark SQL are all covered by INT32 in Parquet.
*/
private[parquet] class ParquetPrimitiveConverter(val updater: ParentContainerUpdater)
extends PrimitiveConverter with HasParentContainerUpdater {
override def addBoolean(value: Boolean): Unit = updater.setBoolean(value)
override def addInt(value: Int): Unit = updater.setInt(value)
override def addLong(value: Long): Unit = updater.setLong(value)
override def addFloat(value: Float): Unit = updater.setFloat(value)
override def addDouble(value: Double): Unit = updater.setDouble(value)
override def addBinary(value: Binary): Unit = updater.set(value.getBytes)
}
/**
* A [[ParquetRowConverter]] is used to convert Parquet records into Catalyst [[InternalRow]]s.
* Since Catalyst `StructType` is also a Parquet record, this converter can be used as root
* converter. Take the following Parquet type as an example:
* {{{
* message root {
* required int32 f1;
* optional group f2 {
* required double f21;
* optional binary f22 (utf8);
* }
* }
* }}}
* 5 converters will be created:
*
* - a root [[ParquetRowConverter]] for [[org.apache.parquet.schema.MessageType]] `root`,
* which contains:
* - a [[ParquetPrimitiveConverter]] for required
* [[org.apache.parquet.schema.OriginalType.INT_32]] field `f1`, and
* - a nested [[ParquetRowConverter]] for optional [[GroupType]] `f2`, which contains:
* - a [[ParquetPrimitiveConverter]] for required
* [[org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE]] field `f21`, and
* - a [[ParquetStringConverter]] for optional [[org.apache.parquet.schema.OriginalType.UTF8]]
* string field `f22`
*
* When used as a root converter, [[NoopUpdater]] should be used since root converters don't have
* any "parent" container.
*
* @param schemaConverter A utility converter used to convert Parquet types to Catalyst types.
* @param parquetType Parquet schema of Parquet records
* @param catalystType Spark SQL schema that corresponds to the Parquet record type. User-defined
* types should have been expanded.
* @param convertTz the optional time zone to convert to int96 data
* @param datetimeRebaseMode the mode of rebasing date/timestamp from Julian to Proleptic Gregorian
* calendar
* @param int96RebaseMode the mode of rebasing INT96 timestamp from Julian to Proleptic Gregorian
* calendar
* @param updater An updater which propagates converted field values to the parent container
*/
private[parquet] class ParquetRowConverter(
schemaConverter: ParquetToSparkSchemaConverter,
parquetType: GroupType,
catalystType: StructType,
convertTz: Option[ZoneId],
datetimeRebaseMode: LegacyBehaviorPolicy.Value,
int96RebaseMode: LegacyBehaviorPolicy.Value,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) with Logging {
assert(
parquetType.getFieldCount <= catalystType.length,
s"""Field count of the Parquet schema is greater than the field count of the Catalyst schema:
|
|Parquet schema:
|$parquetType
|Catalyst schema:
|${catalystType.prettyJson}
""".stripMargin)
assert(
!catalystType.existsRecursively(_.isInstanceOf[UserDefinedType[_]]),
s"""User-defined types in Catalyst schema should have already been expanded:
|${catalystType.prettyJson}
""".stripMargin)
logDebug(
s"""Building row converter for the following schema:
|
|Parquet form:
|$parquetType
|Catalyst form:
|${catalystType.prettyJson}
""".stripMargin)
/**
* Updater used together with field converters within a [[ParquetRowConverter]]. It propagates
* converted filed values to the `ordinal`-th cell in `currentRow`.
*/
private final class RowUpdater(row: InternalRow, ordinal: Int) extends ParentContainerUpdater {
override def set(value: Any): Unit = row(ordinal) = value
override def setBoolean(value: Boolean): Unit = row.setBoolean(ordinal, value)
override def setByte(value: Byte): Unit = row.setByte(ordinal, value)
override def setShort(value: Short): Unit = row.setShort(ordinal, value)
override def setInt(value: Int): Unit = row.setInt(ordinal, value)
override def setLong(value: Long): Unit = row.setLong(ordinal, value)
override def setDouble(value: Double): Unit = row.setDouble(ordinal, value)
override def setFloat(value: Float): Unit = row.setFloat(ordinal, value)
}
private[this] val currentRow = new SpecificInternalRow(catalystType.map(_.dataType))
/**
* The [[InternalRow]] converted from an entire Parquet record.
*/
def currentRecord: InternalRow = currentRow
private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
int96RebaseMode, "Parquet INT96")
// Converters for each field.
private[this] val fieldConverters: Array[Converter with HasParentContainerUpdater] = {
// (SPARK-31116) Use case insensitive map if spark.sql.caseSensitive is false
// to prevent throwing IllegalArgumentException when searching catalyst type's field index
val catalystFieldNameToIndex = if (SQLConf.get.caseSensitiveAnalysis) {
catalystType.fieldNames.zipWithIndex.toMap
} else {
CaseInsensitiveMap(catalystType.fieldNames.zipWithIndex.toMap)
}
parquetType.getFields.asScala.map { parquetField =>
val fieldIndex = catalystFieldNameToIndex(parquetField.getName)
val catalystField = catalystType(fieldIndex)
// Converted field value should be set to the `fieldIndex`-th cell of `currentRow`
newConverter(parquetField, catalystField.dataType, new RowUpdater(currentRow, fieldIndex))
}.toArray
}
// Updaters for each field.
private[this] val fieldUpdaters: Array[ParentContainerUpdater] = fieldConverters.map(_.updater)
override def getConverter(fieldIndex: Int): Converter = fieldConverters(fieldIndex)
override def end(): Unit = {
var i = 0
while (i < fieldUpdaters.length) {
fieldUpdaters(i).end()
i += 1
}
updater.set(currentRow)
}
override def start(): Unit = {
var i = 0
val numFields = currentRow.numFields
while (i < numFields) {
currentRow.setNullAt(i)
i += 1
}
i = 0
while (i < fieldUpdaters.length) {
fieldUpdaters(i).start()
i += 1
}
}
/**
* Creates a converter for the given Parquet type `parquetType` and Spark SQL data type
* `catalystType`. Converted values are handled by `updater`.
*/
private def newConverter(
parquetType: Type,
catalystType: DataType,
updater: ParentContainerUpdater): Converter with HasParentContainerUpdater = {
catalystType match {
case BooleanType | IntegerType | LongType | FloatType | DoubleType | BinaryType =>
new ParquetPrimitiveConverter(updater)
case ByteType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit =
updater.setByte(value.asInstanceOf[ByteType#InternalType])
}
case ShortType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit =
updater.setShort(value.asInstanceOf[ShortType#InternalType])
}
// For INT32 backed decimals
case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT32 =>
val metadata = parquetType.asPrimitiveType().getDecimalMetadata
if (metadata == null) {
// If the column is a plain INT32, we should pick the precision that can host the largest
// INT32 value.
new ParquetIntDictionaryAwareDecimalConverter(
DecimalType.IntDecimal.precision, 0, updater)
} else {
new ParquetIntDictionaryAwareDecimalConverter(
metadata.getPrecision, metadata.getScale, updater)
}
// For INT64 backed decimals
case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT64 =>
val metadata = parquetType.asPrimitiveType().getDecimalMetadata
if (metadata == null) {
// If the column is a plain INT64, we should pick the precision that can host the largest
// INT64 value.
new ParquetLongDictionaryAwareDecimalConverter(
DecimalType.LongDecimal.precision, 0, updater)
} else {
new ParquetLongDictionaryAwareDecimalConverter(
metadata.getPrecision, metadata.getScale, updater)
}
// For BINARY and FIXED_LEN_BYTE_ARRAY backed decimals
case t: DecimalType
if parquetType.asPrimitiveType().getPrimitiveTypeName == FIXED_LEN_BYTE_ARRAY ||
parquetType.asPrimitiveType().getPrimitiveTypeName == BINARY =>
val metadata = parquetType.asPrimitiveType().getDecimalMetadata
if (metadata == null) {
throw new RuntimeException(s"Unable to create Parquet converter for ${t.typeName} " +
s"whose Parquet type is $parquetType without decimal metadata. Please read this " +
"column/field as Spark BINARY type." )
} else {
new ParquetBinaryDictionaryAwareDecimalConverter(
metadata.getPrecision, metadata.getScale, updater)
}
case t: DecimalType =>
throw new RuntimeException(
s"Unable to create Parquet converter for decimal type ${t.json} whose Parquet type is " +
s"$parquetType. Parquet DECIMAL type can only be backed by INT32, INT64, " +
"FIXED_LEN_BYTE_ARRAY, or BINARY.")
case StringType =>
new ParquetStringConverter(updater)
case TimestampType if parquetType.getOriginalType == OriginalType.TIMESTAMP_MICROS =>
new ParquetPrimitiveConverter(updater) {
override def addLong(value: Long): Unit = {
updater.setLong(timestampRebaseFunc(value))
}
}
case TimestampType if parquetType.getOriginalType == OriginalType.TIMESTAMP_MILLIS =>
new ParquetPrimitiveConverter(updater) {
override def addLong(value: Long): Unit = {
val micros = DateTimeUtils.millisToMicros(value)
updater.setLong(timestampRebaseFunc(micros))
}
}
// INT96 timestamp doesn't have a logical type, here we check the physical type instead.
case TimestampType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT96 =>
new ParquetPrimitiveConverter(updater) {
// Converts nanosecond timestamps stored as INT96
override def addBinary(value: Binary): Unit = {
val julianMicros = ParquetRowConverter.binaryToSQLTimestamp(value)
val gregorianMicros = int96RebaseFunc(julianMicros)
val adjTime = convertTz.map(DateTimeUtils.convertTz(gregorianMicros, _, ZoneOffset.UTC))
.getOrElse(gregorianMicros)
updater.setLong(adjTime)
}
}
case DateType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit = {
updater.set(dateRebaseFunc(value))
}
}
// A repeated field that is neither contained by a `LIST`- or `MAP`-annotated group nor
// annotated by `LIST` or `MAP` should be interpreted as a required list of required
// elements where the element type is the type of the field.
case t: ArrayType if parquetType.getOriginalType != LIST =>
if (parquetType.isPrimitive) {
new RepeatedPrimitiveConverter(parquetType, t.elementType, updater)
} else {
new RepeatedGroupConverter(parquetType, t.elementType, updater)
}
case t: ArrayType =>
new ParquetArrayConverter(parquetType.asGroupType(), t, updater)
case t: MapType =>
new ParquetMapConverter(parquetType.asGroupType(), t, updater)
case t: StructType =>
val wrappedUpdater = {
// SPARK-30338: avoid unnecessary InternalRow copying for nested structs:
// There are two cases to handle here:
//
// 1. Parent container is a map or array: we must make a deep copy of the mutable row
// because this converter may be invoked multiple times per Parquet input record
// (if the map or array contains multiple elements).
//
// 2. Parent container is a struct: we don't need to copy the row here because either:
//
// (a) all ancestors are structs and therefore no copying is required because this
// converter will only be invoked once per Parquet input record, or
// (b) some ancestor is struct that is nested in a map or array and that ancestor's
// converter will perform deep-copying (which will recursively copy this row).
if (updater.isInstanceOf[RowUpdater]) {
// `updater` is a RowUpdater, implying that the parent container is a struct.
updater
} else {
// `updater` is NOT a RowUpdater, implying that the parent container a map or array.
new ParentContainerUpdater {
override def set(value: Any): Unit = {
updater.set(value.asInstanceOf[SpecificInternalRow].copy()) // deep copy
}
}
}
}
new ParquetRowConverter(
schemaConverter,
parquetType.asGroupType(),
t,
convertTz,
datetimeRebaseMode,
int96RebaseMode,
wrappedUpdater)
case t =>
throw new RuntimeException(
s"Unable to create Parquet converter for data type ${t.json} " +
s"whose Parquet type is $parquetType")
}
}
/**
* Parquet converter for strings. A dictionary is used to minimize string decoding cost.
*/
private final class ParquetStringConverter(updater: ParentContainerUpdater)
extends ParquetPrimitiveConverter(updater) {
private var expandedDictionary: Array[UTF8String] = null
override def hasDictionarySupport: Boolean = true
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { i =>
UTF8String.fromBytes(dictionary.decodeToBinary(i).getBytes)
}
}
override def addValueFromDictionary(dictionaryId: Int): Unit = {
updater.set(expandedDictionary(dictionaryId))
}
override def addBinary(value: Binary): Unit = {
// The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here we
// are using `Binary.toByteBuffer.array()` to steal the underlying byte array without copying
// it.
val buffer = value.toByteBuffer
val offset = buffer.arrayOffset() + buffer.position()
val numBytes = buffer.remaining()
updater.set(UTF8String.fromBytes(buffer.array(), offset, numBytes))
}
}
/**
* Parquet converter for fixed-precision decimals.
*/
private abstract class ParquetDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetPrimitiveConverter(updater) {
protected var expandedDictionary: Array[Decimal] = _
override def hasDictionarySupport: Boolean = true
override def addValueFromDictionary(dictionaryId: Int): Unit = {
updater.set(expandedDictionary(dictionaryId))
}
// Converts decimals stored as INT32
override def addInt(value: Int): Unit = {
addLong(value: Long)
}
// Converts decimals stored as INT64
override def addLong(value: Long): Unit = {
updater.set(decimalFromLong(value))
}
// Converts decimals stored as either FIXED_LENGTH_BYTE_ARRAY or BINARY
override def addBinary(value: Binary): Unit = {
updater.set(decimalFromBinary(value))
}
protected def decimalFromLong(value: Long): Decimal = {
Decimal(value, precision, scale)
}
protected def decimalFromBinary(value: Binary): Decimal = {
if (precision <= Decimal.MAX_LONG_DIGITS) {
// Constructs a `Decimal` with an unscaled `Long` value if possible.
val unscaled = ParquetRowConverter.binaryToUnscaledLong(value)
Decimal(unscaled, precision, scale)
} else {
// Otherwise, resorts to an unscaled `BigInteger` instead.
Decimal(new BigDecimal(new BigInteger(value.getBytes), scale), precision, scale)
}
}
}
private class ParquetIntDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromLong(dictionary.decodeToInt(id).toLong)
}
}
}
private class ParquetLongDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromLong(dictionary.decodeToLong(id))
}
}
}
private class ParquetBinaryDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromBinary(dictionary.decodeToBinary(id))
}
}
}
/**
* Parquet converter for arrays. Spark SQL arrays are represented as Parquet lists. Standard
* Parquet lists are represented as a 3-level group annotated by `LIST`:
* {{{
* <list-repetition> group <name> (LIST) { <-- parquetSchema points here
* repeated group list {
* <element-repetition> <element-type> element;
* }
* }
* }}}
* The `parquetSchema` constructor argument points to the outermost group.
*
* However, before this representation is standardized, some Parquet libraries/tools also use some
* non-standard formats to represent list-like structures. Backwards-compatibility rules for
* handling these cases are described in Parquet format spec.
*
* @see https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists
*/
private final class ParquetArrayConverter(
parquetSchema: GroupType,
catalystSchema: ArrayType,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) {
private[this] val currentArray = ArrayBuffer.empty[Any]
private[this] val elementConverter: Converter = {
val repeatedType = parquetSchema.getType(0)
val elementType = catalystSchema.elementType
// At this stage, we're not sure whether the repeated field maps to the element type or is
// just the syntactic repeated group of the 3-level standard LIST layout. Take the following
// Parquet LIST-annotated group type as an example:
//
// optional group f (LIST) {
// repeated group list {
// optional group element {
// optional int32 element;
// }
// }
// }
//
// This type is ambiguous:
//
// 1. When interpreted as a standard 3-level layout, the `list` field is just the syntactic
// group, and the entire type should be translated to:
//
// ARRAY<STRUCT<element: INT>>
//
// 2. On the other hand, when interpreted as a non-standard 2-level layout, the `list` field
// represents the element type, and the entire type should be translated to:
//
// ARRAY<STRUCT<element: STRUCT<element: INT>>>
//
// Here we try to convert field `list` into a Catalyst type to see whether the converted type
// matches the Catalyst array element type. If it doesn't match, then it's case 1; otherwise,
// it's case 2.
val guessedElementType = schemaConverter.convertField(repeatedType)
if (DataType.equalsIgnoreCompatibleNullability(guessedElementType, elementType)) {
// If the repeated field corresponds to the element type, creates a new converter using the
// type of the repeated field.
newConverter(repeatedType, elementType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentArray += value
})
} else {
// If the repeated field corresponds to the syntactic group in the standard 3-level Parquet
// LIST layout, creates a new converter using the only child field of the repeated field.
assert(!repeatedType.isPrimitive && repeatedType.asGroupType().getFieldCount == 1)
new ElementConverter(repeatedType.asGroupType().getType(0), elementType)
}
}
override def getConverter(fieldIndex: Int): Converter = elementConverter
override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray))
override def start(): Unit = currentArray.clear()
/** Array element converter */
private final class ElementConverter(parquetType: Type, catalystType: DataType)
extends GroupConverter {
private var currentElement: Any = _
private[this] val converter =
newConverter(parquetType, catalystType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentElement = value
})
override def getConverter(fieldIndex: Int): Converter = converter
override def end(): Unit = currentArray += currentElement
override def start(): Unit = currentElement = null
}
}
/** Parquet converter for maps */
private final class ParquetMapConverter(
parquetType: GroupType,
catalystType: MapType,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) {
private[this] val currentKeys = ArrayBuffer.empty[Any]
private[this] val currentValues = ArrayBuffer.empty[Any]
private[this] val keyValueConverter = {
val repeatedType = parquetType.getType(0).asGroupType()
new KeyValueConverter(
repeatedType.getType(0),
repeatedType.getType(1),
catalystType.keyType,
catalystType.valueType)
}
override def getConverter(fieldIndex: Int): Converter = keyValueConverter
override def end(): Unit = {
// The parquet map may contains null or duplicated map keys. When it happens, the behavior is
// undefined.
// TODO (SPARK-26174): disallow it with a config.
updater.set(
new ArrayBasedMapData(
new GenericArrayData(currentKeys.toArray),
new GenericArrayData(currentValues.toArray)))
}
override def start(): Unit = {
currentKeys.clear()
currentValues.clear()
}
/** Parquet converter for key-value pairs within the map. */
private final class KeyValueConverter(
parquetKeyType: Type,
parquetValueType: Type,
catalystKeyType: DataType,
catalystValueType: DataType)
extends GroupConverter {
private var currentKey: Any = _
private var currentValue: Any = _
private[this] val converters = Array(
// Converter for keys
newConverter(parquetKeyType, catalystKeyType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentKey = value
}),
// Converter for values
newConverter(parquetValueType, catalystValueType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentValue = value
}))
override def getConverter(fieldIndex: Int): Converter = converters(fieldIndex)
override def end(): Unit = {
currentKeys += currentKey
currentValues += currentValue
}
override def start(): Unit = {
currentKey = null
currentValue = null
}
}
}
private trait RepeatedConverter {
private[this] val currentArray = ArrayBuffer.empty[Any]
protected def newArrayUpdater(updater: ParentContainerUpdater) = new ParentContainerUpdater {
override def start(): Unit = currentArray.clear()
override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray))
override def set(value: Any): Unit = currentArray += value
}
}
/**
* A primitive converter for converting unannotated repeated primitive values to required arrays
* of required primitives values.
*/
private final class RepeatedPrimitiveConverter(
parquetType: Type,
catalystType: DataType,
parentUpdater: ParentContainerUpdater)
extends PrimitiveConverter with RepeatedConverter with HasParentContainerUpdater {
val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater)
private[this] val elementConverter: PrimitiveConverter =
newConverter(parquetType, catalystType, updater).asPrimitiveConverter()
override def addBoolean(value: Boolean): Unit = elementConverter.addBoolean(value)
override def addInt(value: Int): Unit = elementConverter.addInt(value)
override def addLong(value: Long): Unit = elementConverter.addLong(value)
override def addFloat(value: Float): Unit = elementConverter.addFloat(value)
override def addDouble(value: Double): Unit = elementConverter.addDouble(value)
override def addBinary(value: Binary): Unit = elementConverter.addBinary(value)
override def setDictionary(dict: Dictionary): Unit = elementConverter.setDictionary(dict)
override def hasDictionarySupport: Boolean = elementConverter.hasDictionarySupport
override def addValueFromDictionary(id: Int): Unit = elementConverter.addValueFromDictionary(id)
}
/**
* A group converter for converting unannotated repeated group values to required arrays of
* required struct values.
*/
private final class RepeatedGroupConverter(
parquetType: Type,
catalystType: DataType,
parentUpdater: ParentContainerUpdater)
extends GroupConverter with HasParentContainerUpdater with RepeatedConverter {
val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater)
private[this] val elementConverter: GroupConverter =
newConverter(parquetType, catalystType, updater).asGroupConverter()
override def getConverter(field: Int): Converter = elementConverter.getConverter(field)
override def end(): Unit = elementConverter.end()
override def start(): Unit = elementConverter.start()
}
}
private[parquet] object ParquetRowConverter {
def binaryToUnscaledLong(binary: Binary): Long = {
// The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here
// we are using `Binary.toByteBuffer.array()` to steal the underlying byte array without
// copying it.
val buffer = binary.toByteBuffer
val bytes = buffer.array()
val start = buffer.arrayOffset() + buffer.position()
val end = buffer.arrayOffset() + buffer.limit()
var unscaled = 0L
var i = start
while (i < end) {
unscaled = (unscaled << 8) | (bytes(i) & 0xff)
i += 1
}
val bits = 8 * (end - start)
unscaled = (unscaled << (64 - bits)) >> (64 - bits)
unscaled
}
def binaryToSQLTimestamp(binary: Binary): Long = {
assert(binary.length() == 12, s"Timestamps (with nanoseconds) are expected to be stored in" +
s" 12-byte long binaries. Found a ${binary.length()}-byte binary instead.")
val buffer = binary.toByteBuffer.order(ByteOrder.LITTLE_ENDIAN)
val timeOfDayNanos = buffer.getLong
val julianDay = buffer.getInt
DateTimeUtils.fromJulianDay(julianDay, timeOfDayNanos)
}
}
|
witgo/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
|
Scala
|
apache-2.0
| 31,694 |
package at.ac.tuwien.ifs.ir.evaluation.pool
import at.ac.tuwien.ifs.ir.evaluation.pool.HedgeBasedPool.rnd
import at.ac.tuwien.ifs.ir.model._
import scala.collection.parallel.{ParMap, ParSeq}
/**
* Created by aldo on 31/08/16.
*/
class FusionBasedPool(method: String, poolSize: Int, lRuns: List[Runs], gT: QRels) extends FixedSizePool(poolSize, lRuns, gT) {
override def getName:String = FusionBasedPool.getName(method, poolSize)
override lazy val qRels: QRels = PoolConverter.repoolToFusionBased(method, poolSize, lRuns, gT)
override def getNewInstance(lRuns: List[Runs]): Pool = FusionBasedPool(method, poolSize, lRuns, gT)
}
object FusionBasedPool {
def apply(method: String, poolSize: Int, lRuns: List[Runs], gT: QRels) = new FusionBasedPool(method, poolSize, lRuns, gT)
def getName(method: String, poolSize: Int):String = "fusionbased_" + method + ":" + poolSize
def getPooledDocuments(method: String, nDs: Map[Int, Int], lRuns: List[Runs], qRels: QRels)(topicId: Int): Set[Document] = {
def normalize(rr: List[RunRecord]): List[RunRecord] = {
if (rr.nonEmpty) {
val max = rr.maxBy(_.score).score
val min = rr.minBy(_.score).score
if (max != min) {
rr.map(e => RunRecord(e.iteration, e.document, e.rank, (e.score - min) / (max - min)))
} else {
rr
}
} else {
rr
}
}
val nLRuns = lRuns.filter(_.selectByTopicIdOrNil(topicId) != Nil).map(runs => {
val run = runs.selectByTopicIdOrNil(topicId)
new Runs(runs.id, List(new Run(run.id, normalize(run.runRecords))))
})
def w(rr: RunRecord, sizeRun: Int): Float =
if(rr == null) 0f else rr.score
def min(l: Seq[Float]): Float =
l.min
def max(l: Seq[Float]): Float =
l.max
def sum(l: Seq[Float]): Float =
l.sum
def anz(l: Seq[Float]): Float =
sum(l) / l.count(_ > 0f)
def mnz(l: Seq[Float]): Float =
sum(l) * l.count(_ > 0f)
def med(l: Seq[Float]) = {
val sl = l.sorted
if (sl.size % 2 == 0) {
val i = sl.size / 2
sl(i - 1) / 2f + sl(i) / 2f
} else
sl((sl.size - 1) / 2)
}
NonAdaptiveBasedPool.getPooledAlsoNullDocumentsWithStat(w,
method match {
case "combmin" => min
case "combmax" => max
case "combsum" => sum
case "combanz" => anz
case "combmnz" => mnz
case "combmed" => med
case _ => throw new Exception("Method " + method + " not recognized!")
}, nDs, nLRuns, qRels)(topicId)
}
}
|
aldolipani/PoolBiasEstimators
|
src/main/scala/at/ac/tuwien/ifs/ir/evaluation/pool/FusionBasedPool.scala
|
Scala
|
apache-2.0
| 2,568 |
package generators.v22
import generators.Helper._
import org.specs2.mutable.Specification
import utils.WithApplication
import scala.xml.XML
/**
* Tests to verify the generation of HTML
*
*
*/
class HtmlFunctionalCasesSpec extends Specification {
val version = "/0.22"
"The HTML generator should generate HTML from C3" should {
"Claim Functional cases" in new WithApplication {
for (i <- 1 to 15) {
val fileLocation = s"functionalTestCase${i}_testGeneratorResultIsSuccess.html"
val source = getClass.getResource(s"$version/claim/c3_functional$i.xml")
deleteFile(fileLocation)
generateHTML(fileLocation, XML.load(source))
}
}
"Change of circumstances Functional cases" in new WithApplication {
for (i <- 1 to 13) {
val fileLocation = s"functionalTestCase${i}_circs_testGeneratorResultIsSuccess.html"
val source = getClass.getResource(s"$version/circs/c3_functional${i}_circs.xml")
deleteFile(fileLocation)
generateHTML(fileLocation, XML.load(source))
}
for (i <- 20 to 28) {
val fileLocation = s"functionalTestCase${i}_circs_testGeneratorResultIsSuccess.html"
val source = getClass.getResource(s"$version/circs/c3_functional${i}_circs.xml")
deleteFile(fileLocation)
generateHTML(fileLocation, XML.load(source))
}
}
}
}
|
Department-for-Work-and-Pensions/RenderingService
|
test/generators/v22/HtmlFunctionalCasesSpec.scala
|
Scala
|
mit
| 1,396 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.util
package object option {
implicit class RichOption[T](o: Option[T]) {
def getOrThrow(msg: String): T =
o.getOrElse(throw new IllegalArgumentException(msg))
}
}
|
ensime/ensime-server
|
util/src/main/scala/org/ensime/util/option.scala
|
Scala
|
gpl-3.0
| 327 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package types
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.parsing.statements.{Dcl, Def, EmptyDcl}
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
/*
* RefineStat ::= Dcl
* | 'type' TypeDef
*/
object RefineStat extends RefineStat {
override protected def `def` = Def
override protected def dcl = Dcl
override protected def emptyDcl = EmptyDcl
}
trait RefineStat {
protected def `def`: Def
protected def dcl: Dcl
protected def emptyDcl: EmptyDcl
def parse(builder: ScalaPsiBuilder): Boolean = {
builder.getTokenType match {
case ScalaTokenTypes.kTYPE =>
if (!`def`.parse(builder, isMod = false)) {
if (!dcl.parse(builder, isMod = false)) {
emptyDcl.parse(builder, isMod = false)
}
}
return true
case ScalaTokenTypes.kVAR | ScalaTokenTypes.kVAL
| ScalaTokenTypes.kDEF =>
if (dcl.parse(builder, isMod = false)) {
return true
}
else {
emptyDcl.parse(builder, isMod = false)
return true
}
case _ =>
return false
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/parser/parsing/types/RefineStat.scala
|
Scala
|
apache-2.0
| 1,346 |
package controllers
import javax.inject.Inject
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.exceptions.ProviderException
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator
import com.mohiva.play.silhouette.impl.providers._
import models.User
import models.services.UserService
import play.api.i18n.{ MessagesApi, Messages }
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc.Action
import scala.concurrent.Future
/**
* The social auth controller.
*
* @param messagesApi The Play messages API.
* @param env The Silhouette environment.
* @param userService The user service implementation.
* @param authInfoRepository The auth info service implementation.
* @param socialProviderRegistry The social provider registry.
*/
class SocialAuthController @Inject() (
val messagesApi: MessagesApi,
val env: Environment[User, CookieAuthenticator],
userService: UserService,
authInfoRepository: AuthInfoRepository,
socialProviderRegistry: SocialProviderRegistry)
extends Silhouette[User, CookieAuthenticator] with Logger {
/**
* Authenticates a user against a social provider.
*
* @param provider The ID of the provider to authenticate against.
* @return The result to display.
*/
def authenticate(provider: String) = Action.async { implicit request =>
(socialProviderRegistry.get[SocialProvider](provider) match {
case Some(p: SocialProvider with CommonSocialProfileBuilder) =>
p.authenticate().flatMap {
case Left(result) => Future.successful(result)
case Right(authInfo) => for {
profile <- p.retrieveProfile(authInfo)
user <- userService.create(profile)
authInfo <- authInfoRepository.save(profile.loginInfo, authInfo)
authenticator <- env.authenticatorService.create(profile.loginInfo)
value <- env.authenticatorService.init(authenticator)
result <- env.authenticatorService.embed(value, Redirect(routes.ApplicationController.admin()))
} yield {
env.eventBus.publish(LoginEvent(user, request, request2Messages))
result
}
}
case _ => Future.failed(new ProviderException(s"Cannot authenticate with unexpected social provider $provider"))
}).recover {
case e: ProviderException =>
logger.error("Unexpected provider error", e)
Redirect(routes.ApplicationController.signIn()).flashing("error" -> Messages("could.not.authenticate"))
}
}
}
|
renexdev/Play-Auth-Slick-Seed-Load-Schema
|
app/controllers/SocialAuthController.scala
|
Scala
|
apache-2.0
| 2,619 |
package com.atomist.rug.runtime.plans
import com.atomist.rug.spi.Handlers.Instruction.{Detail, Edit}
import com.atomist.rug.spi.Handlers.Status.{Failure, Success}
import com.atomist.rug.spi.Handlers._
import org.scalatest.{DiagrammedAssertions, FunSpec, Matchers, OneInstancePerTest}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class PlanResultInterpreterTest extends FunSpec with Matchers with DiagrammedAssertions with OneInstancePerTest {
val successfulInstructionResult = InstructionResult(Edit(Detail("edit1", None, Nil, None)), Response(Success))
val failureInstructionResult = InstructionResult(Edit(Detail("edit2", None, Nil, None)), Response(Failure))
val errorInstructionResult = InstructionError(Edit(Detail("edit3", None, Nil, None)), new IllegalStateException("doh!"))
val successfulNestedPlan = NestedPlanRun(Plan(None,Nil,Nil, Nil), PlanResult(Seq(successfulInstructionResult)))
val failureNestedPlan = NestedPlanRun(Plan(None,Nil, Nil, Nil), PlanResult(Seq(failureInstructionResult)))
val errorNestedPlan = NestedPlanRun(Plan(None,Nil, Nil, Nil), PlanResult(Seq(errorInstructionResult)))
it ("should interpret empty plan result as success") {
val planResult = PlanResult(Nil)
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Success)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with successful instruction responses as success") {
val planResult = PlanResult(Seq(
successfulInstructionResult
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Success)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with failure response as failure") {
val planResult = PlanResult(Seq(
failureInstructionResult
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with exception as failure") {
val planResult = PlanResult(Seq(
errorInstructionResult
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with success and failure responses as failure") {
val planResult = PlanResult(Seq(
successfulInstructionResult,
failureInstructionResult
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with success response and exception as failure") {
val planResult = PlanResult(Seq(
successfulInstructionResult,
errorInstructionResult
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with successful nested plan as success") {
val planResult = PlanResult(Seq(
successfulInstructionResult,
successfulNestedPlan
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Success)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with failure nested plan as failure") {
val planResult = PlanResult(Seq(
successfulInstructionResult,
failureNestedPlan
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
it ("should interpret plan result with error nested plan as failure") {
val planResult = PlanResult(Seq(
successfulInstructionResult,
errorNestedPlan
))
val actualResponse = PlanResultInterpreter.interpret(planResult)
val expectedResponse = Response(Failure)
assert (actualResponse == expectedResponse)
}
}
|
atomist/rug
|
src/test/scala/com/atomist/rug/runtime/plans/PlanResultInterpreterTest.scala
|
Scala
|
gpl-3.0
| 4,151 |
package maker.project
import org.scalatest.FunSuite
import maker.utils.FileUtils._
import maker.utils.os.Command
class JavaCompileTests extends FunSuite with TestUtils {
test("Java module fails when expected and stays failed"){
withTempDir{
root =>
val proj = new TestModule(root, "JavaCompileTests")
proj.writeSrc(
"src/foo/Foo.java",
"""
package foo;
class Foo {
public int baz() { return 1; }
}
"""
)
proj.writeSrc(
"src/foo/Bar.java",
"""
package foo;
xclass Bar {
public int baz() { return 1; }
}
"""
)
proj.clean
assert(proj.compilePhase.classFiles.size === 0)
assert(proj.compile.failed, "Compilation should have failed")
assert(!file(proj.compilePhase.outputDir, "foo", "Bar.class").exists, "Bar.class should not exist")
sleepToNextSecond
assert(proj.compile.failed, "Compilation should have failed")
}
}
test("Java can compile 1.6 output"){
withTempDir{
root =>
val proj = new TestModule(root, "JavaCompileTests"){
override def javacOptions = List("-source", "1.6", "-target", "1.6")
}
proj.writeSrc(
"src/foo/Foo.java",
"""
package foo;
class Foo {
public int baz() { return 1; }
}
"""
)
proj.compile
assert(proj.compilePhase.classFiles.size === 1)
val classfile = file(proj.compilePhase.outputDir, "foo", "Foo.class")
assert(classfile.exists, "Foo.class should exist")
val cmd = Command("file", classfile.getAbsolutePath).withNoOutput.withSavedOutput
cmd.exec
assert(cmd.savedOutput.contains("compiled Java class data, version 50.0 (Java 1.6)"))
}
}
}
|
syl20bnr/maker
|
maker/tests/maker/project/JavaCompileTests.scala
|
Scala
|
bsd-2-clause
| 1,932 |
package com.caibowen.prma.api
import scala.concurrent.Future
/**
* @author BowenCai
* @since 05/12/2014.
*/
trait SimpleCache[K,V]{
def contains(key: K): Boolean
def clear(): Unit
/**
* keys currently available
* @return
*/
def keys: Set[K]
/**
* put value.
* behavior undefined if there is old value
* @param key
* @param value
*/
def put(key: K, value: V): Unit
/**
* put only if this key does not exist
* @param key
* @param value
*/
def putIfAbsent(key: K, value: V): Unit
/**
* put all values.
* behavior undefined if there is old value
* @param vals
*/
def putAll(vals: Map[K, V]): Unit
/**
* update, return old value if requested
* behavior undefined if there is no old value
*
* @param key
* @param value
* @param retrieve
* @return old value
*/
def doUpdate(key: K, value: V, retrieve: Boolean): Option[V]
/**
* update if old value exists, return old value if requested
* or put new value and return None
*
* @param key
* @param value
* @param retrieve
* @return old value, if exists and requested
*/
def update(key: K, value: V, retrieve: Boolean): Option[V] = {
var op = get(key)
if (op isDefined) {
op = doUpdate(key, value, retrieve)
if (op.isDefined && retrieve)
op
else None
} else {
put(key, value)
None
}
}
def get(key: K): Option[V]
def apply(key: K): V = {
val op = get(key)
if (op isDefined)
op.get
else
null.asInstanceOf[V]
}
/**
* @return a map view
*/
def toMap(): Map[K, V]
/**
* approximate number of cache entries
* @return
*/
def count: Int
/**
* @param key
* @param returnVal
* @return old value, if exists and requested
*/
def remove(key: K, returnVal: Boolean): Option[V]
def removeAll(keys: Iterable[K]): Unit
import scala.concurrent.ExecutionContext.Implicits.global
def containsAsync(key: K): Future[Boolean] = Future {
contains(key)
}
def clearAsync(): Future[Unit] = Future {
clear
}
def keysAsync(): Future[Set[K]] = Future {
keys
}
def putAsync(key: K, value: V): Future[Unit] = Future {
put(key, value)
}
def putIfAbsentAsync(key: K, value: V): Future[Unit] = Future{
putIfAbsent(key, value)
}
def putAllAsync(vals: Map[K, V]): Future[Unit] = Future{
putAll(vals)
}
def getAsync(key: K): Future[Option[V]] = Future {
get(key)
}
def removeAsync(key: K, returnVal: Boolean): Future[Option[V]] = Future{
remove(key, returnVal)
}
def toMapAsync(): Future[Map[K, V]] = Future {
this.toMap()
}
def removeAllAsync(keys: Iterable[K]): Future[Unit] = Future {
removeAll(keys)
}
def updateAsync(key: K, value: V, retrieve: Boolean): Future[Option[V]] = Future {
update(key, value, retrieve)
}
}
|
xkommando/PRMA
|
api/src/main/scala/com/caibowen/prma/api/SimpleCache.scala
|
Scala
|
lgpl-3.0
| 2,899 |
/*
* Copyright © 2016 - 2020 Schlichtherle IT Services
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package global.namespace.neuron.di.scala
private trait LowPriorityMacroAnnotation { self: MacroAnnotation =>
import c.universe._
lazy val NamedArg: AssignOrNamedArgExtractor = AssignOrNamedArg
}
|
christian-schlichtherle/neuron-di
|
core-scala/src/main/scala-2.12/global/namespace/neuron/di/scala/LowPriorityMacroAnnotation.scala
|
Scala
|
apache-2.0
| 822 |
package propel.lib
import propel.core.ExprTree._
import BooleanLib._
import CompareLib._
import CollectionLib.Filter
import DecimalLib._
import SampleLib._
object SampleExprs {
val trueOrFalse = Or (True (), False ())
val oneEqualsZero = Equal (One (), Zero ())
val oneMinusOne = Subtract (One (), One())
val oneMinusOneIsZero = Equal (oneMinusOne, Zero ())
val oneMinusOneIsZeroWithLet =
Let (
decls = Map ("oneMinusOne" -> oneMinusOne),
in = Equal (Lookup("oneMinusOne"), Zero ()))
val recursiveLet =
Let (
decls = Map (
"a" -> Add (Lookup ("b"), One ()),
"b" -> Subtract (Lookup ("a"), One())),
in = Equal (Lookup("a"), Zero ()))
val zeroIsLessThanOne = LessThan (Zero (), One())
val allThePeople = People ()
val peopleOver1 = Filter (People (), Lambda (p => LessThan (One (), Age (p))))
}
|
ozmi/propel
|
src/test/scala/propel/lib/SampleExprs.scala
|
Scala
|
mit
| 1,008 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.referer
import io.gatling.core.session.{ Session, SessionPrivateAttributes }
import io.gatling.http.util.HttpHelper.{ isAjax, isHtml }
import io.gatling.http.protocol.HttpProtocol
import io.gatling.http.response.Response
import org.asynchttpclient.Request
object RefererHandling {
private val RefererAttributeName = SessionPrivateAttributes.PrivateAttributePrefix + "http.referer"
def getStoredReferer(session: Session): Option[String] = session(RefererAttributeName).asOption[String]
def storeReferer(request: Request, response: Response, protocol: HttpProtocol): Session => Session =
if (protocol.requestPart.autoReferer && !isAjax(request.getHeaders) && isHtml(response.headers))
_.set(RefererAttributeName, request.getUrl)
else
Session.Identity
}
|
wiacekm/gatling
|
gatling-http/src/main/scala/io/gatling/http/referer/RefererHandling.scala
|
Scala
|
apache-2.0
| 1,425 |
/*
* Copyright 2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import Keys._
import net.liftweb.sbt.LiftBuildPlugin.{crossMapped, defaultOrMapped}
object Dependencies {
type ModuleMap = String => ModuleID
lazy val CVMapping2911 = crossMapped("2.9.2" -> "2.9.1", "2.9.1-1" -> "2.9.1")
lazy val CVMappingAll = crossMapped("2.9.2" -> "2.9.1", "2.9.1-1" -> "2.9.1")
lazy val slf4jVersion = "1.6.4"
lazy val scalazGroup = defaultOrMapped("org.scalaz")
lazy val scalazVersion = defaultOrMapped("6.0.4", "2.9.0" -> "6.0.RC2")
// Compile scope:
// Scope available in all classpath, transitive by default.
lazy val commons_codec = "commons-codec" % "commons-codec" % "1.6"
lazy val commons_fileupload = "commons-fileupload" % "commons-fileupload" % "1.2.2"
lazy val commons_httpclient = "commons-httpclient" % "commons-httpclient" % "3.1"
lazy val dispatch_http = "net.databinder" % "dispatch-http" % "0.7.8" cross CVMapping2911
lazy val javamail = "javax.mail" % "mail" % "1.4.4"
lazy val joda_time = "joda-time" % "joda-time" % "1.6.2" // TODO: 2.1
lazy val htmlparser = "nu.validator.htmlparser" % "htmlparser" % "1.4"
lazy val mongo_java_driver = "org.mongodb" % "mongo-java-driver" % "2.7.3"
lazy val paranamer = "com.thoughtworks.paranamer" % "paranamer" % "2.4.1"
lazy val scalajpa = "org.scala-libs" % "scalajpa" % "1.4" cross CVMappingAll
lazy val scalap: ModuleMap = "org.scala-lang" % "scalap" % _
lazy val scala_compiler: ModuleMap = "org.scala-lang" % "scala-compiler" % _
lazy val scalaz_core: ModuleMap = sv => scalazGroup(sv) % "scalaz-core" % scalazVersion(sv) cross CVMappingAll
lazy val slf4j_api = "org.slf4j" % "slf4j-api" % slf4jVersion
lazy val squeryl = "org.squeryl" % "squeryl" % "0.9.5-1" cross crossMapped("2.9.1-1" -> "2.9.1", "2.8.2" -> "2.8.1")
// Aliases
lazy val mongo_driver = mongo_java_driver
lazy val scalaz = scalaz_core
// Provided scope:
// Scope provided by container, available only in compile and test classpath, non-transitive by default.
lazy val logback = "ch.qos.logback" % "logback-classic" % "1.0.1" % "provided"
lazy val log4j = "log4j" % "log4j" % "1.2.16" % "provided"
lazy val slf4j_log4j12 = "org.slf4j" % "slf4j-log4j12" % slf4jVersion % "provided"
lazy val persistence_api = "javax.persistence" % "persistence-api" % "1.0" % "provided"
lazy val servlet_api = "javax.servlet" % "servlet-api" % "2.5" % "provided"
// Runtime scope:
// Scope provided in runtime, available only in runtime and test classpath, not compile classpath, non-transitive by default.
lazy val derby = "org.apache.derby" % "derby" % "10.7.1.1" % "test" //% "optional"
lazy val h2database = "com.h2database" % "h2" % "1.2.147" % "test" //% "optional"
// Aliases
lazy val h2 = h2database
// Test scope:
// Scope available only in test classpath, non-transitive by default.
// TODO: See if something alternative with lesser footprint can be used instead of mega heavy apacheds
lazy val apacheds = "org.apache.directory.server" % "apacheds-server-integ" % "1.5.5" % "test" // TODO: 1.5.7
lazy val jetty6 = "org.mortbay.jetty" % "jetty" % "6.1.26" % "test"
lazy val jwebunit = "net.sourceforge.jwebunit" % "jwebunit-htmlunit-plugin" % "2.5" % "test"
lazy val mockito_all = "org.mockito" % "mockito-all" % "1.9.0" % "test"
lazy val scalacheck = "org.scalacheck" % "scalacheck" % "1.10.0" % "test" cross CVMappingAll
lazy val specs2 = "org.specs2" % "specs2" % "1.11" % "test" cross CVMappingAll
}
|
pbrant/framework
|
project/Dependencies.scala
|
Scala
|
apache-2.0
| 4,780 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class UniqueProcessTest extends Specification {
val process = new UniqueProcess
val sft = SimpleFeatureTypes.createType("unique", "track:String,dtg:Date,*geom:Point:srid=4326")
val fc = new ListFeatureCollection(sft)
val features = (0 until 10).map { i =>
val sf = new ScalaSimpleFeature(sft, i.toString)
sf.setAttribute(0, s"t-${i % 2}")
sf.setAttribute(1, s"2017-05-24T00:00:0$i.000Z")
sf.setAttribute(2, s"POINT(45 5$i)")
sf
}
step {
features.foreach(fc.add)
}
"UniqueProcess" should {
"manually visit a feature collection" in {
val result = SelfClosingIterator(process.execute(fc, "track", null, null, null, null, null).features).toSeq
foreach(result)(_.getAttributeCount mustEqual 1)
result.map(_.getAttribute(0)) must containTheSameElementsAs(Seq("t-0", "t-1"))
}
"manually visit a feature collection with counts" in {
val result = SelfClosingIterator(process.execute(fc, "track", null, true, null, null, null).features).toSeq
foreach(result)(_.getAttributeCount mustEqual 2)
result.map(_.getAttribute(0)) must containTheSameElementsAs(Seq("t-0", "t-1"))
result.map(_.getAttribute(1)) must containTheSameElementsAs(Seq(5L, 5L))
}
"manually visit a feature collection with a filter" in {
val result = SelfClosingIterator(process.execute(fc, "track", ECQL.toFilter("dtg before 2017-05-24T00:00:05.001Z"), true, null, null, null).features).toSeq
foreach(result)(_.getAttributeCount mustEqual 2)
result.map(_.getAttribute(0)) must containTheSameElementsAs(Seq("t-0", "t-1"))
result.map(_.getAttribute(1)) must containTheSameElementsAs(Seq(3L, 3L))
}
}
}
|
ronq/geomesa
|
geomesa-process/geomesa-process-vector/src/test/scala/org/locationtech/geomesa/process/analytic/UniqueProcessTest.scala
|
Scala
|
apache-2.0
| 2,646 |
package com.whitepages.cloudmanager.client
import java.text.SimpleDateFormat
import java.util.Locale
import org.apache.solr.common.util.NamedList
import scala.collection.JavaConverters._
import scala.concurrent.duration._
trait SolrResponseHelper {
implicit val rsp: NamedList[AnyRef]
// solr response objects are annoying.
// TODO: Use NamedList.findRecursive?
def walk(directions: String*): Option[String] = walk(directions.toList)
def walk(directions: List[String])(implicit node: NamedList[AnyRef]): Option[String] = {
directions.length match {
case 0 => throw new RuntimeException("Recursed one too many times")
case 1 => {
val destination = node.get(directions.head)
if (destination == null)
None
else
Some(destination.toString)
}
case _ => {
val step = node.get(directions.head)
if (step == null)
None
else
walk(directions.tail)(step.asInstanceOf[NamedList[AnyRef]])
}
}
}
def get(key: String) = rsp.get(key)
lazy val status = walk("responseHeader", "status").getOrElse("-100")
}
case class GenericSolrResponse(rsp: NamedList[AnyRef]) extends SolrResponseHelper
case class ReplicationStateResponse(rsp: NamedList[AnyRef]) extends SolrResponseHelper {
private val backupDateFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy", Locale.ROOT)
lazy val replicating = walk("details", "slave", "isReplicating")
lazy val replicationTimeRemaining =
walk("details", "slave", "timeRemaining").map(_.replace("s", "").toInt.seconds)
lazy val generation = walk("details", "generation")
lazy val indexVersion = walk("details", "indexVersion")
lazy val lastBackupSucceeded = walk("details", "backup", "status").map(_.toLowerCase == "success")
lazy val lastBackup = walk("details", "backup", "snapshotCompletedAt").map(backupDateFormat.parse)
lazy val indexPath = walk("details", "indexPath")
}
case class LukeStateResponse(rsp: NamedList[AnyRef]) extends SolrResponseHelper {
lazy val numDocs = walk("index", "numDocs").map(_.toInt)
lazy val version = walk("index", "version")
lazy val current = walk("index", "current").map(s => if (s == "true") true else false)
}
case class SystemStateResponse(rsp: NamedList[AnyRef]) extends SolrResponseHelper {
lazy val solrVersion = walk("lucene", "solr-spec-version").map(SolrCloudVersion(_)).getOrElse(SolrCloudVersion.unknown)
lazy val zkHost =
walk("zkHost") // solr 5.x
.orElse(findCmdLineArg("-DzkHost=")) // solr 4.x
def jmxNode = Option(rsp.findRecursive("jvm", "jmx")).map(_.asInstanceOf[NamedList[AnyRef]])
def findCmdLineArg(argPrefix: String) = {
val argsOpt = jmxNode.flatMap(n => Option(n.get("commandLineArgs"))).map(_.asInstanceOf[java.util.List[String]].asScala)
argsOpt.flatMap(arg => arg.find(_.startsWith(argPrefix))).map(_.replace(argPrefix, ""))
}
}
case class RestoreStateResponse(rsp: NamedList[AnyRef]) extends SolrResponseHelper {
lazy val restoreStatus = walk("restorestatus", "status")
// note: could be neither failed nor success (particularly, "No restore actions in progress")
lazy val restoreSuccess = restoreStatus.exists(_.toLowerCase == "success")
lazy val restoreFailure = restoreStatus.exists(_.toLowerCase == "failed")
lazy val restoreSnapshotName = walk("restorestatus", "snapshotName")
}
object SolrCloudVersion {
def parseVersion(version: String): SolrCloudVersion = {
val cleanVersion = version.trim.replaceAll("""\\s""", "").replaceAll("-.*$", "")
val versions = cleanVersion.split('.')
val major = if (versions.length > 0) versions(0).toInt else 0
val minor = if (versions.length > 1) versions(1).toInt else 0
val patch = if (versions.length > 2) versions(2).toInt else 0
SolrCloudVersion(major, minor, patch)
}
def apply(version: String): SolrCloudVersion = parseVersion(version)
val unknown = SolrCloudVersion(0,0,0)
}
case class SolrCloudVersion(major: Int, minor: Int, patch: Int = 0) extends Ordered[SolrCloudVersion] {
override def compare(that: SolrCloudVersion): Int = {
if (major != that.major) major.compareTo(that.major)
else if (minor != that.minor) minor.compareTo(that.minor)
else if (patch != that.patch) patch.compareTo(that.patch)
else 0
}
override val toString = List(major, minor, patch).mkString(".")
}
|
randomstatistic/solrcloud_manager
|
src/main/scala/com/whitepages/cloudmanager/client/SolrResponseHelper.scala
|
Scala
|
apache-2.0
| 4,383 |
/*******************************************************************************
Copyright (c) 2013, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.DOMCore
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
object DOMImplementationSource extends DOM {
private val name = "DOMImplementationSource"
/* predefined locatoins */
val loc_cons = newSystemRecentLoc(name + "Cons")
val loc_proto = newSystemRecentLoc(name + "Proto")
/* constructor or object*/
private val prop_cons: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("@hasinstance", AbsConstValue(PropValue(Value(NullTop)))),
("length", AbsConstValue(PropValue(ObjectValue(Value(AbsNumber.alpha(0)), F, F, F)))),
("prototype", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F))))
)
/* prorotype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
("getDOMImplementation", AbsBuiltinFunc("DOMImplementationSource.getDOMImplementation", 1)),
("getDOMImplementationList", AbsBuiltinFunc("DOMImplementationSource.getDOMImplementationList", 1))
)
/* global */
private val prop_global: List[(String, AbsProperty)] = List(
(name, AbsConstValue(PropValue(ObjectValue(loc_cons, T, F, T))))
)
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_cons, prop_cons), (loc_proto, prop_proto), (GlobalLoc, prop_global)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map(
//TODO: not yet implemented
//case "DOMImplementationSource.getDOMImplementation" => ((h,ctx),(he,ctxe))
//case "DOMImplementationSource.getDOMImplementationList" => ((h,ctx),(he,ctxe))
)
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map(
//TODO: not yet implemented
//case "DOMImplementationSource.getDOMImplementation" => ((h,ctx),(he,ctxe))
//case "DOMImplementationSource.getDOMImplementationList" => ((h,ctx),(he,ctxe))
)
}
def getDefMap(): Map[String, AccessFun] = {
Map(
//TODO: not yet implemented
//case "DOMImplementationSource.getDOMImplementation" => ((h,ctx),(he,ctxe))
//case "DOMImplementationSource.getDOMImplementationList" => ((h,ctx),(he,ctxe))
)
}
def getUseMap(): Map[String, AccessFun] = {
Map(
//TODO: not yet implemented
//case "DOMImplementationSource.getDOMImplementation" => ((h,ctx),(he,ctxe))
//case "DOMImplementationSource.getDOMImplementationList" => ((h,ctx),(he,ctxe))
)
}
/* instance */
//def instantiate() = Unit // not yet implemented
// intance of DOMImplementationSource should have no property
}
|
daejunpark/jsaf
|
src/kr/ac/kaist/jsaf/analysis/typing/models/DOMCore/DOMImplementationSource.scala
|
Scala
|
bsd-3-clause
| 3,578 |
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperables.spark.wrappers.estimators
import io.deepsense.deeplang.doperables.spark.wrappers.params.common.OptionalQuantilesColumnChoice
import io.deepsense.deeplang.params.ParamPair
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
class AFTSurvivalRegressionSmokeTest
extends AbstractEstimatorModelWrapperSmokeTest {
override def className: String = "AFTSurvivalRegression"
override val estimator = new AFTSurvivalRegression()
import estimator._
val optionalQuantilesChoice = OptionalQuantilesColumnChoice.QuantilesColumnNoOption()
override val estimatorParams: Seq[ParamPair[_]] = Seq(
censorColumn -> NameSingleColumnSelection("myCensor"),
fitIntercept -> true,
maxIterations -> 2.0,
tolerance -> 0.01,
featuresColumn -> NameSingleColumnSelection("myStandardizedFeatures"),
labelColumn -> NameSingleColumnSelection("myNoZeroLabel"),
predictionColumn -> "pred",
optionalQuantilesColumn -> optionalQuantilesChoice,
quantileProbabilities -> Array(0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99)
)
}
|
deepsense-io/seahorse-workflow-executor
|
deeplang/src/it/scala/io/deepsense/deeplang/doperables/spark/wrappers/estimators/AFTSurvivalRegressionSmokeTest.scala
|
Scala
|
apache-2.0
| 1,711 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate
package jade
import java.io.{ StringWriter, PrintWriter, File }
import org.fusesource.scalate.scaml.ScamlTestSupport
/**
* <p>
* </p>
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
class JadeTestSupport extends ScamlTestSupport {
override def render(name: String, content: String): String = {
val buffer = new StringWriter()
val out = new PrintWriter(buffer)
val uri = "/org/fusesource/scalate/jade/test" + name
val context = new DefaultRenderContext(uri, engine, out) {
val name = "Hiram"
val title = "MyPage"
val href = "http://scalate.fusesource.org"
val quality = "scrumptious"
}
engine.bindings = List(Binding("context", context.getClass.getName, true))
val testIdx = testCounter.incrementAndGet
val dir = new File("target/JadeTest")
dir.mkdirs
engine.workingDirectory = dir
context.attributes("context") = context
context.attributes("bean") = Bean("red", 10)
context.attributes("label") = "Scalate"
val template = compileJade(uri, content)
template.render(context)
out.close
buffer.toString
}
}
|
maslovalex/scalate
|
scalate-core/src/test/scala/org/fusesource/scalate/jade/JadeTestSupport.scala
|
Scala
|
apache-2.0
| 1,899 |
package org.littlewings.javaee7.scoped
import javax.enterprise.context.ApplicationScoped
class Book
trait Shop[T]
@ApplicationScoped
class Business
@ApplicationScoped
class BookShop extends Business with Shop[Book]
|
kazuhira-r/javaee7-scala-examples
|
cdi-typed/src/main/scala/org/littlewings/javaee7/scoped/BookShop.scala
|
Scala
|
mit
| 220 |
package org.jetbrains.plugins.scala.projectHighlighting
import com.intellij.openapi.util.TextRange
import com.intellij.pom.java.LanguageLevel
import org.jetbrains.plugins.scala.HighlightingTests
import org.junit.experimental.categories.Category
@Category(Array(classOf[HighlightingTests]))
class MeerkatProjectHighlightingTest extends GithubSbtAllProjectHighlightingTest {
override def jdkLanguageLevel: LanguageLevel = LanguageLevel.JDK_1_8
override def githubUsername = "niktrop"
override def githubRepoName = "Meerkat"
override def revision = "5013864a9cbcdb43f92d1d57200352743d412235"
override def filesWithProblems: Map[String, Set[TextRange]] = Map(
"src/test/scala/org/meerkat/parsers/examples/Example12.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example10.scala" -> Set(),
"src/main/scala/org/meerkat/parsers/package.scala" -> Set((4162, 4227),(4125, 4155)),
"src/test/scala/org/meerkat/parsers/examples/Example4.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example14.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example13.scala" -> Set((1771, 1774),(1850, 1851),(1856, 1859),(1862, 1863)),
"src/test/scala/org/meerkat/parsers/examples/Example8.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example9.scala" -> Set(),
"src/main/scala/org/meerkat/parsers/Parsers.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example1.scala" -> Set(),
"src/main/scala/org/meerkat/parsers/AbstractOperatorParsers.scala" -> Set(),
"src/main/scala/org/meerkat/parsers/OperatorParsers.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example11.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example3.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example2.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example5.scala" -> Set((1654, 1915),(1640, 1651)),
"src/test/scala/org/meerkat/parsers/examples/Example15.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example6.scala" -> Set(),
"src/main/scala/org/meerkat/parsers/AbstractParsers.scala" -> Set(),
"src/test/scala/org/meerkat/parsers/examples/Example7.scala" -> Set()
)
}
|
JetBrains/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/projectHighlighting/MeerkatProjectHighlightingTest.scala
|
Scala
|
apache-2.0
| 2,267 |
package HackerRank.Training.BasicProgramming
import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter}
import java.util.InputMismatchException
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
import scala.reflect.ClassTag
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <[email protected]> on 8/11/2017
*/
private[this] object LisaWorkbook {
import Reader._
import Writer._
private[this] val TEST_INPUT: Option[String] = None
//------------------------------------------------------------------------------------------//
// Solution
//------------------------------------------------------------------------------------------//
private[this] def solve(): Unit = {
val n = next[Int]()
val k = next[Int]()
val book = Book(k, next[Int, Vector](n))
println(book.countSpecialProblems)
}
private[this] final class Book(val pages: Seq[Page]) extends AnyVal {
def countSpecialProblems: Int = {
pages.count(page => page.problems.contains(page.pageNumber))
}
override def toString: String = pages.toString
}
private[this] object Book {
def apply(pageSize: Int, problems: Seq[Int]): Book = {
var pageNumber = 1
new Book(
problems.zipWithIndex.flatMap { case (numberOfProblems, chapterNumber) =>
val chapterBuilder = Vector.newBuilder[Page]
for (pageStartProblem <- 1 to numberOfProblems by pageSize) {
val isPageNotFullyPopulated = pageStartProblem > numberOfProblems - pageSize + 1
val pageLength = (if (isPageNotFullyPopulated) numberOfProblems % pageSize else pageSize) - 1
chapterBuilder +=
Page(
pageNumber,
chapterNumber + 1,
pageStartProblem to pageStartProblem + pageLength
)
pageNumber += 1
}
chapterBuilder.result()
}
)
}
}
private[this] final case class Page(pageNumber: Int, chapter: Int, problems: Range)
//------------------------------------------------------------------------------------------//
// Run
//------------------------------------------------------------------------------------------//
@throws[Exception]
def main(args: Array[String]): Unit = {
val s = System.currentTimeMillis
solve()
flush()
if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms")
}
//------------------------------------------------------------------------------------------//
// Input
//------------------------------------------------------------------------------------------//
private[this] final object Reader {
private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes))
def next[T: ClassTag](): T = {
implicitly[ClassTag[T]].runtimeClass match {
case java.lang.Integer.TYPE => nextInt().asInstanceOf[T]
case java.lang.Long.TYPE => nextLong().asInstanceOf[T]
case java.lang.Double.TYPE => nextDouble().asInstanceOf[T]
case java.lang.Character.TYPE => nextChar().asInstanceOf[T]
case s if Class.forName("java.lang.String") == s => nextString().asInstanceOf[T]
case b if Class.forName("scala.math.BigInt") == b => BigInt(nextString()).asInstanceOf[T]
case b if Class.forName("scala.math.BigDecimal") == b => BigDecimal(nextString()).asInstanceOf[T]
case _ => throw new RuntimeException("Unsupported input type.")
}
}
def next[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += reader
}
builder.result()
}
def nextWithIndex[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((reader, i))
}
builder.result()
}
def next[T: ClassTag, Coll[_]](n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += next[T]()
}
builder.result()
}
def nextWithIndex[T: ClassTag, Coll[_]](n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((next[T](), i))
}
builder.result()
}
def nextMultiLine[T: ClassTag](n: Int, m: Int): Seq[Seq[T]] = {
val map = Vector.newBuilder[Vector[T]]
var i = 0
while (i < n) {
map += next[T, Vector](m)
i += 1
}
map.result()
}
private[this] def nextDouble(): Double = nextString().toDouble
private[this] def nextChar(): Char = skip.toChar
private[this] def nextString(): String = {
var b = skip
val sb = new java.lang.StringBuilder
while (!isSpaceChar(b)) {
sb.appendCodePoint(b)
b = readByte().toInt
}
sb.toString
}
private[this] def nextInt(): Int = {
var num = 0
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Int")
}
private[this] def nextLong(): Long = {
var num = 0L
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Long")
}
private[this] val inputBuffer = new Array[Byte](1024)
private[this] var lenBuffer = 0
private[this] var ptrBuffer = 0
private[this] def readByte()(implicit in: java.io.InputStream): Byte = {
if (lenBuffer == -1) throw new InputMismatchException
if (ptrBuffer >= lenBuffer) {
ptrBuffer = 0
try {
lenBuffer = in.read(inputBuffer)
} catch {
case _: IOException =>
throw new InputMismatchException
}
if (lenBuffer <= 0) return -1
}
inputBuffer({
ptrBuffer += 1
ptrBuffer - 1
})
}
private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126)
private[this] def skip = {
var b = 0
while ( {
b = readByte().toInt
b != -1 && isSpaceChar(b)
}) {}
b
}
}
//------------------------------------------------------------------------------------------//
// Output
//------------------------------------------------------------------------------------------//
private[this] final object Writer {
private[this] val out = new PrintWriter(System.out)
def flush(): Unit = out.flush()
def println(x: Any): Unit = out.println(x)
def print(x: Any): Unit = out.print(x)
}
}
|
robertoFischer/hackerrank
|
src/main/scala/HackerRank/Training/BasicProgramming/LisaWorkbook.scala
|
Scala
|
mit
| 7,798 |
package com.twitter.finagle.redis.protocol
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
import com.twitter.finagle.redis.ClientError
import com.twitter.finagle.redis.util._
import Commands.trimList
case class LLen(key: ChannelBuffer) extends StrictKeyCommand {
val command = Commands.LLEN
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.LLEN, key))
}
object LLen {
def apply(args: Seq[Array[Byte]]): LLen = {
LLen(GetMonadArg(args, CommandBytes.LLEN))
}
}
case class LIndex(key: ChannelBuffer, index: Long) extends StrictKeyCommand {
val command = Commands.LINDEX
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.LINDEX, key, StringToChannelBuffer(index.toString)))
}
object LIndex {
def apply(args: Seq[Array[Byte]]): LIndex = {
val list = trimList(args, 2, Commands.LINDEX)
val index = RequireClientProtocol.safe {
NumberFormat.toInt(BytesToString(list(1)))
}
LIndex(ChannelBuffers.wrappedBuffer(list(0)), index)
}
}
case class LInsert(
key: ChannelBuffer,
relativePosition: String,
pivot: ChannelBuffer,
value: ChannelBuffer)
extends StrictKeyCommand
with StrictValueCommand
{
val command = Commands.LINSERT
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.LINSERT, key, StringToChannelBuffer(relativePosition),
pivot, value))
}
object LInsert {
def apply(args: Seq[Array[Byte]]): LInsert = {
val list = trimList(args, 4, Commands.LINSERT)
LInsert(ChannelBuffers.wrappedBuffer(list(0)),
BytesToString(list(1)),
ChannelBuffers.wrappedBuffer(list(2)),
ChannelBuffers.wrappedBuffer(list(3)))
}
}
case class LPop(key: ChannelBuffer) extends StrictKeyCommand {
val command = Commands.LPOP
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.LPOP, key))
}
object LPop {
def apply(args: Seq[Array[Byte]]): LPop = {
LPop(GetMonadArg(args, CommandBytes.LPOP))
}
}
case class LPush(key: ChannelBuffer, values: Seq[ChannelBuffer]) extends StrictKeyCommand {
val command = Commands.LPUSH
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.LPUSH, key) ++ values)
}
object LPush {
def apply(args: List[Array[Byte]]): LPush = args match {
case head :: tail =>
LPush(ChannelBuffers.wrappedBuffer(head), tail map ChannelBuffers.wrappedBuffer)
case _ => throw ClientError("Invalid use of LPush")
}
}
case class LRem(key: ChannelBuffer, count: Long, value: ChannelBuffer)
extends StrictKeyCommand
with StrictValueCommand
{
val command = Commands.LREM
override def toChannelBuffer = {
val commandArgs = Seq(CommandBytes.LREM, key, StringToChannelBuffer(count.toString), value)
RedisCodec.toUnifiedFormat(commandArgs)
}
}
object LRem {
def apply(args: List[Array[Byte]]): LRem = {
val list = trimList(args, 3, Commands.LREM)
val count = RequireClientProtocol.safe {
NumberFormat.toInt(BytesToString(list(1)))
}
LRem(ChannelBuffers.wrappedBuffer(list(0)), count, ChannelBuffers.wrappedBuffer(list(2)))
}
}
case class LSet(key: ChannelBuffer, index: Long, value: ChannelBuffer)
extends StrictKeyCommand
with StrictValueCommand
{
val command = Commands.LSET
override def toChannelBuffer = {
val commandArgs = List(CommandBytes.LSET, key, StringToChannelBuffer(index.toString), value)
RedisCodec.toUnifiedFormat(commandArgs)
}
}
object LSet {
def apply(args: List[Array[Byte]]): LSet = {
val list = trimList(args, 3, Commands.LSET)
val index = RequireClientProtocol.safe {
NumberFormat.toInt(BytesToString(list(1)))
}
LSet(ChannelBuffers.wrappedBuffer(list(0)), index, ChannelBuffers.wrappedBuffer(list(2)))
}
}
case class LRange(key: ChannelBuffer, start: Long, end: Long) extends ListRangeCommand {
override val command = Commands.LRANGE
}
object LRange {
def apply(args: Seq[Array[Byte]]): LRange = {
val list = trimList(args, 3, Commands.LRANGE)
val (start, end) = RequireClientProtocol.safe {
Tuple2(NumberFormat.toInt(BytesToString(list(1))), NumberFormat.toInt(BytesToString(list(2))))
}
LRange(ChannelBuffers.wrappedBuffer(list(0)), start, end)
}
}
case class RPop(key: ChannelBuffer) extends StrictKeyCommand {
val command = Commands.RPOP
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.RPOP, key))
}
object RPop extends {
def apply(args: List[Array[Byte]]): RPop = {
RPop(GetMonadArg(args, CommandBytes.RPOP))
}
}
case class RPush(key: ChannelBuffer, values: List[ChannelBuffer]) extends StrictKeyCommand {
val command = Commands.RPUSH
override def toChannelBuffer =
RedisCodec.toUnifiedFormat(Seq(CommandBytes.RPUSH, key) ++ values)
}
object RPush {
def apply(args: List[Array[Byte]]): RPush = args match {
case head :: tail =>
RPush(ChannelBuffers.wrappedBuffer(head), tail map ChannelBuffers.wrappedBuffer)
case _ => throw ClientError("Invalid use of RPush")
}
}
case class LTrim(key: ChannelBuffer, start: Long, end: Long) extends ListRangeCommand {
val command = Commands.LTRIM
}
object LTrim {
def apply(args: Seq[Array[Byte]]): LTrim = {
val list = trimList(args, 3, Commands.LTRIM)
val (start, end) = RequireClientProtocol.safe {
Tuple2(NumberFormat.toInt(BytesToString(list(1))), NumberFormat.toInt(BytesToString(list(2))))
}
LTrim(ChannelBuffers.wrappedBuffer(list(0)), start, end)
}
}
trait ListRangeCommand extends StrictKeyCommand {
val start: Long
val end: Long
val command: String
override def toChannelBuffer = {
RedisCodec.toUnifiedFormat(
Seq(
StringToChannelBuffer(command),
key,
StringToChannelBuffer(start.toString),
StringToChannelBuffer(end.toString)))
}
}
|
travisbrown/finagle
|
finagle-redis/src/main/scala/com/twitter/finagle/redis/protocol/commands/Lists.scala
|
Scala
|
apache-2.0
| 5,859 |
package name.abhijitsarkar.akka
import java.nio.file.{DirectoryStream, Path}
import akka.actor.{Actor, Props}
import akka.stream.scaladsl.Source
import scala.collection.immutable.{List => ImmutableList}
/**
* @author Abhijit Sarkar
*/
case class Message(dir: DirectoryStream[Path], text: String)
import scala.collection.JavaConverters._
class Transformer extends Actor {
def receive = {
case Message(dir, text) => {
val flow = Source(dir.asScala.toList).map(p => {
val lines = io.Source.fromFile(p.toFile).getLines().filter(_.contains(text)).map(_.trim).to[ImmutableList]
(p.toAbsolutePath.toString, lines)
}).filter(!_._2.isEmpty)
sender ! flow
}
}
}
object Transformer {
def props = Props(new Transformer)
}
|
asarkar/akka
|
akka-streams-learning/weather-streaming/src/main/scala/name/abhijitsarkar/akka/Transformer.scala
|
Scala
|
gpl-3.0
| 771 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.docgen
import java.io.{File, PrintWriter}
import scala.reflect.runtime.universe.typeTag
import io.deepsense.deeplang.doperables.Transformer
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperations.{EstimatorAsFactory, EstimatorAsOperation, EvaluatorAsFactory, TransformerAsOperation}
import io.deepsense.deeplang.params._
import io.deepsense.deeplang.params.choice.{AbstractChoiceParam, Choice, ChoiceParam, MultipleChoiceParam}
import io.deepsense.deeplang.{DOperation, DOperation1To2}
trait DocPageCreator {
/**
* @return number of pages created
*/
def createDocPages(
sparkOperations: Seq[OperationWithSparkClassName],
forceUpdate: Boolean): Int = {
sparkOperations.map { case OperationWithSparkClassName(operation, sparkClassName) =>
val sparkPageFile = new File(
"docs/operations/" + DocUtils.underscorize(operation.name) + ".md")
if(!sparkPageFile.exists() || forceUpdate) {
createDocPage(sparkPageFile, operation, sparkClassName)
1
} else {
0
}
}.sum
}
// scalastyle:off println
private def createDocPage(sparkPageFile: File, operation: DocumentedOperation, sparkClassName: String) = {
val writer = new PrintWriter(sparkPageFile)
writer.println(header(operation))
writer.println(description(operation))
writer.println()
writer.println(sparkDocLink(operation, sparkClassName))
writer.println()
writer.println(sinceSeahorseVersion(operation))
writer.println()
writer.println(input(operation))
writer.println()
writer.println(output(operation))
writer.println()
writer.println(parameters(operation))
appendExamplesSectionIfNecessary(writer, operation)
writer.flush()
writer.close()
println("Created doc page for " + operation.name)
}
// scalastyle:on println
private def header(operation: DocumentedOperation): String = {
s"""---
|layout: global
|displayTitle: ${operation.name}
|title: ${operation.name}
|description: ${operation.name}
|usesMathJax: true
|includeOperationsMenu: true
|---""".stripMargin
}
private def description(operation: DocumentedOperation): String = {
DocUtils.forceDotAtEnd(operation.description)
}
private def sparkDocLink(operation: DocumentedOperation, sparkClassName: String) = {
val scalaDocUrl = SparkOperationsDocGenerator.scalaDocPrefix + sparkClassName
val additionalDocs = operation.generateDocs match {
case None => ""
case Some(docs) => docs
}
s"""|This operation is ported from Spark ML.
|
|
|$additionalDocs
|
|
|For scala docs details, see
|<a target="_blank" href="$scalaDocUrl">$sparkClassName documentation</a>.""".stripMargin
}
private def sinceSeahorseVersion(operation: DocumentedOperation): String = {
s"**Since**: Seahorse ${operation.since.humanReadable}"
}
private def input(operation: DocumentedOperation): String = {
val inputTable = operation match {
case (t: TransformerAsOperation[_]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/dataframe.html\\">DataFrame</a></code>",
"The input <code>DataFrame</code>.")
))
case (es: EstimatorAsOperation[_, _]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/dataframe.html\\">DataFrame</a></code>",
"The input <code>DataFrame</code>.")
))
case (e: EstimatorAsFactory[_]) =>
"This operation does not take any input."
case (ev: EvaluatorAsFactory[_]) =>
"This operation does not take any input."
}
"## Input\\n\\n" + inputTable
}
private def output(operation: DocumentedOperation): String = {
val outputTable = operation match {
case (t: TransformerAsOperation[_]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/dataframe.html\\">DataFrame</a></code>",
"The output <code>DataFrame</code>."),
("<code><a href=\\"../classes/transformer.html\\">Transformer</a></code>",
"A <code>Transformer</code> that allows to apply the operation on other" +
" <code>DataFrames</code> using a <a href=\\"transform.html\\">Transform</a>.")
))
case (eso: EstimatorAsOperation[_, _]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/dataframe.html\\">DataFrame</a></code>",
"The output <code>DataFrame</code>."),
("<code><a href=\\"../classes/transformer.html\\">Transformer</a></code>",
"A <code>Transformer</code> that allows to apply the operation on other" +
" <code>DataFrames</code> using a <a href=\\"transform.html\\">Transform</a>.")
))
case (e: EstimatorAsFactory[_]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/estimator.html\\">Estimator</a></code>",
"An <code>Estimator</code> that can be used in " +
"a <a href=\\"fit.html\\">Fit</a> operation.")
))
case (ev: EvaluatorAsFactory[_]) =>
inputOutputTable(Seq(
("<code><a href=\\"../classes/evaluator.html\\">Evaluator</a></code>",
"An <code>Evaluator</code> that can be used in " +
"an <a href=\\"evaluate.html\\">Evaluate</a> operation.")
))
}
"## Output\\n\\n" + outputTable
}
/**
* @param data Sequence of tuples (typeQualifier, description)
*/
private def inputOutputTable(data: Seq[(String, String)]): String = {
"""
|<table>
|<thead>
|<tr>
|<th style="width:15%">Port</th>
|<th style="width:15%">Type Qualifier</th>
|<th style="width:70%">Description</th>
|</tr>
|</thead>
|<tbody>
""".stripMargin + tableRows(data) +
"""
|</tbody>
|</table>
|""".stripMargin
}
private def tableRows(data: Seq[(String, String)]): String = {
data.zipWithIndex.map(_ match {
case ((typeQualifier, description), index) =>
s"<tr><td><code>$index</code></td><td>$typeQualifier</td><td>$description</td></tr>"
}).reduce((s1, s2) => s1 + s2)
}
private def parameters(operation: DocumentedOperation): String = {
"## Parameters\\n\\n" + parametersTable(operation)
}
private def parametersTable(operation: DocumentedOperation): String = {
"""
|<table class="table">
|<thead>
|<tr>
|<th style="width:15%">Name</th>
|<th style="width:15%">Type</th>
|<th style="width:70%">Description</th>
|</tr>
|</thead>
|<tbody>
|""".stripMargin + extractParameters(operation) +
"""
|</tbody>
|</table>
|""".stripMargin
}
private def extractParameters(operation: DocumentedOperation): String = {
operation.params.map(param =>
ParameterDescription(
param.name,
sparkParamType(param),
param.description.map(desc => DocUtils.forceDotAtEnd(desc)).getOrElse("")
+ extraDescription(param)))
.map(paramDescription => parameterTableEntry(paramDescription))
.reduce((s1, s2) => s1 + s2)
}
private def sparkParamType(param: Param[_]): String = {
param match {
case (p: IOColumnsParam) => "InputOutputColumnSelector"
case (p: BooleanParam) => "Boolean"
case (p: ChoiceParam[_]) => "SingleChoice"
case (p: ColumnSelectorParam) => "MultipleColumnSelector"
case (p: NumericParam) => "Numeric"
case (p: MultipleNumericParam) => "MultipleNumeric"
case (p: MultipleChoiceParam[_]) => "MultipleChoice"
case (p: PrefixBasedColumnCreatorParam) => "String"
case (p: SingleColumnCreatorParam) => "String"
case (p: SingleColumnSelectorParam) => "SingleColumnSelector"
case (p: StringParam) => "String"
case _ => throw new RuntimeException(
"Unexpected parameter of class " + param.getClass.getSimpleName)
}
}
private def parameterTableEntry(paramDescription: ParameterDescription): String = {
val paramType = paramDescription.paramType
val anchor = paramTypeAnchor(paramType)
s"""
|<tr>
|<td><code>${paramDescription.name}</code></td>
|<td><code><a href="../parameter_types.html#$anchor">${paramType}</a></code></td>
|<td>${paramDescription.description}</td>
|</tr>
|""".stripMargin
}
private def paramTypeAnchor(paramType: String) = {
paramType.replaceAll("(.)([A-Z])", "$1-$2").toLowerCase
}
private def extraDescription(param: Param[_]): String = {
param match {
case (p: IOColumnsParam) => ""
case (p: AbstractChoiceParam[_, _]) => " Possible values: " + choiceValues(p.choiceInstances)
case _ => ""
}
}
private def choiceValues(choices: Seq[Choice]): String =
"<code>[" + choices.map("\\"" + _.name + "\\"").mkString(", ") + "]</code>"
private case class ParameterDescription(
name: String,
paramType: String,
description: String)
private def appendExamplesSectionIfNecessary(writer: PrintWriter, operation: DocumentedOperation): Unit = {
val createExamplesSection: Boolean = operation match {
// It is impossible to match DOperation1To2[DataFrame, DataFrame, Transformer] in match-case
case op: DOperation1To2[_, _, _] =>
(op.tTagTI_0.tpe <:< typeTag[DataFrame].tpe) &&
(op.tTagTO_0.tpe <:< typeTag[DataFrame].tpe) &&
(op.tTagTO_1.tpe <:< typeTag[Transformer].tpe)
case op =>
false
}
if (createExamplesSection) {
// scalastyle:off println
println("\\t\\tAdding 'Example' section for " + operation.name)
writer.println()
writer.println(examples(operation))
// scalastyle:on println
}
}
private def examples(operation: DocumentedOperation): String = {
"{% markdown operations/examples/" + operation.getClass.getSimpleName + ".md %}"
}
}
|
deepsense-io/seahorse-workflow-executor
|
docgen/src/main/scala/io/deepsense/docgen/DocPageCreator.scala
|
Scala
|
apache-2.0
| 10,566 |
package com.rasterfoundry.datamodel
import io.circe._
import cats.syntax.either._
sealed abstract class JobStatus(val repr: String) {
override def toString = repr
}
object JobStatus {
case object Uploading extends JobStatus("UPLOADING")
case object Success extends JobStatus("SUCCESS")
case object Failure extends JobStatus("FAILURE")
case object PartialFailure extends JobStatus("PARTIALFAILURE")
case object Queued extends JobStatus("QUEUED")
case object Processing extends JobStatus("PROCESSING")
def fromString(s: String): JobStatus = s.toUpperCase match {
case "UPLOADING" => Uploading
case "SUCCESS" => Success
case "FAILURE" => Failure
case "PARTIALFAILURE" => PartialFailure
case "QUEUED" => Queued
case "PROCESSING" => Processing
case _ => throw new Exception(s"Invalid string: $s")
}
implicit val jobStatusEncoder: Encoder[JobStatus] =
Encoder.encodeString.contramap[JobStatus](_.toString)
implicit val jobStatusDecoder: Decoder[JobStatus] =
Decoder.decodeString.emap { str =>
Either.catchNonFatal(fromString(str)).leftMap(_ => "JobStatus")
}
}
|
azavea/raster-foundry
|
app-backend/datamodel/src/main/scala/JobStatus.scala
|
Scala
|
apache-2.0
| 1,175 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.runtime
import scala.runtime.ClassValueCompat._
private[scala] abstract class ClassValueCompat[T] extends ClassValueInterface[T] { self =>
private val instance: ClassValueInterface[T] =
if (classValueAvailable) new JavaClassValue()
else new FallbackClassValue()
private class JavaClassValue extends ClassValue[T] with ClassValueInterface[T] {
override def computeValue(cls: Class[_]): T = self.computeValue(cls)
}
private class FallbackClassValue extends ClassValueInterface[T] {
override def get(cls: Class[_]): T = self.computeValue(cls)
override def remove(cls: Class[_]): Unit = {}
}
def get(cls: Class[_]): T = instance.get(cls)
def remove(cls: Class[_]): Unit = instance.remove(cls)
protected def computeValue(cls: Class[_]): T
}
private[scala] object ClassValueCompat {
trait ClassValueInterface[T] {
def get(cls: Class[_]): T
def remove(cls: Class[_]): Unit
}
private val classValueAvailable: Boolean = try {
Class.forName("java.lang.ClassValue", false, classOf[Object].getClassLoader)
true
} catch {
case _: ClassNotFoundException => false
}
}
|
scala/scala
|
src/library/scala/runtime/ClassValueCompat.scala
|
Scala
|
apache-2.0
| 1,435 |
package org.zouzias.qclocktwo.phrases
import scala.collection.mutable.ArrayBuffer
/**
* Word phrases as an array of words
*/
class TimePhrases {
type WordPhrase = Array[String]
private val phrases_ : ArrayBuffer[WordPhrase] = new ArrayBuffer()
/**
* Add a new phrase
*
* @param phrase Phrase as an array of words
*/
def addPhrase(phrase: WordPhrase): Unit = {
phrases_.append(phrase)
}
/**
* Return all phrases
*
* @return Array of phrases
*/
def phrases(): Array[WordPhrase] = {
phrases_.toArray
}
}
|
zouzias/qlocktwo-grid-generator
|
src/main/scala/org/zouzias/qclocktwo/phrases/TimePhrases.scala
|
Scala
|
apache-2.0
| 559 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript
import slamdata.Predef._
import quasar.{RenderTree, NonTerminal, RenderTreeT}, RenderTree.ops._
import quasar.common.JoinType
import quasar.contrib.matryoshka._
import quasar.fp._
import matryoshka._
import matryoshka.data._
import monocle.macros.Lenses
import scalaz._, Scalaz._
/** Applies a function across two datasets, in the cases where the JoinFunc
* evaluates to true. The branches represent the divergent operations applied
* to some common src. Each branch references the src exactly once. (Since no
* constructor has more than one recursive component, it’s guaranteed that
* neither side references the src _more_ than once.)
*
* This case represents a full θJoin, but we could have an algebra that
* rewrites it as
* Filter(_, EquiJoin(...))
* to simplify behavior for the backend.
*/
@Lenses final case class ThetaJoin[T[_[_]], A](
src: A,
lBranch: FreeQS[T],
rBranch: FreeQS[T],
on: JoinFunc[T],
f: JoinType,
combine: JoinFunc[T])
object ThetaJoin {
implicit def equal[T[_[_]]: BirecursiveT: EqualT]: Delay[Equal, ThetaJoin[T, ?]] =
new Delay[Equal, ThetaJoin[T, ?]] {
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def apply[A](eq: Equal[A]) =
Equal.equal {
case (ThetaJoin(a1, l1, r1, o1, f1, c1), ThetaJoin(a2, l2, r2, o2, f2, c2)) =>
eq.equal(a1, a2) && l1 ≟ l2 && r1 ≟ r2 && o1 ≟ o2 && f1 ≟ f2 && c1 ≟ c2
}
}
implicit def traverse[T[_[_]]]: Traverse[ThetaJoin[T, ?]] =
new Traverse[ThetaJoin[T, ?]] {
def traverseImpl[G[_]: Applicative, A, B](
fa: ThetaJoin[T, A])(
f: A => G[B]) =
f(fa.src) ∘ (ThetaJoin(_, fa.lBranch, fa.rBranch, fa.on, fa.f, fa.combine))
}
implicit def show[T[_[_]]: ShowT]: Delay[Show, ThetaJoin[T, ?]] =
new Delay[Show, ThetaJoin[T, ?]] {
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def apply[A](showA: Show[A]): Show[ThetaJoin[T, A]] = Show.show {
case ThetaJoin(src, lBr, rBr, on, f, combine) =>
Cord("ThetaJoin(") ++
showA.show(src) ++ Cord(",") ++
lBr.show ++ Cord(",") ++
rBr.show ++ Cord(",") ++
on.show ++ Cord(",") ++
f.show ++ Cord(",") ++
combine.show ++ Cord(")")
}
}
implicit def renderTree[T[_[_]]: RenderTreeT: ShowT]: Delay[RenderTree, ThetaJoin[T, ?]] =
new Delay[RenderTree, ThetaJoin[T, ?]] {
val nt = List("ThetaJoin")
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
def apply[A](r: RenderTree[A]): RenderTree[ThetaJoin[T, A]] = RenderTree.make {
case ThetaJoin(src, lBr, rBr, on, f, combine) =>
NonTerminal(nt, None, List(
r.render(src),
lBr.render,
rBr.render,
on.render,
f.render,
combine.render))
}
}
implicit def mergeable[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT]
: Mergeable.Aux[T, ThetaJoin[T, ?]] =
new Mergeable[ThetaJoin[T, ?]] {
type IT[F[_]] = T[F]
val merge = new Merge[IT]
val rewrite = new Rewrite[IT]
def mergeSrcs(
left: Mergeable.MergeSide[IT, ThetaJoin[T, ?]],
right: Mergeable.MergeSide[IT, ThetaJoin[T, ?]]) =
(left.source, right.source) match {
case (ThetaJoin(s1, l1, r1, o1, f1, c1), ThetaJoin(_, l2, r2, o2, f2, c2)) if f1 ≟ f2 => {
val left1 = rebaseBranch(l1, left.access)
val right1 = rebaseBranch(r1, left.access)
val left2 = rebaseBranch(l2, right.access)
val right2 = rebaseBranch(r2, right.access)
def updateJoin(func: JoinFunc[IT], l: FreeMap[IT], r: FreeMap[IT]): JoinFunc[IT] =
func.flatMap {
case LeftSide => l.as(LeftSide)
case RightSide => r.as(RightSide)
}
merge.tryMergeBranches(rewrite)(left1, right1, left2, right2).toOption.map {
case (resL, resR) => {
val onL: JoinFunc[IT] = updateJoin(o1, resL.lval, resR.lval)
val onR: JoinFunc[IT] = updateJoin(o2, resL.rval, resR.rval)
// The implication here is that
// `(l join r on X) as lj inner join (l join r on Y) as rj on lj = rj`
// is equivalent to
// `l join r on X and Y`
val on: JoinFunc[IT] = (onL ≟ onR).fold(onL, Free.roll(MFC(MapFuncsCore.And(onL, onR))))
val cL: JoinFunc[IT] = updateJoin(c1, resL.lval, resR.lval)
val cR: JoinFunc[IT] = updateJoin(c2, resL.rval, resR.rval)
val (cond, l, r) = concat(cL, cR)
SrcMerge(ThetaJoin(s1, resL.src, resR.src, on, f1, cond), l, r)
}
}
}
case (_, _) => None
}
}
}
|
drostron/quasar
|
connector/src/main/scala/quasar/qscript/ThetaJoin.scala
|
Scala
|
apache-2.0
| 5,508 |
package io.aigar.controller
import io.aigar.controller.response.{ErrorResponse}
import org.scalatra._
import scalate.ScalateSupport
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.json._
trait AigarStack
extends ScalatraServlet
with ScalateSupport
with JacksonJsonSupport
with ContentEncodingSupport
{
protected implicit val jsonFormats: Formats = DefaultFormats
before() {
contentType = formats("json")
}
trap(400) {
returnError(400, "invalid request")
}
trap(403) {
returnError(403, "forbidden")
}
trap(404) {
returnError(404, "not found")
}
trap(422) {
returnError(422, "unprocessable entity")
}
trap(500) {
returnError(500, "internal server error")
}
def returnError(statusCode: Int, message: String): ErrorResponse = {
status = statusCode
contentType = formats("json")
ErrorResponse(message)
}
}
|
DrPandemic/aigar.io
|
game/src/main/scala/io/aigar/controller/AigarStack.scala
|
Scala
|
mit
| 901 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence
import akka.Done
import akka.event.Logging
import akka.stream.scaladsl.Flow
import scala.concurrent.Future
import akka.NotUsed
import akka.persistence.query.NoOffset
import akka.persistence.query.Offset
object ReadSideProcessor {
/**
* An read side offset processor.
*
* This is responsible for the actual read side handling, including handling offsets and the events themselves.
*/
abstract class ReadSideHandler[Event <: AggregateEvent[Event]] {
/**
* Prepare the database for all processors.
*
* This will be invoked at system startup. It is guaranteed to only be invoked once at a time across the entire
* cluster, and so is safe to be used to perform actions like creating tables, that could cause problems if
* done from multiple nodes.
*
* It will be invoked again if it fails, and it may be invoked multiple times as nodes of the cluster go up or
* down. Unless the entire system is restarted, there is no way to guarantee that it will be invoked at a
* particular time - in particular, it should not be used for doing upgrades unless the entire system is
* restarted and a new cluster built from scratch.
*
* @return A `Future` that is redeemed when preparation is finished.
*/
def globalPrepare(): Future[Done] =
Future.successful(Done)
/**
* Prepare this processor.
*
* The primary purpose of this method is to load the last offset that was processed, so that read side
* processing can continue from that offset.
*
* This also provides an opportunity for processors to do any initialisation activities, such as creating or
* updating database tables, or migrating data.
*
* This will be invoked at least once for each tag, and may be invoked multiple times, such as in the event of
* failure.
*
* @param tag The tag to get the offset for.
* @return A `Future` that is redeemed when preparation is finished.
*/
def prepare(tag: AggregateEventTag[Event]): Future[Offset] =
Future.successful(NoOffset);
/**
* Flow to handle the events.
*
* If the handler does any blocking, this flow should be configured to use a dispatcher that is configured to
* allow for that blocking.
*/
def handle(): Flow[EventStreamElement[Event], Done, NotUsed]
}
}
/**
* A read side processor.
*
* Read side processors consume events produced by [[com.lightbend.lagom.scaladsl.persistence.PersistentEntity]]
* instances, and update some read side data store that is optimized for queries.
*
* The events they consume must be tagged, and a read side is able to consume events of one or more tags. Events are
* usually tagged according to some supertype of event, for example, events may be tagged as <code>Order</code> events.
* They may also be tagged according to a hash of the ID of the entity associated with the event - this allows read
* side event handling to be sharded across many nodes. Tagging is done using
* [[com.lightbend.lagom.scaladsl.persistence.AggregateEventTag]].
*
* Read side processors are responsible for tracking what events they have already seen. This is done using offsets,
* which are sequential values associated with each event. Note that end users typically will not need to handle
* offsets themselves, this will be provided by Lagom support specific to the read side datastore, and end users can
* just focus on handling the events themselves.
*/
abstract class ReadSideProcessor[Event <: AggregateEvent[Event]] {
/**
* Return a [[ReadSideProcessor#ReadSideHandler]] for the given offset type.
*
* @return The offset processor.
*/
def buildHandler(): ReadSideProcessor.ReadSideHandler[Event]
/**
* The tags to aggregate.
*
* This must return at least one tag to aggregate. Read side processors will be sharded over the cluster by these
* tags, so if events are tagged by a shard key, the read side processing load can be distributed across the
* cluster.
*
* @return The tags to aggregate.
*/
def aggregateTags: Set[AggregateEventTag[Event]]
/**
* The name of this read side.
*
* This name should be unique among the read sides and entity types of the service. By default it is using the
* short class name of the concrete `ReadSideProcessor` class. Subclasses may override to define other type names.
* It is wise to override and retain the original name when the class name is changed because this name is used to
* identify read sides throughout the cluster.
*/
def readSideName: String =
Logging.simpleName(getClass)
}
|
rcavalcanti/lagom
|
persistence/scaladsl/src/main/scala/com/lightbend/lagom/scaladsl/persistence/ReadSideProcessor.scala
|
Scala
|
apache-2.0
| 4,786 |
package com.twitter.io
import com.twitter.io.Writer.ClosableWriter
import com.twitter.util.{Future, FuturePool, Promise, Return, Throw, Time}
import java.io.OutputStream
import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
/**
* Construct a Writer from a given OutputStream.
*/
private[io]
class OutputStreamWriter(out: OutputStream, bufsize: Int) extends ClosableWriter {
import com.twitter.io.OutputStreamWriter._
private[this] val done = new Promise[Unit]
private[this] val writeOp = new AtomicReference[Buf => Future[Unit]](doWrite)
// Byte array reused on each write to avoid multiple allocations.
private[this] val bytes = new Array[Byte](bufsize)
@tailrec
private[this] def drain(buf: Buf): Unit = {
if (buf.isEmpty) out.flush() else {
// The source length is min(buf.length, bytes.length).
val b = buf.slice(0, bytes.length)
// Copy from the source to byte array.
b.write(bytes, 0)
// Write the bytes that were copied.
out.write(bytes, 0, b.length)
// Recurse on the remainder.
drain(buf.slice(bytes.length, Int.MaxValue))
}
}
private[this] def doWrite: Buf => Future[Unit] = buf =>
FuturePool.interruptibleUnboundedPool { drain(buf) }
def write(buf: Buf): Future[Unit] =
if (done.isDefined) done else (
done or writeOp.getAndSet(_ => Future.exception(WriteExc))(buf)
) transform {
case Return(_) =>
writeOp.set(doWrite)
Future.Done
case Throw(cause) =>
// We don't need to wait for the close, we care only that it is called.
if (cause != WriteExc) close()
Future.exception(cause)
}
def fail(cause: Throwable): Unit =
done.updateIfEmpty(Throw(cause))
def close(deadline: Time): Future[Unit] =
if (done.updateIfEmpty(Throw(CloseExc))) FuturePool.unboundedPool {
out.close()
} else Future.Done
}
private object OutputStreamWriter {
val WriteExc = new IllegalStateException("write while writing")
val CloseExc = new IllegalStateException("write after close")
}
|
travisbrown/util
|
util-core/src/main/scala/com/twitter/io/OutputStreamWriter.scala
|
Scala
|
apache-2.0
| 2,085 |
/*
* Copyright (c) 2015 Elder Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eri.viz.gui.jfx.monadic
import scala.language.{postfixOps, reflectiveCalls}
/**
* Test rig for constructing chained parent/child bindings using functional idioms.
*
* @author <a href="mailto:[email protected]">Simeon H.K. Fitch</a>
* @since 9/30/15
*/
class MonadicBindingTest extends MonadicJFXTestSpec {
describe("monadic laws") {
import oovm.pure
it("should fulfill associative law") {
new Fixture {
val mb = propA
val f = (a: A) ⇒ a.propD
val g = (a: A) ⇒ a.propB
assert(mb.flatMap(f).flatMap(g).map(_.toUpperCase).get ===
mb.flatMap(a ⇒ f(a).flatMap(g)).map(_.toUpperCase).get)
}
()
}
it("should fulfill left identity law") {
new Fixture {
val mb = propA
val f = (a: A) ⇒ a.propD
assert(pure(mb.get).flatMap(f).get === f(mb.get).get)
}
()
}
it("should fulfill right identity law") {
new Fixture {
val mb = propA
assert(mb.flatMap(pure).get === mb.get)
}
()
}
}
describe("null semantics") {
it("should map null") {
new Fixture {
val mb = prop0
assert(mb.map(_.stat).get === null)
assert(mb.map(_.stat).map(_.toUpperCase).get === null)
}
()
}
it("should flatmap null") {
new Fixture {
val mb = prop0
assert(mb.flatMap(_.propC).get === null)
assert(mb.flatMap(_.propC).map(_._1).get === null)
}
()
}
}
describe("change propagation") {
it("should propagate first order map changes") {
new Fixture {
val mb = propA
val der = mb.map(_.stat)
der.addListener(countingListener)
propA.set(new A("somethingelse"))
assert(counter.get === 1)
assert(der.get === "somethingelse")
propA.set(null)
assert(counter.get === 2)
assert(der.get === null)
}
()
}
it("should propagate first order flatMap changes") {
new Fixture {
val mb = propA
val der = mb.flatMap(_.propB)
der.addListener(countingListener)
propA.get.propB.set("somethingelse")
assert(counter.get() === 1)
propA.set(new A())
assert(counter.get() === 2)
propA.set(null)
assert(counter.get() === 3)
assert(der.get === null)
}
()
}
it("should propagate second order flatMap changes") {
new Fixture {
val mb = propA
val der = mb.flatMap(_.propD).flatMap(_.propB)
der.addListener(countingListener)
propA.get.propD.set(new A())
assert(counter.get() === 1)
propA.get.propD.get.propB.set("somethingelse")
assert(counter.get() === 2)
propA.set(new A())
assert(counter.get() === 3)
}
()
}
}
}
|
ElderResearch/monadic-jfx
|
src/test/scala/eri/viz/gui/jfx/monadic/MonadicBindingTest.scala
|
Scala
|
apache-2.0
| 3,471 |
package com.clinkle.sql
import java.sql.{ResultSet, Statement, Connection}
trait Execable[T] extends Node { query =>
def exec(implicit executor: Executor): Stream[T]
def tryFirst(implicit executor: Executor): Option[T] = exec.headOption
def first(implicit executor: Executor): T = tryFirst.get
def tryOnly(implicit executor: Executor): Option[T] = {
val all = exec
assert(all.isEmpty || all.tail.isEmpty)
all.headOption
}
def only(implicit executor: Executor): T = tryOnly.get
}
trait UpdateExec extends Node { query =>
def exec(implicit executor: Executor): Int = executor.executeUpdate(query)
def updateAtMostOne(implicit executor: Executor): Int = {
val numDeleted = exec
assert(numDeleted <= 1, s"Query ${ query.serial } deleted more than one row.")
numDeleted
}
def updateOne(implicit executor: Executor): Int = {
val numDeleted = updateAtMostOne
assert(numDeleted >= 1, s"Query ${ query.serial } deleted fewer than one row.")
numDeleted
}
}
trait OnDuplicateKeyUpdateExec extends Node { query =>
// For normal inserts returns the number of rows inserted.
// When used with `ON DUPLICATE KEY UPDATE` returns 1 if a new row was inserted and 2 if the row was updated.
def exec(implicit executor: Executor): Int = executor.executeUpdate(query)
}
trait InsertExec extends OnDuplicateKeyUpdateExec { query =>
def only(implicit executor: Executor): Int = {
val inserted = exec
assert(inserted == 1, s"Query ${ query.serial } inserted fewer or more than one row.")
inserted
}
def insertAtMostOne(implicit executor: Executor): Int = {
val inserted = exec
assert(inserted <= 1, s"Query ${ query.serial } inserted $inserted rows.")
inserted
}
def keys[T](implicit executor: Executor, primitive: Primitive[T]): Stream[T] = {
val rows = executor.executeKeys(query)
def next: Stream[T] = {
if (!rows.next())
Stream.empty
else
primitive.extract(rows, 1) #:: next
}
next
}
def key[T](implicit executor: Executor, primitive: Primitive[T]): T = {
val allKeys = keys
assert(allKeys.nonEmpty, s"Query ${ query.serial } did not insert any keys.")
assert(allKeys.tail.isEmpty, s"Query ${ query.serial } inserted more than one key.")
allKeys.head
}
}
trait Executor {
def executeQuery(query: Node): ResultSet
def executeUpdate(query: Node): Int
def executeKeys(query: Node): ResultSet
}
object Executor {
implicit def ConnectionExecutor(implicit conn: Connection): Executor = new Executor {
override def executeQuery(query: Node): ResultSet = {
val sql = query.sql
val stmt = conn.prepareStatement(sql)
query.setParams(stmt, 1)
stmt.executeQuery()
}
override def executeUpdate(query: Node): Int = {
val sql = query.sql
val stmt = conn.prepareStatement(sql, Statement.NO_GENERATED_KEYS)
query.setParams(stmt, 1)
stmt.executeUpdate()
}
override def executeKeys(query: Node): ResultSet = {
val sql = query.sql
val stmt = conn.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS)
query.setParams(stmt, 1)
stmt.executeUpdate()
stmt.getGeneratedKeys
}
}
}
|
Clinkle/stilts
|
src/com/clinkle/sql/Exec.scala
|
Scala
|
apache-2.0
| 3,226 |
/*
* Original implementation (C) 2009-2011 Debasish Ghosh
* Adapted and extended in 2011 by Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.spray.json
trait CollectionFormats {
/**
* Supplies the JsonFormat for Lists.
*/
implicit def listFormat[T :JsonFormat] = new RootJsonFormat[List[T]] {
def write(list: List[T]) = JsArray(list.map(_.toJson))
def read(value: JsValue) = value match {
case JsArray(elements) => elements.map(_.convertTo[T])
case x => deserializationError("Expected List as JsArray, but got " + x)
}
}
/**
* Supplies the JsonFormat for Arrays.
*/
implicit def arrayFormat[T :JsonFormat :ClassManifest] = new RootJsonFormat[Array[T]] {
def write(array: Array[T]) = JsArray(array.map(_.toJson).toList)
def read(value: JsValue) = value match {
case JsArray(elements) => elements.map(_.convertTo[T]).toArray[T]
case x => deserializationError("Expected Array as JsArray, but got " + x)
}
}
/**
* Supplies the JsonFormat for Maps. The implicitly available JsonFormat for the key type K must
* always write JsStrings, otherwise a [[cc.spray.json.SerializationException]] will be thrown.
*/
implicit def mapFormat[K :JsonFormat, V :JsonFormat] = new RootJsonFormat[Map[K, V]] {
def write(m: Map[K, V]) = JsObject {
m.map { field =>
field._1.toJson match {
case JsString(x) => x -> field._2.toJson
case x => throw new SerializationException("Map key must be formatted as JsString, not '" + x + "'")
}
}
}
def read(value: JsValue) = value match {
case x: JsObject => x.fields.map { field =>
(JsString(field._1).convertTo[K], field._2.convertTo[V])
} (collection.breakOut)
case x => deserializationError("Expected Map as JsObject, but got " + x)
}
}
import collection.{immutable => imm}
implicit def immIterableFormat[T :JsonFormat] = viaList[imm.Iterable[T], T](list => imm.Iterable(list :_*))
implicit def immSeqFormat[T :JsonFormat] = viaList[imm.Seq[T], T](list => imm.Seq(list :_*))
implicit def immIndexedSeqFormat[T :JsonFormat] = viaList[imm.IndexedSeq[T], T](list => imm.IndexedSeq(list :_*))
implicit def immLinearSeqFormat[T :JsonFormat] = viaList[imm.LinearSeq[T], T](list => imm.LinearSeq(list :_*))
implicit def immSetFormat[T :JsonFormat] = viaList[imm.Set[T], T](list => imm.Set(list :_*))
implicit def vectorFormat[T :JsonFormat] = viaList[Vector[T], T](list => Vector(list :_*))
import collection._
implicit def iterableFormat[T :JsonFormat] = viaList[Iterable[T], T](list => Iterable(list :_*))
implicit def seqFormat[T :JsonFormat] = viaList[Seq[T], T](list => Seq(list :_*))
implicit def indexedSeqFormat[T :JsonFormat] = viaList[IndexedSeq[T], T](list => IndexedSeq(list :_*))
implicit def linearSeqFormat[T :JsonFormat] = viaList[LinearSeq[T], T](list => LinearSeq(list :_*))
implicit def setFormat[T :JsonFormat] = viaList[Set[T], T](list => Set(list :_*))
/**
* A JsonFormat construction helper that creates a JsonFormat for an Iterable type I from a builder function
* List => I.
*/
def viaList[I <: Iterable[T], T :JsonFormat](f: List[T] => I): RootJsonFormat[I] = new RootJsonFormat[I] {
def write(iterable: I) = JsArray(iterable.map(_.toJson).toList)
def read(value: JsValue) = value match {
case JsArray(elements) => f(elements.map(_.convertTo[T]))
case x => deserializationError("Expected Collection as JsArray, but got " + x)
}
}
}
|
beamly/spray-json
|
src/main/scala/cc/spray/json/CollectionFormats.scala
|
Scala
|
apache-2.0
| 4,113 |
/*
* Copyright 2014 Adam Rosenberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nalloc.bitb.kcits.sandbox.map
import org.nalloc.bitb.kcits.optional._
import org.nalloc.bitb.kcits.sandbox.Inspectable
class BlockInlineLambda extends Inspectable {
private[this] val bInlineComplex = b.map { x =>
val y = x + 5
y * 3
}
private[this] val sInlineComplex = s.map { x =>
val y = x + 5
y * 3
}
private[this] val iInlineComplex = i.map { x =>
val y = x + 5
y * 3
}
private[this] val lInlineComplex = l.map { x =>
val y = x + 5
y * 3
}
private[this] val fInlineComplex = f.map { x =>
val y = x + 5
y * 3
}
private[this] val dInlineComplex = d.map { x =>
val y = x + 5
y * 3
}
private[this] val stInlineComplex = st.map { x =>
val y = x + x
y + y
}
}
|
arosenberger/nalloc_2.10
|
sandbox/src/main/scala/org/nalloc/bitb/kcits/sandbox/map/BlockInlineLambda.scala
|
Scala
|
apache-2.0
| 1,355 |
package rescala.operator
import rescala.compat.FlattenCollectionCompat
import scala.annotation.implicitNotFound
import scala.reflect.ClassTag
import rescala.interface.RescalaInterface
trait FlattenApi extends FlattenCollectionCompat {
self: RescalaInterface =>
@implicitNotFound(msg =
"Could not flatten ${A}. Try to select a specific flatten strategy from rescala.reactives.Flatten.")
trait Flatten[-A, R] {
def apply(sig: A): R
}
/** Flatten a Signal[Signal[B]\] into a Signal[B] that changes whenever the outer or inner signal changes. */
implicit def flattenImplicitForsignal[B](implicit
ticket: CreationTicket
): Flatten[Signal[Signal[B]], Signal[B]] =
new Flatten[Signal[Signal[B]], Signal[B]] {
def apply(sig: Signal[Signal[B]]): Signal[B] =
Signals.dynamic(sig) { t => t.depend(t.depend(sig).resource) }
}
/** Flatten a Signal[Array[Signal[B]\]\] into a Signal[Array[B]\] where the new Signal updates whenever any of the inner or the outer signal updates */
implicit def flattenImplicitForarraySignals[B: ClassTag, Sig[U] <: Signal[U]](implicit
ticket: CreationTicket
): Flatten[Signal[Array[Sig[B]]], Signal[Array[B]]] =
new Flatten[Signal[Array[Sig[B]]], Signal[Array[B]]] {
def apply(sig: Signal[Array[Sig[B]]]): Signal[Array[B]] =
Signals.dynamic(sig) { t => t.depend(sig) map { (r: Signal[B]) => t.depend(r) } }
}
/** Flatten a Signal[Option[Signal[B]\]\] into a Signal[Option[B]\] where the new Signal updates whenever any of the inner or the outer signal updates */
implicit def flattenImplicitForoptionSignal[B, Sig[U] <: Signal[U]](implicit
ticket: CreationTicket
): Flatten[Signal[Option[Sig[B]]], Signal[Option[B]]] =
new Flatten[Signal[Option[Sig[B]]], Signal[Option[B]]] {
def apply(sig: Signal[Option[Sig[B]]]): Signal[Option[B]] =
Signals.dynamic(sig) { t => t.depend(sig) map { (r: Signal[B]) => t.depend(r) } }
}
/** Flatten a Signal[Event[B]]\] into a Event[B] where the new Event fires whenever the current inner event fires */
implicit def flattenImplicitForevent[A, B, Evnt[A1] <: Event[A1]](implicit
ticket: CreationTicket
): Flatten[Signal[Evnt[B]], Event[B]] =
new Flatten[Signal[Evnt[B]], Event[B]] {
def apply(sig: Signal[Evnt[B]]): Event[B] = Events.dynamic(sig) { t => t.depend(t.depend(sig)) }
}
/** Flatten a Event[Option[B]\] into a Event[B] that fires whenever the inner option is defined. */
implicit def flattenImplicitForoption[A, B](implicit
ticket: CreationTicket
): Flatten[Event[Option[B]], Event[B]] =
new Flatten[Event[Option[B]], Event[B]] {
def apply(event: Event[Option[B]]): Event[B] =
Events.static(event) { t => t.dependStatic(event).flatten }
}
}
|
guidosalva/REScala
|
Code/Main/shared/src/main/scala/rescala/operator/FlattenApi.scala
|
Scala
|
apache-2.0
| 2,790 |
package io.swagger.client.model
import io.swagger.client.core.ApiModel
import org.joda.time.DateTime
case class Update (
/* id */
id: Option[Int],
/* user_id */
userId: Int,
/* connector_id */
connectorId: Int,
/* number_of_measurements */
numberOfMeasurements: Int,
/* success */
success: Boolean,
/* message */
message: String,
/* created_at */
createdAt: Option[DateTime],
/* updated_at */
updatedAt: Option[DateTime])
extends ApiModel
|
QuantiModo/QuantiModo-SDK-Akka-Scala
|
src/main/scala/io/swagger/client/model/Update.scala
|
Scala
|
gpl-2.0
| 479 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.util
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.stats.Cardinality
import org.locationtech.geomesa.utils.text.KVPairParser
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AccumuloSchemaBuilderTest extends Specification {
import scala.collection.JavaConverters._
"AccumuloSchemaBuilder" should {
"allow join indices" >> {
val spec = AccumuloSchemaBuilder.builder()
.addString("foo").withJoinIndex()
.addInt("bar").withJoinIndex(Cardinality.HIGH)
.spec
spec mustEqual "foo:String:index=join,bar:Int:index=join:cardinality=high"
}
"configure table splitters as strings" >> {
val config = Map("id.type" -> "digit", "fmt" ->"%02d", "min" -> "0", "max" -> "99")
val sft1 = AccumuloSchemaBuilder.builder()
.addInt("i")
.addLong("l")
.userData
.splits(config)
.build("test")
// better - uses class directly (or at least less annoying)
val sft2 = AccumuloSchemaBuilder.builder()
.userData
.splits(config)
.addInt("i")
.addLong("l")
.build("test")
def test(sft: SimpleFeatureType) = {
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs.TableSplitterOpts
sft.getAttributeCount mustEqual 2
sft.getAttributeDescriptors.asScala.map(_.getLocalName) must containAllOf(List("i", "l"))
val opts = KVPairParser.parse(sft.getUserData.get(TableSplitterOpts).asInstanceOf[String])
opts.toSeq must containTheSameElementsAs(config.toSeq)
}
List(sft1, sft2) forall test
}
}
}
|
locationtech/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/util/AccumuloSchemaBuilderTest.scala
|
Scala
|
apache-2.0
| 2,283 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.attribute.Attribute
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.support.requisite._
/**
* Icon Feat Mobile Spellcasting.png Mobile Spellcasting Passive Character while moving cast at half
* their normal movement speed, however with Mobile Spellcasting, character can move at full speed.
*
* Combat Casting, Dexterity 13, Ability to cast 2nd level spells Level 3: Artificer, Cleric, Druid,
* Wizard Level 4: Bard, Favored Soul, Sorcerer Level 7: Paladin, Ranger
*
* @todo
* Add ability to cast 2nd level spells req
*/
protected[feats] trait MobileSpellcasting
extends FeatRequisiteImpl with ClassRequisiteImpl with Passive with RequiresAllOfFeat
with AttributeRequisiteImpl with RequiresAllOfAttribute with RequiresAnyOfClass {
self: GeneralFeat =>
override def allOfFeats: Seq[GeneralFeat] = List(GeneralFeat.CombatCasting)
override def anyOfClass: Seq[(HeroicCharacterClass, Int)] =
List(
(HeroicCharacterClass.Artificer, 3),
(HeroicCharacterClass.Bard, 4),
(HeroicCharacterClass.Cleric, 3),
(HeroicCharacterClass.Druid, 3),
(HeroicCharacterClass.FavoredSoul, 4),
(HeroicCharacterClass.Sorcerer, 4),
(HeroicCharacterClass.Wizard, 3),
(HeroicCharacterClass.Paladin, 7),
(HeroicCharacterClass.Ranger, 7)
)
override def allOfAttributes: Seq[(Attribute, Int)] = Seq((Attribute.Dexterity, 13))
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/MobileSpellcasting.scala
|
Scala
|
apache-2.0
| 2,193 |
package me.frmr.kafka.testtool
import scala.math
import scala.util.Random
object MessageBatcher {
private[this] def sliding[T](
collection: Seq[T],
slotSize: Int,
unevenProbability: Double,
maximumUnevenDist: Double,
unevenEnabled: Boolean
): Seq[Seq[T]] = {
def takeMore = Random.nextDouble() < 0.5
def adjustTakeBy = (slotSize*math.max(Random.nextDouble(), maximumUnevenDist)).toInt
var numberTaken = 0
val slotCount = collection.size / slotSize
for (slot <- (0 to slotCount)) yield {
if (slot == slotCount) {
collection.drop(numberTaken)
} else if (unevenEnabled && Random.nextDouble() < unevenProbability) {
val numToTake = if (takeMore) {
slotSize + adjustTakeBy
} else {
slotSize - adjustTakeBy
}
val nextSlot = collection.drop(numberTaken).take(numToTake)
numberTaken = numberTaken + numToTake
nextSlot
} else {
val nextSlot = collection.drop(numberTaken).take(slotSize)
numberTaken = numberTaken + slotSize
nextSlot
}
}
}
def batch(
messages: Seq[(Array[Byte], Array[Byte])],
duration: Int,
lagProbability: Double,
minimumLagMs: Int,
maximumLagMs: Int,
lagEnabled: Boolean,
unevenDistributionProbability: Double,
maximumDistributionVariance: Double,
unevenEnabled: Boolean
): Seq[MessageBatch] = {
val numberOfMessages = messages.length
val messagesPerSecond = messages.length / duration
val batchedMessages = sliding(
messages,
messagesPerSecond,
unevenDistributionProbability,
maximumDistributionVariance,
unevenEnabled
)
val baseDistanceMs = 1000 /// one second
for (batch <- batchedMessages) yield {
if (lagEnabled && Random.nextDouble() < lagProbability) {
val lag = Random.nextInt(maximumLagMs - minimumLagMs) + minimumLagMs
MessageBatch(baseDistanceMs+lag, batch)
} else {
MessageBatch(baseDistanceMs, batch)
}
}
}
}
|
farmdawgnation/kafka-detective
|
testtool/src/main/scala/me/frmr/kafka/testtool/MessageBatcher.scala
|
Scala
|
apache-2.0
| 2,055 |
import java.util.UUID
import akka.actor.{ ActorIdentity, ActorPath, ActorSystem, Identify, Props }
import akka.pattern.ask
import akka.persistence.journal.leveldb.{ SharedLeveldbJournal, SharedLeveldbStore }
import akka.util.Timeout
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
trait SharedJournalSupport {
implicit val timeout = Timeout(10 seconds)
def startupSharedJournal(startStore: Boolean, path: ActorPath)(implicit system: ActorSystem, ctx: ExecutionContext): Unit = {
if (startStore)
system.actorOf(Props[SharedLeveldbStore], "store")
val actorSelection = system.actorSelection(path)
val future = actorSelection ? Identify(UUID.randomUUID())
future.onSuccess {
case ActorIdentity(_, Some(ref)) =>
SharedLeveldbJournal.setStore(ref, system)
case x ⇒
system.log.error("Shared journal not started at {}", path)
system.terminate()
}
future.onFailure {
case _ ⇒
system.log.error("Lookup of shared journal at {} timed out", path)
system.terminate()
}
}
}
|
j5ik2o/spetstore-cqrs-es-akka
|
play2-application/app/SharedJournalSupport.scala
|
Scala
|
mit
| 1,094 |
import org.scalatest.{FunSuite, Matchers}
/**
* Created by inieto on 27/04/15.
*/
class _34_TypeSignatures extends FunSuite with Matchers {
test("") {
}
}
|
inieto/scala-47deg
|
ScalaExercises/src/test/scala-2.11/_34_TypeSignatures.scala
|
Scala
|
mit
| 164 |
package cilib
final class MultiEval[A] private (objectives: List[Eval[A]]) {
def eval(xs: List[A]): List[Objective[A]] =
objectives.map(_.eval(xs))
}
object MultiEval {
// The varags sucks
def apply[A](a: Eval[A], b: Eval[A], rest: Eval[A]*) =
new MultiEval(List(a, b) ++ rest.toList)
}
|
robgarden/cilib
|
moo/src/main/scala/cilib/MultiEval.scala
|
Scala
|
gpl-3.0
| 305 |
package org.awong.sorting.pq
import java.util.NoSuchElementException
/**
* For example of purely functional PQ, see http://amitdev.github.io/coding/2014/03/06/Priority-Queue/
*/
abstract class PriorityQueue[K](implicit val ord: Ordering[K]) {
import ord._
import collection.mutable.ArrayBuffer
// heap-ordered complete binary tree in pq[1..N] with pq[0] unused
var pq = ArrayBuffer[K]()
var n: Int = 0
def insert(v: K): Unit = {
n = n + 1
pq = pq :+ v
swim(size)
}
def enqueue(v: K): Unit
def dequeue(): K
def isEmpty: Boolean = {
n == 0
}
def size: Int = {
n
}
/**
* AKA a bottom-up reheapify
*/
protected def swim(in: Int): Unit = {
var k = in
while (k > 1 && cmp(k/2, k)) {
exch(k, k/2)
k = k/2
}
}
/**
* AKA a top-down reheapify
*/
protected def sink(in: Int): Unit = {
var k = in
var isDone = false
while (2*k <= size && !isDone) {
var j = 2*k
if (j < size && cmp(j, j+1)) {
j = j + 1
}
if (!cmp(k,j)) {
isDone = true
} else {
exch(k,j)
k = j
}
}
}
protected def cmp(i: Int, j: Int): Boolean
protected def less(i: Int, j: Int): Boolean = {
pq(i) < pq(j)
}
protected def greater(i: Int, j: Int): Boolean = {
pq(i) > pq(j)
}
protected def exch(i: Int, j: Int): Unit = {
val swap = pq(i)
pq(i) = pq(j)
pq(j) = swap
}
}
/**
* @see http://algs4.cs.princeton.edu/24pq/MaxPQ.java.html
*/
class MaxPQ[K <: Ordered[K]] extends PriorityQueue[K] {
import ord._
protected def cmp(i: Int, j: Int): Boolean = {
less(i,j)
}
// return the largest key
def max: K = {
if (isEmpty) {
throw new NoSuchElementException("PQ underflow")
}
pq(1)
}
// return and remove the largest key
def delMax(): K = {
if (isEmpty) {
throw new NoSuchElementException("PQ underflow")
}
val maxKey = max
n = n - 1
exch(1, n)
sink(1)
pq.remove(n + 1) // to avoid loitering and aid GC
maxKey
}
def dequeue(): K = delMax()
def enqueue(v: K): Unit = insert(v)
private def isMaxHeap(k: Int): Boolean = {
if (k > size) true
else {
val left = 2*k
val right = 2*k + 1
if (left <= size && cmp(k,left)) false
else if (right <= size && cmp(k, right)) false
else isMaxHeap(left) && isMaxHeap(right)
}
}
}
/**
* @see http://algs4.cs.princeton.edu/24pq/MinPQ.java.html
*/
class MinPQ[K <: Ordered[K]] extends PriorityQueue[K] {
protected def cmp(i: Int, j: Int): Boolean = {
greater(i,j)
}
// return the smallest key
def min: K = {
if (isEmpty) {
throw new NoSuchElementException("PQ underflow")
}
pq.head
}
// return and remove the smallest key
def delMin(): K = {
if (isEmpty) {
throw new NoSuchElementException("PQ underflow")
}
exch(1,n)
val minKey = pq(n)
n = n - 1
sink(1)
pq.remove(n + 1) // to avoid loitering and aid GC
minKey
}
def dequeue(): K = delMin()
def enqueue(v: K): Unit = insert(v)
private def isMinHeap(k: Int): Boolean = {
if (k > size) true
else {
val left = 2*k
val right = 2*k + 1
if (left <= size && cmp(k,left)) false
else if (right <= size && cmp(k, right)) false
else isMinHeap(left) && isMinHeap(right)
}
}
}
object MinPQ {
}
|
alanktwong/algorithms-scala
|
sorting/src/main/scala/org/awong/sorting/pq/PriorityQueue.scala
|
Scala
|
mit
| 3,408 |
// This function will be used while invoking "Summation" to compute
// The area under the curve.
def f(coefficients:List[Int],powers:List[Int],x:Double):Double =
{
coefficients.zip(powers).map(arg => math.pow(x, arg._2) * arg._1).sum
}
// This function will be used while invoking "Summation" to compute
// The Volume of revolution of the curve around the X-Axis
// The 'Area' referred to here is the area of the circle obtained
// By rotating the point on the curve (x,f(x)) around the X-Axis
def area(coefficients:List[Int],powers:List[Int],x:Double):Double =
{
math.pow(f(coefficients, powers, x), 2) * math.Pi
}
// This is the part where the series is summed up
// This function is invoked once with func = f to compute the area // under the curve
// Then it is invoked again with func = area to compute the volume
// of revolution of the curve
def summation(func:(List[Int],List[Int],Double)=>Double,upperLimit:Int,lowerLimit:Int,coefficients:List[Int],powers:List[Int]):Double =
{
(lowerLimit*1d to upperLimit*1d by 0.001d).map(0.001d*func(coefficients,powers,_)).sum
}
// The Input-Output functions will be handled by us. You only need to concentrate your effort on the function bodies above.
|
franklingu/HackerRank
|
functional-programming/introduction/area-under-curves-and-volume-of-revolving-a-curve/area_under_curves_and_volume_of_revolving_a_curve.scala
|
Scala
|
mit
| 1,277 |
object Main extends App {
val source = scala.io.Source.fromFile(args(0))
val lines = source.getLines.filter(_.length > 0)
for (l <- lines) {
val m = l.split(" ").foldLeft("")((x: String, y: String) => if (y.length > x.length) y else x)
val r = for (i <- 0 to (m.length - 1)) yield "*" * i + m(i)
println(r.mkString(" "))
}
}
|
nikai3d/ce-challenges
|
easy/stepwise_word.scala
|
Scala
|
bsd-3-clause
| 346 |
package demo.components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import scalacss.ProdDefaults._
object ReactTagsInputInfo {
object Style extends StyleSheet.Inline {
import dsl._
val content = style(
textAlign.center,
fontSize(30.px),
paddingTop(40.px)
)
}
val component = ScalaComponent
.builder[Unit]("ReactTagsInputInfo")
.render(P => {
InfoTemplate(componentFilePath = "textfields/ReactTagsInput.scala")(
<.div(
<.h3("React Tags Input "),
<.p(
"scalajs-react wrapper for ",
RedLink("tags input", "https://github.com/olahol/react-tagsinput")
),
<.div(
<.h4("Supported Version :"),
<.span("3.0.3")
),
<.div(
<.h4("How To Use :"),
<.p("Follow the installation guide from :",
RedLink("here", "https://github.com/olahol/react-tagsinput#install"))
)
)
)
})
.build
def apply() = component()
}
|
chandu0101/scalajs-react-components
|
demo/src/main/scala/demo/components/ReactTagsInputInfo.scala
|
Scala
|
apache-2.0
| 1,072 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.mesos.config
class MesosSchedulerBackendUtilSuite extends SparkFunSuite {
test("ContainerInfo fails to parse invalid docker parameters") {
val conf = new SparkConf()
conf.set("spark.mesos.executor.docker.parameters", "a,b")
conf.set("spark.mesos.executor.docker.image", "test")
val containerInfo = MesosSchedulerBackendUtil.buildContainerInfo(
conf)
val params = containerInfo.getDocker.getParametersList
assert(params.size() == 0)
}
test("ContainerInfo parses docker parameters") {
val conf = new SparkConf()
conf.set("spark.mesos.executor.docker.parameters", "a=1,b=2,c=3")
conf.set("spark.mesos.executor.docker.image", "test")
val containerInfo = MesosSchedulerBackendUtil.buildContainerInfo(
conf)
val params = containerInfo.getDocker.getParametersList
assert(params.size() == 3)
assert(params.get(0).getKey == "a")
assert(params.get(0).getValue == "1")
assert(params.get(1).getKey == "b")
assert(params.get(1).getValue == "2")
assert(params.get(2).getKey == "c")
assert(params.get(2).getValue == "3")
}
}
|
bravo-zhang/spark
|
resource-managers/mesos/src/test/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackendUtilSuite.scala
|
Scala
|
apache-2.0
| 2,042 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kafka.index
import java.util.concurrent._
import com.github.benmanes.caffeine.cache.Ticker
import com.typesafe.scalalogging.StrictLogging
import org.locationtech.geomesa.filter.factory.FastFilterFactory
import org.locationtech.geomesa.filter.index.{BucketIndexSupport, SizeSeparatedBucketIndexSupport}
import org.locationtech.geomesa.kafka.data.KafkaDataStore.IndexConfig
import org.locationtech.geomesa.kafka.index.FeatureStateFactory.{FeatureExpiration, FeatureState}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import scala.concurrent.duration.Duration
/**
* Feature cache implementation
*
* @param sft simple feature type
* @param config index config
*/
class KafkaFeatureCacheImpl(sft: SimpleFeatureType, config: IndexConfig)
extends KafkaFeatureCache with FeatureExpiration with StrictLogging {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
// keeps location and expiry keyed by feature ID (we need a way to retrieve a feature based on ID for
// update/delete operations). to reduce contention, we never iterate over this map
private val state = new ConcurrentHashMap[String, FeatureState]
// note: CQEngine handles points vs non-points internally
private val support = if (config.cqAttributes.nonEmpty) {
KafkaFeatureCache.cqIndexSupport(sft, config)
} else if (sft.isPoints) {
BucketIndexSupport(sft, config.resolutionX, config.resolutionY)
} else {
SizeSeparatedBucketIndexSupport(sft, config.ssiTiers, config.resolutionX / 360d, config.resolutionY / 180d)
}
private val factory = {
val expiry = if (config.expiry == Duration.Inf) { None } else {
val (executor, ticker) = config.executor.getOrElse {
val ex = new ScheduledThreadPoolExecutor(2)
// don't keep running scheduled tasks after shutdown
ex.setExecuteExistingDelayedTasksAfterShutdownPolicy(false)
// remove tasks when canceled, otherwise they will only be removed from the task queue
// when they would be executed. we expect frequent cancellations due to feature updates
ex.setRemoveOnCancelPolicy(true)
(ex, Ticker.systemTicker())
}
Some((this, executor, ticker, config.expiry.toMillis))
}
val eventTime = config.eventTime.map(e => (FastFilterFactory.toExpression(sft, e.expression), e.ordering))
FeatureStateFactory(support.index, expiry, eventTime, sft.getGeomIndex)
}
/**
* Note: this method is not thread-safe. The `state` and `index` can get out of sync if the same feature
* is updated simultaneously from two different threads
*
* In our usage, this isn't a problem, as a given feature ID is always operated on by a single thread
* due to kafka consumer partitioning
*/
override def put(feature: SimpleFeature): Unit = {
val featureState = factory.createState(feature)
logger.trace(s"${featureState.id} adding feature $featureState")
val old = state.put(featureState.id, featureState)
if (old == null) {
featureState.insertIntoIndex()
} else if (old.time <= featureState.time) {
logger.trace(s"${featureState.id} removing old feature")
old.removeFromIndex()
featureState.insertIntoIndex()
} else {
logger.trace(s"${featureState.id} ignoring out of sequence feature")
if (!state.replace(featureState.id, featureState, old)) {
logger.warn(s"${featureState.id} detected inconsistent state... spatial index may be incorrect")
old.removeFromIndex()
}
}
logger.trace(s"Current index size: ${state.size()}/${support.index.size()}")
}
/**
* Note: this method is not thread-safe. The `state` and `index` can get out of sync if the same feature
* is updated simultaneously from two different threads
*
* In our usage, this isn't a problem, as a given feature ID is always operated on by a single thread
* due to kafka consumer partitioning
*/
override def remove(id: String): Unit = {
logger.trace(s"$id removing feature")
val old = state.remove(id)
if (old != null) {
old.removeFromIndex()
}
logger.trace(s"Current index size: ${state.size()}/${support.index.size()}")
}
override def expire(featureState: FeatureState): Unit = {
logger.trace(s"${featureState.id} expiring from index")
if (state.remove(featureState.id, featureState)) {
featureState.removeFromIndex()
}
logger.trace(s"Current index size: ${state.size()}/${support.index.size()}")
}
override def clear(): Unit = {
logger.trace("Clearing index")
state.clear()
support.index.clear()
}
override def size(): Int = state.size()
// optimized for filter.include
override def size(f: Filter): Int = if (f == Filter.INCLUDE) { size() } else { query(f).length }
override def query(id: String): Option[SimpleFeature] =
Option(state.get(id)).flatMap(f => Option(f.retrieveFromIndex()))
override def query(filter: Filter): Iterator[SimpleFeature] = support.query(filter)
override def close(): Unit = factory.close()
}
|
aheyne/geomesa
|
geomesa-kafka/geomesa-kafka-datastore/src/main/scala/org/locationtech/geomesa/kafka/index/KafkaFeatureCacheImpl.scala
|
Scala
|
apache-2.0
| 5,629 |
package com.biosimilarity.spirograph
import ru.circumflex._, core._, web._
import org.scalatest._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers.MustMatchers
@RunWith(classOf[JUnitRunner])
class MySpec
extends FreeSpec
with BeforeAndAfter
with MustMatchers {
before {
cx("cx.router") = classOf[Main]
MockApp.start()
}
after {
MockApp.stop()
}
"My application" - {
"test itself" in {
MockApp.get("/test").execute().content must equal ("I'm fine, thanks!")
}
}
}
|
leithaus/SpliciousRoadmap
|
src/test/scala/specs.scala
|
Scala
|
artistic-2.0
| 565 |
package io.jfc
import cats.data.Xor
/**
* A zipper that represents a position in a JSON document and supports
* navigation and modification.
*
* The `focus` represents the current position of the cursor; it may be updated
* with `withFocus` or changed using navigation methods like `left` and `right`.
*
* jfc includes three kinds of cursors. [[Cursor]] is the simplest: it doesn't
* keep track of its history. [[HCursor]] is a cursor that does keep track of
* its history, but does not represent the possibility that an navigation or
* modification operation has failed. [[ACursor]] is the richest cursor, since
* it both tracks history through an underlying [[HCursor]] and can represent
* failed operations.
*
* [[GenericCursor]] is an abstraction over these three types, and it has
* several abstract type members that are required in order to represent the
* different roles of the three cursor types. `Self` is simply the specific type
* of the cursor, `Focus` is a type constructor that represents the context in
* which the focus is available, `Result` is the type that is returned by all
* navigation and modification operations, and `M` is a type class that includes
* the operations that we need for `withFocusM`.
*
* @groupname TypeMembers Type members
* @groupprio TypeMembers 0
* @groupname Access Access and navigation
* @groupprio Access 2
* @groupname Modification Modification
* @groupprio Modification 3
* @groupname ArrayAccess Array access
* @groupprio ArrayAccess 4
* @groupname ObjectAccess Object access
* @groupprio ObjectAccess 5
* @groupname ArrayNavigation Array navigation
* @groupprio ArrayNavigation 6
* @groupname ObjectNavigation Object navigation
* @groupprio ObjectNavigation 7
* @groupname ArrayModification Array modification
* @groupprio ArrayModification 8
* @groupname ObjectModification Object modification
* @groupprio ObjectModification 9
* @groupname Decoding Decoding
* @groupprio Decoding 10
*
* @author Travis Brown
*/
trait GenericCursor[C <: GenericCursor[C]] {
/**
* The context that the cursor is available in.
*
* @group TypeMembers
*/
type Focus[_]
/**
* The type returned by navigation and modifications operations.
*
* @group TypeMembers
*/
type Result
/**
* The type class including the operations needed for `withFocusM`.
*
* @group TypeMembers
*/
type M[_[_]]
/**
* The current location in the document.
*
* @group Access
*/
def focus: Focus[Json]
/**
* Return to the root of the document.
*
* @group Access
*/
def top: Focus[Json]
/**
* Move the focus to the parent.
*
* @group Access
*/
def up: Result
/**
* Delete the focus and move to its parent.
*
* @group Modification
*/
def delete: Result
/**
* Modify the focus using the given function.
*
* @group Modification
*/
def withFocus(f: Json => Json): C
/**
* Modify the focus in a context using the given function.
*
* @group Modification
*/
def withFocusM[F[_]: M](f: Json => F[Json]): F[C]
/**
* Replace the focus.
*
* @group Modification
*/
def set(j: Json): C = withFocus(_ => j)
/**
* If the focus is a JSON array, return the elements to the left.
*
* @group ArrayAccess
*/
def lefts: Option[List[Json]]
/**
* If the focus is a JSON array, return the elements to the right.
*
* @group ArrayAccess
*/
def rights: Option[List[Json]]
/**
* If the focus is a JSON object, return its field names in a set.
*
* @group ObjectAccess
*/
def fieldSet: Option[Set[String]]
/**
* If the focus is a JSON object, return its field names in their original
* order.
*
* @group ObjectAccess
*/
def fields: Option[List[String]]
/**
* If the focus is an element in a JSON array, move to the left.
*
* @group ArrayNavigation
*/
def left: Result
/**
* If the focus is an element in a JSON array, move to the right.
*
* @group ArrayNavigation
*/
def right: Result
/**
* If the focus is an element in a JSON array, move to the first element.
*
* @group ArrayNavigation
*/
def first: Result
/**
* If the focus is an element in a JSON array, move to the last element.
*
* @group ArrayNavigation
*/
def last: Result
/**
* If the focus is an element in JSON array, move to the left the given number
* of times. A negative value will move the cursor right.
*
* @group ArrayNavigation
*/
def leftN(n: Int): Result
/**
* If the focus is an element in JSON array, move to the right the given
* number of times. A negative value will move the cursor left.
*
* @group ArrayNavigation
*/
def rightN(n: Int): Result
/**
* If the focus is an element in a JSON array, move to the left until the
* given predicate matches the new focus.
*
* @group ArrayNavigation
*/
def leftAt(p: Json => Boolean): Result
/**
* If the focus is an element in a JSON array, move to the right until the
* given predicate matches the new focus.
*
* @group ArrayNavigation
*/
def rightAt(p: Json => Boolean): Result
/**
* If the focus is an element in a JSON array, find the first element at or to
* its right that matches the given predicate.
*
* @group ArrayNavigation
*/
def find(p: Json => Boolean): Result
/**
* If the focus is a JSON array, move to its first element.
*
* @group ArrayNavigation
*/
def downArray: Result
/**
* If the focus is a JSON array, move to the first element that satisfies the
* given predicate.
*
* @group ArrayNavigation
*/
def downAt(p: Json => Boolean): Result
/**
* If the focus is a JSON array, move to the element at the given index.
*
* @group ArrayNavigation
*/
def downN(n: Int): Result
/**
* If the focus is a value in a JSON object, move to a sibling with the given
* key.
*
* @group ObjectNavigation
*/
def field(k: String): Result
/**
* If the focus is a JSON object, move to the value of the given key.
*
* @group ObjectNavigation
*/
def downField(k: String): Result
/**
* Delete the focus and move to the left in a JSON array.
*
* @group ArrayModification
*/
def deleteGoLeft: Result
/**
* Delete the focus and move to the right in a JSON array.
*
* @group ArrayModification
*/
def deleteGoRight: Result
/**
* Delete the focus and move to the first element in a JSON array.
*
* @group ArrayModification
*/
def deleteGoFirst: Result
/**
* Delete the focus and move to the last element in a JSON array.
*
* @group ArrayModification
*/
def deleteGoLast: Result
/**
* Delete all values to the left of the focus in a JSON array.
*
* @group ArrayModification
*/
def deleteLefts: Result
/**
* Delete all values to the right of the focus in a JSON array.
*
* @group ArrayModification
*/
def deleteRights: Result
/**
* Replace all values to the left of the focus in a JSON array.
*
* @group ArrayModification
*/
def setLefts(x: List[Json]): Result
/**
* Replace all values to the right of the focus in a JSON array.
*
* @group ArrayModification
*/
def setRights(x: List[Json]): Result
/**
* Delete the focus and move to the sibling with the given key in a JSON
* object.
*
* @group ObjectModification
*/
def deleteGoField(k: String): Result
/**
* Attempt to decode the focus as an `A`.
*
* @group Decoding
*/
def as[A](implicit decode: Decode[A]): Xor[DecodeFailure, A]
/**
* Attempt to decode the value at the given key in a JSON object as an `A`.
*
* @group Decoding
*/
def get[A](k: String)(implicit decode: Decode[A]): Xor[DecodeFailure, A]
}
|
non/circe
|
core/src/main/scala/io/jfc/GenericCursor.scala
|
Scala
|
apache-2.0
| 7,876 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.pipeline.api
import org.orbeon.oxf.processor.pipeline.PipelineFunctionLibrary
/**
* Function library to use from XSLT and XPL.
*/
object FunctionLibrary extends PipelineFunctionLibrary {
// For Java callers
def instance = this
}
|
wesley1001/orbeon-forms
|
src/main/scala/org/orbeon/oxf/pipeline/api/FunctionLibrary.scala
|
Scala
|
lgpl-2.1
| 914 |
trait DeliteDSL {
abstract class <~<[-From, +To] extends (From => To)
implicit def trivial[A]: A <~< A = new (A <~< A) {def apply(x: A) = x}
trait Forcible[T]
object Forcible {
def factory[T](f: T => Forcible[T]) = new (T <~< Forcible[T]){def apply(x: T) = f(x)}
}
case class DeliteInt(x: Int) extends Forcible[Int]
implicit val forcibleInt = Forcible.factory(DeliteInt(_: Int))
class DeliteCollection[T](val xs: Iterable[T]) {
// must use existential in bound of P, instead of T itself, because we cannot both have:
// Test.x below: DeliteCollection[T=Int] -> P=DeliteInt <: Forcible[T=Int], as T=Int <~< P=DeliteInt
// Test.xAlready below: DeliteCollection[T=DeliteInt] -> P=DeliteInt <: Forcible[T=DeliteInt], as T=DeliteInt <~< P=DeliteInt
// this would required DeliteInt <: Forcible[Int] with Forcible[DeliteInt]
def headProxy[P <: Forcible[_]](implicit w: T <~< P): P = xs.head
}
// If T is already a proxy (it is forcible), the compiler should use
// forcibleIdentity to deduce that P=T. If T is Int, the compiler
// should use intToForcible to deduce that P=DeliteInt.
//
// Without this feature, the user must write 'xs.proxyOfFirst[DeliteInt]',
// with the feature they can write 'xs.proxyOfFirst', which is shorter and
// avoids exposing internal DELITE types to the world.
object Test {
val x = new DeliteCollection(List(1,2,3)).headProxy
// inferred: val x: Forcible[Int] = new DeliteCollection[Int](List.apply[Int](1, 2, 3)).headProxy[Forcible[Int]](forcibleInt);
val xAlready = new DeliteCollection(List(DeliteInt(1),DeliteInt(2),DeliteInt(3))).headProxy
// inferred: val xAlready: DeliteInt = new DeliteCollection[DeliteInt](List.apply[DeliteInt](DeliteInt(1), DeliteInt(2), DeliteInt(3))).headProxy[DeliteInt](trivial[DeliteInt]);
}
}
|
scala/scala
|
test/files/pos/t2421_delitedsl.scala
|
Scala
|
apache-2.0
| 1,848 |
package org.jetbrains.plugins.scala.lang.psi.light
import com.intellij.psi.impl.light.LightMethod
import com.intellij.psi.{PsiElement, PsiMethod, JavaPsiFacade}
import org.jetbrains.plugins.scala.lang.psi.types.result.{TypingContext, Success}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
/**
* @author Alefas
* @since 28.02.12
*/
class StaticTraitScFunctionWrapper(val function: ScFunction, containingClass: PsiClassWrapper) extends {
val elementFactory = JavaPsiFacade.getInstance(function.getProject).getElementFactory
val methodText = StaticTraitScFunctionWrapper.methodText(function, containingClass: PsiClassWrapper)
val method: PsiMethod = {
try {
elementFactory.createMethodFromText(methodText, containingClass)
} catch {
case e: Exception => elementFactory.createMethodFromText("public void FAILED_TO_DECOMPILE_METHOD() {}", containingClass)
}
}
} with LightMethodAdapter(function.getManager, method, containingClass) with LightScalaMethod {
override def getNavigationElement: PsiElement = function
override def canNavigate: Boolean = function.canNavigate
override def canNavigateToSource: Boolean = function.canNavigateToSource
override def getParent: PsiElement = containingClass
}
object StaticTraitScFunctionWrapper {
def methodText(function: ScFunction, containingClass: PsiClassWrapper): String = {
val builder = new StringBuilder
builder.append(JavaConversionUtil.modifiers(function, true))
if (!function.isConstructor) {
function.returnType match {
case Success(tp, _) => builder.append(JavaConversionUtil.typeText(tp, function.getProject, function.getResolveScope))
case _ => builder.append("java.lang.Object")
}
}
builder.append(" ")
val name = if (!function.isConstructor) function.getName else function.containingClass.getName
builder.append(name)
val qualName = containingClass.getQualifiedName
builder.append(((qualName.substring(0, qualName.length() - 6) + " This") +: function.parameters.map { case param =>
val builder = new StringBuilder
param.getRealParameterType(TypingContext.empty) match {
case Success(tp, _) =>
if (param.isCallByNameParameter) builder.append("scala.Function0<")
builder.append(JavaConversionUtil.typeText(tp, function.getProject, function.getResolveScope))
if (param.isCallByNameParameter) builder.append(">")
case _ => builder.append("java.lang.Object")
}
builder.append(" ").append(param.getName)
builder.toString()
}).mkString("(", ", ", ")"))
builder.append(LightUtil.getThrowsSection(function))
builder.append(" {}")
builder.toString()
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/psi/light/StaticTraitScFunctionWrapper.scala
|
Scala
|
apache-2.0
| 2,735 |
package unfiltered.request
/** Accepts request header extractor */
object Accepts {
trait Accepting {
def contentType: String
def ext: String
def unapply[T](r: HttpRequest[T]) = {
val pathSuffix = Path(r).split("[.]").lastOption
r match {
case Accept(values) =>
if(values.exists { _.equalsIgnoreCase(contentType) })
Some(r)
else if (values.contains("*/*") && pathSuffix.exists { ext == _ })
Some(r)
else None
case _ => pathSuffix match {
case Some(pathSuffix) if(pathSuffix == ext) => Some(r)
case _ => None
}
}
}
}
object Json extends Accepting {
val contentType = "application/json"
val ext = "json"
}
/** Lenient matcher for application/javascript and text/javascript */
object JavaScript extends Accepting {
val contentType = "text/javascript"
val ext = "js"
override def unapply[T](r: HttpRequest[T]) =
AppJavaScript.unapply(r) orElse {super.unapply(r)}
}
object AppJavaScript extends Accepting {
val contentType = "application/javascript"
val ext = "js"
}
/** Lenient matcher for application/json, application/javascript, and text/javascript */
object Jsonp {
def unapply[T](r: HttpRequest[T]) =
Json.unapply(r) orElse {JavaScript.unapply(r)}
}
object Xml extends Accepting {
val contentType = "text/xml"
val ext = "xml"
}
object Html extends Accepting {
val contentType = "text/html"
val ext = "html"
}
object Csv extends Accepting {
val contentType = "text/csv"
val ext = "csv"
}
}
|
hamnis/unfiltered
|
library/src/main/scala/request/accepts.scala
|
Scala
|
mit
| 1,638 |
package com.github.ldaniels528.commons.helpers
import java.lang.reflect.{Field, Method}
import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
/**
* Scala Bean Copy Utilities
* @author [email protected]
*/
class ScalaBeanUtil() {
/**
* Copies values from the source instance to the destination instance
* via the destination's mutator methods.
*/
def copy[A, B](src: A, dest: B): B = {
// get the source and destination fields
val srcMethods = extractMethods(src.getClass)
val destMethods = extractMethods(dest.getClass, isTarget = true)
// for each field that's found in both source and destination,
// copy its value
destMethods foreach {
dm =>
// find the matching method
srcMethods.find(_.getName == asGetter(dm)) match {
case Some(sm) =>
val value = getValue(src, sm)
Try(setValue(dest, dm, value)) match {
case Success(_) =>
case Failure(e) =>
System.err.println(s"ScalaBeanUtil: Error setting method '${sm.getName}' with '$value'")
}
case None =>
}
}
dest
}
/**
* Creates a new case class (parameterized) instance setting its values
* from the source object(s)
*/
def caseCopy[S](sources: Any*)(implicit m: Manifest[S]): S = {
// get the source properties
val srcProps = Map(sources flatMap (src =>
extractMethods(src.getClass) flatMap { m =>
val (k, v) = (m.getName, m.invoke(src))
if (v != getDefaultValue(m.getReturnType)) Some((k, v)) else None
}): _*)
// get the destination class
val destClass = m.runtimeClass.asInstanceOf[Class[S]]
// lookup the default constructor
val cons = destClass.getConstructors()(0)
// if the construct has no parameters, set the mutators
if (cons.getParameterTypes.length == 0) {
// create the destination instance
val dest = cons.newInstance().asInstanceOf[S]
// copy the values
extractMethods(destClass, isTarget = true) map (m =>
setValue(dest, m, srcProps.getOrElse(asGetter(m), getDefaultValue(m.getReturnType))))
dest
} else {
// build the destination properties
val destProps = extractMethods(destClass) map { m =>
val name = m.getName
(name, srcProps.getOrElse(name, getDefaultValue(m.getReturnType)))
}
// create the destination case class
cons.newInstance(destProps map (_._2): _*).asInstanceOf[S]
}
}
/**
* Retrieves the value from the bean's property
*/
protected def getValue[A](bean: A, m: Method): Object = m.invoke(bean)
/**
* Sets the value of the bean's property
*/
protected def setValue[B](bean: B, m: Method, value: Object): Object = {
m.invoke(bean, getTypedValue(m.getReturnType, value))
}
/**
* Returns the default value of the method's return type
*/
protected def getDefaultValue(returnType: Class[_]): Object = {
returnType match {
case c if c.isArray => createTypedArray(returnType.getComponentType, Array.empty)
case c if c == classOf[Option[_]] => None
case c if c == classOf[List[_]] => Nil
case c if c == classOf[Seq[_]] => Seq.empty
case c if c == classOf[Set[_]] => Set.empty
case c if c == classOf[java.util.Collection[_]] => java.util.Collections.emptyList
case c if c == classOf[java.util.List[_]] => java.util.Collections.emptyList
case c if c == classOf[java.util.Set[_]] => java.util.Collections.emptySet
case _ => null
}
}
protected def getTypedValue(returnType: Class[_], value: Object): Object = {
import scala.collection.JavaConversions._
// look for exception cases by value type
value match {
// if the value is null ...
case v if v == null => getDefaultValue(returnType)
// if the value is an Option[T] ...
case o: Option[_] if returnType != classOf[Option[_]] => o.map(_.asInstanceOf[Object]).orNull
// look for exception cases by return type
case _ =>
returnType match {
case c if c.isArray => getTypedArray(returnType, value)
case c if c == classOf[Option[_]] => Option(value)
case c if c == classOf[List[_]] => getTypedArray(returnType, value).toList
case c if c == classOf[Seq[_]] => getTypedArray(returnType, value).toSeq
case c if c == classOf[Set[_]] => getTypedArray(returnType, value).toSet
case c if c == classOf[java.util.Collection[_]] => seqAsJavaList(getTypedArray(returnType, value).toList)
case c if c == classOf[java.util.List[_]] => seqAsJavaList(getTypedArray(returnType, value).toList)
case c if c == classOf[java.util.Set[_]] => getTypedArray(returnType, value).toSet.asJava
case _ => value
}
}
}
protected def getTypedArray(returnType: Class[_], value: Object): Array[Any] = {
// determine the type of the array
val arrayType = returnType.getComponentType
// convert the value into an array
value match {
case a if a == null => createTypedArray(arrayType, Array.empty)
case a: Array[_] => createTypedArray(arrayType, a.toArray)
case l: List[_] => createTypedArray(arrayType, l.toArray)
case s: Seq[_] => createTypedArray(arrayType, s.toArray)
case c: java.util.Collection[_] => createTypedArray(arrayType, c.toArray.asInstanceOf[Array[Any]])
case x =>
throw new IllegalArgumentException(s"Cannot convert type ${x.getClass.getName} into an array")
}
}
protected def createTypedArray(arrayType: Class[_], items: Array[Any]): Array[Any] = {
val array = java.lang.reflect.Array.newInstance(arrayType, items.length).asInstanceOf[Array[Any]]
System.arraycopy(items, 0, array, 0, items.length)
array
}
protected def asGetter(m: Method): String = m.getName.replaceAllLiterally("_$eq", "")
protected def extractMethods[A](beanClass: Class[A], isTarget: Boolean = false): Seq[Method] = {
if (isTarget) {
beanClass.getDeclaredMethods filter (_.getName.endsWith("_$eq"))
} else {
beanClass.getDeclaredFields filterNot unwantedFields map (f => beanClass.getMethod(f.getName))
}
}
/**
* Eliminates reflection artifacts
*/
protected def unwantedFields(f: Field) = Set("$outer", "serialVersionUID").contains(f.getName)
}
|
ldaniels528/commons-helpers
|
src/main/scala/com/github/ldaniels528/commons/helpers/ScalaBeanUtil.scala
|
Scala
|
apache-2.0
| 6,410 |
// scalac: -Werror
class Test {
import MyEnum._
def f(e: MyEnum) = e match {
case ONE => println("one")
case TWO => println("two")
// missing case --> exhaustivity warning!
}
import MySecondEnum._
def g(e: MySecondEnum) = e match {
case RED => println("red")
// missing case --> exhaustivity warning!
}
}
|
lrytz/scala
|
test/files/neg/t2442/t2442.scala
|
Scala
|
apache-2.0
| 339 |
package org.bone.ircballoon
import org.eclipse.swt.widgets.{List => SWTList, _}
import org.eclipse.swt.layout._
import org.eclipse.swt.events._
import org.eclipse.swt.graphics._
import org.eclipse.swt.custom._
import org.eclipse.swt._
import scala.math._
import scala.collection.JavaConversions._
import I18N.i18n._
import ImageUtil._
import org.eclipse.swt.widgets.Listener
/*
* prevent from closing block from taskbar
* but allow closing block from "Connect" button on MainWindow
*/
object NotificationBlockListener extends Listener{
var closable = false;
def closable(option : Boolean){
closable = option
}
def handleEvent (event: Event) {
event.doit = closable
}
}
case class NotificationBlock(size: (Int, Int), location: (Int, Int),
borderColor: Color, bgColor: Color, alpha: Int,
fontColor: Color, font: Font,
nicknameColor: Color, nicknameFont: Font,
messageSize: Int, hasScrollBar: Boolean,
onTop: Boolean,
backgroundImage: Option[String] = None) extends Notification
with MessageIcon
with NotificationTheme
with NotificationWindow
with SWTHelper
{
val display = Display.getDefault
val shell_display = onTop match{
case true => SWT.NO_TRIM|SWT.ON_TOP|SWT.RESIZE
case false => SWT.MODELESS|SWT.NO_TRIM|SWT.RESIZE
}
val shell = new Shell(display, shell_display)
shell.addListener (SWT.Close, NotificationBlockListener)
val label = createContentLabel()
var messages: List[IRCMessage] = Nil
val (inputLabel, inputText) = createChatInputBox()
shell.setText(tr("IRCBalloon Chatroom"))
def createChatInputBox() =
{
val label = new Label(shell, SWT.LEFT)
val text = new Text(shell, SWT.BORDER)
label.setText(tr("Chat:"))
label.setFont(font)
label.setForeground(fontColor)
val layoutData = new GridData(SWT.FILL, SWT.NONE, true, false)
text.setBackground(bgColor)
text.setForeground(fontColor)
text.setFont(font)
text.setLayoutData(layoutData)
text.addTraverseListener(new TraverseListener() {
override def keyTraversed(e: TraverseEvent) {
if (e.detail == SWT.TRAVERSE_RETURN && text.getText.trim.length > 0) {
val message = text.getText.trim()
MainWindow.getIRCBot.foreach { bot =>
bot.getChannels.foreach { channel =>
val nickname = MainWindow.getNickname
val user = bot.getUser(nickname)
val isOP = user.getChannelsOpIn.contains(channel)
bot.sendMessage(channel, message)
NotificationBlock.this.addMessage(
ChatMessage(nickname, isOP, message)
)
}
}
text.setText("")
}
}
})
(label, text)
}
def createContentLabel() =
{
val layoutData = new GridData(SWT.FILL, SWT.FILL, true, true)
val style = hasScrollBar match {
case true => SWT.MULTI|SWT.WRAP|SWT.READ_ONLY|SWT.V_SCROLL|SWT.NO_FOCUS
case false => SWT.MULTI|SWT.WRAP|SWT.READ_ONLY|SWT.NO_FOCUS
}
val label = new StyledText(shell, style)
layoutData.horizontalSpan = 2
label.setBackgroundMode(SWT.INHERIT_FORCE)
label.setLayoutData(layoutData)
label.addPaintObjectListener(new PaintObjectListener() {
override def paintObject(event: PaintObjectEvent) {
event.style.data match {
case image: Image =>
val x = event.x
val y = event.y + event.ascent - event.style.metrics.ascent
event.gc.drawImage(image, x, y)
case _ =>
}
}
})
label
}
override def onTrayIconClicked(): Unit =
{
shell.setVisible(!shell.isVisible)
}
def addMessage(newMessage: IRCMessage)
{
messages = (newMessage :: messages).take(messageSize)
updateMessages()
}
def updateMessages()
{
display.syncExec (new Runnable {
override def run ()
{
if (!shell.isDisposed) {
val message = messages.take(messageSize).
reverse.map(_.toString).mkString("\\n")
label.setText(message)
val styles = nicknameStyles(message, nicknameColor, nicknameFont) ++
opStyles(message) ++
emoteStyles(message) ++
avatarStyles(message)
styles.foreach(label.setStyleRange)
label.setTopPixel(Int.MaxValue)
}
}
})
}
def setLayout()
{
val layout = new GridLayout(2, false)
layout.marginLeft = 5
layout.marginRight = 5
layout.marginTop = 5
layout.marginBottom = 5
shell.setLayout(layout)
label.setFont(font)
label.setForeground(fontColor)
label.setLineSpacing(1)
}
def setMoveAndResize()
{
var isResize = false
var offsetX = 0
var offsetY = 0
shell.addMouseListener(new MouseAdapter() {
override def mouseDown(e: MouseEvent) {
offsetX = e.x
offsetY = e.y
isResize =
e.x >= (shell.getSize.x - 20) && e.x <= shell.getSize.x &&
e.y >= (shell.getSize.y - 20) && e.y <= shell.getSize.y
}
override def mouseUp(e: MouseEvent) {
offsetX = 0
offsetY = 0
isResize = false
}
})
shell.addMouseMoveListener(new MouseMoveListener() {
private var isResizing = false
def isCorner(e: MouseEvent) = {
e.x >= (shell.getSize.x - 20) && e.x <= shell.getSize.x &&
e.y >= (shell.getSize.y - 20) && e.y <= shell.getSize.y
}
def setResizeCursor(e: MouseEvent)
{
if (isCorner(e) && !isResizing && !display.isDisposed) {
shell.setCursor(display.getSystemCursor(SWT.CURSOR_SIZESE))
isResizing = true
} else if (isResizing && !display.isDisposed) {
shell.setCursor(display.getSystemCursor(SWT.CURSOR_ARROW))
isResizing = false
}
}
def moveWindow(e: MouseEvent)
{
val absX = shell.getLocation.x + e.x
val absY = shell.getLocation.y + e.y
shell.setLocation(absX - offsetX, absY - offsetY)
MainWindow.blockSetting.locationX.setText((absX - offsetX).toString)
MainWindow.blockSetting.locationY.setText((absY - offsetY).toString)
}
def resizeWindow(e: MouseEvent)
{
shell.setSize(e.x, e.y)
MainWindow.blockSetting.width.setText(e.x.toString)
MainWindow.blockSetting.height.setText(e.y.toString)
}
override def mouseMove(e: MouseEvent)
{
val isDrag = (e.stateMask & SWT.BUTTON1) != 0
val shouldMove = isDrag && !isResize
val shouldResize = isDrag && isResize
setResizeCursor(e)
(shouldResize, shouldMove) match {
case (true, _) => resizeWindow(e)
case (_, true) => moveWindow(e)
case (_, _) => // Do nothing
}
}
})
}
def open()
{
val optionBGImage = backgroundImage.flatMap { file => loadFromFile(file) }
optionBGImage match {
case None => setBackground()
case Some(null) => setBackground()
case Some(image) => shell.setBackgroundImage(image)
}
shell.setBackgroundMode(SWT.INHERIT_FORCE)
setLayout()
setMoveAndResize()
shell.setAlpha(alpha)
shell.setSize(size._1, size._2)
shell.setLocation(location._1, location._2)
shell.open()
updateMessages()
}
def close()
{
if (!shell.isDisposed) {
shell.close()
}
}
}
|
fuunkaosekai/IRCBalloonJ
|
src/main/scala/notification/NotificationBlock.scala
|
Scala
|
gpl-3.0
| 9,013 |
package dao
import scala.concurrent.Future
import models.{ Category, PostCategory }
import play.api.Play
import play.api.db.slick.DatabaseConfigProvider
import play.api.db.slick.HasDatabaseConfig
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import slick.driver.JdbcProfile
import java.sql.Timestamp
trait CategoriesComponent { self: HasDatabaseConfig[JdbcProfile] =>
import driver.api._
class Categories(tag: Tag) extends Table[Category](tag, "blog_blogcategory") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("title")
def slug = column[String]("slug")
def * = (id, name, slug) <> (Category.tupled, Category.unapply _)
}
class PostCategories(tag: Tag) extends Table[PostCategory](tag, "blog_blogpost_categories") {
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
def postId = column[Long]("blogpost_id")
def categoryId = column[Long]("blogcategory_id")
def * = (id, postId, categoryId) <> (PostCategory.tupled, PostCategory.unapply _)
}
}
class CategoriesDAO extends CategoriesComponent with HasDatabaseConfig[JdbcProfile] {
protected val dbConfig = DatabaseConfigProvider.get[JdbcProfile](Play.current)
import driver.api._
val categories = TableQuery[Categories]
/** Retrieve a category from the id. */
def findById(id: Long): Future[Option[Category]] =
db.run(categories.filter(_.id === id).result.headOption)
/** Retrieve a category from the slug. */
def findBySlug(slug: String): Future[Option[Category]] =
db.run(categories.filter(_.slug === slug).result.headOption)
/** Count all categories. */
def count(): Future[Int] =
db.run(categories.length.result)
}
|
vjousse/play2-blog
|
app/dao/CategoriesDAO.scala
|
Scala
|
mit
| 1,714 |
/***********************************************************************
* Copyright (c) 2017 IBM
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.cassandra.index
import java.nio.ByteBuffer
import com.datastax.driver.core._
import com.datastax.driver.core.querybuilder.QueryBuilder
import com.typesafe.scalalogging.LazyLogging
import org.geotools.factory.Hints
import org.locationtech.geomesa.cassandra._
import org.locationtech.geomesa.cassandra.data._
import org.locationtech.geomesa.index.index.ClientSideFiltering.RowAndValue
import org.locationtech.geomesa.index.index.{ClientSideFiltering, IndexAdapter}
import org.locationtech.geomesa.index.utils.Explainer
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
object CassandraFeatureIndex extends CassandraIndexManagerType {
// note: keep in priority order for running full table scans
override val AllIndices: Seq[CassandraFeatureIndex] =
Seq(CassandraZ3Index, CassandraXZ3Index, CassandraZ2Index, CassandraXZ2Index, CassandraIdIndex, CassandraAttributeIndex)
override val CurrentIndices: Seq[CassandraFeatureIndex] = AllIndices
implicit class RichByteArray(val array: Array[Byte]) extends AnyVal {
def getOrElse(i: Int, default: Byte): Byte = if (array.length > i) { array(i) } else { default }
}
}
trait CassandraFeatureIndex extends CassandraFeatureIndexType
with IndexAdapter[CassandraDataStore, CassandraFeature, Seq[RowValue], Seq[RowRange]]
with ClientSideFiltering[Row] with LazyLogging {
private val sfts = new ThreadLocal[SimpleFeatureType]
private val FeatureColumn = NamedColumn("sf", -1, "blob", classOf[ByteBuffer])
protected def columns: Seq[NamedColumn]
protected def rowToColumns(sft: SimpleFeatureType, row: Array[Byte]): Seq[RowValue]
protected def columnsToRow(columns: Seq[RowValue]): Array[Byte]
override def configure(sft: SimpleFeatureType, ds: CassandraDataStore): Unit = {
super.configure(sft, ds)
val tableName = getTableName(sft.getTypeName, ds)
val cluster = ds.session.getCluster
val table = cluster.getMetadata.getKeyspace(ds.session.getLoggedKeyspace).getTable(tableName)
if (table == null) {
val (partitions, pks) = columns.partition(_.partition)
val create = s"CREATE TABLE $tableName (${columns.map(c => s"${c.name} ${c.cType}").mkString(", ")}, sf blob, " +
s"PRIMARY KEY (${partitions.map(_.name).mkString("(", ", ", ")")}" +
s"${if (pks.nonEmpty) { pks.map(_.name).mkString(", ", ", ", "")} else { "" }}))"
logger.info(create)
ds.session.execute(create)
}
}
override def delete(sft: SimpleFeatureType, ds: CassandraDataStore, shared: Boolean): Unit = {
if (shared) {
throw new NotImplementedError() // TODO
} else {
val tableName = getTableName(sft.getTypeName, ds)
val delete = s"drop table if exists $tableName"
logger.info(delete)
ds.session.execute(delete)
}
}
abstract override def getQueryPlan(sft: SimpleFeatureType,
ds: CassandraDataStore,
filter: CassandraFilterStrategyType,
hints: Hints,
explain: Explainer): CassandraQueryPlanType = {
sfts.set(sft)
try {
super.getQueryPlan(sft, ds, filter, hints, explain)
} finally {
sfts.remove()
}
}
override protected def createInsert(row: Array[Byte], cf: CassandraFeature): Seq[RowValue] =
rowToColumns(cf.feature.getFeatureType, row) :+ RowValue(FeatureColumn, ByteBuffer.wrap(cf.fullValue))
override protected def createDelete(row: Array[Byte], cf: CassandraFeature): Seq[RowValue] =
rowToColumns(cf.feature.getFeatureType, row)
override protected def scanPlan(sft: SimpleFeatureType,
ds: CassandraDataStore,
filter: CassandraFilterStrategyType,
hints: Hints,
ranges: Seq[Seq[RowRange]],
ecql: Option[Filter]): CassandraQueryPlanType = {
import org.locationtech.geomesa.index.conf.QueryHints.RichHints
if (ranges.isEmpty) {
EmptyPlan(filter)
} else {
val ks = ds.session.getLoggedKeyspace
val tableName = getTableName(sft.getTypeName, ds)
val toFeatures = resultsToFeatures(sft, ecql, hints.getTransform)
val statements = ranges.map { criteria =>
val select = QueryBuilder.select.all.from(ks, tableName)
criteria.foreach { c =>
if (c.start == c.end) {
if (c.start != null) {
select.where(QueryBuilder.eq(c.column.name, c.start))
}
} else {
if (c.start != null) {
select.where(QueryBuilder.gte(c.column.name, c.start))
}
if (c.end != null) {
select.where(QueryBuilder.lt(c.column.name, c.end))
}
}
}
select
}
QueryPlan(filter, tableName, statements, ds.config.queryThreads, ecql, toFeatures)
}
}
override protected def range(start: Array[Byte], end: Array[Byte]): Seq[RowRange] = {
val sft = sfts.get
val startValues = rowToColumns(sft, start)
val endValues = rowToColumns(sft, end)
// TODO avoid zip...
startValues.zip(endValues).map { case (s, e) => RowRange(s.column, s.value, e.value) }
}
override protected def rangeExact(row: Array[Byte]): Seq[RowRange] =
rowToColumns(sfts.get, row).map { case RowValue(col, v) => RowRange(col, v, v) }
override def rowAndValue(result: Row): RowAndValue = {
val values = columns.map(c => RowValue(c, result.get(c.i, c.jType).asInstanceOf[AnyRef]))
val sf = result.getBytes("sf")
val row = columnsToRow(values)
val value = Array.ofDim[Byte](sf.limit())
sf.get(value)
RowAndValue(row, 0, row.length, value, 0, value.length)
}
}
|
MutahirKazmi/geomesa
|
geomesa-cassandra/geomesa-cassandra-datastore/src/main/scala/org/locationtech/geomesa/cassandra/index/CassandraFeatureIndex.scala
|
Scala
|
apache-2.0
| 6,358 |
package dotty.tools.dotc.printing
import dotty.tools.dotc.core.Constants
import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Contexts._
import dotty.tools.dotc.core.Flags._
import dotty.tools.dotc.core.NameOps._
import dotty.tools.dotc.core.Names.Name
import dotty.tools.dotc.core.Symbols._
import dotty.tools.dotc.core.Types._
import dotty.tools.dotc.printing.Texts._
class ReplPrinter(_ctx: Context) extends DecompilerPrinter(_ctx) {
val debugPrint = _ctx.settings.YprintDebug.value
override def nameString(name: Name): String =
if (name.isReplAssignName) name.decode.toString.takeWhile(_ != '$')
else super.nameString(name)
override protected def exprToText(tp: ExprType): Text =
if (debugPrint) super.exprToText(tp)
else ": " ~ toText(tp.resType)
override def toText(sym: Symbol): Text =
if (sym.name.isReplAssignName) nameString(sym.name)
else if (debugPrint) super.toText(sym)
else keyString(sym) ~~ nameString(sym.name.stripModuleClassSuffix)
inline private val qSc = '"';
override def toText(const: Constant): Text =
if (debugPrint) super.toText(const)
else if (const.tag == Constants.StringTag) Str(s"${qSc}${const.value}$qSc")
else Str(const.value.toString)
override def dclText(sym: Symbol): Text = if (debugPrint) super.dclText(sym) else
("lazy": Text).provided(sym.is(Lazy)) ~~
toText(sym) ~ {
if (sym.is(Method)) toText(sym.info)
else if (sym.isType && sym.info.isTypeAlias) toText(sym.info)
else if (sym.isType || sym.isClass) ""
else ":" ~~ toText(sym.info)
}
override def toTextSingleton(tp: SingletonType): Text =
if (debugPrint)
super.toTextSingleton(tp)
else
tp match {
case ConstantType(const) => toText(const)
case _ => toTextRef(tp) ~ ".type"
}
// We don't want the colors coming from RefinedPrinter as the REPL uses its
// own syntax coloring mechanism.
override def coloredStr(text: String, color: String): String = text
override def coloredText(text: Text, color: String): Text = text
}
|
som-snytt/dotty
|
compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala
|
Scala
|
apache-2.0
| 2,110 |
/**
* Created by Variant on 16/3/26.
*/
class P[+T](val first : T,val second :T){
//def replceFirst(newfirst : T) = new P[T](newfirst,second)
//逆变点
def replceFirst[R >: T](newfirst : R) = new P[R](newfirst,second)
}
object Variant_Position {
def main(args: Array[String]) {
}
}
|
sparkLiwei/ProgrammingNote
|
scalaLearning/scalaTypeParameteriza/Variant_Position.scala
|
Scala
|
cc0-1.0
| 299 |
package controllers
import javax.inject.{ Inject, Named, Singleton }
import akka.actor.{ ActorRef, ActorSystem }
import akka.stream.Materializer
import akka.util.Timeout
import com.github.j5ik2o.spetstore.adaptor.http.{ CreateItemTypeJson, ItemTypeSupport }
import com.github.j5ik2o.spetstore.usecase.ItemTypeUseCase
import com.github.tototoshi.play2.json4s.Json4s
import org.json4s._
import play.api.mvc.{ Action, Controller }
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
@Singleton
class ItemTypeController @Inject() (json4s: Json4s, @Named("item-type-aggregate") itemTypeAggregate: ActorRef)(implicit exec: ExecutionContext, actorSystem: ActorSystem, meterializer: Materializer)
extends Controller with ItemTypeSupport {
import json4s._
implicit val formats = DefaultFormats
implicit val timeout = Timeout(10 seconds)
override val itemTypeUseCase: ItemTypeUseCase = ItemTypeUseCase(itemTypeAggregate)
def create: Action[JValue] = Action.async(json) { implicit request =>
val createItemTypeJson = request.body.extract[CreateItemTypeJson]
createItemTypeGraph(createItemTypeJson).run().map(e => Ok(Extraction.decompose(e)))
}
}
|
j5ik2o/spetstore-cqrs-es-akka
|
play2-application/app/controllers/ItemTypeController.scala
|
Scala
|
mit
| 1,191 |
package ru.stachek66.tools
import java.io.File
import java.net.URL
import org.junit.runner.RunWith
import org.scalatest.{Ignore, FunSuite}
import org.scalatest.junit.JUnitRunner
/**
* alexeyev
* 07.09.14.
*/
@Ignore
class Downloader$Test extends FunSuite {
test("downloading-something") {
val hello = new File("hello-test.html")
val mystem = new File("atmta.binary")
Downloader.downloadBinaryFile(new URL("http://www.stachek66.ru/"), hello)
Downloader.downloadBinaryFile(
new URL("http://download.cdn.yandex.net/mystem/mystem-3.0-linux3.1-64bit.tar.gz"),
mystem
)
Downloader.downloadBinaryFile(
new URL("http://download.cdn.yandex.net/mystem/mystem-3.1-win-64bit.zip"),
mystem
)
hello.delete
mystem.delete
}
test("download-and-unpack") {
val bin = new File("atmta.binary.tar.gz")
val bin2 = new File("executable")
Decompressor.select.unpack(
Downloader.downloadBinaryFile(
new URL("http://download.cdn.yandex.net/mystem/mystem-3.0-linux3.1-64bit.tar.gz"),
bin),
bin2
)
bin.delete
bin2.delete
}
}
|
alexeyev/mystem-scala
|
src/test/scala/ru/stachek66/tools/Downloader$Test.scala
|
Scala
|
mit
| 1,132 |
package spec
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import java.util.TimeZone
import org.slf4j.{LoggerFactory, MDC}
import scribe.handler.LogHandler
import scribe.output.LogOutput
import scribe.output.format.{ASCIIOutputFormat, OutputFormat}
import scribe.util.Time
import scribe.format._
import scribe.format
import scribe.writer.Writer
import scribe.{Level, LogRecord, Logger}
class SLF4JSpec extends AnyWordSpec with Matchers {
TimeZone.setDefault(TimeZone.getTimeZone("UTC"))
private var logs: List[LogRecord[_]] = Nil
private var logOutput: List[String] = Nil
private val writer = new Writer {
override def write[M](record: LogRecord[M], output: LogOutput, outputFormat: OutputFormat): Unit = {
logs = record :: logs
logOutput = output.plainText :: logOutput
}
}
private val recordHolder = LogHandler(
writer = writer,
minimumLevel = Some(Level.Info),
formatter = formatter"$dateFull ${string("[")}$levelColoredPaddedRight${string("]")} ${green(position)} - ${format.messages}$mdc"
)
"SLF4J" should {
"set the time to an arbitrary value" in {
OutputFormat.default = ASCIIOutputFormat
Time.function = () => 1542376191920L
}
"remove existing handlers from Root" in {
Logger.root.clearHandlers().replace()
}
"add a testing handler" in {
Logger.root.withHandler(recordHolder).replace()
}
"verify not records are in the RecordHolder" in {
logs.isEmpty should be(true)
}
"log to Scribe" in {
val logger = LoggerFactory.getLogger(getClass)
logger.info("Hello World!")
}
"verify Scribe received the record" in {
logs.size should be(1)
val r = logs.head
r.level should be(Level.Info)
r.logOutput.plainText should be("Hello World!")
r.className should be("spec.SLF4JSpec")
logs = Nil
}
"verify Scribe wrote value" in {
logOutput.size should be(1)
val s = logOutput.head
s should be("2018.11.16 13:49:51:920 [INFO ] spec.SLF4JSpec - Hello World!")
}
"use MDC" in {
MDC.put("name", "John Doe")
val logger = LoggerFactory.getLogger(getClass)
logger.info("A generic name")
logOutput.head should be("2018.11.16 13:49:51:920 [INFO ] spec.SLF4JSpec - A generic name (name: John Doe)")
}
"clear MDC" in {
MDC.clear()
val logger = LoggerFactory.getLogger(getClass)
logger.info("MDC cleared")
logOutput.head should be("2018.11.16 13:49:51:920 [INFO ] spec.SLF4JSpec - MDC cleared")
}
"make sure logging nulls doesn't error" in {
val logger = LoggerFactory.getLogger(getClass)
logger.error(null)
logs.length should be(3)
logOutput.head should be("2018.11.16 13:49:51:920 [ERROR] spec.SLF4JSpec - null")
}
}
}
|
outr/scribe
|
slf4j2/src/test/scala/spec/SLF4JSpec.scala
|
Scala
|
mit
| 2,844 |
// Generated by <a href="http://scalaxb.org/">scalaxb</a>.
package eveapi.xml.account.char.SkillInTraining
case class Eveapi(currentTime: String,
result: eveapi.xml.account.char.SkillInTraining.Result,
cachedUntil: String,
attributes: Map[String, scalaxb.DataRecord[Any]] = Map()) {
lazy val version = attributes("@version").as[BigInt]
}
case class Result(currentTQTime: eveapi.xml.account.char.SkillInTraining.CurrentTQTime,
trainingEndTime: String,
trainingStartTime: String,
trainingTypeID: BigInt,
trainingStartSP: BigInt,
trainingDestinationSP: BigInt,
trainingToLevel: BigInt,
skillInTraining: BigInt)
case class CurrentTQTime(mixed: Seq[scalaxb.DataRecord[Any]] = Nil,
attributes: Map[String, scalaxb.DataRecord[Any]] = Map()) {
lazy val offset = attributes("@offset").as[BigInt]
}
|
scala-eveapi/eveapi
|
xml/src/main/scala/eveapi/xml/char/SkillInTraining/SkillInTraining.scala
|
Scala
|
mit
| 1,001 |
package com.twitter.finagle.builder
import com.twitter.concurrent.AsyncSemaphore
import com.twitter.finagle.filter.{MaskCancelFilter, RequestSemaphoreFilter}
import com.twitter.finagle.netty3.channel.IdleConnectionFilter
import com.twitter.finagle.netty3.channel.OpenConnectionsThresholds
import com.twitter.finagle.netty3.Netty3Listener
import com.twitter.finagle.param.ProtocolLibrary
import com.twitter.finagle.server.{StackBasedServer, Listener, StackServer, StdStackServer}
import com.twitter.finagle.service.{ExpiringService, TimeoutFilter}
import com.twitter.finagle.ssl.{Ssl, Engine}
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.tracing.TraceInitializerFilter
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util._
import com.twitter.finagle.{Server => FinagleServer, _}
import com.twitter.util
import com.twitter.util.{CloseAwaitably, Duration, Future, NullMonitor, Time}
import java.net.SocketAddress
import javax.net.ssl.SSLEngine
import org.jboss.netty.channel.ServerChannelFactory
import scala.annotation.implicitNotFound
/**
* A listening server. This is for compatibility with older code that is
* using builder.Server. New code should use the ListeningServer trait.
*/
trait Server extends ListeningServer {
/**
* When a server is bound to an ephemeral port, gets back the address
* with concrete listening port picked.
*/
@deprecated("Use boundAddress", "2014-12-19")
final def localAddress: SocketAddress = boundAddress
}
/**
* Factory for [[com.twitter.finagle.builder.ServerBuilder]] instances
*/
object ServerBuilder {
type Complete[Req, Rep] = ServerBuilder[
Req, Rep, ServerConfig.Yes,
ServerConfig.Yes, ServerConfig.Yes]
def apply() = new ServerBuilder()
def get() = apply()
/**
* Provides a typesafe `build` for Java.
*/
def safeBuild[Req, Rep](service: Service[Req, Rep], builder: Complete[Req, Rep]): Server =
builder.build(service)(ServerConfigEvidence.FullyConfigured)
/**
* Provides a typesafe `build` for Java.
*/
def safeBuild[Req, Rep](
serviceFactory: ServiceFactory[Req, Rep],
builder: Complete[Req, Rep]
): Server =
builder.build(serviceFactory)(ServerConfigEvidence.FullyConfigured)
}
object ServerConfig {
sealed trait Yes
type FullySpecified[Req, Rep] = ServerConfig[Req, Rep, Yes, Yes, Yes]
def nilServer[Req, Rep] = new FinagleServer[Req, Rep] {
def serve(addr: SocketAddress, service: ServiceFactory[Req, Rep]): ListeningServer =
NullServer
}
// params specific to ServerBuilder
private[builder] case class BindTo(addr: SocketAddress) {
def mk(): (BindTo, Stack.Param[BindTo]) =
(this, BindTo.param)
}
private[builder] object BindTo {
implicit val param = Stack.Param(BindTo(new SocketAddress {
override val toString = "unknown"
}))
}
private[builder] case class MonitorFactory(mFactory: (String, SocketAddress) => util.Monitor) {
def mk(): (MonitorFactory, Stack.Param[MonitorFactory]) =
(this, MonitorFactory.param)
}
private[builder] object MonitorFactory {
implicit val param = Stack.Param(MonitorFactory((_, _) => NullMonitor))
}
private[builder] case class Daemonize(onOrOff: Boolean) {
def mk(): (Daemonize, Stack.Param[Daemonize]) =
(this, Daemonize.param)
}
private[builder] object Daemonize {
implicit val param = Stack.Param(Daemonize(false))
}
}
@implicitNotFound("Builder is not fully configured: Codec: ${HasCodec}, BindTo: ${HasBindTo}, Name: ${HasName}")
trait ServerConfigEvidence[HasCodec, HasBindTo, HasName]
private[builder] object ServerConfigEvidence {
implicit object FullyConfigured extends ServerConfigEvidence[ServerConfig.Yes, ServerConfig.Yes, ServerConfig.Yes]
}
/**
* A configuration object that represents what shall be built.
*/
private[builder] final class ServerConfig[Req, Rep, HasCodec, HasBindTo, HasName]
/**
* A handy Builder for constructing Servers (i.e., binding Services to
* a port). This class is subclassable. Override copy() and build()
* to do your own dirty work.
*
* The main class to use is [[com.twitter.finagle.builder.ServerBuilder]], as so
* {{{
* ServerBuilder()
* .codec(Http)
* .hostConnectionMaxLifeTime(5.minutes)
* .readTimeout(2.minutes)
* .name("servicename")
* .bindTo(new InetSocketAddress(serverPort))
* .build(plusOneService)
* }}}
*
* The `ServerBuilder` requires the definition of `codec`, `bindTo`
* and `name`. In Scala, these are statically type
* checked, and in Java the lack of any of the above causes a runtime
* error.
*
* The `build` method uses an implicit argument to statically
* typecheck the builder (to ensure completeness, see above). The Java
* compiler cannot provide such implicit, so we provide a separate
* function in Java to accomplish this. Thus, the Java code for the
* above is
*
* {{{
* ServerBuilder.safeBuild(
* plusOneService,
* ServerBuilder.get()
* .codec(Http)
* .hostConnectionMaxLifeTime(5.minutes)
* .readTimeout(2.minutes)
* .name("servicename")
* .bindTo(new InetSocketAddress(serverPort)));
* }}}
*
* Alternatively, using the `unsafeBuild` method on `ServerBuilder`
* verifies the builder dynamically, resulting in a runtime error
* instead of a compiler error.
*
* =Defaults=
*
* The following defaults are applied to servers constructed via ServerBuilder,
* unless overridden with the corresponding method. These defaults were chosen
* carefully so as to work well for most use cases. Before changing any of them,
* make sure that you know exactly how they will affect your application --
* these options are typically only changed by expert users.
*
* - `openConnectionsThresholds`: None
* - `maxConcurrentRequests`: Int.MaxValue
* - `backlog`: OS-defined default value
*/
class ServerBuilder[Req, Rep, HasCodec, HasBindTo, HasName] private[builder](
val params: Stack.Params,
mk: Stack.Params => FinagleServer[Req, Rep]
) {
import ServerConfig._
import com.twitter.finagle.param._
// Convenient aliases.
type FullySpecifiedConfig = FullySpecified[Req, Rep]
type ThisConfig = ServerConfig[Req, Rep, HasCodec, HasBindTo, HasName]
type This = ServerBuilder[Req, Rep, HasCodec, HasBindTo, HasName]
private[builder] def this() = this(Stack.Params.empty, Function.const(ServerConfig.nilServer)_)
override def toString() = "ServerBuilder(%s)".format(params)
protected def copy[Req1, Rep1, HasCodec1, HasBindTo1, HasName1](
ps: Stack.Params,
newServer: Stack.Params => FinagleServer[Req1, Rep1]
): ServerBuilder[Req1, Rep1, HasCodec1, HasBindTo1, HasName1] =
new ServerBuilder(ps, newServer)
protected def configured[P: Stack.Param, HasCodec1, HasBindTo1, HasName1](
param: P
): ServerBuilder[Req, Rep, HasCodec1, HasBindTo1, HasName1] =
copy(params + param, mk)
def codec[Req1, Rep1](
codec: Codec[Req1, Rep1]
): ServerBuilder[Req1, Rep1, Yes, HasBindTo, HasName] =
this.codec((_: ServerCodecConfig) => codec)
.configured(ProtocolLibrary(codec.protocolLibraryName))
def codec[Req1, Rep1](
codecFactory: CodecFactory[Req1, Rep1]
): ServerBuilder[Req1, Rep1, Yes, HasBindTo, HasName] =
this.codec(codecFactory.server)
.configured(ProtocolLibrary(codecFactory.protocolLibraryName))
def codec[Req1, Rep1](
codecFactory: CodecFactory[Req1, Rep1]#Server
): ServerBuilder[Req1, Rep1, Yes, HasBindTo, HasName] =
stack({ ps =>
val Label(label) = ps[Label]
val BindTo(addr) = ps[BindTo]
val codec = codecFactory(ServerCodecConfig(label, addr))
val newStack = StackServer.newStack[Req1, Rep1].replace(
StackServer.Role.preparer, (next: ServiceFactory[Req1, Rep1]) =>
codec.prepareConnFactory(next)
).replace(TraceInitializerFilter.role, codec.newTraceInitializer)
case class Server(
stack: Stack[ServiceFactory[Req1, Rep1]] = newStack,
params: Stack.Params = ps
) extends StdStackServer[Req1, Rep1, Server] {
protected type In = Any
protected type Out = Any
protected def copy1(
stack: Stack[ServiceFactory[Req1, Rep1]] = this.stack,
params: Stack.Params = this.params
) = copy(stack, params)
protected def newListener(): Listener[Any, Any] =
Netty3Listener(codec.pipelineFactory, params)
protected def newDispatcher(transport: Transport[In, Out], service: Service[Req1, Rep1]) = {
// TODO: Expiration logic should be installed using ExpiringService
// in StackServer#newStack. Then we can thread through "closes"
// via ClientConnection.
val Timer(timer) = params[Timer]
val ExpiringService.Param(idleTime, lifeTime) = params[ExpiringService.Param]
val Stats(sr) = params[Stats]
val idle = if (idleTime.isFinite) Some(idleTime) else None
val life = if (lifeTime.isFinite) Some(lifeTime) else None
val dispatcher = codec.newServerDispatcher(transport, service)
(idle, life) match {
case (None, None) => dispatcher
case _ =>
new ExpiringService(service, idle, life, timer, sr.scope("expired")) {
protected def onExpire() { dispatcher.close(Time.now) }
}
}
}
}
val proto = params[ProtocolLibrary]
val serverParams =
if (proto != ProtocolLibrary.param.default) ps
else ps + ProtocolLibrary(codec.protocolLibraryName)
Server(
stack = newStack,
params = serverParams
)
})
/**
* Overrides the stack and [[com.twitter.finagle.Server]] that will be used
* by this builder.
*
* @param mk A function that materializes a `Server` from a set of `Params`.
* `mk` is passed the state of configuration when `build` is called. There is
* no guarantee that all the builder parameters will be used by the server
* created by `mk`; it is up to the discretion of the server and protocol
* implementation.
*/
@deprecated("Use stack(server: StackBasedServer)", "7.0.0")
def stack[Req1, Rep1](
mk: Stack.Params => FinagleServer[Req1, Rep1]
): ServerBuilder[Req1, Rep1, Yes, HasBindTo, HasName] =
copy(params, mk)
/**
* Overrides the stack and [[com.twitter.finagle.Server]] that will be used
* by this builder.
*
* @param server A [[StackBasedServer]] representation of a
* [[com.twitter.finagle.Server]]. `server` is materialized with the state of
* configuration when `build` is called. There is no guarantee that all
* builder parameters will be used by the resultant `Server`; it is up to the
* discretion of `server` itself and the protocol implementation.
*/
def stack[Req1, Rep1](
server: StackBasedServer[Req1, Rep1]
): ServerBuilder[Req1, Rep1, Yes, HasBindTo, HasName] = {
val withParams: Stack.Params => FinagleServer[Req1, Rep1] = { ps =>
server.withParams(server.params ++ ps)
}
copy(params, withParams)
}
def reportTo(receiver: StatsReceiver): This =
configured(Stats(receiver))
def name(value: String): ServerBuilder[Req, Rep, HasCodec, HasBindTo, Yes] =
configured(Label(value))
def sendBufferSize(value: Int): This =
configured(params[Transport.BufferSizes].copy(send = Some(value)))
def recvBufferSize(value: Int): This =
configured(params[Transport.BufferSizes].copy(recv = Some(value)))
def backlog(value: Int): This =
configured(Listener.Backlog(Some(value)))
def bindTo(address: SocketAddress): ServerBuilder[Req, Rep, HasCodec, Yes, HasName] =
configured(BindTo(address))
@deprecated("use com.twitter.finagle.netty3.numWorkers flag instead", "2015-11-18")
def channelFactory(cf: ServerChannelFactory): This =
configured(Netty3Listener.ChannelFactory(cf))
def logger(logger: java.util.logging.Logger): This =
configured(Logger(logger))
def logChannelActivity(v: Boolean): This =
configured(Transport.Verbose(v))
def tls(certificatePath: String, keyPath: String,
caCertificatePath: String = null, ciphers: String = null, nextProtos: String = null): This =
newFinagleSslEngine(() => Ssl.server(certificatePath, keyPath, caCertificatePath, ciphers, nextProtos))
/**
* Provide a raw SSL engine that is used to establish SSL sessions.
*/
def newSslEngine(newSsl: () => SSLEngine): This =
newFinagleSslEngine(() => new Engine(newSsl()))
def newFinagleSslEngine(v: () => Engine): This =
configured(Transport.TLSServerEngine(Some(v)))
/**
* Configures the maximum concurrent requests that are admitted
* by the server at any given time. If the server receives a
* burst of traffic that exceeds this limit, the burst is rejected
* with a `Failure.Rejected` exception. Note, this failure signals
* a graceful rejection which is transmitted to clients by certain
* protocols in Finagle (e.g. Http, ThriftMux).
*/
def maxConcurrentRequests(max: Int): This = {
val sem =
if (max == Int.MaxValue) None
else Some(new AsyncSemaphore(max, 0))
configured(RequestSemaphoreFilter.Param(sem))
}
def requestTimeout(howlong: Duration): This =
configured(TimeoutFilter.Param(howlong))
def keepAlive(value: Boolean): This =
configured(params[Transport.Liveness].copy(keepAlive = Some(value)))
def readTimeout(howlong: Duration): This =
configured(params[Transport.Liveness].copy(readTimeout = howlong))
def writeCompletionTimeout(howlong: Duration): This =
configured(params[Transport.Liveness].copy(writeTimeout = howlong))
def monitor(mFactory: (String, SocketAddress) => util.Monitor): This =
configured(MonitorFactory(mFactory))
@deprecated("Use tracer() instead", "7.0.0")
def tracerFactory(factory: com.twitter.finagle.tracing.Tracer.Factory): This =
tracer(factory())
// API compatibility method
@deprecated("Use tracer() instead", "7.0.0")
def tracerFactory(t: com.twitter.finagle.tracing.Tracer): This =
tracer(t)
def tracer(t: com.twitter.finagle.tracing.Tracer): This =
configured(Tracer(t))
/**
* Cancel pending futures whenever the the connection is shut down.
* This defaults to true.
*/
def cancelOnHangup(yesOrNo: Boolean): This = {
// Note: We invert `yesOrNo` as the param here because the filter's
// cancellation-masking is the inverse operation of cancelling on hangup.
configured(MaskCancelFilter.Param(!yesOrNo))
}
def hostConnectionMaxIdleTime(howlong: Duration): This =
configured(params[ExpiringService.Param].copy(idleTime = howlong))
def hostConnectionMaxLifeTime(howlong: Duration): This =
configured(params[ExpiringService.Param].copy(lifeTime = howlong))
def openConnectionsThresholds(thresholds: OpenConnectionsThresholds): This =
configured(IdleConnectionFilter.Param(Some(thresholds)))
/**
* Configures the traffic class.
*
* @see [[Listener.TrafficClass]]
*/
def trafficClass(value: Option[Int]): This =
configured(Listener.TrafficClass(value))
/**
* When true, the server is daemonized. As with java threads, a
* process can only exit only when all remaining servers are daemonized.
* False by default.
*/
def daemon(daemonize: Boolean): This =
configured(Daemonize(daemonize))
/**
* Provide an alternative to putting all request exceptions under
* a "failures" stat. Typical implementations may report any
* cancellations or validation errors separately so success rate
* considers only valid non cancelled requests.
*
* @param exceptionStatsHandler function to record failure details.
*/
def exceptionCategorizer(exceptionStatsHandler: stats.ExceptionStatsHandler): This =
configured(ExceptionStatsHandler(exceptionStatsHandler))
/* Builder methods follow */
/**
* Construct the Server, given the provided Service.
*/
def build(service: Service[Req, Rep]) (
implicit THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ServerConfigEvidence[HasCodec, HasBindTo, HasName]
): Server = build(ServiceFactory.const(service))
@deprecated("Used for ABI compat", "5.0.1")
def build(service: Service[Req, Rep],
THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ThisConfig =:= FullySpecifiedConfig
): Server = build(ServiceFactory.const(service), THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION)
/**
* Construct the Server, given the provided Service factory.
*/
@deprecated("Use the ServiceFactory variant instead", "5.0.1")
def build(serviceFactory: () => Service[Req, Rep])(
implicit THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ThisConfig =:= FullySpecifiedConfig
): Server = build((_:ClientConnection) => serviceFactory())(THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION)
/**
* Construct the Server, given the provided ServiceFactory. This
* is useful if the protocol is stateful (e.g., requires authentication
* or supports transactions).
*/
@deprecated("Use the ServiceFactory variant instead", "5.0.1")
def build(serviceFactory: (ClientConnection) => Service[Req, Rep])(
implicit THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ThisConfig =:= FullySpecifiedConfig
): Server = build(new ServiceFactory[Req, Rep] {
def apply(conn: ClientConnection) = Future.value(serviceFactory(conn))
def close(deadline: Time) = Future.Done
}, THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION)
/**
* Construct the Server, given the provided ServiceFactory. This
* is useful if the protocol is stateful (e.g., requires authentication
* or supports transactions).
*/
def build(serviceFactory: ServiceFactory[Req, Rep])(
implicit THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ServerConfigEvidence[HasCodec, HasBindTo, HasName]
): Server = {
val Label(label) = params[Label]
val BindTo(addr) = params[BindTo]
val Logger(logger) = params[Logger]
val Daemonize(daemon) = params[Daemonize]
val MonitorFactory(newMonitor) = params[MonitorFactory]
val monitor = newMonitor(label, InetSocketAddressUtil.toPublic(addr)) andThen
new SourceTrackingMonitor(logger, "server")
val serverParams = params +
Monitor(monitor) +
Reporter(NullReporterFactory)
val listeningServer = mk(serverParams).serve(addr, serviceFactory)
new Server with CloseAwaitably {
val exitGuard = if (!daemon) Some(ExitGuard.guard(s"server for '$label'")) else None
override protected def closeServer(deadline: Time): Future[Unit] = closeAwaitably {
listeningServer.close(deadline) ensure {
exitGuard.foreach(_.unguard())
}
}
override def boundAddress: SocketAddress = listeningServer.boundAddress
}
}
@deprecated("Used for ABI compat", "5.0.1")
def build(serviceFactory: ServiceFactory[Req, Rep],
THE_BUILDER_IS_NOT_FULLY_SPECIFIED_SEE_ServerBuilder_DOCUMENTATION:
ThisConfig =:= FullySpecifiedConfig
): Server = build(serviceFactory)(
new ServerConfigEvidence[HasCodec, HasBindTo, HasName]{})
/**
* Construct a Service, with runtime checks for builder
* completeness.
*/
def unsafeBuild(service: Service[Req, Rep]): Server = {
if (!params.contains[BindTo])
throw new IncompleteSpecification("No bindTo was specified")
if (!params.contains[Label])
throw new IncompleteSpecification("No name were specified")
val sb = this.asInstanceOf[ServerBuilder[Req, Rep, Yes, Yes, Yes]]
sb.build(service)
}
}
|
a-manumohan/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/builder/ServerBuilder.scala
|
Scala
|
apache-2.0
| 19,765 |
import org.scalatest.FunSuite
import hkt._
class BasicSuite extends FunSuite {
implicit val listFunctor: Functor[List] = new Functor[List] {
def fmap[A, B](fa: List[A])(f: A => B): List[B] = fa.map(f)
}
implicit val listApplicative: Applicative[List] =
new Applicative[List] {
def point[A](a: => A): List[A] = List(a)
def apply[A,B](fa: => List[A])(f: => List[A => B]): List[B] = for {
elem <- fa
func <- f
} yield func(elem)
def fmap[A, B](fa: List[A])(f: A => B): List[B] = apply(fa)(point(f))
}
test("Functor inc test") {
def inc(list: List[Int])(implicit func: Functor[List]) = func.fmap(list)(_ + 1)
assert(inc(List(1, 2, 3)) == List(2, 3, 4))
}
test("Functor identity law test") {
val expectedList = List(1, 2, 3)
val outputList = Functor[List].fmap(expectedList)(x => x)
assert(expectedList == outputList)
}
test("Functor composition test") {
val f1 = (x: Int) => x + 1
val f2 = (x: Int) => x * 2
val inputList = List(1, 2, 3)
val compRes = Functor[List].fmap(inputList)(f2 compose f1)
val mapRes = Functor[List].fmap(Functor[List].fmap(inputList)(f1))(f2)
assert(compRes == mapRes)
}
test("Applicative add test") {
val add = (x: Int, y: Int) => x + y
val list1 = List(1, 2, 3)
val list2 = List(4, 5, 6)
val result = Applicative[List].<*>(list2)(Functor[List].fmap(list1)(add.curried))
assert(result == List(5, 6, 7, 6, 7, 8, 7, 8, 9))
}
test("Applicative identity law test") {
val expectedList = List(1, 2, 3)
val outputList = Applicative[List].<*>(expectedList)(List((x: Int) => x))
assert(expectedList == outputList)
}
test("Applicative pure composition test") {
val af = Applicative[List]
val data = 1
val inc = (x: Int) => x + 1
val res1 = af.<*>(af.pure(data))(af.pure(inc))
val res2 = af.pure(inc(data))
assert(res1 == res2)
}
test("Applicative fmap and <*> test") {
val af = Applicative[List]
val list = List(1, 2, 3)
val inc = (x: Int) => x + 1
val res1 = af.fmap(list)(inc)
val res2 = af.<*>(list)(af.pure(inc))
assert(res1 == res2)
}
test("Applicative <*> swap order test") {
val af = Applicative[List]
val data = 1
val partialAf = af.pure((x: Int) => x + 1)
val res1 = af.<*>(af.pure(data))(partialAf)
val res2 = af.<*>(partialAf)(af.pure((func: Int => Int) => func(data)))
assert(res1 == res2)
}
test("Applicative lifting functions test") {
val af = Applicative[List]
val lab: List[Int => String] = List((x: Int) => x.toString)
val lbc: List[String => Int] = List((x: String) => x.length)
val list = List(1, 2, 3)
val comp = (bc: String => Int) => (ab: Int => String) => bc compose ab
val res1 = af.<*>( af.<*>(list)(lab) )(lbc)
val res2 = af.<*>(list)( af.<*>(lab)( af.<*>(lbc)( af.pure(comp) ) ) )
assert(res1 == res2)
}
}
// a more advanced use case & test suite
class AdvancedSuite extends FunSuite {
import scala.reflect.runtime.universe._ // for WeakTypeTag
trait Cake extends Base
with Elems
trait Base { self: Cake =>
type Rep[A]
type Def[A]
type IntRep = Rep[Int]
def toRep[A](x: A)(implicit eA: Elem[A]): Rep[A] = ??? //(s"Don't know how to create Rep for $x with element $eA")
trait Reifiable[T]
object Def {
def unapply[T](e: Rep[T]): Option[Def[T]] = def_unapply(e)
}
def def_unapply[T](e: Rep[T]): Option[Def[T]]
}
trait Elems extends Base { self: Cake =>
type Elem[A] = Element[A]
abstract class Element[A] extends Serializable {
def isEntityType: Boolean
def isBaseType: Boolean = this.isInstanceOf[BaseElem[_]]
def tag: WeakTypeTag[A]
}
def element[A](implicit ea: Elem[A]): Elem[A] = ea
class BaseElem[A](implicit val tag: WeakTypeTag[A]) extends Element[A] with Serializable {
override def isEntityType = false
}
}
trait BaseExp extends Base { self: CakeExp =>
type Rep[A] = Exp[A]
abstract class Exp[T] {
def elem: Elem[T]
def isConst: Boolean = this match {
case Def(Const(_)) => true
case _ => false
}
}
trait ReifiableExp[T] extends Reifiable[T]
type Def[A] = ReifiableExp[A]
abstract class BaseDef[T](implicit val selfType: Elem[T]) extends Def[T]
case class Const[T: Elem](x: T) extends BaseDef[T] {
def uniqueOpId = toString
}
}
trait MyDsl extends Cake
trait CakeExp extends Cake with Elems
// cake pattern in use
trait GPU extends Base { self: MyDsl =>
/** an entity for keeping data on device */
trait Device[A] extends Reifiable[Device[A]] {
/** this is pure from applicative functor, i.e. constructor */
def from(e: A): Device[A]
/** kernel launch for device */
def map[B](fn: A => B): Device[B]
}
implicit val deviceFunctor: Functor[Device] = new Functor[Device] {
def fmap[T, U](m: Device[T])(fn: T => U): Device[U] = m.map(fn)
}
}
}
|
alsam/scala-hkt-examples
|
test/hkt_test.scala
|
Scala
|
mit
| 5,106 |
class ProtobufSbt(info: sbt.ProjectInfo) extends sbt.PluginProject(info) {
/**
* Publish to a local temp repo, then rsync the files over to repo.codahale.com.
*/
override def managedStyle = sbt.ManagedStyle.Maven
val publishTo = sbt.Resolver.file("Local Cache", ("." / "target" / "repo").asFile)
def publishToLocalRepoAction = super.publishAction
override def publishAction = task {
log.info("Uploading to repo.codahale.com")
sbt.Process("rsync", "-avz" :: "target/repo/" :: "codahale.com:/home/codahale/repo.codahale.com" :: Nil) ! log
None
} describedAs("Publish binary and source JARs to repo.codahale.com") dependsOn(test, publishToLocalRepoAction)
}
|
jcarnegie/protobuf-sbt
|
project/build/ProtobufSbt.scala
|
Scala
|
mit
| 684 |
package io.circe.numbers.testing
import org.scalacheck.{ Arbitrary, Gen }
/**
* An integral string with an optional leading minus sign and between 1 and 25
* digits (inclusive).
*/
final case class IntegralString(value: String)
object IntegralString {
implicit val arbitraryIntegralString: Arbitrary[IntegralString] = Arbitrary(
for {
sign <- Gen.oneOf("", "-")
nonZero <- Gen.choose(1, 9).map(_.toString)
/**
* We want between 1 and 25 digits, with extra weight on the numbers of
* digits around the size of `Long.MaxValue`.
*/
count <- Gen.chooseNum(0, 24, 17, 18, 19)
rest <- Gen.buildableOfN[String, Char](count, Gen.numChar)
} yield IntegralString(s"$sign$nonZero$rest")
)
}
|
travisbrown/circe
|
modules/numbers-testing/src/main/scala/io/circe/numbers/testing/IntegralString.scala
|
Scala
|
apache-2.0
| 750 |
object Test {
type And[X, Y] = X & Y
val x: And[_, _] = ??? // error: unreducible
}
|
som-snytt/dotty
|
tests/neg/and-wildcard.scala
|
Scala
|
apache-2.0
| 90 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.guice
import com.google.inject.AbstractModule
import com.google.inject.Guice
import com.netflix.atlas.core.db.Database
import com.netflix.iep.guice.PreDestroyList
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import munit.FunSuite
class WebApiModuleSuite extends FunSuite {
test("load module") {
val deps = new AbstractModule {
override def configure(): Unit = {
bind(classOf[Config]).toInstance(ConfigFactory.load())
}
}
val injector = Guice.createInjector(deps, new WebApiModule, new WebApiModule)
assert(injector.getInstance(classOf[Database]) != null)
injector.getInstance(classOf[PreDestroyList]).invokeAll()
}
}
|
brharrington/atlas
|
atlas-module-webapi/src/test/scala/com/netflix/atlas/guice/WebApiModuleSuite.scala
|
Scala
|
apache-2.0
| 1,323 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package cqrs.query
import cqrs.command.DomainEvent
/**
* Represents single event that was published by some aggregate root.
*
* @param sequenceNr monotonically increasing event number for single aggregate root
*/
case class EventEnvelope(persistenceId: String, sequenceNr: Long, event: DomainEvent)
|
cqrs-endeavour/cqrs-endeavour
|
cqrs-framework/src/main/scala/cqrs/query/EventEnvelope.scala
|
Scala
|
mpl-2.0
| 512 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.exec.utils
import com.flaminem.flamy.exec.utils.Workflow.Status
import scala.collection.mutable.ListBuffer
/**
* Adds a layer to a SimpleWorkflow to keep the history of every status change.
*/
trait WorkflowHistory[T] extends Workflow[T]{
private val history: ListBuffer[(T, Status)] = ListBuffer[(T, Status)]()
override def todo(v: T): Unit = {
history += v -> Workflow.Status.TODO
super.todo(v)
}
override def running(v: T): Unit = {
history += v -> Workflow.Status.RUNNING
super.running(v)
}
override def successful(v: T): Unit = {
history += v -> Workflow.Status.SUCCESSFUL
super.successful(v)
}
override def failed(v: T): Unit = {
history += v -> Workflow.Status.FAILED
super.failed(v)
}
override def skipped(v: T): Unit = {
history += v -> Workflow.Status.SKIPPED
super.skipped(v)
}
override def interrupting(v: T): Unit = {
history += v -> Workflow.Status.INTERRUPTING
super.interrupting(v)
}
override def interrupted(v: T): Unit = {
history += v -> Workflow.Status.INTERRUPTED
super.interrupted(v)
}
def getHistory: Seq[(T, Status)] = history
}
|
flaminem/flamy
|
src/main/scala/com/flaminem/flamy/exec/utils/WorkflowHistory.scala
|
Scala
|
apache-2.0
| 1,754 |
import stainless.lang._
import stainless.annotation._
object PartitionExample {
sealed abstract class IList
case object Nil extends IList
case class Cons(head: BigInt, tail: IList) extends IList
def filter(l: IList, p: BigInt => Boolean): IList = {
decreases(l)
l match {
case Nil => Nil
case Cons(h, t) if p(h) => Cons(h, filter(t, p))
case Cons(_, t) => filter(t, p)
}
}
def partition(l: IList, p: BigInt => Boolean): (IList, IList) = {
decreases(l)
l match {
case Nil => (Nil, Nil)
case Cons(h, t) =>
val (l1, l2) = partition(t, p)
if (p(h)) (Cons(h, l1), l2)
else (l1, Cons(h, l2))
}
} ensuring { res =>
res._1 == filter(l, p) &&
res._2 == filter(l, (x: BigInt) => !p(x))
}
def count(l: IList, x: BigInt): BigInt = {
decreases(l)
l match {
case Nil => BigInt(0)
case Cons(h, t) => (if (h == x) BigInt(1) else BigInt(0)) + count(t, x)
}
}
def partitionMultiplicity(@induct l: IList, p: BigInt => Boolean, x: BigInt): Boolean = {
val (l1, l2) = partition(l, p)
count(l, x) == count(l1, x) + count(l2, x)
}.holds
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/PartitionExample.scala
|
Scala
|
apache-2.0
| 1,169 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller, Peng Hao
* @version 1.3
* @date Sat Nov 7 21:01:31 EST 2015
* @see LICENSE (MIT style license file).
*/
package scalation.graphalytics.mutable
import scala.collection.mutable.{Map, Set => SET}
//import scala.collection.mutable.PriorityQueue
import scalation.graphalytics.{Tree, TreeNode}
import scalation.math.ExtremeD.{MAX_VALUE, MIN_VALUE}
import scalation.graphalytics.mutable.{ExampleMGraphD => EX_GRAPH}
import scalation.util.PriorityQueue
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MinSpanningTree` class is used to build minimum cost spanning trees
* from graphs. Edge cost/weights are given by edge labels. `MinSpanningTree`
* implements Prim's algorithm.
* @see www.cse.ust.hk/~dekai/271/notes/L07/L07.pdf
* @param g the multi-digraph to build the spanning tree from
* @param min whether to to create a minimum (true) or maximum (false) spanning tree
* @param undirected whether the graph is already undirected
*/
class MinSpanningTree (g: MGraph [Double], min: Boolean = true, undirected: Boolean = true)
extends Error
{
private val DEBUG = false // debug flag
private val size = g.size // the number of nodes for the spanning tree
private val root = new TreeNode (0, 0) // for vertex 0 in g, create a root node
private var stree: Tree = null // spanning tree built by calling span
if (! undirected) g.makeUndirected ()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Print the spanning tree.
*/
def printSTree () { stree.printTree () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Animate the display of the spanning tree.
*/
def aniSTree () { stree.aniTree () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a minimum cost spanning tree for the given graph, returning true
* if a complete spanning tree connecting all of g's vertices can be created.
*/
def span (): Tree =
{
val pred = makeITree () // make an inverted tree
val el = Array.ofDim [Double] (pred.length) // copy elabel value from g into a pred elabel array
for (i <- 1 until el.length) el(i) = g.elabel(pred(i), i) // skipping root node (0)
stree = Tree (pred, el, 3.5, "st") // build spanning tree from pred array
stree
} // span
private val key = if (min) Array.fill (size)(MAX_VALUE) else Array.fill (size)(MIN_VALUE) // cost/key array
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Elem` class is used for ordering elements on a priority queue.
* @param idx the index of a node
* @param key the ordering key (based on cost) for a node
*/
case class Elem (idx: Int, key: Double)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `NodeOrder` object defines the order of node indices based on
* their 'key' value. Using -key to get "smallest first" in priority queue.
* This is for minimum spanning trees ('min' = true)
*/
object NodeOrder extends Ordering [Elem]
{
def compare (e1: Elem, e2: Elem): Int = -e1.key compare -e2.key
} // NodeOrder
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `NodeOrder` object defines the order of node indices based on
* their 'key' value. Using +key to get "largest first" in priority queue.
* This is for maximum spanning trees ('min' = false)
*/
object NodeOrder2 extends Ordering [Elem]
{
def compare (e1: Elem, e2: Elem): Int = e1.key compare e2.key
} // NodeOrder2
private val qu = PriorityQueue ()(if (min) NodeOrder else NodeOrder2) // priority queue of vertices
for (i <- 0 until size) { qu.enqueue (Elem (i, key(i))) } // put all vertices in priority queue
private val out = Array.fill (size)(true) // status of outside spanning tree
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Make an inverted tree by recording the predecessor/parent array.
* Each note except the root will have one parent. See pseudo-code on p. 28
* @see www.cse.ust.hk/~dekai/271/notes/L07/L07.pdf
*/
def makeITree (): Array [Int] =
{
val pred = Array.fill (size)(-1) // predecessor node array
key(0) = null.asInstanceOf [Double] // start at the root (node index 0)
pred(0) = -1 // it has no predecessor/parent
while (qu.nonEmpty) { // until all vertices in spanning tree
if (DEBUG) qu.printInOrder
val pi = qu.dequeue ().idx // return and remove least cost vertex
if (DEBUG) println ("makeITree: dequeued pi = " + pi)
for (ni <- g.ch(pi)) { // iterate through its children
val cost = g.elabel (pi, ni) // get cost from edge label
if (out(ni) && (min && cost < key(ni) || !min && cost > key(ni))) {
qu.increaseKey (Elem (ni, key(ni)), Elem (ni, cost)) // reposition ni toward front in priority queue
key(ni) = cost // lower the cost for node index ni
pred(ni) = pi // set pred of ni to parent pi
} // if
} // for
out(pi) = false // now finished with pi
} // while
if (DEBUG) println ("pred = " + pred.deep)
pred
} // makeITree
} // MinSpanningTree class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MinSpanningTreeTest` object is used to test the `MinSpanningTree` class.
* > run-main scalation.graphalytics.mutable.MinSpanningTreeTest
*/
object MinSpanningTreeTest extends App
{
val g = EX_GRAPH.g2
g.printG ()
val st = new MinSpanningTree (g)
st.span ()
println ("spanning tree for graph " + g.name)
println ("-" * 60)
st.printSTree ()
st.aniSTree
} // MinSpanningTreeTest object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MinSpanningTreeTest2` object is used to test the `MinSpanningTree` class.
* @see www.cse.ust.hk/~dekai/271/notes/L07/L07.pdf
* > run-main scalation.graphalytics.mutable.MinSpanningTreeTest2
*/
object MinSpanningTreeTest2 extends App
{
val g = new MGraph (Array (SET (1, 3, 4), // ch(0)
SET (2, 3), // ch(1)
SET (3, 5), // ch(2)
SET (4, 5), // ch(3)
SET (), // ch(4)
SET ()), // ch(5)
Array.fill (6)(-1.0), // vertex labels
Map ((0, 1) -> 1.0, // edge labels
(0, 3) -> 10.0,
(0, 4) -> 3.0,
(1, 2) -> 2.0,
(1, 3) -> 3.0,
(2, 3) -> 4.0,
(2, 5) -> 5.0,
(3, 4) -> 4.0,
(3, 5) -> 1.0))
g.printG ()
val st = new MinSpanningTree (g)
st.span ()
println ("spanning tree for graph " + g.name)
println ("-" * 60)
st.printSTree ()
st.aniSTree ()
} // MinSpanningTreeTest2 object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MinSpanningTreeTest3` object is used to test the `MinSpanningTree` class.
* This test the Maximum Spanning Tree option.
* @see www.cse.ust.hk/~dekai/271/notes/L07/L07.pdf
* > run-main scalation.graphalytics.mutable.MinSpanningTreeTest3
*/
object MinSpanningTreeTest3 extends App
{
val g = new MGraph (Array (SET (1, 3, 4), // ch(0)
SET (2, 3), // ch(1)
SET (3, 5), // ch(2)
SET (4, 5), // ch(3)
SET (), // ch(4)
SET ()), // ch(5)
Array.fill (6)(-1.0), // vertex labels
Map ((0, 1) -> 1.0, // edge labels
(0, 3) -> 10.0,
(0, 4) -> 3.0,
(1, 2) -> 2.0,
(1, 3) -> 3.0,
(2, 3) -> 4.0,
(2, 5) -> 5.0,
(3, 4) -> 4.0,
(3, 5) -> 1.0))
g.printG ()
val st = new MinSpanningTree (g, false)
st.span ()
println ("spanning tree for graph " + g.name)
println ("-" * 60)
st.printSTree ()
st.aniSTree ()
} // MinSpanningTreeTest3 object
|
NBKlepp/fda
|
scalation_1.3/scalation_modeling/src/main/scala/scalation/graphalytics/mutable/MinSpanningTree.scala
|
Scala
|
mit
| 9,735 |
package org.psliwa.idea.composerJson.composer.model.version
sealed trait Constraint {
def isBounded: Boolean = this match {
case SemanticConstraint(_) => true
case WildcardConstraint(None) => false
case WildcardConstraint(Some(constraint)) => constraint.isBounded
case WrappedConstraint(constraint, _, _) => constraint.isBounded
case OperatorConstraint(operator, constraint, _) => operator.isBounded && constraint.isBounded
case LogicalConstraint(constraints, operator, _) =>
operator match {
case LogicalOperator.AND => constraints.exists(_.isBounded)
case LogicalOperator.OR => constraints.forall(_.isBounded)
}
case AliasedConstraint(constraint, _, _) => constraint.isBounded
case HashConstraint(_) | HyphenRangeConstraint(_, _, _) | DateConstraint(_) => true
case DevConstraint(_) => false
case _ => false
}
def replace(f: Constraint => Option[Constraint]): Constraint = {
f(this).getOrElse(this match {
case WrappedConstraint(constraint, prefix, suffix) => WrappedConstraint(constraint.replace(f), prefix, suffix)
case WildcardConstraint(Some(constraint)) =>
constraint.replace(f) match {
case sc @ SemanticConstraint(_) => WildcardConstraint(Some(sc))
case _ => this
}
case OperatorConstraint(operator, constraint, ps) => OperatorConstraint(operator, constraint.replace(f), ps)
case AliasedConstraint(constraint, alias, ps) => AliasedConstraint(constraint.replace(f), alias, ps)
case HyphenRangeConstraint(from, to, ps) => HyphenRangeConstraint(from.replace(f), to.replace(f), ps)
case LogicalConstraint(constraints, operator, ps) =>
LogicalConstraint(constraints.map(_.replace(f)), operator, ps)
case _ => this
})
}
def contains(f: Constraint => Boolean): Boolean = {
if (f(this)) true
else
this match {
case WrappedConstraint(constraint, _, _) => constraint.contains(f)
case WildcardConstraint(Some(constraint)) => constraint.contains(f)
case OperatorConstraint(_, constraint, _) => constraint.contains(f)
case AliasedConstraint(constraint, alias, _) => constraint.contains(f) || alias.contains(f)
case HyphenRangeConstraint(from, to, _) => from.contains(f) || to.contains(f)
case LogicalConstraint(constraints, _, _) => constraints.exists(f)
case _ => false
}
}
def presentation: String = this match {
case SemanticConstraint(version) => version.toString
case DevConstraint(version) => "dev-" + version
case WildcardConstraint(maybeConstraint) => maybeConstraint.map(_.presentation + ".").getOrElse("") + "*"
case WrappedConstraint(constraint, prefix, suffix) =>
prefix.map(_.toString).getOrElse("") + constraint.presentation + suffix.map(_.toString).getOrElse("")
case OperatorConstraint(operator, constraint, separator) => operator.toString + separator + constraint.presentation
case DateConstraint(version) => version
case HashConstraint(version) => version
case HyphenRangeConstraint(from, to, separator) => from.presentation + separator + to.presentation
case AliasedConstraint(constraint, alias, separator) => constraint.presentation + separator + alias.presentation
case LogicalConstraint(constraints, LogicalOperator.AND, separator) =>
constraints.map(_.presentation).mkString(separator)
case LogicalConstraint(constraints, LogicalOperator.OR, separator) =>
constraints.map(_.presentation).mkString(separator)
case _ => "<unknown>"
}
}
case class SemanticConstraint(version: SemanticVersion) extends Constraint
case class WildcardConstraint(constraint: Option[SemanticConstraint]) extends Constraint
case class WrappedConstraint(constraint: Constraint, prefix: Option[String], suffix: Option[String]) extends Constraint
case class OperatorConstraint(operator: ConstraintOperator, constraint: Constraint, presentationPadding: String = "")
extends Constraint
case class LogicalConstraint(constraints: List[Constraint], operator: LogicalOperator, presentationSeparator: String)
extends Constraint
case class AliasedConstraint(constraint: Constraint, as: Constraint, presentationSeparator: String = " as ")
extends Constraint
case class HashConstraint(version: String) extends Constraint
case class DateConstraint(version: String) extends Constraint
case class DevConstraint(version: String) extends Constraint
case class HyphenRangeConstraint(from: Constraint, to: Constraint, presentationSeparator: String = " - ")
extends Constraint
sealed trait ConstraintOperator {
def isBounded = true
}
sealed trait UnboundedOperator extends ConstraintOperator {
override def isBounded = false
}
object ConstraintOperator {
case object >= extends UnboundedOperator
case object > extends UnboundedOperator
case object < extends ConstraintOperator
case object <= extends ConstraintOperator
case object != extends UnboundedOperator
case object ~ extends ConstraintOperator
case object ^ extends ConstraintOperator
val values: Set[ConstraintOperator] = Set(>=, >, <, <=, !=, ConstraintOperator.~, ^)
}
sealed trait LogicalOperator
object LogicalOperator {
case object OR extends LogicalOperator
case object AND extends LogicalOperator
}
|
psliwa/idea-composer-plugin
|
src/main/scala/org/psliwa/idea/composerJson/composer/model/version/Constraint.scala
|
Scala
|
mit
| 5,306 |
package io.circe.tests
import io.circe.Json
import org.scalacheck.{ Arbitrary, Gen }
trait ArbitraryInstances {
private[this] def maxDepth: Int = 5
private[this] def maxSize: Int = 20
private[this] def genNull: Gen[Json] = Gen.const(Json.empty)
private[this] def genBool: Gen[Json] = Arbitrary.arbBool.arbitrary.map(Json.bool)
private[this] def genNumber: Gen[Json] = Gen.oneOf(
Arbitrary.arbLong.arbitrary.map(Json.long),
Arbitrary.arbDouble.arbitrary.map(Json.numberOrNull)
)
private[this] def genString: Gen[Json] = Arbitrary.arbString.arbitrary.map(Json.string)
private[this] def genArray(depth: Int): Gen[Json] = Gen.choose(0, maxSize).flatMap { size =>
Gen.listOfN(
size,
arbitraryJsonAtDepth(depth + 1).arbitrary
).map(Json.array)
}
private[this] def genObject(depth: Int): Gen[Json] = Gen.choose(0, maxSize).flatMap { size =>
Gen.listOfN(
size,
for {
k <- Arbitrary.arbString.arbitrary
v <- arbitraryJsonAtDepth(depth + 1).arbitrary
} yield k -> v
).map(Json.obj)
}
private[this] def arbitraryJsonAtDepth(depth: Int): Arbitrary[Json] = {
val genJsons = List( genNumber, genString) ++ (
if (depth < maxDepth) List(genArray(depth), genObject(depth)) else Nil
)
Arbitrary(Gen.oneOf(genNull, genBool, genJsons: _*))
}
implicit def arbitraryJson: Arbitrary[Json] = arbitraryJsonAtDepth(0)
}
|
ktoso/circe
|
tests/shared/src/main/scala/io/circe/tests/ArbitraryInstances.scala
|
Scala
|
apache-2.0
| 1,419 |
package filodb.prometheus.ast
import filodb.query.BinaryOperator
/*
* The following label matching operators exist:
* = Select labels that are exactly equal to the provided string.
* =: Select labels that are exactly equal to the provided string.
* !=: Select labels that are not equal to the provided string.
* =~: Select labels that regex-match the provided string (or substring).
* !~: Select labels that do not regex-match the provided string (or substring).
* *
* The following binary comparison operators exist in Prometheus:
* == (equal)
* != (not-equal)
* > (greater-than)
* < (less-than)
* >= (greater-or-equal)
* <= (less-or-equal)
*
* Set Operators are Or, And and Unless
*/
trait Operators {
sealed trait PromToken
sealed trait Operator extends PromToken {
def getPlanOperator: BinaryOperator
}
case object EqualMatch extends Operator {
override def getPlanOperator: BinaryOperator = BinaryOperator.EQL
}
case object RegexMatch extends Operator {
override def getPlanOperator: BinaryOperator = BinaryOperator.EQLRegex
}
case object NotRegexMatch extends Operator {
override def getPlanOperator: BinaryOperator = BinaryOperator.NEQRegex
}
sealed trait Comparision extends Operator {
def isBool: Boolean
}
case class NotEqual(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.NEQ else BinaryOperator.NEQ_BOOL
}
case class Eq(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.EQL else BinaryOperator.EQL_BOOL
}
case class Gt(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.GTR else BinaryOperator.GTR_BOOL
}
case class Gte(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.GTE else BinaryOperator.GTE_BOOL
}
case class Lt(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.LSS else BinaryOperator.LSS_BOOL
}
case class Lte(isBool: Boolean) extends Comparision {
override def getPlanOperator: BinaryOperator = if (!isBool) BinaryOperator.LTE else BinaryOperator.LTE_BOOL
}
case class LabelMatch(label: String, labelMatchOp: Operator, value: String) extends PromToken
sealed trait ArithmeticOp extends Operator
case object Add extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.ADD
}
case object Sub extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.SUB
}
case object Mul extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.MUL
}
case object Div extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.DIV
}
case object Mod extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.MOD
}
case object Pow extends ArithmeticOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.POW
}
sealed trait SetOp extends Operator
case object And extends SetOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.LAND
}
case object Or extends SetOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.LOR
}
case object Unless extends SetOp {
override def getPlanOperator: BinaryOperator = BinaryOperator.LUnless
}
}
|
tuplejump/FiloDB
|
prometheus/src/main/scala/filodb/prometheus/ast/Operators.scala
|
Scala
|
apache-2.0
| 3,557 |
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2
import java.nio.ByteBuffer
import scala.collection.immutable.BitSet
import shark.memstore2.column.ColumnIterator
import shark.memstore2.column.ColumnIteratorFactory
/**
* An iterator for a partition of data. Each element returns a ColumnarStruct
* that can be read by a ColumnarStructObjectInspector.
*
* @param numRows: total number of rows in this partition.
* @param columnIterators: iterators for all columns.
@ @param columnUsed: an optional bitmap indicating whether a column is used.
*/
class TablePartitionIterator(
val numRows: Long,
val columnIterators: Array[ColumnIterator],
val columnUsed: BitSet = null)
extends Iterator[ColumnarStruct] {
private val _struct = new ColumnarStruct(columnIterators)
private var _position: Long = 0
def hasNext(): Boolean = _position < numRows
def next(): ColumnarStruct = {
_position += 1
var i = 0
while (i < _columnIteratorsToAdvance.size) {
_columnIteratorsToAdvance(i).next
i += 1
}
_struct
}
// Track the list of columns we need to call next on.
private val _columnIteratorsToAdvance: Array[ColumnIterator] = {
if (columnUsed == null) {
columnIterators
} else {
columnUsed.map(colId => columnIterators(colId)).toArray
}
}
}
|
sameeragarwal/blinkdb_dev
|
src/main/scala/shark/memstore2/TablePartitionIterator.scala
|
Scala
|
apache-2.0
| 1,956 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.param.shared
import org.apache.spark.ml.param._
/**
* Created by dirceu on 14/04/16.
*/
trait HasTimeCol extends Params {
/**
* Param for label column name.
*
* @group param
*/
final val timeCol: Param[Option[String]] =
new Param[Option[String]](this, "timeCol", "time series column name")
setDefault(timeCol, None)
/** @group getParam */
final def getTimeCol: Option[String] = $(timeCol)
}
|
eleflow/uberdata
|
iuberdata_core/src/main/scala/org/apache/spark/ml/param/shared/HasTimeCol.scala
|
Scala
|
apache-2.0
| 1,054 |
package com.lunatic.mlx.kddcup99.mllib.transformers
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
/**
* Remove the columns in the given list
*/
case class ColumnRemover[T: ClassTag](removableColumns: Option[List[Int]] = None) extends Transformer[Array[T], Array[T]] {
//class ColumnRemover[T: ClassTag] extends Transformer[Array[T], Array[T], ColumnRemover[T]] {
override def transform(input: RDD[Array[T]]): RDD[Array[T]] =
removableColumns.map(rc =>
input.map { v => v.zipWithIndex.filterNot(x => rc.contains(x._2)).map(_._1) }
).getOrElse(input)
override def transform(input: Array[T]): Array[T] = {
removableColumns.map(rc =>
input.zipWithIndex.filterNot(x => rc.contains(x._2)).map(_._1)
).getOrElse(input)
}
// type TransParams <: ColumnRemovedParams
//
// override def transform(input: RDD[Array[T]], params: Option[TransParams] = None): RDD[Array[T]] =
// params.map(rc =>
// input.map { v => v.zipWithIndex.filterNot(x => rc.removableColumns.contains(x._2)).map(_._1) }
// ).getOrElse(input)
//
// override def transform(input: Array[T], params: Option[TransParams] = None): Array[T] = {
// params.map(rc =>
// input.zipWithIndex.filterNot(x => rc.removableColumns.contains(x._2)).map(_._1)
// ).getOrElse(input)
// }
}
//case class ColumnRemovedParams[T: ClassTag](removableColumns: List[Int]) extends Params[ColumnRemover[T]]
|
tupol/sparx-mllib
|
src/main/scala/com/lunatic/mlx/kddcup99/mllib/transformers/ColumnRemover.scala
|
Scala
|
apache-2.0
| 1,421 |
package cloudcmd.common.util
import cloudcmd.common.FileUtil
import org.json.JSONArray
import org.json.JSONException
import org.json.JSONObject
import java.io._
import scala.collection.mutable
object JsonUtil {
def toJsonArray(data: List[String]) : JSONArray = {
val arr = new JSONArray()
data.foreach(arr.put)
arr
}
def toJsonArray(data: Set[String]) : JSONArray = {
val arr = new JSONArray()
data.foreach(arr.put)
arr
}
def toStringMap(obj: JSONObject) : Map[String, String] = {
import scala.collection.JavaConversions._
Map() ++ obj.keys().flatMap { key =>
Map(key.asInstanceOf[String] -> obj.getString(key.asInstanceOf[String]))
}
}
def mergeLeft(dest: JSONObject, src: JSONObject) {
val keys = src.keys
while (keys.hasNext) {
val key = keys.next.asInstanceOf[String]
dest.put(key, src.get(key))
}
}
def createJsonObject(args: AnyRef*): JSONObject = {
if (args.length % 2 != 0) throw new IllegalArgumentException("missing last value: args require key/value pairs")
val obj = new JSONObject
(0 until args.length by 2).foreach(i => obj.put(args(i).toString, args(i + 1)))
obj
}
/***
* For convenience, THIS METHOD CLOSES THE inputstream
*/
def loadJsonArray(load: InputStream): JSONArray = {
var dis : DataInputStream = null
var isr : InputStreamReader = null
var br : BufferedReader = null
try {
dis = new DataInputStream(load)
isr = new InputStreamReader(dis)
br = new BufferedReader(isr)
new JSONArray(br.readLine)
}
finally {
FileUtil.SafeClose(br)
FileUtil.SafeClose(isr)
FileUtil.SafeClose(dis)
FileUtil.SafeClose(load)
}
}
def loadJson(is: InputStream): JSONObject = {
var dis : DataInputStream = null
var isr : InputStreamReader = null
var br : BufferedReader = null
try {
dis = new DataInputStream(is)
isr = new InputStreamReader(dis)
br = new BufferedReader(isr)
new JSONObject(br.readLine)
}
finally {
FileUtil.SafeClose(br)
FileUtil.SafeClose(isr)
FileUtil.SafeClose(dis)
FileUtil.SafeClose(is)
}
}
def createSet(array: JSONArray): Set[String] = {
// TODO: what's the fastest way to do this?
(0 until array.length).map(array.getString).filter(_.length > 0).toSet
}
def createSet(str: String, delimiter: String): Set[String] = {
str.split(delimiter).filter(_.length > 0).toSet
}
def prettyToString(jsonObject: JSONObject): String = {
try {
jsonObject.toString(2)
}
catch {
case e: JSONException => jsonObject.toString
}
}
}
|
briangu/cloudcmd
|
common/src/main/scala/cloudcmd/common/util/JsonUtil.scala
|
Scala
|
apache-2.0
| 2,670 |
/*
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.apollo.cluster
import dto._
import org.fusesource.hawtdispatch._
import java.lang.String
import org.apache.activemq.apollo.broker._
import org.apache.activemq.apollo.util._
import org.fusesource.hawtbuf.proto.MessageBuffer
import org.fusesource.fabric.apollo.cluster.model._
import org.fusesource.hawtbuf._
import scala.collection.mutable.{HashMap, HashSet}
import org.fusesource.hawtbuf.Buffer._
import org.apache.activemq.apollo.broker.protocol.ProtocolFactory
import org.apache.activemq.apollo.broker.store.MessageRecord
import org.fusesource.fabric.apollo.cluster.protocol.{ClusterProtocolConstants, ClusterProtocolCodec, ClusterProtocolHandler}
import ClusterProtocolConstants._
import org.apache.activemq.apollo.dto._
object Peer extends Log
/**
* A peer is remote broker which is part of of the local broker's cluster.
* Multiple connections can exist to a peer broker and this class tracks
* both the inbound and outbound connections to that peer.
*/
class Peer(cluster_connector:ClusterConnector, val id:String) extends Dispatched {
import Peer._
val channel_window_size: Int = 64*1024
implicit def to_buffer(value:Long):Buffer = {
val os = new DataByteArrayOutputStream
os.writeVarLong(value)
os.toBuffer
}
val dispatch_queue = createQueue("peer:"+id)
var outbound:Queue = _
var joined_cluster_at = System.currentTimeMillis
var left_cluster_at = 0L
var peer_info:ClusterNodeDTO = _
///////////////////////////////////////////////////////////////////////////////
//
// Connection Lifecycle. These occur for both inbound and outbound connections.
//
///////////////////////////////////////////////////////////////////////////////
var connecting = false
var handlers = HashSet[ClusterProtocolHandler]()
var primary:ClusterProtocolHandler = _
var session_manager:SessionSinkMux[Frame] = _
var connection_sink:Sink[Frame] = _
private def connection_send(command:Int, data:MessageBuffer[_,_]):Unit = connection_send(command, data.toFramedBuffer)
private def connection_send(command:Int, data:Buffer):Unit = connection_send(Frame(command, data))
private def connection_send(frame:Frame):Unit = dispatch_queue {
if( connection_sink!=null ) {
val accepted = connection_sink.offer(frame)
assert(accepted)
}
}
def on_client_connected(handler:ClusterProtocolHandler) = {
val accepted = handler.connection.transport.offer(create_hello_frame(handler))
assert(accepted)
}
def on_client_hello(handler:ClusterProtocolHandler, hello:ProtocolHello.Buffer) = {
// This is the hello sent from the client.. so we are the server.
on_hello(handler, hello)
// we need to send the client a hello now..
handler.dispatch_queue {
val accepted = handler.connection.transport.offer(create_hello_frame(handler))
assert(accepted)
}
}
def on_server_hello(handler:ClusterProtocolHandler, hello:ProtocolHello.Buffer) = {
// Server just responded to the client hello we sent in on_client_connected method.
on_hello(handler, hello)
}
private def on_hello(handler:ClusterProtocolHandler, hello:ProtocolHello.Buffer) = {
if( hello.getVersion != PROTOCOL_VERSION ) {
handler.die("Unsupported protocol version: "+hello.getVersion)
}
if( id != hello.getId ) {
handler.die("Peer's id does not match what was expected.")
}
val local_tokens:Set[String] = {
import collection.JavaConversions._
collectionAsScalaIterable(cluster_connector.config.security_tokens).toSet
}
// does the client need to give us a matching token?
if( !local_tokens.isEmpty ) {
// We just need one token to match. Nodes may be configured with multiple tokens
// when a token is getting changed across a cluster.
val remote_tokens:Set[String] = collection.JavaConversions.collectionAsScalaIterable(hello.getSecurityTokensList).toSet
val intersection = local_tokens.intersect( remote_tokens )
if( intersection.isEmpty ) {
handler.die("Peer did not supply a valid security token.")
}
}
dispatch_queue {
handlers += handler
connecting = false
if( primary == null ) {
make_primary(handler)
}
}
}
def create_hello_frame(handler:ClusterProtocolHandler) = {
val hello = new ProtocolHello.Bean
hello.setVersion(PROTOCOL_VERSION)
hello.setId(cluster_connector.node_id)
hello.setAddress(handler.connection.transport.getRemoteAddress.toString)
hello.addAllSecurityTokens(cluster_connector.config.security_tokens)
Frame(COMMAND_HELLO, hello.freeze.toFramedBuffer)
}
def on_peer_disconnected(handler:ClusterProtocolHandler) = dispatch_queue {
handlers -= handler
if( handler == primary ) {
outbound_channels.values.foreach( _.peer_disconnected )
primary = null
if( !handlers.isEmpty ) {
make_primary(handlers.head)
}
}
}
def make_primary(handler: ClusterProtocolHandler): Unit = {
assert_executing
primary = handler
session_manager = new SessionSinkMux[Frame](handler.connection.transport_sink.map(x => x), handler.dispatch_queue, Frame)
connection_sink = new OverflowSink(session_manager.open(dispatch_queue));
// resend the consumer infos...
exported_consumers.values.foreach { consumer =>
connection_send(COMMAND_ADD_CONSUMER, consumer.consumer_info)
}
outbound_channels.values.foreach(_.peer_connected)
}
//
// Lets try to maintain at least one connection up for now.. we might
// want to relax this in big clusters.
//
def check() = dispatch_queue {
// Should we try to connect to the peer?
if( !connecting && handlers.isEmpty && peer_info.cluster_address!=null ) {
connecting = true
cluster_connector.connect(peer_info.cluster_address) {
case Success(connection) =>
connection.transport.setProtocolCodec(new ClusterProtocolCodec)
connection.protocol_handler = new ClusterProtocolHandler(this)
connection.protocol_handler.connection = connection
connection.protocol_handler.on_transport_connected
case Failure(error) =>
connecting = false
}
}
}
def close() = {
handlers.foreach{ handler=>
if( handler == primary ) {
outbound_channels.values.foreach( _.peer_disconnected )
primary = null
}
handlers -= handler
handler.connection.stop(NOOP)
}
}
//////////////////////////////////////////////////////////////////////////////
//
// Internal support methods.
//
//////////////////////////////////////////////////////////////////////////////
private def get_virtual_host(host:AsciiBuffer) = cluster_connector.broker.get_virtual_host(host).asInstanceOf[ClusterVirtualHostDTO]
///////////////////////////////////////////////////////////////////////////////
//
// Handle events from the connections.
//
///////////////////////////////////////////////////////////////////////////////
def on_frame(source:ClusterProtocolHandler, frame:Frame) = dispatch_queue {
trace(id+" got "+to_string(frame))
frame.command match {
case COMMAND_ADD_CONSUMER =>
val consumer = ConsumerInfo.FACTORY.parseFramed(frame.data)
on_add_consumer(consumer)
case COMMAND_REMOVE_CONSUMER =>
val consumer = ConsumerInfo.FACTORY.parseFramed(frame.data)
on_remove_consumer(consumer)
case COMMAND_CHANNEL_OPEN =>
val producer = ChannelOpen.FACTORY.parseFramed(frame.data)
on_channel_open(producer)
case COMMAND_CHANNEL_SEND =>
val delivery = ChannelDelivery.FACTORY.parseFramed(frame.data)
on_channel_send(delivery)
case COMMAND_CHANNEL_ACK =>
val ack = ChannelAck.FACTORY.parseFramed(frame.data)
on_channel_ack(ack)
case COMMAND_CHANNEL_CLOSE =>
on_channel_close(frame.data.buffer.bigEndianEditor.readVarInt)
case value =>
source.die("Unkown command value: "+source)
}
}
def to_string(frame:Frame) = {
frame.command match {
case COMMAND_ADD_CONSUMER =>
"COMMAND_ADD_CONSUMER("+ConsumerInfo.FACTORY.parseFramed(frame.data)+")"
case COMMAND_REMOVE_CONSUMER =>
"COMMAND_REMOVE_CONSUMER("+ConsumerInfo.FACTORY.parseFramed(frame.data)+")"
case COMMAND_CHANNEL_OPEN =>
"COMMAND_CHANNEL_OPEN("+ChannelOpen.FACTORY.parseFramed(frame.data)+")"
case COMMAND_CHANNEL_SEND =>
"COMMAND_CHANNEL_SEND("+ChannelDelivery.FACTORY.parseFramed(frame.data)+")"
case COMMAND_CHANNEL_ACK =>
"COMMAND_CHANNEL_ACK("+ChannelAck.FACTORY.parseFramed(frame.data)+")"
case COMMAND_CHANNEL_CLOSE =>
"COMMAND_CHANNEL_CLOSE("+frame.data.buffer.bigEndianEditor.readVarInt+")"
case value =>
"UNKNOWN"
}
}
def create_connection_status(source:ClusterProtocolHandler) = {
var rc = new ClusterConnectionStatusDTO
rc.waiting_on = source.waiting_on()
rc.node_id = id
rc.exported_consumer_count = exported_consumers.size
rc.imported_consumer_count = imported_consumers.size
outbound_channels.foreach { case (key,value)=>
val s = new ChannelStatusDTO
s.id = key
s.byte_credits = value.byte_credits
s.delivery_credits = value.delivery_credits
s.connected = value.connected
rc.outbound_channels.add(s)
}
inbound_channels.foreach { case (key,value)=>
val s = new ChannelStatusDTO
s.id = key
s.byte_credits = value.byte_credits
s.delivery_credits = value.delivery_credits
s.connected = value.connected
rc.outbound_channels.add(s)
}
rc
}
///////////////////////////////////////////////////////////////////////////////
//
// Consumer management. Allows master brokers to know about consumers
// on remote peers.
//
///////////////////////////////////////////////////////////////////////////////
var next_consumer_id = 0L
val exported_consumers = HashMap[Long, ExportedConsumer]()
val imported_consumers = HashMap[Long, ClusterDeliveryConsumer]()
def add_cluster_consumer( bean:ConsumerInfo.Bean, consumer:DeliveryConsumer) = {
dispatch_queue.assertExecuting()
val consumer_id = next_consumer_id
bean.setConsumerId(consumer_id)
next_consumer_id += 1
val exported = new ExportedConsumer(bean.freeze, consumer)
exported_consumers.put(consumer_id, exported)
connection_send(COMMAND_ADD_CONSUMER, exported.consumer_info)
exported
}
case class ExportedConsumer(consumer_info:ConsumerInfo.Buffer, consumer:DeliveryConsumer) {
def close() = {
assert_executing
exported_consumers -= consumer_info.getConsumerId.longValue
connection_send(COMMAND_REMOVE_CONSUMER, consumer_info)
}
}
private def unit = {}
def on_add_consumer(consumer_info:ConsumerInfo.Buffer) = {
assert_executing
val consumer_id = consumer_info.getConsumerId.longValue
if( !imported_consumers.contains(consumer_id) ) {
val consumer = new ClusterDeliveryConsumer(consumer_info)
imported_consumers.put(consumer_id, consumer)
cluster_connector.broker.dispatch_queue {
val host = cluster_connector.broker.get_virtual_host(consumer_info.getVirtualHost)
// assert(host!=null, "Unknown virtual host: "+consumer_info.getVirtualHost)
val router = host.router.asInstanceOf[ClusterRouter]
router.bind(consumer.destinations, consumer, null)
}
}
}
def on_remove_consumer(info:ConsumerInfo.Buffer) = {
assert_executing
imported_consumers.remove(info.getConsumerId.longValue).foreach { consumer=>
cluster_connector.broker.dispatch_queue {
val host = cluster_connector.broker.get_virtual_host(consumer.info.getVirtualHost)
if( host!=null ) {
val router = host.router.asInstanceOf[ClusterRouter]
router.unbind(consumer.destinations, consumer, false, null)
}
}
}
}
class ClusterDeliveryConsumer(val info:ConsumerInfo.Buffer) extends BaseRetained with DeliveryConsumer {
import collection.JavaConversions._
def consumer_id = info.getConsumerId.longValue
def destinations:Array[BindAddress] = info.getDestinationList.toSeq.toArray.
map(x=> XmlCodec.decode(classOf[DestinationDTO], new ByteArrayInputStream(x))).map { x=>
val rc:BindAddress = x match {
case x:TopicDestinationDTO=> SimpleAddress("topic", DestinationAddress.decode_path(x.name))
case x:QueueDestinationDTO=> SimpleAddress("queue", DestinationAddress.decode_path(x.name))
case x:DurableSubscriptionDestinationDTO=>
if( x.is_direct() ) {
SimpleAddress("dsub", DestinationAddress.decode_path(x.name))
} else {
SubscriptionAddress(DestinationAddress.decode_path(x.name), x.selector, x.topics.toSeq.toArray.map{ topic=>
SimpleAddress("topic", DestinationAddress.decode_path(topic.name))
})
}
}
rc
}
def matches(message: Delivery): Boolean = true
def is_persistent: Boolean = false
def dispatch_queue: DispatchQueue = Peer.this.dispatch_queue
def connect(p: DeliveryProducer): DeliverySession = {
val open = new ChannelOpen.Bean
open.setConsumerId(consumer_id)
new MutableSink[Delivery] with DeliverySession {
var closed = false
val channel = open_channel(p.dispatch_queue, open)
if( !closed ) {
downstream = Some(channel)
} else {
channel.close
}
def close: Unit = {
if( !closed ) {
closed = true
downstream.foreach(_.asInstanceOf[Peer#OutboundChannelSink].close)
}
}
def producer: DeliveryProducer = p
def consumer: DeliveryConsumer = ClusterDeliveryConsumer.this
def remaining_capacity = downstream.map(_.asInstanceOf[OutboundChannelSink].remaining_capacity).getOrElse(0)
@volatile
var enqueue_item_counter = 0L
@volatile
var enqueue_size_counter = 0L
@volatile
var enqueue_ts = 0L
override def offer(value: Delivery) = {
if( super.offer(value) ){
enqueue_item_counter += 1
enqueue_size_counter += value.size
enqueue_ts = now
true
} else {
false
}
}
}
}
}
def now = this.cluster_connector.broker.now
///////////////////////////////////////////////////////////////////////////////
//
// Channel Management: A channel provides a flow controlled message
// delivery window between the brokers. Used for both producer to destination
// and destination to consumer deliveries.
//
///////////////////////////////////////////////////////////////////////////////
var next_channel_id = 0L
val outbound_channels = HashMap[Long, OutboundChannelSink]()
def open_channel(q:DispatchQueue, open:ChannelOpen.Bean) = {
dispatch_queue.assertExecuting()
open.setChannel(next_channel_id)
next_channel_id += 1
val channel = new OutboundChannelSink(q, open.freeze)
outbound_channels.put(channel.id, channel)
debug("opening channel %d to peer %s", channel.id, id)
connection_send(COMMAND_CHANNEL_OPEN, channel.open_command)
channel
}
class OutboundChannelSink(val producer_queue:DispatchQueue, val open_command:ChannelOpen.Buffer) extends Sink[Delivery] with SinkFilter[Frame] {
def id = open_command.getChannel.longValue
// The deliveries waiting for acks..
val waiting_for_ack = HashMap[Long, Delivery]()
var last_seq_id = 0L
// Messages overflow.. while the peer is disconnected.
val sink_switcher = new MutableSink[Frame]()
// On the next delivery, we will tell the consumer what was the
// last ack from him that we saw, so that he can discard data
// needed to do duplicate/redelivery detection.
var next_ack_seq:Option[Long] = None
def remaining_capacity = {
if ( byte_credits > 0 ) {
byte_credits
} else {
delivery_credits
}
}
def byte_credits = session.byte_credits
def delivery_credits = session.delivery_credits
def connected = sink_switcher.downstream.isDefined
val session = new OverflowSink[Frame](sink_switcher) {
var byte_credits = 0
var delivery_credits = 0
override def full: Boolean = super.full || ( byte_credits <= 0 && delivery_credits <= 0 )
override def offer(frame: Frame): Boolean = {
byte_credits -= frame.data.length
delivery_credits -= 1
trace("outbound-channel %d: decreased credits (%d,%d) ... window (%d,%d)".format(open_command.getChannel.longValue, 1, frame.data.length, delivery_credits, byte_credits));
super.offer(frame)
}
def on_channel_ack(ack:ChannelAck.Buffer) = {
val was_full = full
if( ack.hasByteCredits ) {
byte_credits += ack.getByteCredits
}
if( ack.hasDeliveryCredits ) {
delivery_credits += ack.getDeliveryCredits
}
trace("outbound-channel %d: increased credits (%d,%d) ... window (%d,%d)".format(open_command.getChannel.longValue, ack.getDeliveryCredits, ack.getByteCredits, delivery_credits, byte_credits));
if( !full && was_full ) {
drain
}
}
}
if( primary!=null ) {
peer_connected
}
def peer_connected = {
producer_queue {
sink_switcher.downstream = Some(session_manager.open(producer_queue))
// Queue up the re-deliveries...
waiting_for_ack.foreach { case (seq, delivery) =>
session.offer(to_frame(Some(seq), delivery))
}
}
}
def peer_disconnected = {
sink_switcher.downstream = None
next_ack_seq = None
session.clear
}
def close() = {
outbound_channels.remove(id)
connection_send(COMMAND_CHANNEL_CLOSE, open_command.getChannel)
}
def next_seq_id = {
val rc = last_seq_id
last_seq_id += 1
rc
}
def to_frame(seq:Option[Long], value: Delivery):Frame = {
val record = value.createMessageRecord
val bean = new ChannelDelivery.Bean
bean.setChannel(open_command.getChannel)
seq.foreach(bean.setSeq(_))
bean.setProtocol(record.protocol)
bean.setData(record.buffer)
next_ack_seq.foreach{x=>
bean.setAckSeq(x)
next_ack_seq = None
}
Frame(COMMAND_CHANNEL_SEND, bean.freeze.toFramedBuffer)
}
def downstream = session
def offer(delivery: Delivery): Boolean = {
if( full ) {
false
} else {
// deliveries only get a seq if they need an ack..
val seq = Option(delivery.ack).map { x=>
val seq = next_seq_id
waiting_for_ack.put(seq, delivery)
seq
}
session.offer(to_frame(seq, delivery))
}
}
def on_channel_ack(ack:ChannelAck.Buffer) = producer_queue {
session.on_channel_ack(ack)
if( ack.hasDeliverySeq ) {
import collection.JavaConversions._
val l = collectionAsScalaIterable(ack.getDeliverySeqList)
next_ack_seq = Some(l.last.longValue)
l.foreach { seq =>
val delivery = waiting_for_ack.remove(seq.longValue)
assert(delivery.isDefined)
delivery.get.ack(Consumed, null)
}
}
}
}
def on_channel_ack(ack:ChannelAck.Buffer) = {
outbound_channels.get(ack.getChannel.longValue).foreach(
_.on_channel_ack(ack)
)
}
val inbound_channels = HashMap[Long, InboundChannelSink]()
class InboundChannelSink(val open_command:ChannelOpen.Buffer) extends Sink[ChannelDelivery.Buffer] with SinkFilter[ChannelDelivery.Buffer] {
def channel_id = open_command.getChannel.longValue
def byte_credits = sink_switcher.byte_credits
def delivery_credits = sink_switcher.delivery_credits
def connected = sink_switcher.downstream.isDefined
// We use a source to merge multiple ack events into a single ack message.
val ack_source = createSource(new EventAggregator[ChannelAck.Bean, ChannelAck.Bean]() {
def mergeEvents(p1: ChannelAck.Bean, p2: ChannelAck.Bean): ChannelAck.Bean = mergeEvent(p1,p2)
def mergeEvent(previous: ChannelAck.Bean, event: ChannelAck.Bean): ChannelAck.Bean = {
if( previous == null ) {
event
} else {
if(event.hasByteCredits) {
previous.setByteCredits(previous.getByteCredits + event.getByteCredits)
}
if(event.hasDeliveryCredits) {
previous.setDeliveryCredits(previous.getDeliveryCredits + event.getDeliveryCredits)
}
if(event.hasDeliverySeq) {
previous.addAllDeliverySeq(event.getDeliverySeqList)
}
previous
}
}
}, dispatch_queue)
ack_source.onEvent {
val ack = ack_source.getData
trace("inbound-channel %d: sending credits (%d,%d) ... window (%d,%d)".format(open_command.getChannel.longValue, ack.getDeliveryCredits, ack.getByteCredits, sink_switcher.delivery_credits, sink_switcher.byte_credits));
ack.setChannel(channel_id)
connection_send(COMMAND_CHANNEL_ACK, ack.freeze)
}
ack_source.resume
// Messages overflow.. while the peer is disconnected.
val sink_switcher = new MutableSink[ChannelDelivery.Buffer]() {
var byte_credits = 0
var delivery_credits = 0
// our downstream sink is an overflow.. so he can handle buffering up
// any extra we give him.
override def full: Boolean = byte_credits <= 0 && delivery_credits <= 0
override def offer(value: ChannelDelivery.Buffer): Boolean = {
trace("inbound-channel %d: reducing credits (%d,%d) ... window (%d,%d)".format(open_command.getChannel.longValue, 1, value.serializedSizeFramed, delivery_credits, byte_credits));
byte_credits -= value.serializedSizeFramed
delivery_credits -= 1
super.offer(value)
}
}
// flow control is maintained using a credit window. We
// allow internal overflow as long as the remote peer does not
// violate his credit window.
override def full = false
def downstream = sink_switcher
def offer(value: ChannelDelivery.Buffer): Boolean = {
if( value.hasProtocol ) {
if( sink_switcher.full ) {
// oh oh.. sender violated the credit window.
warn("channel: %d sender violated the flow control window", open_command.getChannel.longValue)
val ack = new ChannelAck.Bean()
ack.setError(ascii("flow control window violation"))
ack.setChannel(channel_id)
connection_send(COMMAND_CHANNEL_ACK, ack.freeze)
false
} else {
sink_switcher.offer(value)
}
}
if( value.hasAckSeq ) {
// TODO:
}
true
}
def to_delivery(value: ChannelDelivery.Buffer):Delivery = {
val message_record = new MessageRecord
message_record.buffer = value.getData
message_record.protocol = value.getProtocol
val delivery = new Delivery
delivery.message = ProtocolFactory.get(value.getProtocol.toString).get.decode(message_record)
delivery.ack = if( value.hasSeq ) {
(x, uow)=> {
// message was acked.. now we need to post it back.
// TODO: use an event source to coalesce multiple events.
val ack = new ChannelAck.Bean()
ack.addDeliverySeq(value.getSeq)
ack_source.merge(ack)
}
} else {
null
}
delivery
}
def send_credit(delivery_credits:Int, byte_credits:Int) = {
val ack = new ChannelAck.Bean()
if( delivery_credits != 0 ) {
sink_switcher.delivery_credits += delivery_credits
ack.setDeliveryCredits(delivery_credits)
}
if( byte_credits!=0 ) {
sink_switcher.byte_credits += byte_credits
ack.setByteCredits(byte_credits)
}
ack_source.merge(ack)
}
def set_producer_route(route:ClusterDeliveryProducerRoute) = {
val mapped:Sink[ChannelDelivery.Buffer] = route.map(to_delivery(_))
sink_switcher.downstream = Some(new OverflowSink(mapped) {
override protected def onDelivered(value: ChannelDelivery.Buffer) = {
send_credit(1, value.serializedSizeFramed)
}
})
send_credit(1, channel_window_size)
}
def close = {
// TODO:
}
}
class ClusterDeliveryProducerRoute(router:ClusterRouter, val info:ChannelOpen.Buffer) extends DeliveryProducerRoute(router) {
import collection.JavaConversions._
def consumer_id = info.getConsumerId.longValue
def destinations:Array[SimpleAddress] = info.getDestinationList.toSeq.toArray.
map(x=>XmlCodec.decode(classOf[DestinationDTO], new ByteArrayInputStream(x))).map { _ match {
case x:TopicDestinationDTO=> SimpleAddress("topic", DestinationAddress.decode_path(x.name))
case x:QueueDestinationDTO=> SimpleAddress("queue", DestinationAddress.decode_path(x.name))
case x:DurableSubscriptionDestinationDTO=> SimpleAddress("dsub", DestinationAddress.decode_path(x.name))
}}
info.getDestinationList.toSeq.toArray.map { x=>
XmlCodec.decode(classOf[DestinationDTO], new ByteArrayInputStream(x))
}
override def dispatch_queue: DispatchQueue = Peer.this.dispatch_queue
}
def on_channel_open(open:ChannelOpen.Buffer) = {
val channel_id = open.getChannel.longValue
val existing = inbound_channels.get(channel_id)
if( !existing.isDefined || open!=existing.get.open_command ) {
if( open.hasConsumerId ) {
debug("Peer %s opened channel %d to consumer %d".format(id, channel_id, open.getConsumerId))
exported_consumers.get(open.getConsumerId) match {
case Some(target) =>
val route = new ClusterDeliveryProducerRoute(null, open)
route.bind(target.consumer::Nil)
val sink = new InboundChannelSink(open)
inbound_channels.put(channel_id, sink)
sink.set_producer_route(route)
case None =>
val ack = new ChannelAck.Bean()
ack.setChannel(channel_id)
ack.setError(ascii("consumer does not exist"))
connection_send(COMMAND_CHANNEL_ACK, ack.freeze)
}
} else {
// If the channel is not sending to a consumer, it must be sending
// to a destination.
assert( open.hasVirtualHost && open.hasDestination )
debug("Peer %s opened channel %d to destinations %s on host %s".format(id, channel_id, open.getDestinationList, open.getVirtualHost))
val sink = new InboundChannelSink(open)
inbound_channels.put(channel_id, sink)
cluster_connector.broker.dispatch_queue {
val host = cluster_connector.broker.get_virtual_host(open.getVirtualHost)
// if( host==null ) {
//
// // TODO: perhaps cluster config is not consistent across all the nodes.
// warn("Unknown virtual host: %s", open.getVirtualHost)
// inbound_channels.remove(channel_id)
//
// } else {
val router = host.router.asInstanceOf[ClusterRouter]
val producer = new ClusterDeliveryProducerRoute(router, open)
router.connect(producer.destinations, producer, null)
sink.set_producer_route(producer)
// }
}
}
}
}
def on_channel_close(channel:Long) = {
inbound_channels.remove(channel).foreach { channel =>
channel.close
}
}
def on_channel_send(delivery:ChannelDelivery.Buffer) = {
inbound_channels.get(delivery.getChannel.longValue) match {
case Some(channel) =>
channel.offer(delivery)
case None =>
val ack = new ChannelAck.Bean()
ack.setChannel(delivery.getChannel)
ack.setError(ascii("channel does not exist"))
connection_send(COMMAND_CHANNEL_ACK, ack.freeze)
}
}
}
|
fusesource/fuse-extra
|
fusemq-apollo/fusemq-apollo-cluster/src/main/scala/org/fusesource/fabric/apollo/cluster/Peer.scala
|
Scala
|
apache-2.0
| 28,761 |
package org.monkeynuthead.monkeybarrel.glue
object MicroPickleSerializers extends Serializers {
import Messages._
import upickle.default._
implicit val messageSerializer = new Serializer[Message] {
override def serialize(a: Message): String = write(a)
override def deserialize(s: String): Message = read[Message](s)
}
}
|
georgenicoll/monkey-barrel
|
glue/shared/src/main/scala/org/monkeynuthead/monkeybarrel/glue/MicroPickleSerializers.scala
|
Scala
|
gpl-2.0
| 357 |
import java.io.File
import java.nio.file._
import Process._
import Modes._
import ScaladocGeneration._
import com.jsuereth.sbtpgp.PgpKeys
import sbt.Keys._
import sbt._
import complete.DefaultParsers._
import pl.project13.scala.sbt.JmhPlugin
import pl.project13.scala.sbt.JmhPlugin.JmhKeys.Jmh
import sbt.Package.ManifestAttributes
import sbt.plugins.SbtPlugin
import sbt.ScriptedPlugin.autoImport._
import xerial.sbt.pack.PackPlugin
import xerial.sbt.pack.PackPlugin.autoImport._
import xerial.sbt.Sonatype.autoImport._
import com.typesafe.tools.mima.plugin.MimaPlugin.autoImport._
import org.scalajs.sbtplugin.ScalaJSPlugin
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
import sbtbuildinfo.BuildInfoPlugin
import sbtbuildinfo.BuildInfoPlugin.autoImport._
import scala.util.Properties.isJavaAtLeast
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
import org.scalajs.linker.interface.ModuleInitializer
object DottyJSPlugin extends AutoPlugin {
import Build._
override def requires: Plugins = ScalaJSPlugin
override def projectSettings: Seq[Setting[_]] = Def.settings(
commonBootstrappedSettings,
/* #11709 Remove the dependency on scala3-library that ScalaJSPlugin adds.
* Instead, in this build, we use `.dependsOn` relationships to depend on
* the appropriate, locally-defined, scala3-library-bootstrappedJS.
*/
libraryDependencies ~= {
_.filter(!_.name.startsWith("scala3-library_sjs1"))
},
// Replace the JVM JUnit dependency by the Scala.js one
libraryDependencies ~= {
_.filter(!_.name.startsWith("junit-interface"))
},
libraryDependencies +=
("org.scala-js" %% "scalajs-junit-test-runtime" % scalaJSVersion % "test").cross(CrossVersion.for3Use2_13),
// Typecheck the Scala.js IR found on the classpath
scalaJSLinkerConfig ~= (_.withCheckIR(true)),
)
}
object Build {
import ScaladocConfigs._
val referenceVersion = "3.1.2-RC1"
val baseVersion = "3.2.0-RC1"
// Versions used by the vscode extension to create a new project
// This should be the latest published releases.
// TODO: Have the vscode extension fetch these numbers from the Internet
// instead of hardcoding them ?
val publishedDottyVersion = referenceVersion
val sbtDottyVersion = "0.5.5"
/** Version against which we check binary compatibility.
*
* This must be the latest published release in the same versioning line.
* For example, if the next version is going to be 3.1.4, then this must be
* set to 3.1.3. If it is going to be 3.1.0, it must be set to the latest
* 3.0.x release.
*/
val previousDottyVersion = "3.1.1"
object CompatMode {
final val BinaryCompatible = 0
final val SourceAndBinaryCompatible = 1
}
val compatMode = {
val VersionRE = """^\\d+\\.(\\d+).(\\d+).*""".r
baseVersion match {
case VersionRE(_, "0") => CompatMode.BinaryCompatible
case _ => CompatMode.SourceAndBinaryCompatible
}
}
/** scala-library version required to compile Dotty.
*
* Both the non-bootstrapped and bootstrapped version should match, unless
* we're in the process of upgrading to a new major version of
* scala-library.
*/
def stdlibVersion(implicit mode: Mode): String = mode match {
case NonBootstrapped => "2.13.8"
case Bootstrapped => "2.13.8"
}
val dottyOrganization = "org.scala-lang"
val dottyGithubUrl = "https://github.com/lampepfl/dotty"
val dottyGithubRawUserContentUrl = "https://raw.githubusercontent.com/lampepfl/dotty"
val isRelease = sys.env.get("RELEASEBUILD") == Some("yes")
val dottyVersion = {
def isNightly = sys.env.get("NIGHTLYBUILD") == Some("yes")
if (isRelease)
baseVersion
else if (isNightly)
baseVersion + "-bin-" + VersionUtil.commitDate + "-" + VersionUtil.gitHash + "-NIGHTLY"
else
baseVersion + "-bin-SNAPSHOT"
}
val dottyNonBootstrappedVersion = {
// Make sure sbt always computes the scalaBinaryVersion correctly
val bin = if (!dottyVersion.contains("-bin")) "-bin" else ""
dottyVersion + bin + "-nonbootstrapped"
}
val sbtCommunityBuildVersion = "0.1.0-SNAPSHOT"
val agentOptions = List(
// "-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005"
// "-agentpath:/home/dark/opt/yjp-2013-build-13072/bin/linux-x86-64/libyjpagent.so"
// "-agentpath:/Applications/YourKit_Java_Profiler_2015_build_15052.app/Contents/Resources/bin/mac/libyjpagent.jnilib",
// "-XX:+HeapDumpOnOutOfMemoryError", "-Xmx1g", "-Xss2m"
)
// Packages all subprojects to their jars
val packageAll = taskKey[Map[String, String]]("Package everything needed to run tests")
// Run tests with filter through vulpix test suite
val testCompilation = inputKey[Unit]("runs integration test with the supplied filter")
// Used to compile files similar to ./bin/scalac script
val scalac = inputKey[Unit]("run the compiler using the correct classpath, or the user supplied classpath")
// Used to run binaries similar to ./bin/scala script
val scala = inputKey[Unit]("run compiled binary using the correct classpath, or the user supplied classpath")
val repl = taskKey[Unit]("spawns a repl with the correct classpath")
// Compiles the documentation and static site
val genDocs = inputKey[Unit]("run scaladoc to generate static documentation site")
// Settings used to configure the test language server
val ideTestsCompilerVersion = taskKey[String]("Compiler version to use in IDE tests")
val ideTestsCompilerArguments = taskKey[Seq[String]]("Compiler arguments to use in IDE tests")
val ideTestsDependencyClasspath = taskKey[Seq[File]]("Dependency classpath to use in IDE tests")
val fetchScalaJSSource = taskKey[File]("Fetch the sources of Scala.js")
lazy val SourceDeps = config("sourcedeps")
// Settings shared by the build (scoped in ThisBuild). Used in build.sbt
lazy val thisBuildSettings = Def.settings(
organization := dottyOrganization,
organizationName := "LAMP/EPFL",
organizationHomepage := Some(url("http://lamp.epfl.ch")),
scalacOptions ++= Seq(
"-feature",
"-deprecation",
"-unchecked",
"-Xfatal-warnings",
"-encoding", "UTF8",
"-language:existentials,higherKinds,implicitConversions,postfixOps"
),
(Compile / compile / javacOptions) ++= Seq("-Xlint:unchecked", "-Xlint:deprecation"),
// Avoid various sbt craziness involving classloaders and parallelism
run / fork := true,
Test / fork := true,
Test / parallelExecution := false,
outputStrategy := Some(StdoutOutput),
// enable verbose exception messages for JUnit
(Test / testOptions) += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-s"),
)
// Settings shared globally (scoped in Global). Used in build.sbt
lazy val globalSettings = Def.settings(
onLoad := (Global / onLoad).value andThen { state =>
def exists(submodule: String) = {
val path = Paths.get(submodule)
Files.exists(path) && {
val fileStream = Files.list(path)
try fileStream.iterator().hasNext
finally fileStream.close()
}
}
// Copy default configuration from .vscode-template/ unless configuration files already exist in .vscode/
sbt.IO.copyDirectory(new File(".vscode-template/"), new File(".vscode/"), overwrite = false)
state
},
// I find supershell more distracting than helpful
useSuperShell := false,
// Credentials to release to Sonatype
credentials ++= (
for {
username <- sys.env.get("SONATYPE_USER")
password <- sys.env.get("SONATYPE_PW")
} yield Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", username, password)
).toList,
PgpKeys.pgpPassphrase := sys.env.get("PGP_PW").map(_.toCharArray()),
PgpKeys.useGpgPinentry := true,
javaOptions ++= {
val ciOptions = // propagate if this is a CI build
sys.props.get("dotty.drone.mem") match {
case Some(prop) => List("-Xmx" + prop)
case _ => List()
}
// Do not cut off the bottom of large stack traces (default is 1024)
"-XX:MaxJavaStackTraceDepth=1000000" :: agentOptions ::: ciOptions
},
excludeLintKeys ++= Set(
// We set these settings in `commonSettings`, if a project
// uses `commonSettings` but overrides `unmanagedSourceDirectories`,
// sbt will complain if we don't exclude them here.
Keys.scalaSource, Keys.javaSource
),
)
lazy val disableDocSetting =
// This is a legacy settings, we should reevalute generating javadocs
Compile / doc / sources := Seq()
lazy val commonSettings = publishSettings ++ Seq(
(Compile / scalaSource) := baseDirectory.value / "src",
(Test / scalaSource) := baseDirectory.value / "test",
(Compile / javaSource) := baseDirectory.value / "src",
(Test / javaSource) := baseDirectory.value / "test",
(Compile / resourceDirectory) := baseDirectory.value / "resources",
(Test / resourceDirectory) := baseDirectory.value / "test-resources",
// Prevent sbt from rewriting our dependencies
scalaModuleInfo ~= (_.map(_.withOverrideScalaVersion(false))),
libraryDependencies += "com.novocode" % "junit-interface" % "0.11" % Test,
// If someone puts a source file at the root (e.g., for manual testing),
// don't pick it up as part of any project.
sourcesInBase := false,
// For compatibility with Java 9+ module system;
// without Automatic-Module-Name, the module name is derived from the jar file which is invalid because of the _3 suffix.
Compile / packageBin / packageOptions +=
Package.ManifestAttributes(
"Automatic-Module-Name" -> s"${dottyOrganization.replaceAll("-",".")}.${moduleName.value.replaceAll("-",".")}"
)
)
// Settings used for projects compiled only with Java
lazy val commonJavaSettings = commonSettings ++ Seq(
version := dottyVersion,
scalaVersion := referenceVersion,
// Do not append Scala versions to the generated artifacts
crossPaths := false,
// Do not depend on the Scala library
autoScalaLibrary := false,
disableDocSetting
)
// Settings used when compiling dotty (both non-bootstrapped and bootstrapped)
lazy val commonDottySettings = commonSettings ++ Seq(
// Manually set the standard library to use
autoScalaLibrary := false,
classpathOptions ~= (old =>
old
.withAutoBoot(false) // no library on the compiler bootclasspath - we may need a more recent version
.withFilterLibrary(false) // ...instead, we put it on the compiler classpath
),
)
lazy val commonScala2Settings = commonSettings ++ Seq(
scalaVersion := stdlibVersion(Bootstrapped),
moduleName ~= { _.stripSuffix("-scala2") },
version := dottyVersion,
target := baseDirectory.value / ".." / "out" / "scala-2" / name.value,
disableDocSetting
)
// Settings used when compiling dotty with the reference compiler
lazy val commonNonBootstrappedSettings = commonDottySettings ++ Seq(
(Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-non-bootstrapped",
version := dottyNonBootstrappedVersion,
scalaVersion := referenceVersion,
disableDocSetting
)
private lazy val currentYear: String = java.util.Calendar.getInstance().get(java.util.Calendar.YEAR).toString
def scalacOptionsDocSettings(includeExternalMappings: Boolean = true) = {
val extMap = Seq("-external-mappings:" +
(if (includeExternalMappings) ".*scala/.*::scaladoc3::https://dotty.epfl.ch/api/," else "") +
".*java/.*::javadoc::https://docs.oracle.com/javase/8/docs/api/")
Seq(
"-skip-by-regex:.+\\\\.internal($|\\\\..+)",
"-skip-by-regex:.+\\\\.impl($|\\\\..+)",
"-project-logo", "docs/_assets/images/logo.svg",
"-social-links:" +
"github::https://github.com/lampepfl/dotty," +
"discord::https://discord.com/invite/scala," +
"twitter::https://twitter.com/scala_lang",
// contains special definitions which are "transplanted" elsewhere
// and which therefore confuse Scaladoc when accessed from this pkg
"-skip-by-id:scala.runtime.stdLibPatches",
// MatchCase is a special type that represents match type cases,
// Reflect doesn't expect to see it as a standalone definition
// and therefore it's easier just not to document it
"-skip-by-id:scala.runtime.MatchCase",
"-project-footer", s"Copyright (c) 2002-$currentYear, LAMP/EPFL",
"-author",
"-groups",
"-default-template", "static-site-main"
) ++ extMap
}
// Settings used when compiling dotty with a non-bootstrapped dotty
lazy val commonBootstrappedSettings = commonDottySettings ++ NoBloopExport.settings ++ Seq(
bspEnabled := false,
(Compile / unmanagedSourceDirectories) += baseDirectory.value / "src-bootstrapped",
version := dottyVersion,
scalaVersion := dottyNonBootstrappedVersion,
scalaCompilerBridgeBinaryJar := {
Some((`scala3-sbt-bridge` / Compile / packageBin).value)
},
// Use the same name as the non-bootstrapped projects for the artifacts.
// Remove the `js` suffix because JS artifacts are published using their special crossVersion.
// The order of the two `stripSuffix`es is important, so that
// scala3-library-bootstrappedjs becomes scala3-library.
moduleName ~= { _.stripSuffix("js").stripSuffix("-bootstrapped") },
// Enforce that the only Scala 2 classfiles we unpickle come from scala-library
/*
scalacOptions ++= {
val cp = (dependencyClasspath in `scala3-library` in Compile).value
val scalaLib = findArtifactPath(cp, "scala-library")
Seq("-Yscala2-unpickler", scalaLib)
},
*/
// sbt gets very unhappy if two projects use the same target
target := baseDirectory.value / ".." / "out" / "bootstrap" / name.value,
// Compile using the non-bootstrapped and non-published dotty
managedScalaInstance := false,
scalaInstance := {
val externalLibraryDeps = (`scala3-library` / Compile / externalDependencyClasspath).value.map(_.data).toSet
val externalCompilerDeps = (`scala3-compiler` / Compile / externalDependencyClasspath).value.map(_.data).toSet
// IMPORTANT: We need to use actual jars to form the ScalaInstance and not
// just directories containing classfiles because sbt maintains a cache of
// compiler instances. This cache is invalidated based on timestamps
// however this is only implemented on jars, directories are never
// invalidated.
val tastyCore = (`tasty-core` / Compile / packageBin).value
val scala3Library = (`scala3-library` / Compile / packageBin).value
val scala3Interfaces = (`scala3-interfaces` / Compile / packageBin).value
val scala3Compiler = (`scala3-compiler` / Compile / packageBin).value
val libraryJars = Array(scala3Library) ++ externalLibraryDeps
val compilerJars = Seq(tastyCore, scala3Interfaces, scala3Compiler) ++ (externalCompilerDeps -- externalLibraryDeps)
Defaults.makeScalaInstance(
scalaVersion.value,
libraryJars = libraryJars,
allCompilerJars = compilerJars,
allDocJars = Seq.empty,
state.value,
scalaInstanceTopLoader.value
)
},
// We cannot include scaladoc in the regular `scalaInstance` task because
// it's a bootstrapped-only project, so we would run into a loop since we
// need the output of that task to compile scaladoc. But we can include it
// in the `scalaInstance` of the `doc` task which allows us to run
// `scala3-library-bootstrapped/doc` for example.
doc / scalaInstance := {
val externalDeps = (LocalProject("scaladoc") / Compile / externalDependencyClasspath).value.map(_.data)
val scalaDoc = (LocalProject("scaladoc") / Compile / packageBin).value
val docJars = Array(scalaDoc) ++ externalDeps
val base = scalaInstance.value
val docScalaInstance = Defaults.makeScalaInstance(
version = base.version,
libraryJars = base.libraryJars,
allCompilerJars = base.compilerJars,
allDocJars = docJars,
state.value,
scalaInstanceTopLoader.value
)
// assert that sbt reuses the same compiler class loader
assert(docScalaInstance.loaderCompilerOnly == base.loaderCompilerOnly)
docScalaInstance
},
Compile / doc / scalacOptions ++= scalacOptionsDocSettings()
)
lazy val commonBenchmarkSettings = Seq(
Jmh / bspEnabled := false,
Jmh / run / mainClass := Some("dotty.tools.benchmarks.Bench"), // custom main for jmh:run
javaOptions += "-DBENCH_COMPILER_CLASS_PATH=" + Attributed.data((`scala3-bootstrapped` / Compile / fullClasspath).value).mkString("", File.pathSeparator, ""),
javaOptions += "-DBENCH_CLASS_PATH=" + Attributed.data((`scala3-library-bootstrapped` / Compile / fullClasspath).value).mkString("", File.pathSeparator, "")
)
lazy val commonMiMaSettings = Def.settings(
mimaPreviousArtifacts += {
val thisProjectID = projectID.value
val crossedName = thisProjectID.crossVersion match {
case cv: Disabled => thisProjectID.name
case cv: Binary => s"${thisProjectID.name}_${cv.prefix}3${cv.suffix}"
}
(thisProjectID.organization % crossedName % previousDottyVersion)
},
mimaCheckDirection := (compatMode match {
case CompatMode.BinaryCompatible => "backward"
case CompatMode.SourceAndBinaryCompatible => "both"
}),
)
/** Projects -------------------------------------------------------------- */
val dottyCompilerBootstrappedRef = LocalProject("scala3-compiler-bootstrapped")
/** External dependencies we may want to put on the compiler classpath. */
def externalCompilerClasspathTask: Def.Initialize[Task[Def.Classpath]] =
// Even if we're running the non-bootstrapped compiler, we want the
// dependencies of the bootstrapped compiler since we want to put them on
// the compiler classpath, not the JVM classpath.
(dottyCompilerBootstrappedRef / Runtime / externalDependencyClasspath)
// The root project:
// - aggregates other projects so that "compile", "test", etc are run on all projects at once.
// - publishes its own empty artifact "dotty" that depends on "scala3-library" and "scala3-compiler",
// this is only necessary for compatibility with sbt which currently hardcodes the "dotty" artifact name
lazy val scala3 = project.in(file(".")).asDottyRoot(NonBootstrapped)
lazy val `scala3-bootstrapped` = project.asDottyRoot(Bootstrapped)
lazy val `scala3-interfaces` = project.in(file("interfaces")).
settings(commonJavaSettings).
settings(commonMiMaSettings).
settings(
versionScheme := Some("semver-spec")
)
/** Find an artifact with the given `name` in `classpath` */
def findArtifact(classpath: Def.Classpath, name: String): File = classpath
.find(_.get(artifact.key).exists(_.name == name))
.getOrElse(throw new MessageOnlyException(s"Artifact for $name not found in $classpath"))
.data
/** Like `findArtifact` but returns the absolute path of the entry as a string */
def findArtifactPath(classpath: Def.Classpath, name: String): String =
findArtifact(classpath, name).getAbsolutePath
/** Insert UnsafeNulls Import after package */
def insertUnsafeNullsImport(lines: Seq[String]): Seq[String] = {
def recur(ls: Seq[String], foundPackage: Boolean): Seq[String] = ls match {
case Seq(l, rest @ _*) =>
val lt = l.trim()
if (foundPackage) {
if (!(lt.isEmpty || lt.startsWith("package ")))
"import scala.language.unsafeNulls" +: ls
else l +: recur(rest, foundPackage)
} else {
if (lt.startsWith("package ")) l +: recur(rest, true)
else l +: recur(rest, foundPackage)
}
case _ => ls
}
recur(lines, false)
}
// Settings shared between scala3-compiler and scala3-compiler-bootstrapped
lazy val commonDottyCompilerSettings = Seq(
// Generate compiler.properties, used by sbt
(Compile / resourceGenerators) += Def.task {
import java.util._
import java.text._
val file = (Compile / resourceManaged).value / "compiler.properties"
val dateFormat = new SimpleDateFormat("yyyyMMdd-HHmmss")
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"))
val contents = //2.11.11.v20170413-090219-8a413ba7cc
s"""version.number=${version.value}
|maven.version.number=${version.value}
|git.hash=${VersionUtil.gitHash}
|copyright.string=Copyright 2002-$currentYear, LAMP/EPFL
""".stripMargin
if (!(file.exists && IO.read(file) == contents)) {
IO.write(file, contents)
}
Seq(file)
}.taskValue,
// get libraries onboard
libraryDependencies ++= Seq(
"org.scala-lang.modules" % "scala-asm" % "9.1.0-scala-1", // used by the backend
Dependencies.oldCompilerInterface, // we stick to the old version to avoid deprecation warnings
"org.jline" % "jline-reader" % "3.19.0", // used by the REPL
"org.jline" % "jline-terminal" % "3.19.0",
"org.jline" % "jline-terminal-jna" % "3.19.0", // needed for Windows
("io.get-coursier" %% "coursier" % "2.0.16" % Test).cross(CrossVersion.for3Use2_13),
),
// For convenience, change the baseDirectory when running the compiler
Compile / forkOptions := (Compile / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value),
Compile / run / forkOptions := (Compile / run / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value),
// And when running the tests
Test / forkOptions := (Test / forkOptions).value.withWorkingDirectory((ThisBuild / baseDirectory).value),
Test / test := {
// Exclude VulpixMetaTests
(Test / testOnly).toTask(" -- --exclude-categories=dotty.VulpixMetaTests").value
},
(Test / testOptions) += Tests.Argument(
TestFrameworks.JUnit,
"--run-listener=dotty.tools.ContextEscapeDetector",
),
// Spawn new JVM in run and test
// Add git-hash used to package the distribution to the manifest to know it in runtime and report it in REPL
packageOptions += ManifestAttributes(("Git-Hash", VersionUtil.gitHash)),
javaOptions ++= {
val managedSrcDir = {
// Populate the directory
(Compile / managedSources).value
(Compile / sourceManaged).value
}
val externalDeps = externalCompilerClasspathTask.value
val jars = packageAll.value
Seq(
"-Ddotty.tests.dottyCompilerManagedSources=" + managedSrcDir,
"-Ddotty.tests.classes.dottyInterfaces=" + jars("scala3-interfaces"),
"-Ddotty.tests.classes.dottyLibrary=" + jars("scala3-library"),
"-Ddotty.tests.classes.dottyCompiler=" + jars("scala3-compiler"),
"-Ddotty.tests.classes.tastyCore=" + jars("tasty-core"),
"-Ddotty.tests.classes.compilerInterface=" + findArtifactPath(externalDeps, "compiler-interface"),
"-Ddotty.tests.classes.scalaLibrary=" + findArtifactPath(externalDeps, "scala-library"),
"-Ddotty.tests.classes.scalaAsm=" + findArtifactPath(externalDeps, "scala-asm"),
"-Ddotty.tests.classes.jlineTerminal=" + findArtifactPath(externalDeps, "jline-terminal"),
"-Ddotty.tests.classes.jlineReader=" + findArtifactPath(externalDeps, "jline-reader"),
)
},
javaOptions += (
s"-Ddotty.tools.dotc.semanticdb.test=${(ThisBuild / baseDirectory).value/"tests"/"semanticdb"}"
),
testCompilation := Def.inputTaskDyn {
val args = spaceDelimited("<arg>").parsed
if (args.contains("--help")) {
println(
s"""
|usage: testCompilation [--help] [--from-tasty] [--update-checkfiles] [<filter>]
|
|By default runs tests in dotty.tools.dotc.*CompilationTests excluding tests tagged with dotty.SlowTests.
|
| --help show this message
| --from-tasty runs tests in dotty.tools.dotc.FromTastyTests
| --update-checkfiles override the checkfiles that did not match with the current output
| <filter> substring of the path of the tests file
|
""".stripMargin
)
(Test / testOnly).toTask(" not.a.test")
}
else {
val updateCheckfile = args.contains("--update-checkfiles")
val fromTasty = args.contains("--from-tasty")
val args1 = if (updateCheckfile | fromTasty) args.filter(x => x != "--update-checkfiles" && x != "--from-tasty") else args
val test = if (fromTasty) "dotty.tools.dotc.FromTastyTests" else "dotty.tools.dotc.*CompilationTests"
val cmd = s" $test -- --exclude-categories=dotty.SlowTests" +
(if (updateCheckfile) " -Ddotty.tests.updateCheckfiles=TRUE" else "") +
(if (args1.nonEmpty) " -Ddotty.tests.filter=" + args1.mkString(" ") else "")
(Test / testOnly).toTask(cmd)
}
}.evaluated,
Compile / mainClass := Some("dotty.tools.dotc.Main"),
scala := {
val args: List[String] = spaceDelimited("<arg>").parsed.toList
val externalDeps = externalCompilerClasspathTask.value
val jars = packageAll.value
val scalaLib = findArtifactPath(externalDeps, "scala-library")
val dottyLib = jars("scala3-library")
def run(args: List[String]): Unit = {
val fullArgs = insertClasspathInArgs(args, List(".", dottyLib, scalaLib).mkString(File.pathSeparator))
runProcess("java" :: fullArgs, wait = true)
}
if (args.isEmpty) {
println("Couldn't run `scala` without args. Use `repl` to run the repl or add args to run the dotty application")
} else if (scalaLib == "") {
println("Couldn't find scala-library on classpath, please run using script in bin dir instead")
} else if (args.contains("-with-compiler")) {
val args1 = args.filter(_ != "-with-compiler")
val asm = findArtifactPath(externalDeps, "scala-asm")
val dottyCompiler = jars("scala3-compiler")
val dottyStaging = jars("scala3-staging")
val dottyTastyInspector = jars("scala3-tasty-inspector")
val dottyInterfaces = jars("scala3-interfaces")
val tastyCore = jars("tasty-core")
run(insertClasspathInArgs(args1, List(dottyCompiler, dottyInterfaces, asm, dottyStaging, dottyTastyInspector, tastyCore).mkString(File.pathSeparator)))
} else run(args)
},
run := scalac.evaluated,
scalac := Def.inputTaskDyn {
val log = streams.value.log
val externalDeps = externalCompilerClasspathTask.value
val jars = packageAll.value
val scalaLib = findArtifactPath(externalDeps, "scala-library")
val dottyLib = jars("scala3-library")
val dottyCompiler = jars("scala3-compiler")
val args0: List[String] = spaceDelimited("<arg>").parsed.toList
val decompile = args0.contains("-decompile")
val printTasty = args0.contains("-print-tasty")
val debugFromTasty = args0.contains("-Ythrough-tasty")
val args = args0.filter(arg => arg != "-repl" && arg != "-decompile" &&
arg != "-with-compiler" && arg != "-Ythrough-tasty" && arg != "-print-tasty")
val main =
if (decompile) "dotty.tools.dotc.decompiler.Main"
else if (printTasty) "dotty.tools.dotc.core.tasty.TastyPrinter"
else if (debugFromTasty) "dotty.tools.dotc.fromtasty.Debug"
else "dotty.tools.dotc.Main"
var extraClasspath = Seq(scalaLib, dottyLib)
if (decompile && !args.contains("-classpath"))
extraClasspath ++= Seq(".")
if (args0.contains("-with-compiler")) {
if (scalaVersion.value == referenceVersion) {
log.error("-with-compiler should only be used with a bootstrapped compiler")
}
val dottyInterfaces = jars("scala3-interfaces")
val dottyStaging = jars("scala3-staging")
val dottyTastyInspector = jars("scala3-tasty-inspector")
val tastyCore = jars("tasty-core")
val asm = findArtifactPath(externalDeps, "scala-asm")
extraClasspath ++= Seq(dottyCompiler, dottyInterfaces, asm, dottyStaging, dottyTastyInspector, tastyCore)
}
val fullArgs = main :: (if (printTasty) args else insertClasspathInArgs(args, extraClasspath.mkString(File.pathSeparator)))
(Compile / runMain).toTask(fullArgs.mkString(" ", " ", ""))
}.evaluated,
/* Add the sources of scalajs-ir.
* To guarantee that dotty can bootstrap without depending on a version
* of scalajs-ir built with a different Scala compiler, we add its
* sources instead of depending on the binaries.
*/
ivyConfigurations += SourceDeps.hide,
transitiveClassifiers := Seq("sources"),
libraryDependencies +=
("org.scala-js" %% "scalajs-ir" % scalaJSVersion % "sourcedeps").cross(CrossVersion.for3Use2_13),
(Compile / sourceGenerators) += Def.task {
val s = streams.value
val cacheDir = s.cacheDirectory
val trgDir = (Compile / sourceManaged).value / "scalajs-ir-src"
val report = updateClassifiers.value
val scalaJSIRSourcesJar = report.select(
configuration = configurationFilter("sourcedeps"),
module = (_: ModuleID).name.startsWith("scalajs-ir_"),
artifact = artifactFilter(`type` = "src")).headOption.getOrElse {
sys.error(s"Could not fetch scalajs-ir sources")
}
FileFunction.cached(cacheDir / s"fetchScalaJSIRSource",
FilesInfo.lastModified, FilesInfo.exists) { dependencies =>
s.log.info(s"Unpacking scalajs-ir sources to $trgDir...")
if (trgDir.exists)
IO.delete(trgDir)
IO.createDirectory(trgDir)
IO.unzip(scalaJSIRSourcesJar, trgDir)
val sjsSources = (trgDir ** "*.scala").get.toSet
sjsSources.foreach(f => {
val lines = IO.readLines(f)
IO.writeLines(f, insertUnsafeNullsImport(lines))
})
sjsSources
} (Set(scalaJSIRSourcesJar)).toSeq
}.taskValue,
)
def insertClasspathInArgs(args: List[String], cp: String): List[String] = {
val (beforeCp, fromCp) = args.span(_ != "-classpath")
val classpath = fromCp.drop(1).headOption.fold(cp)(_ + File.pathSeparator + cp)
"-classpath" :: classpath :: beforeCp ::: fromCp.drop(2)
}
lazy val nonBootstrapedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq(
// packageAll packages all and then returns a map with the abs location
packageAll := Def.taskDyn { // Use a dynamic task to avoid loops when loading the settings
Def.task {
Map(
"scala3-interfaces" -> (`scala3-interfaces` / Compile / packageBin).value,
"scala3-compiler" -> (Compile / packageBin).value,
"tasty-core" -> (`tasty-core` / Compile / packageBin).value,
// NOTE: Using scala3-library-bootstrapped here is intentional: when
// running the compiler, we should always have the bootstrapped
// library on the compiler classpath since the non-bootstrapped one
// may not be binary-compatible.
"scala3-library" -> (`scala3-library-bootstrapped` / Compile / packageBin).value
).mapValues(_.getAbsolutePath)
}
}.value,
(Test / testOptions) += Tests.Argument(
TestFrameworks.JUnit,
"--exclude-categories=dotty.BootstrappedOnlyTests",
),
// increase stack size for non-bootstrapped compiler, because some code
// is only tail-recursive after bootstrap
(Test / javaOptions) += "-Xss2m"
)
lazy val bootstrapedDottyCompilerSettings = commonDottyCompilerSettings ++ Seq(
javaOptions ++= {
val jars = packageAll.value
Seq(
"-Ddotty.tests.classes.dottyStaging=" + jars("scala3-staging"),
"-Ddotty.tests.classes.dottyTastyInspector=" + jars("scala3-tasty-inspector"),
)
},
packageAll := {
(`scala3-compiler` / packageAll).value ++ Seq(
"scala3-compiler" -> (Compile / packageBin).value.getAbsolutePath,
"scala3-staging" -> (LocalProject("scala3-staging") / Compile / packageBin).value.getAbsolutePath,
"scala3-tasty-inspector" -> (LocalProject("scala3-tasty-inspector") / Compile / packageBin).value.getAbsolutePath,
"tasty-core" -> (LocalProject("tasty-core-bootstrapped") / Compile / packageBin).value.getAbsolutePath,
)
},
Compile / scalacOptions ++= Seq("-Yexplicit-nulls"),
repl := (Compile / console).value,
Compile / console / scalacOptions := Nil, // reset so that we get stock REPL behaviour! E.g. avoid -unchecked being enabled
)
def dottyCompilerSettings(implicit mode: Mode): sbt.Def.SettingsDefinition =
if (mode == NonBootstrapped) nonBootstrapedDottyCompilerSettings else bootstrapedDottyCompilerSettings
lazy val `scala3-compiler` = project.in(file("compiler")).asDottyCompiler(NonBootstrapped)
lazy val Scala3CompilerCoursierTest = config("scala3CompilerCoursierTest") extend Test
lazy val `scala3-compiler-bootstrapped` = project.in(file("compiler")).asDottyCompiler(Bootstrapped)
.configs(Scala3CompilerCoursierTest)
.settings(
inConfig(Scala3CompilerCoursierTest)(Defaults.testSettings),
Scala3CompilerCoursierTest / scalaSource := baseDirectory.value / "test-coursier",
Scala3CompilerCoursierTest / fork := true,
Scala3CompilerCoursierTest / envVars := Map("DOTTY_BOOTSTRAPPED_VERSION" -> dottyVersion),
Scala3CompilerCoursierTest / unmanagedClasspath += (Scala3CompilerCoursierTest / scalaSource).value,
Scala3CompilerCoursierTest / test := ((Scala3CompilerCoursierTest / test) dependsOn (
publishLocal, // Had to enumarate all deps since calling `scala3-bootstrap` / publishLocal will lead to recursive dependency => stack overflow
`scala3-interfaces` / publishLocal,
dottyLibrary(Bootstrapped) / publishLocal,
tastyCore(Bootstrapped) / publishLocal,
),
).value,
)
def dottyCompiler(implicit mode: Mode): Project = mode match {
case NonBootstrapped => `scala3-compiler`
case Bootstrapped => `scala3-compiler-bootstrapped`
}
// Settings shared between scala3-library, scala3-library-bootstrapped and scala3-library-bootstrappedJS
lazy val dottyLibrarySettings = Seq(
(Compile / scalacOptions) ++= Seq(
// Needed so that the library sources are visible when `dotty.tools.dotc.core.Definitions#init` is called
"-sourcepath", (Compile / sourceDirectories).value.map(_.getAbsolutePath).distinct.mkString(File.pathSeparator),
"-Yexplicit-nulls",
),
)
lazy val `scala3-library` = project.in(file("library")).asDottyLibrary(NonBootstrapped)
lazy val `scala3-library-bootstrapped`: Project = project.in(file("library")).asDottyLibrary(Bootstrapped)
def dottyLibrary(implicit mode: Mode): Project = mode match {
case NonBootstrapped => `scala3-library`
case Bootstrapped => `scala3-library-bootstrapped`
}
/** The dotty standard library compiled with the Scala.js back-end, to produce
* the corresponding .sjsir files.
*
* This artifact must be on the classpath on every "Dotty.js" project.
*
* Currently, only a very small fraction of the dotty library is actually
* included in this project, and hence available to Dotty.js projects. More
* will be added in the future as things are confirmed to be supported.
*/
lazy val `scala3-library-bootstrappedJS`: Project = project.in(file("library-js")).
asDottyLibrary(Bootstrapped).
enablePlugins(DottyJSPlugin).
settings(
libraryDependencies +=
("org.scala-js" %% "scalajs-library" % scalaJSVersion).cross(CrossVersion.for3Use2_13),
Compile / unmanagedSourceDirectories ++=
(`scala3-library-bootstrapped` / Compile / unmanagedSourceDirectories).value,
// Configure the source maps to point to GitHub for releases
scalacOptions ++= {
if (isRelease) {
val baseURI = (LocalRootProject / baseDirectory).value.toURI
val dottyVersion = version.value
Seq(s"-scalajs-mapSourceURI:$baseURI->$dottyGithubRawUserContentUrl/$dottyVersion/")
} else {
Nil
}
},
// Make sure `scala3-bootstrapped/test` doesn't fail on this project for no reason
Test / test := {},
Test / testOnly := {},
)
lazy val tastyCoreSettings = Seq(
scalacOptions += "-source:3.0-migration"
)
lazy val `tasty-core` = project.in(file("tasty")).asTastyCore(NonBootstrapped)
lazy val `tasty-core-bootstrapped`: Project = project.in(file("tasty")).asTastyCore(Bootstrapped)
lazy val `tasty-core-scala2`: Project = project.in(file("tasty")).asTastyCoreScala2
def tastyCore(implicit mode: Mode): Project = mode match {
case NonBootstrapped => `tasty-core`
case Bootstrapped => `tasty-core-bootstrapped`
}
lazy val `scala3-staging` = project.in(file("staging")).
withCommonSettings(Bootstrapped).
// We want the compiler to be present in the compiler classpath when compiling this project but not
// when compiling a project that depends on scala3-staging (see sbt-test/sbt-dotty/quoted-example-project),
// but we always need it to be present on the JVM classpath at runtime.
dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test").
settings(
javaOptions := (`scala3-compiler-bootstrapped` / javaOptions).value
)
lazy val `scala3-tasty-inspector` = project.in(file("tasty-inspector")).
withCommonSettings(Bootstrapped).
// We want the compiler to be present in the compiler classpath when compiling this project but not
// when compiling a project that depends on scala3-tasty-inspector (see sbt-test/sbt-dotty/tasty-inspector-example-project),
// but we always need it to be present on the JVM classpath at runtime.
dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test").
settings(
javaOptions := (`scala3-compiler-bootstrapped` / javaOptions).value
)
/** Scala library compiled by dotty using the latest published sources of the library */
lazy val `stdlib-bootstrapped` = project.in(file("stdlib-bootstrapped")).
withCommonSettings(Bootstrapped).
dependsOn(dottyCompiler(Bootstrapped) % "provided; compile->runtime; test->test").
dependsOn(`scala3-tasty-inspector` % "test->test").
settings(commonBootstrappedSettings).
settings(
moduleName := "scala-library",
javaOptions := (`scala3-compiler-bootstrapped` / javaOptions).value,
Compile/scalacOptions += "-Yerased-terms",
Compile/scalacOptions ++= {
Seq(
"-sourcepath",
Seq(
(Compile/sourceManaged).value / "scala-library-src",
(Compile/sourceManaged).value / "dotty-library-src",
).mkString(File.pathSeparator),
)
},
Compile / doc / scalacOptions += "-Ydocument-synthetic-types",
scalacOptions -= "-Xfatal-warnings",
ivyConfigurations += SourceDeps.hide,
transitiveClassifiers := Seq("sources"),
libraryDependencies +=
("org.scala-lang" % "scala-library" % stdlibVersion(Bootstrapped) % "sourcedeps"),
(Compile / sourceGenerators) += Def.task {
val s = streams.value
val cacheDir = s.cacheDirectory
val trgDir = (Compile / sourceManaged).value / "scala-library-src"
val report = updateClassifiers.value
val scalaLibrarySourcesJar = report.select(
configuration = configurationFilter("sourcedeps"),
module = (_: ModuleID).name == "scala-library",
artifact = artifactFilter(`type` = "src")).headOption.getOrElse {
sys.error(s"Could not fetch scala-library sources")
}
FileFunction.cached(cacheDir / s"fetchScalaLibrarySrc",
FilesInfo.lastModified, FilesInfo.exists) { dependencies =>
s.log.info(s"Unpacking scala-library sources to $trgDir...")
if (trgDir.exists)
IO.delete(trgDir)
IO.createDirectory(trgDir)
IO.unzip(scalaLibrarySourcesJar, trgDir)
((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet
} (Set(scalaLibrarySourcesJar)).toSeq
}.taskValue,
(Compile / sourceGenerators) += Def.task {
val s = streams.value
val cacheDir = s.cacheDirectory
val trgDir = (Compile / sourceManaged).value / "dotty-library-src"
// NOTE `sourceDirectory` is used for actual copying,
// but `sources` are used as cache keys
val dottyLibSourceDirs = (`scala3-library-bootstrapped`/Compile/unmanagedSourceDirectories).value
def dottyLibSources = dottyLibSourceDirs.foldLeft(PathFinder.empty) { (pf, dir) =>
if (!dir.exists) pf else pf +++ (dir ** "*.scala") +++ (dir ** "*.java")
}
val cachedFun = FileFunction.cached(
cacheDir / s"copyDottyLibrarySrc",
FilesInfo.lastModified,
FilesInfo.exists,
) { _ =>
if (trgDir.exists) IO.delete(trgDir)
dottyLibSourceDirs.foreach { dir =>
if (dir.exists) {
s.log.info(s"Copying scala3-library sources from $dir to $trgDir...")
IO.copyDirectory(dir, trgDir)
}
}
((trgDir ** "*.scala") +++ (trgDir ** "*.java")).get.toSet
}
cachedFun(dottyLibSources.get.toSet).toSeq
}.taskValue,
(Compile / sources) ~= (_.filterNot(file =>
// sources from https://github.com/scala/scala/tree/2.13.x/src/library-aux
file.getPath.endsWith("scala-library-src/scala/Any.scala") ||
file.getPath.endsWith("scala-library-src/scala/AnyVal.scala") ||
file.getPath.endsWith("scala-library-src/scala/AnyRef.scala") ||
file.getPath.endsWith("scala-library-src/scala/Nothing.scala") ||
file.getPath.endsWith("scala-library-src/scala/Null.scala") ||
file.getPath.endsWith("scala-library-src/scala/Singleton.scala"))),
(Test / managedClasspath) ~= {
_.filterNot(file => file.data.getName == s"scala-library-${stdlibVersion(Bootstrapped)}.jar")
},
)
/** Test the tasty generated by `stdlib-bootstrapped`
*
* The tests are run with the bootstrapped compiler and the tasty inpector on the classpath.
* The classpath has the default `scala-library` and not `stdlib-bootstrapped`.
*
* The jar of `stdlib-bootstrapped` is provided for to the tests.
* - inspector: test that we can load the contents of the jar using the tasty inspector
* - from-tasty: test that we can recompile the contents of the jar using `dotc -from-tasty`
*/
lazy val `stdlib-bootstrapped-tasty-tests` = project.in(file("stdlib-bootstrapped-tasty-tests")).
withCommonSettings(Bootstrapped).
dependsOn(`scala3-tasty-inspector` % "test->test").
settings(commonBootstrappedSettings).
settings(
javaOptions := (`scala3-compiler-bootstrapped` / javaOptions).value,
javaOptions += "-Ddotty.scala.library=" + (`stdlib-bootstrapped` / Compile / packageBin).value.getAbsolutePath
)
lazy val `scala3-sbt-bridge` = project.in(file("sbt-bridge/src")).
// We cannot depend on any bootstrapped project to compile the bridge, since the
// bridge is needed to compile these projects.
dependsOn(`scala3-compiler` % Provided).
settings(commonJavaSettings).
settings(
description := "sbt compiler bridge for Dotty",
Test / sources := Seq(),
Compile / scalaSource := baseDirectory.value,
Compile / javaSource := baseDirectory.value,
Compile / resourceDirectory := baseDirectory.value.getParentFile / "resources",
// Referring to the other project using a string avoids an infinite loop
// when sbt reads the settings.
Test / test := (LocalProject("scala3-sbt-bridge-tests") / Test / test).value,
// The `newCompilerInterface` is backward compatible with the `oldCompilerInterface`
libraryDependencies += Dependencies.newCompilerInterface % Provided
)
// We use a separate project for the bridge tests since they can only be run
// with the bootstrapped library on the classpath.
lazy val `scala3-sbt-bridge-tests` = project.in(file("sbt-bridge/test")).
dependsOn(dottyCompiler(Bootstrapped) % Test).
settings(commonBootstrappedSettings).
settings(
Compile / sources := Seq(),
Test / scalaSource := baseDirectory.value,
Test / javaSource := baseDirectory.value,
// Tests disabled until zinc-api-info cross-compiles with 2.13,
// alternatively we could just copy in sources the part of zinc-api-info we need.
Test / sources := Seq()
)
lazy val `scala3-language-server` = project.in(file("language-server")).
dependsOn(dottyCompiler(Bootstrapped)).
settings(commonBootstrappedSettings).
settings(
libraryDependencies ++= Seq(
"org.eclipse.lsp4j" % "org.eclipse.lsp4j" % "0.6.0",
Dependencies.`jackson-databind`
),
// Work around https://github.com/eclipse/lsp4j/issues/295
dependencyOverrides += "org.eclipse.xtend" % "org.eclipse.xtend.lib" % "2.16.0",
javaOptions := (`scala3-compiler-bootstrapped` / javaOptions).value,
).
settings(
ideTestsCompilerVersion := (`scala3-compiler` / version).value,
ideTestsCompilerArguments := Seq(),
ideTestsDependencyClasspath := {
val dottyLib = (`scala3-library-bootstrapped` / Compile / classDirectory).value
val scalaLib =
(`scala3-library-bootstrapped` / Compile / dependencyClasspath)
.value
.map(_.data)
.filter(_.getName.matches("scala-library.*\\\\.jar"))
.toList
dottyLib :: scalaLib
},
Test / buildInfoKeys := Seq[BuildInfoKey](
ideTestsCompilerVersion,
ideTestsCompilerArguments,
ideTestsDependencyClasspath
),
Test / buildInfoPackage := "dotty.tools.languageserver.util.server",
BuildInfoPlugin.buildInfoScopedSettings(Test),
BuildInfoPlugin.buildInfoDefaultSettings
)
/** A sandbox to play with the Scala.js back-end of dotty.
*
* This sandbox is compiled with dotty with support for Scala.js. It can be
* used like any regular Scala.js project. In particular, `fastOptJS` will
* produce a .js file, and `run` will run the JavaScript code with a JS VM.
*
* Simply running `dotty/run -scalajs` without this sandbox is not very
* useful, as that would not provide the linker and JS runners.
*/
lazy val sjsSandbox = project.in(file("sandbox/scalajs")).
enablePlugins(DottyJSPlugin).
dependsOn(`scala3-library-bootstrappedJS`).
settings(
// Required to run Scala.js tests.
Test / fork := false,
scalaJSUseMainModuleInitializer := true,
)
/** Scala.js test suite.
*
* This project downloads the sources of the upstream Scala.js test suite,
* and tests them with the dotty Scala.js back-end. Currently, only a very
* small fraction of the upstream test suite is actually compiled and run.
* It will grow in the future, as more stuff is confirmed to be supported.
*/
lazy val sjsJUnitTests = project.in(file("tests/sjs-junit")).
enablePlugins(DottyJSPlugin).
dependsOn(`scala3-library-bootstrappedJS`).
settings(
scalacOptions --= Seq("-Xfatal-warnings", "-deprecation"),
// Required to run Scala.js tests.
Test / fork := false,
fetchScalaJSSource / sourceDirectory := target.value / s"scala-js-src-$scalaJSVersion",
fetchScalaJSSource := {
import org.eclipse.jgit.api._
import org.eclipse.jgit.lib._
val s = streams.value
val ver = scalaJSVersion
val trgDir = (fetchScalaJSSource / sourceDirectory).value
if (!trgDir.exists) {
s.log.info(s"Fetching Scala.js source version $ver")
IO.createDirectory(trgDir)
new CloneCommand()
.setDirectory(trgDir)
.setURI("https://github.com/scala-js/scala-js.git")
.setNoCheckout(true)
.call()
}
// Checkout proper ref. We do this anyway so we fail if something is wrong
val git = Git.open(trgDir)
s.log.info(s"Checking out Scala.js source version $ver")
git.getRepository().getConfig().setEnum("core", null, "autocrlf", CoreConfig.AutoCRLF.FALSE)
git.checkout().setName(s"v$ver").call()
trgDir
},
// We need JUnit in the Compile configuration
libraryDependencies +=
("org.scala-js" %% "scalajs-junit-test-runtime" % scalaJSVersion).cross(CrossVersion.for3Use2_13),
(Compile / sourceGenerators) += Def.task {
import org.scalajs.linker.interface.CheckedBehavior
val stage = scalaJSStage.value
val linkerConfig = stage match {
case FastOptStage => (Compile / fastOptJS / scalaJSLinkerConfig).value
case FullOptStage => (Compile / fullOptJS / scalaJSLinkerConfig).value
}
val moduleKind = linkerConfig.moduleKind
val sems = linkerConfig.semantics
ConstantHolderGenerator.generate(
(Compile / sourceManaged).value,
"org.scalajs.testsuite.utils.BuildInfo",
"scalaVersion" -> scalaVersion.value,
"hasSourceMaps" -> false, //DottyJSPlugin.wantSourceMaps.value,
"isNoModule" -> (moduleKind == ModuleKind.NoModule),
"isESModule" -> (moduleKind == ModuleKind.ESModule),
"isCommonJSModule" -> (moduleKind == ModuleKind.CommonJSModule),
"isFullOpt" -> (stage == FullOptStage),
"compliantAsInstanceOfs" -> (sems.asInstanceOfs == CheckedBehavior.Compliant),
"compliantArrayIndexOutOfBounds" -> (sems.arrayIndexOutOfBounds == CheckedBehavior.Compliant),
"compliantModuleInit" -> (sems.moduleInit == CheckedBehavior.Compliant),
"strictFloats" -> sems.strictFloats,
"productionMode" -> sems.productionMode,
"esVersion" -> linkerConfig.esFeatures.esVersion.edition,
"useECMAScript2015Semantics" -> linkerConfig.esFeatures.useECMAScript2015Semantics,
)
}.taskValue,
(Test / scalacOptions) += "-scalajs-genStaticForwardersForNonTopLevelObjects",
scalaJSLinkerConfig ~= { _.withSemantics(build.TestSuiteLinkerOptions.semantics _) },
(Test / scalaJSModuleInitializers) ++= build.TestSuiteLinkerOptions.moduleInitializers,
// Perform Ycheck after the Scala.js-specific transformation phases
scalacOptions += "-Ycheck:prepjsinterop,explicitJSClasses,addLocalJSFakeNews",
Test / jsEnvInput := {
val resourceDir = fetchScalaJSSource.value / "test-suite/js/src/test/resources"
val f = (resourceDir / "NonNativeJSTypeTestNatives.js").toPath
org.scalajs.jsenv.Input.Script(f) +: (Test / jsEnvInput).value
},
(Compile / managedSources) ++= {
val dir = fetchScalaJSSource.value
(
(dir / "test-suite/js/src/main/scala" ** (("*.scala": FileFilter)
-- "Typechecking*.scala" // defines a Scala 2 macro
)).get
++ (dir / "junit-async/js/src/main/scala" ** "*.scala").get
)
},
// A first blacklist of tests for those that do not compile or do not link
(Test / managedSources) ++= {
val dir = fetchScalaJSSource.value / "test-suite"
(
(dir / "shared/src/test/scala" ** (("*.scala": FileFilter)
-- "ReflectiveCallTest.scala" // uses many forms of structural calls that are not allowed in Scala 3 anymore
-- "EnumerationTest.scala" // scala.Enumeration support for Scala.js is not implemented in scalac (yet)
)).get
++ (dir / "shared/src/test/require-sam" ** "*.scala").get
++ (dir / "shared/src/test/require-jdk8" ** "*.scala").get
++ (dir / "shared/src/test/require-jdk7" ** "*.scala").get
++ (dir / "js/src/test/scala" ** (("*.scala": FileFilter)
-- "StackTraceTest.scala" // would require `npm install source-map-support`
-- "UnionTypeTest.scala" // requires the Scala 2 macro defined in Typechecking*.scala
)).get
++ (dir / "js/src/test/require-2.12" ** "*.scala").get
++ (dir / "js/src/test/require-sam" ** "*.scala").get
++ (dir / "js/src/test/scala-new-collections" ** "*.scala").get
)
},
)
lazy val sjsCompilerTests = project.in(file("sjs-compiler-tests")).
dependsOn(`scala3-compiler` % "test->test").
settings(
commonNonBootstrappedSettings,
// Change the baseDirectory when running the tests
Test / baseDirectory := baseDirectory.value.getParentFile,
javaOptions ++= (`scala3-compiler` / javaOptions).value,
javaOptions ++= {
val externalJSDeps = (`scala3-library-bootstrappedJS` / Compile / externalDependencyClasspath).value
val dottyLibraryJSJar = (`scala3-library-bootstrappedJS` / Compile / packageBin).value.getAbsolutePath
Seq(
"-Ddotty.tests.classes.dottyLibraryJS=" + dottyLibraryJSJar,
"-Ddotty.tests.classes.scalaJSLibrary=" + findArtifactPath(externalJSDeps, "scalajs-library_2.13"),
)
},
)
lazy val `scala3-bench` = project.in(file("bench")).asDottyBench(NonBootstrapped)
lazy val `scala3-bench-bootstrapped` = project.in(file("bench")).asDottyBench(Bootstrapped)
lazy val `scala3-bench-run` = project.in(file("bench-run")).asDottyBench(Bootstrapped)
val testcasesOutputDir = taskKey[Seq[String]]("Root directory where tests classses are generated")
val testcasesSourceRoot = taskKey[String]("Root directory where tests sources are generated")
val testDocumentationRoot = taskKey[String]("Root directory where tests documentation are stored")
val generateSelfDocumentation = taskKey[Unit]("Generate example documentation")
// Note: the two tasks below should be one, but a bug in Tasty prevents that
val generateScalaDocumentation = inputKey[Unit]("Generate documentation for dotty lib")
val generateTestcasesDocumentation = taskKey[Unit]("Generate documentation for testcases, usefull for debugging tests")
val generateReferenceDocumentation = taskKey[Unit]("Generate language reference documentation for Scala 3")
lazy val `scaladoc-testcases` = project.in(file("scaladoc-testcases")).
dependsOn(`scala3-compiler-bootstrapped`).
settings(commonBootstrappedSettings)
/**
* Collection of projects building targets for scaladoc, these are:
* - common - common module for javascript
* - main - main target for default scaladoc producing html webpage
* - contributors - not related project to any of forementioned modules. Used for presenting contributors for static site.
* Made as an indepented project to be scaladoc-agnostic.
*/
lazy val `scaladoc-js-common` = project.in(file("scaladoc-js/common")).
enablePlugins(DottyJSPlugin).
dependsOn(`scala3-library-bootstrappedJS`).
settings(libraryDependencies += ("org.scala-js" %%% "scalajs-dom" % "1.1.0").cross(CrossVersion.for3Use2_13))
lazy val `scaladoc-js-main` = project.in(file("scaladoc-js/main")).
enablePlugins(DottyJSPlugin).
dependsOn(`scaladoc-js-common`).
settings(
scalaJSUseMainModuleInitializer := true,
Test / fork := false
)
lazy val `scaladoc-js-contributors` = project.in(file("scaladoc-js/contributors")).
enablePlugins(DottyJSPlugin).
dependsOn(`scala3-library-bootstrappedJS`).
settings(
Test / fork := false,
scalaJSUseMainModuleInitializer := true,
libraryDependencies += ("org.scala-js" %%% "scalajs-dom" % "1.1.0").cross(CrossVersion.for3Use2_13)
)
def generateDocumentation(configTask: Def.Initialize[Task[GenerationConfig]]) =
Def.taskDyn {
val config = configTask.value
config.get[OutputDir].foreach { outDir =>
IO.createDirectory(file(outDir.value))
}
val command = generateCommand(config)
Def.task {
(Compile / run).toTask(command).value
}
}
val SourceLinksIntegrationTest = config("sourceLinksIntegrationTest") extend Test
lazy val scaladoc = project.in(file("scaladoc")).
configs(SourceLinksIntegrationTest).
settings(commonBootstrappedSettings).
dependsOn(`scala3-compiler-bootstrapped`).
dependsOn(`scala3-tasty-inspector`).
settings(inConfig(SourceLinksIntegrationTest)(Defaults.testSettings)).
settings(
SourceLinksIntegrationTest / scalaSource := baseDirectory.value / "test-source-links",
SourceLinksIntegrationTest / test:= ((SourceLinksIntegrationTest / test) dependsOn generateScalaDocumentation.toTask("")).value,
).
settings(
Compile / resourceGenerators += Def.task {
DocumentationWebsite.generateStaticAssets(
(`scaladoc-js-contributors` / Compile / fullOptJS).value.data,
(`scaladoc-js-main` / Compile / fullOptJS).value.data,
(`scaladoc-js-contributors` / Compile / baseDirectory).value / "css",
(`scaladoc-js-common` / Compile / baseDirectory).value / "css",
(Compile / resourceManaged).value,
)
}.taskValue,
libraryDependencies ++= Dependencies.flexmarkDeps ++ Seq(
"nl.big-o" % "liqp" % "0.8.2",
"org.jsoup" % "jsoup" % "1.14.3", // Needed to process .html files for static site
Dependencies.`jackson-dataformat-yaml`,
"com.novocode" % "junit-interface" % "0.11" % "test",
),
Compile / mainClass := Some("dotty.tools.scaladoc.Main"),
Compile / buildInfoKeys := Seq[BuildInfoKey](version),
Compile / buildInfoPackage := "dotty.tools.scaladoc",
BuildInfoPlugin.buildInfoScopedSettings(Compile),
BuildInfoPlugin.buildInfoDefaultSettings,
Test / test := (Test / test).dependsOn(`scaladoc-testcases` / Compile / compile).value,
Test / testcasesOutputDir := (`scaladoc-testcases`/Compile/products).value.map(_.getAbsolutePath),
Test / testcasesSourceRoot := ((`scaladoc-testcases` / baseDirectory).value / "src").getAbsolutePath.toString,
run / baseDirectory := (ThisBuild / baseDirectory).value,
generateSelfDocumentation := Def.taskDyn {
generateDocumentation(Scaladoc)
}.value,
generateScalaDocumentation := Def.inputTaskDyn {
val majorVersion = (LocalProject("scala3-library-bootstrapped") / scalaBinaryVersion).value
val extraArgs = spaceDelimited("[<output-dir>] [--justAPI]").parsed
val outputDirOverride = extraArgs.headOption.fold(identity[GenerationConfig](_))(newDir => {
config: GenerationConfig => config.add(OutputDir(newDir))
})
val justAPIArg: Option[String] = extraArgs.drop(1).find(_ == "--justAPI")
val justAPI = justAPIArg.fold(identity[GenerationConfig](_))(_ => {
config: GenerationConfig => config.remove[SiteRoot]
})
val overrideFunc = outputDirOverride.andThen(justAPI)
val config = Def.task {
overrideFunc(Scala3.value)
}
val writeAdditionalFiles = Def.task {
val dest = file(config.value.get[OutputDir].get.value)
if (justAPIArg.isEmpty) {
IO.write(dest / "versions" / "latest-nightly-base", majorVersion)
// This file is used by GitHub Pages when the page is available in a custom domain
IO.write(dest / "CNAME", "dotty.epfl.ch")
}
}
writeAdditionalFiles.dependsOn(generateDocumentation(config))
}.evaluated,
generateTestcasesDocumentation := Def.taskDyn {
generateDocumentation(Testcases)
}.value,
generateReferenceDocumentation := Def.taskDyn {
val temp = IO.createTemporaryDirectory
IO.copyDirectory(file("docs"), temp / "docs")
IO.delete(temp / "docs" / "_blog")
IO.copyDirectory(
file("project") / "resources" / "referenceReplacements",
temp / "docs",
overwrite = true
)
val languageReferenceConfig = Def.task {
Scala3.value
.add(OutputDir("scaladoc/output/reference"))
.add(SiteRoot(s"${temp.getAbsolutePath}/docs"))
.add(ProjectName("Scala 3 Reference"))
.add(SourceLinks(List(
dottySrcLink(referenceVersion, temp.getAbsolutePath + "=")
)))
.withTargets(List("___fake___.scala"))
}
generateDocumentation(languageReferenceConfig)
}.value,
Test / buildInfoKeys := Seq[BuildInfoKey](
(Test / Build.testcasesOutputDir),
(Test / Build.testcasesSourceRoot),
Build.testDocumentationRoot,
),
testDocumentationRoot := (baseDirectory.value / "test-documentations").getAbsolutePath,
Test / buildInfoPackage := "dotty.tools.scaladoc.test",
BuildInfoPlugin.buildInfoScopedSettings(Test),
)
// various scripted sbt tests
lazy val `sbt-test` = project.in(file("sbt-test")).
enablePlugins(ScriptedPlugin).
settings(commonSettings).
settings(
sbtTestDirectory := baseDirectory.value,
target := baseDirectory.value / ".." / "out" / name.value,
// The batch mode accidentally became the default with no way to disable
// it in sbt 1.4 (https://github.com/sbt/sbt/issues/5913#issuecomment-716003195).
// We enable it explicitly here to make it clear that we're using it.
scriptedBatchExecution := true,
scriptedLaunchOpts ++= Seq(
"-Dplugin.version=" + version.value,
"-Dplugin.scalaVersion=" + dottyVersion,
"-Dplugin.scala2Version=" + stdlibVersion(Bootstrapped),
"-Dplugin.scalaJSVersion=" + scalaJSVersion,
"-Dsbt.boot.directory=" + ((ThisBuild / baseDirectory).value / ".sbt-scripted").getAbsolutePath // Workaround sbt/sbt#3469
),
// Pass along ivy home and repositories settings to sbt instances run from the tests
scriptedLaunchOpts ++= {
val repositoryPath = (io.Path.userHome / ".sbt" / "repositories").absolutePath
s"-Dsbt.repository.config=$repositoryPath" ::
ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList
},
scriptedBufferLog := true,
scripted := scripted.dependsOn(
(`scala3-sbt-bridge` / publishLocal),
(`scala3-interfaces` / publishLocal),
(`scala3-compiler-bootstrapped` / publishLocal),
(`scala3-library-bootstrapped` / publishLocal),
(`scala3-library-bootstrappedJS` / publishLocal),
(`tasty-core-bootstrapped` / publishLocal),
(`scala3-staging` / publishLocal),
(`scala3-tasty-inspector` / publishLocal),
(`scaladoc` / publishLocal),
(`scala3-bootstrapped` / publishLocal) // Needed because sbt currently hardcodes the dotty artifact
).evaluated
)
lazy val `sbt-community-build` = project.in(file("sbt-community-build")).
enablePlugins(SbtPlugin).
settings(commonSettings).
settings(
name := "sbt-community-build",
version := sbtCommunityBuildVersion,
organization := "ch.epfl.lamp",
sbtTestDirectory := baseDirectory.value / "sbt-test",
scriptedLaunchOpts ++= Seq(
"-Dplugin.version=" + version.value,
"-Dplugin.scalaVersion=" + dottyVersion,
"-Dplugin.scalaJSVersion=" + scalaJSVersion,
"-Dplugin.sbtDottyVersion=" + sbtDottyVersion,
"-Ddotty.communitybuild.dir=" + baseDirectory.value / "target",
"-Dsbt.boot.directory=" + ((ThisBuild / baseDirectory).value / ".sbt-scripted").getAbsolutePath // Workaround sbt/sbt#3469
),
// Pass along ivy home and repositories settings to sbt instances run from the tests
scriptedLaunchOpts ++= {
val repositoryPath = (io.Path.userHome / ".sbt" / "repositories").absolutePath
s"-Dsbt.repository.config=$repositoryPath" ::
ivyPaths.value.ivyHome.map("-Dsbt.ivy.home=" + _.getAbsolutePath).toList
},
scriptedBufferLog := true,
scriptedBatchExecution := true,
scripted := scripted.dependsOn(
(`scala3-sbt-bridge` / publishLocal),
(`scala3-interfaces` / publishLocal),
(`scala3-compiler-bootstrapped` / publishLocal),
(`scala3-library-bootstrapped` / publishLocal),
(`scala3-library-bootstrappedJS` / publishLocal),
(`tasty-core-bootstrapped` / publishLocal),
(`scala3-staging` / publishLocal),
(`scala3-tasty-inspector` / publishLocal),
(`scaladoc` / publishLocal),
(`scala3-bootstrapped` / publishLocal)
).evaluated
)
val prepareCommunityBuild = taskKey[Unit]("Publish local the compiler and the sbt plugin. Also store the versions of the published local artefacts in two files, community-build/{scala3-bootstrapped.version,sbt-dotty-sbt}.")
lazy val `community-build` = project.in(file("community-build")).
dependsOn(dottyLibrary(Bootstrapped)).
settings(commonBootstrappedSettings).
settings(
prepareCommunityBuild := {
(`scala3-sbt-bridge` / publishLocal).value
(`scala3-interfaces` / publishLocal).value
(`tasty-core-bootstrapped` / publishLocal).value
(`scala3-library-bootstrapped` / publishLocal).value
(`scala3-tasty-inspector` / publishLocal).value
(`scaladoc` / publishLocal).value
(`scala3-compiler-bootstrapped` / publishLocal).value
(`scala3-bootstrapped` / publishLocal).value
(`scala3-library-bootstrappedJS` / publishLocal).value
(`sbt-community-build` / publishLocal).value
// (publishLocal in `scala3-staging`).value
val pluginText =
s"""updateOptions in Global ~= (_.withLatestSnapshots(false))
|addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "$sbtDottyVersion")
|addSbtPlugin("ch.epfl.lamp" % "sbt-community-build" % "$sbtCommunityBuildVersion")
|addSbtPlugin("org.scala-js" % "sbt-scalajs" % "$scalaJSVersion")""".stripMargin
IO.write(baseDirectory.value / "sbt-dotty-sbt", pluginText)
IO.write(baseDirectory.value / "scala3-bootstrapped.version", dottyVersion)
IO.delete(baseDirectory.value / "dotty-community-build-deps") // delete any stale deps file
},
(Test / testOptions) += Tests.Argument(
TestFrameworks.JUnit,
"--include-categories=dotty.communitybuild.TestCategory",
"--run-listener=dotty.communitybuild.FailureSummarizer",
),
Compile/run := (Compile/run).dependsOn(prepareCommunityBuild).evaluated,
Test / testOnly := ((Test / testOnly) dependsOn prepareCommunityBuild).evaluated,
Test / test := ((Test / test ) dependsOn prepareCommunityBuild).value,
javaOptions ++= {
// Propagate the ivy cache directory setting to the tests, which will
// then propagate it further to the sbt instances they will spawn.
val sbtProps = Option(System.getProperty("sbt.ivy.home")) match {
case Some(ivyHome) =>
Seq(s"-Dsbt.ivy.home=$ivyHome")
case _ =>
Seq()
}
sbtProps
}
)
lazy val publishSettings = Seq(
publishMavenStyle := true,
isSnapshot := version.value.contains("SNAPSHOT"),
publishTo := sonatypePublishToBundle.value,
publishConfiguration ~= (_.withOverwrite(true)),
publishLocalConfiguration ~= (_.withOverwrite(true)),
Test / publishArtifact := false,
homepage := Some(url(dottyGithubUrl)),
licenses += (("Apache-2.0",
url("https://www.apache.org/licenses/LICENSE-2.0"))),
scmInfo := Some(
ScmInfo(
url(dottyGithubUrl),
"scm:git:[email protected]:lampepfl/dotty.git"
)
),
developers := List(
Developer(
id = "odersky",
name = "Martin Odersky",
email = "[email protected]",
url = url("https://github.com/odersky")
),
Developer(
id = "DarkDimius",
name = "Dmitry Petrashko",
email = "[email protected]",
url = url("https://d-d.me")
),
Developer(
id = "smarter",
name = "Guillaume Martres",
email = "[email protected]",
url = url("http://guillaume.martres.me")
),
Developer(
id = "felixmulder",
name = "Felix Mulder",
email = "[email protected]",
url = url("http://felixmulder.com")
),
Developer(
id = "liufengyun",
name = "Liu Fengyun",
email = "[email protected]",
url = url("https://fengy.me")
),
Developer(
id = "nicolasstucki",
name = "Nicolas Stucki",
email = "[email protected]",
url = url("https://github.com/nicolasstucki")
),
Developer(
id = "OlivierBlanvillain",
name = "Olivier Blanvillain",
email = "[email protected]",
url = url("https://github.com/OlivierBlanvillain")
),
Developer(
id = "biboudis",
name = "Aggelos Biboudis",
email = "[email protected]",
url = url("http://biboudis.github.io")
),
Developer(
id = "allanrenucci",
name = "Allan Renucci",
email = "[email protected]",
url = url("https://github.com/allanrenucci")
),
Developer(
id = "Duhemm",
name = "Martin Duhem",
email = "[email protected]",
url = url("https://github.com/Duhemm")
)
)
)
lazy val commonDistSettings = Seq(
packMain := Map(),
publishArtifact := false,
packGenerateMakefile := false,
packExpandedClasspath := true,
packArchiveName := "scala3-" + dottyVersion
)
lazy val dist = project.asDist(Bootstrapped)
.settings(
packResourceDir += (baseDirectory.value / "bin" -> "bin"),
)
implicit class ProjectDefinitions(val project: Project) extends AnyVal {
// FIXME: we do not aggregate `bin` because its tests delete jars, thus breaking other tests
def asDottyRoot(implicit mode: Mode): Project = project.withCommonSettings.
aggregate(`scala3-interfaces`, dottyLibrary, dottyCompiler, tastyCore, `scala3-sbt-bridge`).
bootstrappedAggregate(`scala3-language-server`, `scala3-staging`, `scala3-tasty-inspector`,
`scala3-library-bootstrappedJS`, scaladoc).
dependsOn(tastyCore).
dependsOn(dottyCompiler).
dependsOn(dottyLibrary).
nonBootstrappedSettings(
addCommandAlias("run", "scala3-compiler/run"),
// Clean everything by default
addCommandAlias("clean", ";scala3/clean;scala3-bootstrapped/clean"),
// `publishLocal` on the non-bootstrapped compiler does not produce a
// working distribution (it can't in general, since there's no guarantee
// that the non-bootstrapped library is compatible with the
// non-bootstrapped compiler), so publish the bootstrapped one by
// default.
addCommandAlias("publishLocal", "scala3-bootstrapped/publishLocal"),
repl := (`scala3-compiler-bootstrapped` / repl).value,
).
settings(
publish / skip := true
)
def asDottyCompiler(implicit mode: Mode): Project = project.withCommonSettings.
dependsOn(`scala3-interfaces`).
dependsOn(dottyLibrary).
dependsOn(tastyCore).
settings(dottyCompilerSettings)
def asDottyLibrary(implicit mode: Mode): Project = {
val base =
project.withCommonSettings.
settings(
versionScheme := Some("semver-spec"),
libraryDependencies += "org.scala-lang" % "scala-library" % stdlibVersion,
// Make sure we do not refer to experimental features outside an experimental scope.
// In other words, disable NIGHTLY/SNAPSHOT experimental scope.
scalacOptions += "-Yno-experimental",
).
settings(dottyLibrarySettings)
if (mode == Bootstrapped) {
base.settings(
(Compile/doc) := {
// Workaround for
// [error] |object IArray cannot have the same name as object IArray in package scala
// -- cannot define object member with the same name as a object member in self reference _.
val doWork = (Compile/doc).result.value
(Compile/doc/target).value
},
commonMiMaSettings,
mimaBinaryIssueFilters ++= MiMaFilters.Library
)
} else base
}
def asTastyCore(implicit mode: Mode): Project = project.withCommonSettings.
dependsOn(dottyLibrary).
settings(tastyCoreSettings).
settings(disableDocSetting).
settings(
versionScheme := Some("semver-spec"),
if (mode == Bootstrapped) {
commonMiMaSettings
} else {
Nil
}
)
def asTastyCoreScala2: Project = project.settings(commonScala2Settings)
def asDottyBench(implicit mode: Mode): Project = project.withCommonSettings.
dependsOn(dottyCompiler).
settings(commonBenchmarkSettings).
enablePlugins(JmhPlugin)
def asDist(implicit mode: Mode): Project = project.
enablePlugins(PackPlugin).
withCommonSettings.
dependsOn(`scala3-interfaces`, dottyCompiler, dottyLibrary, tastyCore, `scala3-staging`, `scala3-tasty-inspector`, scaladoc).
settings(commonDistSettings).
bootstrappedSettings(
target := baseDirectory.value / "target" // override setting in commonBootstrappedSettings
)
def withCommonSettings(implicit mode: Mode): Project = project.settings(mode match {
case NonBootstrapped => commonNonBootstrappedSettings
case Bootstrapped => commonBootstrappedSettings
})
}
}
object ScaladocConfigs {
import Build._
private lazy val currentYear: String = java.util.Calendar.getInstance().get(java.util.Calendar.YEAR).toString
def dottyExternalMapping = ".*scala/.*::scaladoc3::https://dotty.epfl.ch/api/"
def javaExternalMapping = ".*java/.*::javadoc::https://docs.oracle.com/javase/8/docs/api/"
def scalaSrcLink(v: String, s: String) = s"${s}github://scala/scala/v$v#src/library"
def dottySrcLink(v: String, sourcesPrefix: String = "", outputPrefix: String = "") =
sys.env.get("GITHUB_SHA") match {
case Some(sha) =>
s"${sourcesPrefix}github://${sys.env("GITHUB_REPOSITORY")}/$sha$outputPrefix"
case None => s"${sourcesPrefix}github://lampepfl/dotty/$v$outputPrefix"
}
lazy val DefaultGenerationConfig = Def.task {
def distLocation = (dist / pack).value
def projectVersion = version.value
def stdLibVersion = stdlibVersion(NonBootstrapped)
def scalaLib = findArtifactPath(externalCompilerClasspathTask.value, "scala-library")
def dottyLib = (`scala3-library` / Compile / classDirectory).value
def srcManaged(v: String, s: String) = s"out/bootstrap/stdlib-bootstrapped/scala-$v/src_managed/main/$s-library-src"
def defaultSourceLinks: SourceLinks = SourceLinks(
List(
scalaSrcLink(stdLibVersion, srcManaged(dottyNonBootstrappedVersion, "scala") + "="),
dottySrcLink(referenceVersion, srcManaged(dottyNonBootstrappedVersion, "dotty") + "=", "#library/src"),
dottySrcLink(referenceVersion),
"docs=github://lampepfl/dotty/main#docs"
)
)
def socialLinks = SocialLinks(List(
"github::https://github.com/lampepfl/dotty",
"discord::https://discord.com/invite/scala",
"twitter::https://twitter.com/scala_lang",
))
def projectLogo = ProjectLogo("docs/_assets/images/logo.svg")
def skipByRegex = SkipByRegex(List(".+\\\\.internal($|\\\\..+)", ".+\\\\.impl($|\\\\..+)"))
def skipById = SkipById(List(
"scala.runtime.stdLibPatches",
"scala.runtime.MatchCase"
))
def projectFooter = ProjectFooter(s"Copyright (c) 2002-$currentYear, LAMP/EPFL")
def defaultTemplate = DefaultTemplate("static-site-main")
GenerationConfig(
List(),
ProjectVersion(projectVersion),
GenerateInkuire(true),
defaultSourceLinks,
skipByRegex,
skipById,
projectLogo,
socialLinks,
projectFooter,
defaultTemplate,
Author(true),
Groups(true)
)
}
lazy val Scaladoc = Def.task {
DefaultGenerationConfig.value
.add(UseJavacp(true))
.add(ProjectName("scaladoc"))
.add(OutputDir("scaladoc/output/self"))
.add(Revision(VersionUtil.gitHash))
.add(ExternalMappings(List(dottyExternalMapping, javaExternalMapping)))
.withTargets((Compile / classDirectory).value.getAbsolutePath :: Nil)
}
lazy val Testcases = Def.task {
val tastyRoots = (Test / Build.testcasesOutputDir).value
DefaultGenerationConfig.value
.add(UseJavacp(true))
.add(OutputDir("scaladoc/output/testcases"))
.add(ProjectName("scaladoc testcases"))
.add(Revision("main"))
.add(SnippetCompiler(List("scaladoc-testcases/docs=compile")))
.add(SiteRoot("scaladoc-testcases/docs"))
.add(ExternalMappings(List(dottyExternalMapping, javaExternalMapping)))
.withTargets(tastyRoots)
}
lazy val Scala3 = Def.task {
val dottyJars: Seq[java.io.File] = Seq(
(`stdlib-bootstrapped`/Compile/products).value,
(`scala3-interfaces`/Compile/products).value,
(`tasty-core-bootstrapped`/Compile/products).value,
).flatten
val roots = dottyJars.map(_.getAbsolutePath)
val managedSources =
(`stdlib-bootstrapped`/Compile/sourceManaged).value / "scala-library-src"
val projectRoot = (ThisBuild/baseDirectory).value.toPath
val stdLibRoot = projectRoot.relativize(managedSources.toPath.normalize())
val docRootFile = stdLibRoot.resolve("rootdoc.txt")
val dottyManagesSources =
(`stdlib-bootstrapped`/Compile/sourceManaged).value / "dotty-library-src"
val dottyLibRoot = projectRoot.relativize(dottyManagesSources.toPath.normalize())
DefaultGenerationConfig.value
.add(ProjectName("Scala 3"))
.add(OutputDir(file("scaladoc/output/scala3").getAbsoluteFile.getAbsolutePath))
.add(Revision("main"))
.add(ExternalMappings(List(javaExternalMapping)))
.add(DocRootContent(docRootFile.toString))
.add(CommentSyntax("wiki"))
.add(VersionsDictionaryUrl("https://scala-lang.org/api/versions.json"))
.add(DocumentSyntheticTypes(true))
.add(SnippetCompiler(List(
s"${dottyLibRoot}/scala/quoted=compile",
s"${dottyLibRoot}/scala/compiletime=compile"
)))
.add(SiteRoot("docs"))
.add(ApiSubdirectory(true))
.withTargets(roots)
}
}
|
dotty-staging/dotty
|
project/Build.scala
|
Scala
|
apache-2.0
| 79,537 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.tree
import org.apache.spark.{Logging, SparkFunSuite}
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.configuration.Algo._
import org.apache.spark.mllib.tree.configuration.{BoostingStrategy, Strategy}
import org.apache.spark.mllib.tree.impurity.Variance
import org.apache.spark.mllib.tree.loss.{AbsoluteError, SquaredError, LogLoss}
import org.apache.spark.mllib.tree.model.GradientBoostedTreesModel
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.util.Utils
/**
* Test suite for [[GradientBoostedTrees]].
* 梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
* GBT的训练是每次训练一颗树,然后利用这颗树对每个实例进行预测,通过一个损失函数,计算损失函数的负梯度值作为残差,
* 利用这个残差更新样本实例的label,然后再次训练一颗树去拟合残差,如此进行迭代,直到满足模型参数需求。
* GBT只适用于二分类和回归,不支持多分类,在预测的时候,不像随机森林那样求平均值,GBT是将所有树的预测值相加求和。
*/
class GradientBoostedTreesSuite extends SparkFunSuite with MLlibTestSparkContext with Logging {
test("Regression with continuous features: SquaredError") {//连续特征的回归:平方误差
GradientBoostedTreesSuite.testCombinations.foreach {
//subsamplingRate学习一棵决策树使用的训练数据比例,范围[0,1]
case (numIterations, learningRate, subsamplingRate) =>
val rdd = sc.parallelize(GradientBoostedTreesSuite.data, 2)
//subsamplingRate学习一棵决策树使用的训练数据比例,范围[0,1]
val treeStrategy = new Strategy(algo = Regression, impurity = Variance, maxDepth = 2,
/**
指明特征是类别型的以及每个类别型特征对应值(类别)。
Map(0 -> 2, 4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}。
注意特征索引是从0开始的,0和4表示第1和第5个特征**/
categoricalFeaturesInfo = Map.empty, subsamplingRate = subsamplingRate)
val boostingStrategy =
new BoostingStrategy(treeStrategy, SquaredError, numIterations, learningRate)
//梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
val gbt = GradientBoostedTrees.train(rdd, boostingStrategy)
assert(gbt.trees.size === numIterations)
try {
EnsembleTestHelper.validateRegressor(gbt, GradientBoostedTreesSuite.data, 0.06)
} catch {
case e: java.lang.AssertionError =>
logError(s"FAILED for numIterations=$numIterations, learningRate=$learningRate," +
s" subsamplingRate=$subsamplingRate")
throw e
}
val remappedInput = rdd.map(x => new LabeledPoint((x.label * 2) - 1, x.features))
val dt = DecisionTree.train(remappedInput, treeStrategy)
// Make sure trees are the same.确保树是一样的
assert(gbt.trees.head.toString == dt.toString)
}
}
//具有连续特征的回归:绝对误差
test("Regression with continuous features: Absolute Error") {
GradientBoostedTreesSuite.testCombinations.foreach {
case (numIterations, learningRate, subsamplingRate) =>
val rdd = sc.parallelize(GradientBoostedTreesSuite.data, 2)
//subsamplingRate学习一棵决策树使用的训练数据比例,范围[0,1]
//树的最大深度,为了防止过拟合,设定划分的终止条件
val treeStrategy = new Strategy(algo = Regression, impurity = Variance, maxDepth = 2,
categoricalFeaturesInfo = Map.empty, subsamplingRate = subsamplingRate)
val boostingStrategy =
new BoostingStrategy(treeStrategy, AbsoluteError, numIterations, learningRate)
//梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
val gbt = GradientBoostedTrees.train(rdd, boostingStrategy)
assert(gbt.trees.size === numIterations)
try {
EnsembleTestHelper.validateRegressor(gbt, GradientBoostedTreesSuite.data, 0.85, "mae")
} catch {
case e: java.lang.AssertionError =>
logError(s"FAILED for numIterations=$numIterations, learningRate=$learningRate," +
s" subsamplingRate=$subsamplingRate")
throw e
}
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
val remappedInput = rdd.map(x => new LabeledPoint((x.label * 2) - 1, x.features))
val dt = DecisionTree.train(remappedInput, treeStrategy)
// Make sure trees are the same.确保树是一样的
assert(gbt.trees.head.toString == dt.toString)
}
}
//具有连续特征的二元分类:日志丢失
test("Binary classification with continuous features: Log Loss") {
GradientBoostedTreesSuite.testCombinations.foreach {
case (numIterations, learningRate, subsamplingRate) =>
val rdd = sc.parallelize(GradientBoostedTreesSuite.data, 2)
//subsamplingRate学习一棵决策树使用的训练数据比例,范围[0,1]
//树的最大深度,为了防止过拟合,设定划分的终止条件
val treeStrategy = new Strategy(algo = Classification, impurity = Variance, maxDepth = 2,
numClasses = 2, categoricalFeaturesInfo = Map.empty,
subsamplingRate = subsamplingRate)
val boostingStrategy =
new BoostingStrategy(treeStrategy, LogLoss, numIterations, learningRate)
val gbt = GradientBoostedTrees.train(rdd, boostingStrategy)
assert(gbt.trees.size === numIterations)
try {
EnsembleTestHelper.validateClassifier(gbt, GradientBoostedTreesSuite.data, 0.9)
} catch {
case e: java.lang.AssertionError =>
logError(s"FAILED for numIterations=$numIterations, learningRate=$learningRate," +
s" subsamplingRate=$subsamplingRate")
throw e
}
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
val remappedInput = rdd.map(x => new LabeledPoint((x.label * 2) - 1, x.features))
val ensembleStrategy = treeStrategy.copy
ensembleStrategy.algo = Regression
ensembleStrategy.impurity = Variance
val dt = DecisionTree.train(remappedInput, ensembleStrategy)
// Make sure trees are the same.
assert(gbt.trees.head.toString == dt.toString)
}
}
//提升策略的默认参数应识别分类
test("SPARK-5496: BoostingStrategy.defaultParams should recognize Classification") {
for (algo <- Seq("classification", "Classification", "regression", "Regression")) {
BoostingStrategy.defaultParams(algo)
}
}
test("model save/load") {//模型保存/加载
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
val trees = Range(0, 3).map(_ => DecisionTreeSuite.createModel(Regression)).toArray
val treeWeights = Array(0.1, 0.3, 1.1)
Array(Classification, Regression).foreach { algo =>
val model = new GradientBoostedTreesModel(algo, trees, treeWeights)
// Save model, load it back, and compare.
//保存模型,加载它回来,并比较
try {
model.save(sc, path)
val sameModel = GradientBoostedTreesModel.load(sc, path)
assert(model.algo == sameModel.algo)
model.trees.zip(sameModel.trees).foreach { case (treeA, treeB) =>
DecisionTreeSuite.checkEqual(treeA, treeB)
}
assert(model.treeWeights === sameModel.treeWeights)
} finally {
Utils.deleteRecursively(tempDir)
}
}
}
//runwithvalidation停止前在验证数据集的表现更好
/*test("runWithValidation stops early and performs better on a validation dataset") {
// Set numIterations large enough so that it stops early.
//集数足够大时,提前停止迭代
val numIterations = 20
//梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
val trainRdd = sc.parallelize(GradientBoostedTreesSuite.trainData, 2)
val validateRdd = sc.parallelize(GradientBoostedTreesSuite.validateData, 2)
val algos = Array(Regression, Regression, Classification)
val losses = Array(SquaredError, AbsoluteError, LogLoss)
algos.zip(losses).foreach { case (algo, loss) =>
//树的最大深度,为了防止过拟合,设定划分的终止条件
val treeStrategy = new Strategy(algo = algo, impurity = Variance, maxDepth = 2,
categoricalFeaturesInfo = Map.empty)
val boostingStrategy =
new BoostingStrategy(treeStrategy, loss, numIterations, validationTol = 0.0)
val gbtValidate = new GradientBoostedTrees(boostingStrategy)
.runWithValidation(trainRdd, validateRdd)
val numTrees = gbtValidate.numTrees//训练的树的数量
assert(numTrees !== numIterations)
// Test that it performs better on the validation dataset.
//测试,它在验证数据集上表现得更好
val gbt = new GradientBoostedTrees(boostingStrategy).run(trainRdd)
val (errorWithoutValidation, errorWithValidation) = {
if (algo == Classification) {
val remappedRdd = validateRdd.map(x => new LabeledPoint(2 * x.label - 1, x.features))
(loss.computeError(gbt, remappedRdd), loss.computeError(gbtValidate, remappedRdd))
} else {
(loss.computeError(gbt, validateRdd), loss.computeError(gbtValidate, validateRdd))
}
}
assert(errorWithValidation <= errorWithoutValidation)
// Test that results from evaluateEachIteration comply with runWithValidation.
//测试结果评估每个迭代符合运行的验证
// Note that convergenceTol is set to 0.0
val evaluationArray = gbt.evaluateEachIteration(validateRdd, loss)
assert(evaluationArray.length === numIterations)
//训练的树的数量
assert(evaluationArray(numTrees) > evaluationArray(numTrees - 1))
var i = 1
while (i < numTrees) {
assert(evaluationArray(i) <= evaluationArray(i - 1))
i += 1
}
}
}*/
/* test("Checkpointing") {//检查点
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
sc.setCheckpointDir(path)
val rdd = sc.parallelize(GradientBoostedTreesSuite.data, 2)
//树的最大深度,为了防止过拟合,设定划分的终止条件
val treeStrategy = new Strategy(algo = Regression, impurity = Variance, maxDepth = 2,
//设置检查点间隔(>=1),或不设置检查点(-1)
categoricalFeaturesInfo = Map.empty, checkpointInterval = 2)
val boostingStrategy = new BoostingStrategy(treeStrategy, SquaredError, 5, 0.1)
//梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
val gbt = GradientBoostedTrees.train(rdd, boostingStrategy)
sc.checkpointDir = None
Utils.deleteRecursively(tempDir)
}*/
}
/**
* 梯度提升决策树:综合多个决策树,消除噪声,避免过拟合
* GBT的训练是每次训练一颗树,然后利用这颗树对每个实例进行预测,通过一个损失函数,计算损失函数的负梯度值作为残差,
* 利用这个残差更新样本实例的label,然后再次训练一颗树去拟合残差,如此进行迭代,直到满足模型参数需求。
* GBT只适用于二分类和回归,不支持多分类,在预测的时候,不像随机森林那样求平均值,GBT是将所有树的预测值相加求和。
*/
private object GradientBoostedTreesSuite {
// Combinations for estimators, learning rates and subsamplingRate
//组合估计,利率和subsamplingrate学习一棵决策树使用的训练数据比例,范围[0,1]
val testCombinations = Array((10, 1.0, 1.0), (10, 0.1, 1.0), (10, 0.5, 0.75), (10, 0.1, 0.75))
val data = EnsembleTestHelper.generateOrderedLabeledPoints(numFeatures = 10, 100)
val trainData = EnsembleTestHelper.generateOrderedLabeledPoints(numFeatures = 20, 120)
val validateData = EnsembleTestHelper.generateOrderedLabeledPoints(numFeatures = 20, 80)
}
|
tophua/spark1.52
|
mllib/src/test/scala/org/apache/spark/mllib/tree/GradientBoostedTreesSuite.scala
|
Scala
|
apache-2.0
| 12,954 |
package BIDMach.datasources
import BIDMat.{Mat,SBMat,CMat,CSMat,DMat,FMat,IMat,HMat,GMat,GIMat,GSMat,SMat,SDMat}
import BIDMat.MatFunctions._
import BIDMat.SciFunctions._
import java.io._
class MatSource(var mats:Array[Mat], override val opts:MatSource.Opts = new MatSource.Options) extends DataSource(opts) {
var sizeMargin = 0f
var here = 0
var there = 0
var blockSize = 0
var totalSize = 0
var umat:Mat = null;
def init = {
sizeMargin = opts.sizeMargin
blockSize = opts.batchSize
if (opts.addConstFeat) {
mats(0) = mats(0) on sparse(ones(1, mats(0).ncols))
}
if (opts.featType == 0) {
mats(0).contents.set(1)
}
here = -blockSize
totalSize = mats(0).ncols
omats = new Array[Mat](mats.length)
endmats = new Array[Mat](mats.length)
fullmats = new Array[Mat](mats.length)
}
def nmats = omats.length
def reset = {
here = -blockSize
}
def next:Array[Mat] = {
here = math.min(here+blockSize, mats(0).ncols)
there = math.min(here+blockSize, mats(0).ncols)
for (i <- 0 until mats.length) {
if (there - here == blockSize) {
fullmats(i) = mats(i).colslice(here, there, fullmats(i))
omats(i) = fullmats(i)
} else {
endmats(i) = mats(i).colslice(here, there, endmats(i))
omats(i) = endmats(i)
}
}
omats
}
def hasNext:Boolean = {
here + blockSize < mats(0).ncols
}
override def setupPutBack(n:Int, dim:Int):Unit = {
if (mats.length <= n || mats(n).asInstanceOf[AnyRef] == null || mats(n).nrows != dim) {
val newmats = new Array[Mat](n+1)
for (i <- 0 until mats.length) {
newmats(i) = mats(i)
}
for (i <- mats.length until n+1) {
newmats(i) = zeros(dim, mats(0).ncols)
}
mats = newmats
}
}
override def putBack(tmats:Array[Mat],n:Int):Unit = {
for (i <- 1 to n)
tmats(i).colslice(0, tmats(i).ncols, mats(i), here, true);
}
def progress = {
math.min((here+blockSize)*1f/totalSize, 1f)
}
}
object MatSource {
trait Opts extends DataSource.Opts {
}
class Options extends Opts {
}
}
|
jamesjia94/BIDMach
|
src/main/scala/BIDMach/datasources/MatSource.scala
|
Scala
|
bsd-3-clause
| 2,256 |
package test006
import org.scalatest._
import scalikejdbc._
import scalikejdbc.scalatest.AutoRollback
import skinny.dbmigration.DBSeeds
import skinny.orm._
trait Connection {
Class.forName("org.h2.Driver")
ConnectionPool.add(Symbol("test006"), "jdbc:h2:mem:test006;MODE=PostgreSQL", "sa", "sa")
}
trait CreateTables extends DBSeeds { self: Connection =>
override val dbSeedsAutoSession = NamedAutoSession(Symbol("test006"))
addSeedSQL(sql"create table summary (id bigserial not null, name varchar(100) not null)")
runIfFailed(sql"select count(1) from summary")
}
class Spec extends fixture.FunSpec with Matchers with Connection with CreateTables with AutoRollback {
override def db(): DB = NamedDB(Symbol("test006")).toDB()
var (_beforeCreate, _beforeUpdateBy, _beforeDeleteBy, _afterCreate, _afterDeleteBy, _afterUpdateBy) =
(0, 0, 0, 0, 0, 0)
case class Summary(id: Long, name: String)
object Summary extends SkinnyCRUDMapper[Summary] {
override val connectionPoolName = Symbol("test006")
override def defaultAlias = createAlias("s")
beforeCreate((session: DBSession, namedValues: Seq[(SQLSyntax, Any)]) => {
_beforeCreate += 1
})
afterCreate((session: DBSession, namedValues: Seq[(SQLSyntax, Any)], generatedId: Option[Long]) => {
_afterCreate += 1
})
beforeUpdateBy((s: DBSession, where: SQLSyntax, params: Seq[(SQLSyntax, Any)]) => {
_beforeUpdateBy += 1
})
afterUpdateBy((s: DBSession, where: SQLSyntax, params: Seq[(SQLSyntax, Any)], count: Int) => {
_afterUpdateBy += 1
})
beforeDeleteBy((s: DBSession, where: SQLSyntax) => {
_beforeDeleteBy += 1
})
afterDeleteBy((s: DBSession, where: SQLSyntax, deletedCount: Int) => {
_afterDeleteBy += 1
})
beforeCreate((session: DBSession, namedValues: Seq[(SQLSyntax, Any)]) => {
_beforeCreate += 1
})
afterCreate((session: DBSession, namedValues: Seq[(SQLSyntax, Any)], generatedId: Option[Long]) => {
_afterCreate += 1
})
beforeUpdateBy((s: DBSession, where: SQLSyntax, params: Seq[(SQLSyntax, Any)]) => {
_beforeUpdateBy += 1
})
afterUpdateBy((s: DBSession, where: SQLSyntax, params: Seq[(SQLSyntax, Any)], count: Int) => {
_afterUpdateBy += 1
})
beforeDeleteBy((s: DBSession, where: SQLSyntax) => {
_beforeDeleteBy += 1
})
afterDeleteBy((s: DBSession, where: SQLSyntax, deletedCount: Int) => {
_afterDeleteBy += 1
})
override def extract(rs: WrappedResultSet, rn: ResultName[Summary]) = autoConstruct(rs, rn)
}
describe("before/after") {
it("should work") { implicit session =>
_beforeCreate should equal(0)
_afterCreate should equal(0)
_beforeUpdateBy should equal(0)
_afterUpdateBy should equal(0)
_beforeDeleteBy should equal(0)
_afterDeleteBy should equal(0)
val id = Summary.createWithAttributes(Symbol("name") -> "Sample")
Summary.updateById(id).withAttributes(Symbol("name") -> "Sample2")
Summary.deleteById(id)
_beforeCreate should equal(2)
_afterCreate should equal(2)
_beforeUpdateBy should equal(2)
_afterUpdateBy should equal(2)
_beforeDeleteBy should equal(2)
_afterDeleteBy should equal(2)
}
}
}
|
skinny-framework/skinny-framework
|
orm/src/test/scala/test006/Spec.scala
|
Scala
|
mit
| 3,289 |
package poly.collection
import poly.collection.exception._
import poly.collection.node._
/**
* Represents a bidirectional sequence, i.e. a sequence that supports
* fast access to the last element as well as fast reversed traversal.
*
* @author Tongfei Chen
* @since 0.1.0
*/
trait BidiSeq[+T] extends Seq[T] with BidiIterable[T] { self =>
import BidiSeq._
/**
* Returns a dummy node whose next node is the head of this sequence,
* and whose previous node is the last of this sequence.
*/
override def dummy: BidiSeqNode[T] = new BidiSeqNode[T] {
def prev = lastNode
def next = headNode
def data = throw new DummyNodeException
def isDummy = true
}
def newReverseIterator = reverse.newIterator
def headNode: BidiSeqNode[T]
/** Returns the last node of this sequence. */
def lastNode: BidiSeqNode[T]
//region MONADIC OPS
override def map[U](f: T => U): BidiSeq[U] = {
class MappedNode(outer: BidiSeqNode[T]) extends BidiSeqNode[U] {
def prev = new MappedNode(outer.prev)
def next = new MappedNode(outer.next)
def data = f(outer.data)
def isDummy = outer.isDummy
}
ofDummyNode(new MappedNode(self.dummy))
}
//endregion
override def tail = ofHeadAndLastNode(headNode.next, lastNode)
override def init = ofHeadAndLastNode(headNode, lastNode.prev)
override def last = lastNode.data
override def suffixes = {
class From(val n: BidiSeqNode[T]) extends BidiSeqNode[BidiSeq[T]] {
def data = ofHeadAndLastNode(n, self.lastNode)
def next = new From(n.next)
def prev = new From(n.prev)
def isDummy = n.isDummy
}
ofHeadAndLastNode(new From(self.headNode), new From(self.lastNode))
}
override def prefixes = {
class Until(val n: BidiSeqNode[T]) extends BidiSeqNode[BidiSeq[T]] {
def data = ofHeadAndLastNode(self.headNode, n)
def next = new Until(n.next)
def prev = new Until(n.prev)
def isDummy = n.isDummy
}
ofHeadAndLastNode(new Until(self.headNode), new Until(self.lastNode))
}
override def slidingPairsWith[U](f: (T, T) => U): BidiSeq[U] = {
class ConsecutiveNode(val n0: BidiSeqNode[T], val n1: BidiSeqNode[T]) extends BidiSeqNode[U] {
def data = f(n0.data, n1.data)
def next = new ConsecutiveNode(n1, n1.next)
def prev = new ConsecutiveNode(n0.prev, n0)
def isDummy = n0.isDummy || n1.isDummy
}
ofDummyNode {
new BidiSeqNode[U] {
def data = throw new DummyNodeException
def next = new ConsecutiveNode(self.headNode, self.headNode.next)
def prev = new ConsecutiveNode(self.lastNode.prev, self.lastNode)
def isDummy = true
}
}
}
override def slidingPairs = slidingPairsWith { (x, y) => (x, y) }
/**
* Reverses this collection. $LAZY
*/
override def reverse: BidiSeq[T] = new BidiSeqT.Reversed(self)
def asBiSeq: BidiSeq[T] = new AbstractBidiSeq[T] {
def headNode = self.headNode
def lastNode = self.lastNode
}
}
object BidiSeq {
def ofDummyNode[T](d: BidiSeqNode[T]): BidiSeq[T] = new AbstractBidiSeq[T] {
override def dummy = d
def headNode = d.next
def lastNode = d.prev
}
def ofHeadAndLastNode[T](hn: BidiSeqNode[T], ln: BidiSeqNode[T]): BidiSeq[T] = {
class ConstrainedNode(val node: BidiSeqNode[T]) extends BidiSeqNode[T] {
def prev = if (node == hn) BidiSeqNode.dummy else new ConstrainedNode(node.prev)
def next = if (node == ln) BidiSeqNode.dummy else new ConstrainedNode(node.next)
def data = node.data
def isDummy = node.isDummy
}
ofDummyNode(new BidiSeqNode[T] {
def next = new ConstrainedNode(hn)
def prev = new ConstrainedNode(ln)
def data = throw new NoSuchElementException
def isDummy = true
})
}
object empty extends BidiSeq[Nothing] {
override def dummy: BidiSeqNode[Nothing] = BidiSeqNode.dummy
def headNode = dummy
def lastNode = dummy
}
}
abstract class AbstractBidiSeq[+T] extends AbstractSeq[T] with BidiSeq[T]
private[poly] object BidiSeqT {
class Reversed[T](self: BidiSeq[T]) extends AbstractBidiSeq[T] {
def headNode = self.lastNode.reverse
def lastNode = self.headNode.reverse
override def reverse = self
}
}
|
ctongfei/poly-collection
|
core/src/main/scala/poly/collection/BidiSeq.scala
|
Scala
|
mit
| 4,258 |
package com.twitter.finagle.mux
import com.twitter.finagle.mux.transport.Message
import com.twitter.finagle.transport.{Transport, TransportProxy}
import com.twitter.finagle.{Failure, Status}
import com.twitter.util.{Future, Return, Throw, Time}
import java.net.SocketAddress
import java.security.cert.Certificate
import java.util.concurrent.atomic.AtomicBoolean
import org.jboss.netty.buffer.ChannelBuffer
/**
* Implements mux session negotiation. The mux spec allows for (re)negotiation
* to happen arbitrarily throughout a session, but for simplicity, our
* implementation assumes it happens at the start of a session. It is implemented
* in terms of a [[Transport]] so that negotiation can sit transparently below
* client and server dispatchers and easily install features based on the
* exchanged version and headers.
*/
private[finagle] object Handshake {
type Headers = Seq[(ChannelBuffer, ChannelBuffer)]
/**
* A function which transforms or installs features atop a transport based
* on a session's headers. Note, the input exposes the framed byte stream rather
* than mux `Message` types to more easily allow for features that need to
* operate on the raw byte frame (e.g. compression, checksums, etc).
*/
type Negotiator = (Headers, Transport[ChannelBuffer, ChannelBuffer]) =>
Transport[Message, Message]
/**
* Returns Some(value) if `key` exists in `headers`, otherwise None.
*/
def valueOf(key: ChannelBuffer, headers: Headers): Option[ChannelBuffer] = {
val iter = headers.iterator
while (iter.hasNext) {
val (k, v) = iter.next()
if (k == key) return Some(v)
}
None
}
/**
* We can assign tag 1 without worry of any tag conflicts because we gate
* all messages until the handshake is complete (or fails).
*/
val TinitTag = 1
/**
* Unfortunately, `Rerr` messages don't have error codes in the current
* version of mux. This means that we need to match strings to distinguish
* between important `Rerr` messages. This one is particularly important
* because it allows us to roll out handshakes without a coordinated
* upgrade path.
*/
val CanTinitMsg = "tinit check"
/**
* A noop negotiator returns a transport that ignores the headers and
* encodes / decodes mux messages.
*/
val NoopNegotiator: Negotiator = (_, trans) => {
trans.map(Message.encode, Message.decode)
}
/**
* In order to simplify the rollout of handshakes, we need to make
* sure that our remote can understand Tinits before sending them.
* This is a hack since we didn't launch mux with handshakes.
*
* 1. Send an Rerr which we are certain can be interpreted by the first
* implementations of mux.
*
* 2. If we receive a marker Rerr which echos back our message, we know
* we can Tinit.
*/
def canTinit(trans: Transport[Message, Message]): Future[Boolean] =
trans.write(Message.Rerr(TinitTag, CanTinitMsg)).before {
trans.read().transform {
case Return(Message.Rerr(`TinitTag`, `CanTinitMsg`)) =>
Future.True
case _ =>
Future.False
}
}
/**
* Returns a [[Transport]] that handles session negotiation from a client's
* perspective. The client initiates the handshake via a `Tinit` message.
* If the server responds appropriately with an `Rinit`, `trans` is transformed
* via `negotiate` otherwise it's returned unchanged.
*
* @param trans the original transport established at the start of a mux
* session (with no messages dispatched).
*
* @param version the version the client sends to the server.
*
* @param headers the headers the client sends to the server.
*
* @param negotiate a function which furnishes a transport based on the
* the headers received from the server.
*/
def client(
trans: Transport[ChannelBuffer, ChannelBuffer],
version: Short,
headers: Headers,
negotiate: Negotiator
): Transport[Message, Message] = {
// Since the handshake happens at the start of a session, we can safely
// enc/dec messages without having to worry about any special session
// features.
val msgTrans = trans.map(Message.encode, Message.decode)
val handshake: Future[Transport[Message, Message]] =
canTinit(msgTrans).transform {
// We can start the official Tinit/Rinit handshake
case Return(true) =>
msgTrans.write(Message.Tinit(TinitTag, version, headers)).before {
msgTrans.read().transform {
case Return(Message.Rinit(_, v, serverHeaders)) if v == version =>
Future(negotiate(serverHeaders, trans))
case Return(Message.Rerr(_, msg)) =>
Future.exception(Failure(msg))
case t@Throw(_) =>
Future.const(t.cast[Transport[Message, Message]])
}
}
// If we can't init, we return the session as is and assume that we
// can speak mux pre version 1 and pre handshaking. Any subsequent
// failures will be handled by the layers above (i.e. the dispatcher).
// This is a workaround since our initial implementation of mux didn't
// implement handshaking.
case Return(false) => Future.value(msgTrans)
case t@Throw(_) =>
Future.const(t.cast[Transport[Message, Message]])
}
handshake.onFailure { _ => msgTrans.close() }
new DeferredTransport(msgTrans, handshake)
}
/**
* Returns a [[Transport]] that handles session negotiation from a server's
* perspective. It reads the first message from the `trans` and if it is
* an `Rinit`, transforms the transport via `negotiate`. If the client doesn't
* support handshakes, the original `trans` is returned, making sure to replace
* any messages we eagerly read from the transport.
*
* @param trans the original transport established at the start of a mux
* session (with no outstanding messages).
*
* @param version the version sent to the client.
*
* @param headers a function which resolves the server headers with respect
* to the client headers. This is structured this way since the headers the
* server responds with are typically based on the clients.
*
* @param negotiate a function which transforms `trans` based on the
* negotiated headers.
*/
def server(
trans: Transport[ChannelBuffer, ChannelBuffer],
version: Short,
headers: Headers => Headers,
negotiate: Negotiator
): Transport[Message, Message] = {
// Since the handshake happens at the start of a session, we can safely enc/dec
// messages without having to worry about any special features (e.g. fragments).
val msgTrans = trans.map(Message.encode, Message.decode)
val handshake: Future[Transport[Message, Message]] =
msgTrans.read().transform {
// A Tinit with a matching version
case Return(Message.Tinit(tag, ver, clientHeaders)) if ver == version =>
val serverHeaders = headers(clientHeaders)
msgTrans.write(Message.Rinit(tag, version, serverHeaders)).before {
Future(negotiate(clientHeaders, trans))
}
// A Tinit with a version mismatch. Write an Rerr and then return
// a failed future.
case Return(Message.Tinit(tag, ver, _)) =>
val msg = s"unsupported version $ver, expected $version"
msgTrans.write(Message.Rerr(tag, msg))
.before { Future.exception(Failure(msg)) }
// A marker Rerr that queries whether or not we can do handshaking.
// Echo back the Rerr message to indicate that we can and recurse
// so we can be ready to handshake again.
case Return([email protected](tag, msg)) =>
msgTrans.write(rerr).before {
Future.value(server(trans, version, headers, negotiate))
}
// Client did not start a session with handshaking but we've consumed
// a message from the transport. Replace the message and return the
// original transport.
case Return(msg) => Future.value(new TransportProxy(msgTrans) {
private[this] val first = new AtomicBoolean(true)
def read(): Future[Message] =
if (first.compareAndSet(true, false)) Future.value(msg)
else msgTrans.read()
def write(req: Message): Future[Unit] = msgTrans.write(req)
})
case Throw(_) => Future.value(msgTrans)
}
handshake.onFailure { _ => msgTrans.close() }
new DeferredTransport(msgTrans, handshake)
}
}
/**
* Implements a [[Transport]] in terms of a future transport. All async
* operations are composed via future composition and callers can safely
* interrupt the returned futures without affecting the result of `underlying`.
*
* @param init the transport to proxy synchronous operations to.
*
* @param underlying the transport which will be used once its containing future
* is satisfied.
*/
private class DeferredTransport(
init: Transport[Message, Message],
underlying: Future[Transport[Message, Message]])
extends Transport[Message, Message] {
// we create a derivative promise while `underlying` is not defined
// because the transport is multiplexed and interrupting on one
// stream shouldn't affect the result of the handshake.
private[this] def gate() = underlying.interruptible()
def write(msg: Message): Future[Unit] = gate().flatMap(_.write(msg))
private[this] val read0: Transport[Message, Message] => Future[Message] = _.read()
def read(): Future[Message] = gate().flatMap(read0)
def status: Status = underlying.poll match {
case Some(Return(t)) => t.status
case None => Status.Busy
case _ => Status.Closed
}
val onClose: Future[Throwable] = gate().flatMap(_.onClose)
def localAddress: SocketAddress = init.localAddress
def remoteAddress: SocketAddress = init.remoteAddress
def peerCertificate: Option[Certificate] = init.peerCertificate
def close(deadline: Time): Future[Unit] = gate().flatMap(_.close(deadline))
}
|
lukiano/finagle
|
finagle-mux/src/main/scala/com/twitter/finagle/mux/Handshake.scala
|
Scala
|
apache-2.0
| 10,091 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tree.impl
import scala.annotation.tailrec
import scala.collection.mutable
import scala.language.implicitConversions
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.classification.DecisionTreeClassificationModel
import org.apache.spark.ml.feature.{Instance, LabeledPoint}
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.tree._
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.mllib.tree.{DecisionTreeSuite => OldDTSuite, EnsembleTestHelper}
import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo, QuantileStrategy, Strategy => OldStrategy}
import org.apache.spark.mllib.tree.impurity.{Entropy, Gini, GiniCalculator, Variance}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.util.collection.OpenHashMap
/**
* Test suite for [[RandomForest]].
*/
class RandomForestSuite extends SparkFunSuite with MLlibTestSparkContext {
import RandomForestSuite.mapToVec
/////////////////////////////////////////////////////////////////////////////
// Tests for split calculation
/////////////////////////////////////////////////////////////////////////////
test("Binary classification with continuous features: split calculation") {
val arr = OldDTSuite.generateOrderedLabeledPointsWithLabel1().map(_.asML.toInstance)
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(OldAlgo.Classification, Gini, 3, 2, 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
val splits = RandomForest.findSplits(rdd, metadata, seed = 42)
assert(splits.length === 2)
assert(splits(0).length === 99)
}
test("Binary classification with binary (ordered) categorical features: split calculation") {
val arr = OldDTSuite.generateCategoricalDataPoints().map(_.asML.toInstance)
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(OldAlgo.Classification, Gini, maxDepth = 2, numClasses = 2,
maxBins = 100, categoricalFeaturesInfo = Map(0 -> 2, 1 -> 2))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
val splits = RandomForest.findSplits(rdd, metadata, seed = 42)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
assert(splits.length === 2)
// no splits pre-computed for ordered categorical features
assert(splits(0).length === 0)
}
test("Binary classification with 3-ary (ordered) categorical features," +
" with no samples for one category: split calculation") {
val arr = OldDTSuite.generateCategoricalDataPoints().map(_.asML.toInstance)
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(OldAlgo.Classification, Gini, maxDepth = 2, numClasses = 2,
maxBins = 100, categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val splits = RandomForest.findSplits(rdd, metadata, seed = 42)
assert(splits.length === 2)
// no splits pre-computed for ordered categorical features
assert(splits(0).length === 0)
}
test("find splits for a continuous feature") {
// find splits for normal case
{
val fakeMetadata = new DecisionTreeMetadata(1, 200000, 200000.0, 0, 0,
Map(), Set(),
Array(6), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamples = Array.fill(10000)((1.0, math.random)).filter(_._2 != 0.0)
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits.length === 5)
assert(fakeMetadata.numSplits(0) === 5)
assert(fakeMetadata.numBins(0) === 6)
// check returned splits are distinct
assert(splits.distinct.length === splits.length)
}
// SPARK-16957: Use midpoints for split values.
{
val fakeMetadata = new DecisionTreeMetadata(1, 8, 8.0, 0, 0,
Map(), Set(),
Array(3), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
// possibleSplits <= numSplits
{
val featureSamples = Array(0, 1, 0, 0, 1, 0, 1, 1)
.map(x => (1.0, x.toDouble)).filter(_._2 != 0.0)
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
val expectedSplits = Array((0.0 + 1.0) / 2)
assert(splits === expectedSplits)
}
// possibleSplits > numSplits
{
val featureSamples = Array(0, 0, 1, 1, 2, 2, 3, 3)
.map(x => (1.0, x.toDouble)).filter(_._2 != 0.0)
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
val expectedSplits = Array((0.0 + 1.0) / 2, (2.0 + 3.0) / 2)
assert(splits === expectedSplits)
}
}
// find splits should not return identical splits
// when there are not enough split candidates, reduce the number of splits in metadata
{
val fakeMetadata = new DecisionTreeMetadata(1, 12, 12.0, 0, 0,
Map(), Set(),
Array(5), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamples = Array(1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3).map(x => (1.0, x.toDouble))
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
val expectedSplits = Array((1.0 + 2.0) / 2, (2.0 + 3.0) / 2)
assert(splits === expectedSplits)
// check returned splits are distinct
assert(splits.distinct.length === splits.length)
}
// find splits when most samples close to the minimum
{
val fakeMetadata = new DecisionTreeMetadata(1, 18, 18.0, 0, 0,
Map(), Set(),
Array(3), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamples =
Array(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5).map(x => (1.0, x.toDouble))
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
val expectedSplits = Array((2.0 + 3.0) / 2, (3.0 + 4.0) / 2)
assert(splits === expectedSplits)
}
// find splits when most samples close to the maximum
{
val fakeMetadata = new DecisionTreeMetadata(1, 17, 17.0, 0, 0,
Map(), Set(),
Array(2), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamples =
Array(0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2).map(x => (1.0, x.toDouble))
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
val expectedSplits = Array((1.0 + 2.0) / 2)
assert(splits === expectedSplits)
}
// find splits for arbitrarily scaled data
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0.0, 0, 0,
Map(), Set(),
Array(6), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamplesUnitWeight = Array.fill(10)((1.0, math.random))
val featureSamplesSmallWeight = featureSamplesUnitWeight.map { case (w, x) => (w * 0.001, x)}
val featureSamplesLargeWeight = featureSamplesUnitWeight.map { case (w, x) => (w * 1000, x)}
val splitsUnitWeight = RandomForest
.findSplitsForContinuousFeature(featureSamplesUnitWeight, fakeMetadata, 0)
val splitsSmallWeight = RandomForest
.findSplitsForContinuousFeature(featureSamplesSmallWeight, fakeMetadata, 0)
val splitsLargeWeight = RandomForest
.findSplitsForContinuousFeature(featureSamplesLargeWeight, fakeMetadata, 0)
assert(splitsUnitWeight === splitsSmallWeight)
assert(splitsUnitWeight === splitsLargeWeight)
}
// find splits when most weight is close to the minimum
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0.0, 0, 0,
Map(), Set(),
Array(3), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0.0, 0, 0
)
val featureSamples = Array((10, 1), (1, 2), (1, 3), (1, 4), (1, 5), (1, 6)).map {
case (w, x) => (w.toDouble, x.toDouble)
}
val splits = RandomForest.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits === Array(1.5, 2.5, 3.5, 4.5, 5.5))
}
}
test("train with empty arrays") {
val lp = LabeledPoint(1.0, Vectors.dense(Array.empty[Double])).toInstance
val data = Array.fill(5)(lp)
val rdd = sc.parallelize(data)
val strategy = new OldStrategy(OldAlgo.Regression, Gini, maxDepth = 2,
maxBins = 5)
withClue("DecisionTree requires number of features > 0," +
" but was given an empty features vector") {
intercept[IllegalArgumentException] {
RandomForest.run(rdd, strategy, 1, "all", 42L, instr = None)
}
}
}
test("train with constant features") {
val instance = LabeledPoint(1.0, Vectors.dense(0.0, 0.0, 0.0)).toInstance
val data = Array.fill(5)(instance)
val rdd = sc.parallelize(data)
val strategy = new OldStrategy(
OldAlgo.Classification,
Gini,
maxDepth = 2,
numClasses = 2,
maxBins = 5,
categoricalFeaturesInfo = Map(0 -> 1, 1 -> 5))
val Array(tree) = RandomForest.run(rdd, strategy, 1, "all", 42L, instr = None)
assert(tree.rootNode.impurity === -1.0)
assert(tree.depth === 0)
assert(tree.rootNode.prediction === instance.label)
// Test with no categorical features
val strategy2 = new OldStrategy(
OldAlgo.Regression,
Variance,
maxDepth = 2,
maxBins = 5)
val Array(tree2) = RandomForest.run(rdd, strategy2, 1, "all", 42L, instr = None)
assert(tree2.rootNode.impurity === -1.0)
assert(tree2.depth === 0)
assert(tree2.rootNode.prediction === instance.label)
}
test("Multiclass classification with unordered categorical features: split calculations") {
val arr = OldDTSuite.generateCategoricalDataPoints().map(_.asML.toInstance)
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(
OldAlgo.Classification,
Gini,
maxDepth = 2,
numClasses = 100,
maxBins = 100,
categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(metadata.isUnordered(featureIndex = 0))
assert(metadata.isUnordered(featureIndex = 1))
val splits = RandomForest.findSplits(rdd, metadata, seed = 42)
assert(splits.length === 2)
assert(splits(0).length === 3)
assert(metadata.numSplits(0) === 3)
assert(metadata.numBins(0) === 3)
assert(metadata.numSplits(1) === 3)
assert(metadata.numBins(1) === 3)
// Expecting 2^2 - 1 = 3 splits per feature
def checkCategoricalSplit(s: Split, featureIndex: Int, leftCategories: Array[Double]): Unit = {
assert(s.featureIndex === featureIndex)
assert(s.isInstanceOf[CategoricalSplit])
val s0 = s.asInstanceOf[CategoricalSplit]
assert(s0.leftCategories === leftCategories)
assert(s0.numCategories === 3) // for this unit test
}
// Feature 0
checkCategoricalSplit(splits(0)(0), 0, Array(0.0))
checkCategoricalSplit(splits(0)(1), 0, Array(1.0))
checkCategoricalSplit(splits(0)(2), 0, Array(0.0, 1.0))
// Feature 1
checkCategoricalSplit(splits(1)(0), 1, Array(0.0))
checkCategoricalSplit(splits(1)(1), 1, Array(1.0))
checkCategoricalSplit(splits(1)(2), 1, Array(0.0, 1.0))
}
test("Multiclass classification with ordered categorical features: split calculations") {
val arr = OldDTSuite.generateCategoricalDataPointsForMulticlassForOrderedFeatures()
.map(_.asML.toInstance)
assert(arr.length === 3000)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(OldAlgo.Classification, Gini, maxDepth = 2, numClasses = 100,
maxBins = 100, categoricalFeaturesInfo = Map(0 -> 10, 1 -> 10))
// 2^(10-1) - 1 > 100, so categorical features will be ordered
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val splits = RandomForest.findSplits(rdd, metadata, seed = 42)
assert(splits.length === 2)
// no splits pre-computed for ordered categorical features
assert(splits(0).length === 0)
}
/////////////////////////////////////////////////////////////////////////////
// Tests of other algorithm internals
/////////////////////////////////////////////////////////////////////////////
test("extract categories from a number for multiclass classification") {
val l = RandomForest.extractMultiClassCategories(13, 10)
assert(l.length === 3)
assert(Seq(3.0, 2.0, 0.0) === l)
}
test("Avoid aggregation on the last level") {
val arr = Array(
LabeledPoint(0.0, Vectors.dense(1.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 1.0, 1.0)),
LabeledPoint(0.0, Vectors.dense(2.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 2.0, 1.0)))
val input = sc.parallelize(arr.map(_.toInstance))
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 1,
numClasses = 2, categoricalFeaturesInfo = Map(0 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(input, strategy)
val splits = RandomForest.findSplits(input, metadata, seed = 42)
val treeInput = TreePoint.convertToTreeRDD(input, splits, metadata)
val baggedInput = BaggedPoint.convertToBaggedRDD(treeInput, 1.0, 1, withReplacement = false)
val topNode = LearningNode.emptyNode(nodeIndex = 1)
assert(topNode.isLeaf === false)
assert(topNode.stats === null)
val nodesForGroup = Map(0 -> Array(topNode))
val treeToNodeToIndexInfo = Map(0 -> Map(
topNode.id -> new RandomForest.NodeIndexInfo(0, None)
))
val nodeStack = new mutable.ListBuffer[(Int, LearningNode)]
RandomForest.findBestSplits(baggedInput, metadata, Map(0 -> topNode),
nodesForGroup, treeToNodeToIndexInfo, splits, nodeStack)
// don't enqueue leaf nodes into node queue
assert(nodeStack.isEmpty)
// set impurity and predict for topNode
assert(topNode.stats !== null)
assert(topNode.stats.impurity > 0.0)
// set impurity and predict for child nodes
assert(topNode.leftChild.get.toNode.prediction === 0.0)
assert(topNode.rightChild.get.toNode.prediction === 1.0)
assert(topNode.leftChild.get.stats.impurity === 0.0)
assert(topNode.rightChild.get.stats.impurity === 0.0)
}
test("Avoid aggregation if impurity is 0.0") {
val arr = Array(
LabeledPoint(0.0, Vectors.dense(1.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 1.0, 1.0)),
LabeledPoint(0.0, Vectors.dense(2.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 2.0, 1.0)))
val input = sc.parallelize(arr.map(_.toInstance))
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 5,
numClasses = 2, categoricalFeaturesInfo = Map(0 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(input, strategy)
val splits = RandomForest.findSplits(input, metadata, seed = 42)
val treeInput = TreePoint.convertToTreeRDD(input, splits, metadata)
val baggedInput = BaggedPoint.convertToBaggedRDD(treeInput, 1.0, 1, withReplacement = false)
val topNode = LearningNode.emptyNode(nodeIndex = 1)
assert(topNode.isLeaf === false)
assert(topNode.stats === null)
val nodesForGroup = Map(0 -> Array(topNode))
val treeToNodeToIndexInfo = Map(0 -> Map(
topNode.id -> new RandomForest.NodeIndexInfo(0, None)
))
val nodeStack = new mutable.ListBuffer[(Int, LearningNode)]
RandomForest.findBestSplits(baggedInput, metadata, Map(0 -> topNode),
nodesForGroup, treeToNodeToIndexInfo, splits, nodeStack)
// don't enqueue a node into node queue if its impurity is 0.0
assert(nodeStack.isEmpty)
// set impurity and predict for topNode
assert(topNode.stats !== null)
assert(topNode.stats.impurity > 0.0)
// set impurity and predict for child nodes
assert(topNode.leftChild.get.toNode.prediction === 0.0)
assert(topNode.rightChild.get.toNode.prediction === 1.0)
assert(topNode.leftChild.get.stats.impurity === 0.0)
assert(topNode.rightChild.get.stats.impurity === 0.0)
}
test("Use soft prediction for binary classification with ordered categorical features") {
// The following dataset is set up such that the best split is {1} vs. {0, 2}.
// If the hard prediction is used to order the categories, then {0} vs. {1, 2} is chosen.
val arr = Array(
LabeledPoint(0.0, Vectors.dense(0.0)),
LabeledPoint(0.0, Vectors.dense(0.0)),
LabeledPoint(0.0, Vectors.dense(0.0)),
LabeledPoint(1.0, Vectors.dense(0.0)),
LabeledPoint(0.0, Vectors.dense(1.0)),
LabeledPoint(0.0, Vectors.dense(1.0)),
LabeledPoint(0.0, Vectors.dense(1.0)),
LabeledPoint(0.0, Vectors.dense(1.0)),
LabeledPoint(0.0, Vectors.dense(2.0)),
LabeledPoint(0.0, Vectors.dense(2.0)),
LabeledPoint(0.0, Vectors.dense(2.0)),
LabeledPoint(1.0, Vectors.dense(2.0)))
val input = sc.parallelize(arr.map(_.toInstance))
// Must set maxBins s.t. the feature will be treated as an ordered categorical feature.
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 1,
numClasses = 2, categoricalFeaturesInfo = Map(0 -> 3), maxBins = 3)
val model = RandomForest.run(input, strategy, numTrees = 1, featureSubsetStrategy = "all",
seed = 42, instr = None, prune = false).head
model.rootNode match {
case n: InternalNode => n.split match {
case s: CategoricalSplit =>
assert(s.leftCategories === Array(1.0))
case _ => fail("model.rootNode.split was not a CategoricalSplit")
}
case _ => fail("model.rootNode was not an InternalNode")
}
}
test("Second level node building with vs. without groups") {
val arr = OldDTSuite.generateOrderedLabeledPoints().map(_.asML.toInstance)
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
// For tree with 1 group
val strategy1 =
new OldStrategy(OldAlgo.Classification, Entropy, 3, 2, 100, maxMemoryInMB = 1000)
// For tree with multiple groups
val strategy2 =
new OldStrategy(OldAlgo.Classification, Entropy, 3, 2, 100, maxMemoryInMB = 0)
val tree1 = RandomForest.run(rdd, strategy1, numTrees = 1, featureSubsetStrategy = "all",
seed = 42, instr = None).head
val tree2 = RandomForest.run(rdd, strategy2, numTrees = 1, featureSubsetStrategy = "all",
seed = 42, instr = None).head
def getChildren(rootNode: Node): Array[InternalNode] = rootNode match {
case n: InternalNode =>
assert(n.leftChild.isInstanceOf[InternalNode])
assert(n.rightChild.isInstanceOf[InternalNode])
Array(n.leftChild.asInstanceOf[InternalNode], n.rightChild.asInstanceOf[InternalNode])
case _ => fail("rootNode was not an InternalNode")
}
// Single group second level tree construction.
val children1 = getChildren(tree1.rootNode)
val children2 = getChildren(tree2.rootNode)
// Verify whether the splits obtained using single group and multiple group level
// construction strategies are the same.
for (i <- 0 until 2) {
assert(children1(i).gain > 0)
assert(children2(i).gain > 0)
assert(children1(i).split === children2(i).split)
assert(children1(i).impurity === children2(i).impurity)
assert(children1(i).impurityStats.stats === children2(i).impurityStats.stats)
assert(children1(i).leftChild.impurity === children2(i).leftChild.impurity)
assert(children1(i).rightChild.impurity === children2(i).rightChild.impurity)
assert(children1(i).prediction === children2(i).prediction)
}
}
def binaryClassificationTestWithContinuousFeaturesAndSubsampledFeatures(strategy: OldStrategy) {
val numFeatures = 50
val arr = EnsembleTestHelper.generateOrderedLabeledPoints(numFeatures, 1000)
val rdd = sc.parallelize(arr).map(_.asML.toInstance)
// Select feature subset for top nodes. Return true if OK.
def checkFeatureSubsetStrategy(
numTrees: Int,
featureSubsetStrategy: String,
numFeaturesPerNode: Int): Unit = {
val seeds = Array(123, 5354, 230, 349867, 23987)
val maxMemoryUsage: Long = 128 * 1024L * 1024L
val metadata =
DecisionTreeMetadata.buildMetadata(rdd, strategy, numTrees, featureSubsetStrategy)
seeds.foreach { seed =>
val failString = s"Failed on test with:" +
s"numTrees=$numTrees, featureSubsetStrategy=$featureSubsetStrategy," +
s" numFeaturesPerNode=$numFeaturesPerNode, seed=$seed"
val nodeStack = new mutable.ListBuffer[(Int, LearningNode)]
val topNodes: Array[LearningNode] = new Array[LearningNode](numTrees)
Range(0, numTrees).foreach { treeIndex =>
topNodes(treeIndex) = LearningNode.emptyNode(nodeIndex = 1)
nodeStack.prepend((treeIndex, topNodes(treeIndex)))
}
val rng = new scala.util.Random(seed = seed)
val (nodesForGroup: Map[Int, Array[LearningNode]],
treeToNodeToIndexInfo: Map[Int, Map[Int, RandomForest.NodeIndexInfo]]) =
RandomForest.selectNodesToSplit(nodeStack, maxMemoryUsage, metadata, rng)
assert(nodesForGroup.size === numTrees, failString)
assert(nodesForGroup.values.forall(_.length == 1), failString) // 1 node per tree
if (numFeaturesPerNode == numFeatures) {
// featureSubset values should all be None
assert(treeToNodeToIndexInfo.values.forall(_.values.forall(_.featureSubset.isEmpty)),
failString)
} else {
// Check number of features.
assert(treeToNodeToIndexInfo.values.forall(_.values.forall(
_.featureSubset.get.length === numFeaturesPerNode)), failString)
}
}
}
checkFeatureSubsetStrategy(numTrees = 1, "auto", numFeatures)
checkFeatureSubsetStrategy(numTrees = 1, "all", numFeatures)
checkFeatureSubsetStrategy(numTrees = 1, "sqrt", math.sqrt(numFeatures).ceil.toInt)
checkFeatureSubsetStrategy(numTrees = 1, "log2",
(math.log(numFeatures) / math.log(2)).ceil.toInt)
checkFeatureSubsetStrategy(numTrees = 1, "onethird", (numFeatures / 3.0).ceil.toInt)
val realStrategies = Array(".1", ".10", "0.10", "0.1", "0.9", "1.0")
for (strategy <- realStrategies) {
val expected = (strategy.toDouble * numFeatures).ceil.toInt
checkFeatureSubsetStrategy(numTrees = 1, strategy, expected)
}
val integerStrategies = Array("1", "10", "100", "1000", "10000")
for (strategy <- integerStrategies) {
val expected = if (strategy.toInt < numFeatures) strategy.toInt else numFeatures
checkFeatureSubsetStrategy(numTrees = 1, strategy, expected)
}
val invalidStrategies = Array("-.1", "-.10", "-0.10", ".0", "0.0", "1.1", "0")
for (invalidStrategy <- invalidStrategies) {
intercept[IllegalArgumentException]{
val metadata =
DecisionTreeMetadata.buildMetadata(rdd, strategy, numTrees = 1, invalidStrategy)
}
}
checkFeatureSubsetStrategy(numTrees = 2, "all", numFeatures)
checkFeatureSubsetStrategy(numTrees = 2, "auto", math.sqrt(numFeatures).ceil.toInt)
checkFeatureSubsetStrategy(numTrees = 2, "sqrt", math.sqrt(numFeatures).ceil.toInt)
checkFeatureSubsetStrategy(numTrees = 2, "log2",
(math.log(numFeatures) / math.log(2)).ceil.toInt)
checkFeatureSubsetStrategy(numTrees = 2, "onethird", (numFeatures / 3.0).ceil.toInt)
for (strategy <- realStrategies) {
val expected = (strategy.toDouble * numFeatures).ceil.toInt
checkFeatureSubsetStrategy(numTrees = 2, strategy, expected)
}
for (strategy <- integerStrategies) {
val expected = if (strategy.toInt < numFeatures) strategy.toInt else numFeatures
checkFeatureSubsetStrategy(numTrees = 2, strategy, expected)
}
for (invalidStrategy <- invalidStrategies) {
intercept[IllegalArgumentException]{
val metadata =
DecisionTreeMetadata.buildMetadata(rdd, strategy, numTrees = 2, invalidStrategy)
}
}
}
test("Binary classification with continuous features: subsampling features") {
val categoricalFeaturesInfo = Map.empty[Int, Int]
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 2,
numClasses = 2, categoricalFeaturesInfo = categoricalFeaturesInfo)
binaryClassificationTestWithContinuousFeaturesAndSubsampledFeatures(strategy)
}
test("Binary classification with continuous features and node Id cache: subsampling features") {
val categoricalFeaturesInfo = Map.empty[Int, Int]
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 2,
numClasses = 2, categoricalFeaturesInfo = categoricalFeaturesInfo,
useNodeIdCache = true)
binaryClassificationTestWithContinuousFeaturesAndSubsampledFeatures(strategy)
}
test("computeFeatureImportance, featureImportances") {
/* Build tree for testing, with this structure:
grandParent
left2 parent
left right
*/
val leftImp = new GiniCalculator(Array(3.0, 2.0, 1.0), 6L)
val left = new LeafNode(0.0, leftImp.calculate(), leftImp)
val rightImp = new GiniCalculator(Array(1.0, 2.0, 5.0), 8L)
val right = new LeafNode(2.0, rightImp.calculate(), rightImp)
val parent = TreeTests.buildParentNode(left, right, new ContinuousSplit(0, 0.5))
val parentImp = parent.impurityStats
val left2Imp = new GiniCalculator(Array(1.0, 6.0, 1.0), 8L)
val left2 = new LeafNode(0.0, left2Imp.calculate(), left2Imp)
val grandParent = TreeTests.buildParentNode(left2, parent, new ContinuousSplit(1, 1.0))
val grandImp = grandParent.impurityStats
// Test feature importance computed at different subtrees.
def testNode(node: Node, expected: Map[Int, Double]): Unit = {
val map = new OpenHashMap[Int, Double]()
TreeEnsembleModel.computeFeatureImportance(node, map)
assert(mapToVec(map.toMap) ~== mapToVec(expected) relTol 0.01)
}
// Leaf node
testNode(left, Map.empty[Int, Double])
// Internal node with 2 leaf children
val feature0importance = parentImp.calculate() * parentImp.count -
(leftImp.calculate() * leftImp.count + rightImp.calculate() * rightImp.count)
testNode(parent, Map(0 -> feature0importance))
// Full tree
val feature1importance = grandImp.calculate() * grandImp.count -
(left2Imp.calculate() * left2Imp.count + parentImp.calculate() * parentImp.count)
testNode(grandParent, Map(0 -> feature0importance, 1 -> feature1importance))
// Forest consisting of (full tree) + (internal node with 2 leafs)
val trees = Array(parent, grandParent).map { root =>
new DecisionTreeClassificationModel(root, numFeatures = 2, numClasses = 3)
.asInstanceOf[DecisionTreeModel]
}
val importances: Vector = TreeEnsembleModel.featureImportances(trees, 2)
val tree2norm = feature0importance + feature1importance
val expected = Vectors.dense((1.0 + feature0importance / tree2norm) / 2.0,
(feature1importance / tree2norm) / 2.0)
assert(importances ~== expected relTol 0.01)
}
test("normalizeMapValues") {
val map = new OpenHashMap[Int, Double]()
map(0) = 1.0
map(2) = 2.0
TreeEnsembleModel.normalizeMapValues(map)
val expected = Map(0 -> 1.0 / 3.0, 2 -> 2.0 / 3.0)
assert(mapToVec(map.toMap) ~== mapToVec(expected) relTol 0.01)
}
///////////////////////////////////////////////////////////////////////////////
// Tests for pruning of redundant subtrees (generated by a split improving the
// impurity measure, but always leading to the same prediction).
///////////////////////////////////////////////////////////////////////////////
test("SPARK-3159 tree model redundancy - classification") {
// The following dataset is set up such that splitting over feature_1 for points having
// feature_0 = 0 improves the impurity measure, despite the prediction will always be 0
// in both branches.
val arr = Array(
Instance(0.0, 1.0, Vectors.dense(0.0, 1.0)),
Instance(1.0, 1.0, Vectors.dense(0.0, 1.0)),
Instance(0.0, 1.0, Vectors.dense(0.0, 0.0)),
Instance(1.0, 1.0, Vectors.dense(1.0, 0.0)),
Instance(0.0, 1.0, Vectors.dense(1.0, 0.0)),
Instance(1.0, 1.0, Vectors.dense(1.0, 1.0))
)
val rdd = sc.parallelize(arr)
val numClasses = 2
val strategy = new OldStrategy(algo = OldAlgo.Classification, impurity = Gini, maxDepth = 4,
numClasses = numClasses, maxBins = 32)
val prunedTree = RandomForest.run(rdd, strategy, numTrees = 1, featureSubsetStrategy = "auto",
seed = 42, instr = None).head
val unprunedTree = RandomForest.run(rdd, strategy, numTrees = 1, featureSubsetStrategy = "auto",
seed = 42, instr = None, prune = false).head
assert(prunedTree.numNodes === 5)
assert(unprunedTree.numNodes === 7)
assert(RandomForestSuite.getSumLeafCounters(List(prunedTree.rootNode)) === arr.size)
}
test("SPARK-3159 tree model redundancy - regression") {
// The following dataset is set up such that splitting over feature_0 for points having
// feature_1 = 1 improves the impurity measure, despite the prediction will always be 0.5
// in both branches.
val arr = Array(
Instance(0.0, 1.0, Vectors.dense(0.0, 1.0)),
Instance(1.0, 1.0, Vectors.dense(0.0, 1.0)),
Instance(0.0, 1.0, Vectors.dense(0.0, 0.0)),
Instance(0.0, 1.0, Vectors.dense(1.0, 0.0)),
Instance(1.0, 1.0, Vectors.dense(1.0, 1.0)),
Instance(0.0, 1.0, Vectors.dense(1.0, 1.0)),
Instance(0.5, 1.0, Vectors.dense(1.0, 1.0))
)
val rdd = sc.parallelize(arr)
val strategy = new OldStrategy(algo = OldAlgo.Regression, impurity = Variance, maxDepth = 4,
numClasses = 0, maxBins = 32)
val prunedTree = RandomForest.run(rdd, strategy, numTrees = 1, featureSubsetStrategy = "auto",
seed = 42, instr = None).head
val unprunedTree = RandomForest.run(rdd, strategy, numTrees = 1, featureSubsetStrategy = "auto",
seed = 42, instr = None, prune = false).head
assert(prunedTree.numNodes === 3)
assert(unprunedTree.numNodes === 5)
assert(RandomForestSuite.getSumLeafCounters(List(prunedTree.rootNode)) === arr.size)
}
test("weights at arbitrary scale") {
val arr = EnsembleTestHelper.generateOrderedLabeledPoints(3, 10)
val rddWithUnitWeights = sc.parallelize(arr.map(_.asML.toInstance))
val rddWithSmallWeights = rddWithUnitWeights.map { inst =>
Instance(inst.label, 0.001, inst.features)
}
val rddWithBigWeights = rddWithUnitWeights.map { inst =>
Instance(inst.label, 1000, inst.features)
}
val strategy = new OldStrategy(OldAlgo.Classification, Gini, 3, 2)
val unitWeightTrees = RandomForest.run(rddWithUnitWeights, strategy, 3, "all", 42L, None)
val smallWeightTrees = RandomForest.run(rddWithSmallWeights, strategy, 3, "all", 42L, None)
unitWeightTrees.zip(smallWeightTrees).foreach { case (unitTree, smallWeightTree) =>
TreeTests.checkEqual(unitTree, smallWeightTree)
}
val bigWeightTrees = RandomForest.run(rddWithBigWeights, strategy, 3, "all", 42L, None)
unitWeightTrees.zip(bigWeightTrees).foreach { case (unitTree, bigWeightTree) =>
TreeTests.checkEqual(unitTree, bigWeightTree)
}
}
test("minWeightFraction and minInstancesPerNode") {
val data = Array(
Instance(0.0, 1.0, Vectors.dense(0.0)),
Instance(0.0, 1.0, Vectors.dense(0.0)),
Instance(0.0, 1.0, Vectors.dense(0.0)),
Instance(0.0, 1.0, Vectors.dense(0.0)),
Instance(1.0, 0.1, Vectors.dense(1.0))
)
val rdd = sc.parallelize(data)
val strategy = new OldStrategy(OldAlgo.Classification, Gini, 3, 2,
minWeightFractionPerNode = 0.5)
val Array(tree1) = RandomForest.run(rdd, strategy, 1, "all", 42L, None)
assert(tree1.depth === 0)
strategy.minWeightFractionPerNode = 0.0
val Array(tree2) = RandomForest.run(rdd, strategy, 1, "all", 42L, None)
assert(tree2.depth === 1)
strategy.minInstancesPerNode = 2
val Array(tree3) = RandomForest.run(rdd, strategy, 1, "all", 42L, None)
assert(tree3.depth === 0)
strategy.minInstancesPerNode = 1
val Array(tree4) = RandomForest.run(rdd, strategy, 1, "all", 42L, None)
assert(tree4.depth === 1)
}
}
private object RandomForestSuite {
def mapToVec(map: Map[Int, Double]): Vector = {
val size = (map.keys.toSeq :+ 0).max + 1
val (indices, values) = map.toSeq.sortBy(_._1).unzip
Vectors.sparse(size, indices.toArray, values.toArray)
}
@tailrec
private def getSumLeafCounters(nodes: List[Node], acc: Long = 0): Long = {
if (nodes.isEmpty) {
acc
}
else {
nodes.head match {
case i: InternalNode => getSumLeafCounters(i.leftChild :: i.rightChild :: nodes.tail, acc)
case l: LeafNode => getSumLeafCounters(nodes.tail, acc + l.impurityStats.rawCount)
}
}
}
}
|
pgandhi999/spark
|
mllib/src/test/scala/org/apache/spark/ml/tree/impl/RandomForestSuite.scala
|
Scala
|
apache-2.0
| 34,293 |
package com.twitter.zk
import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
import org.apache.zookeeper.ZooKeeper
import com.twitter.logging.Logger
import com.twitter.util.Future
trait Connector {
import Connector.EventHandler
val name = "zk-connector"
protected lazy val log = Logger.get(name)
private[this] val listeners = new AtomicReference[List[EventHandler]](Nil)
protected[this] val sessionBroker: EventBroker = new EventBroker
// a broker may only be used for 1:1 communication, so we fan-out event notifications
sessionBroker.recv foreach { event =>
val listening = listeners.get()
log.debug("propagating event to %d listeners %s", listening.size, event)
val stateEvent = StateEvent(event)
listening.foreach { listener =>
if (listener.isDefinedAt(stateEvent)) {
try {
listener(stateEvent)
} catch {
case e: Throwable => log.error(e, "Exception in connection event listener")
}
} else log.debug("listener does not handle %s", event)
}
}
@tailrec
final def onSessionEvent(f: EventHandler) {
val list = listeners.get()
if (!listeners.compareAndSet(list, f :: list)) onSessionEvent(f)
}
/** Connect to a ZooKeeper cluster and yield a handle once the connection is complete. */
def apply(): Future[ZooKeeper]
/** Disconnect from the ZooKeeper server. */
def release(): Future[Unit]
}
object Connector {
type EventHandler = PartialFunction[StateEvent, Unit]
/**
* Dispatches requests across several connectors.
*
* Session events from all Connnectors are published on the session broker; however consumers
* of these events cannot know which specific connection the event was fired on.
*/
case class RoundRobin(connectors: Connector*) extends Connector {
require(connectors.length > 0)
override val name = "round-robin-zk-connector:%d".format(connectors.length)
private[this] var index = 0
protected[this] def nextConnector() = {
val i = synchronized {
if (index == Int.MaxValue ) {
index = 0
}
index = index + 1
index % connectors.length
}
log.trace("connector %d of %d", i+1, connectors.length)
connectors(i)
}
connectors foreach {
_ onSessionEvent { case event => sessionBroker.send(event()).sync() }
}
def apply(): Future[ZooKeeper] = nextConnector().apply()
/** Disconnect from all ZooKeeper servers. */
def release(): Future[Unit] = Future.join {
log.trace("release")
connectors map { _.release() }
}
}
}
|
edombowsky/util
|
util-zk/src/main/scala/com/twitter/zk/Connector.scala
|
Scala
|
apache-2.0
| 2,628 |
package sri.universal.navigation
import chandu0101.macros.tojs.JSMacro
import sri.core._
import sri.universal.ReactUniversal
import sri.universal.components._
import sri.universal.all._
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
import scala.scalajs.js.{UndefOr => U, undefined => undefined}
case class NavigationHeader(key: U[String] = undefined,
style: U[js.Any] = undefined,
navigationProps: U[NavigationSceneRendererProps] = undefined,
renderTitleComponent: U[(NavigationSceneRendererProps,NavigationScene) => ReactElement] = undefined,
renderLeftComponent: U[(NavigationSceneRendererProps,NavigationScene) => ReactElement] = undefined,
renderRightComponent: U[(NavigationSceneRendererProps,NavigationScene) => ReactElement] = undefined,
ref: U[NavigationHeaderM => _] = undefined) {
def apply() = {
val props = JSMacro[NavigationHeader](this)
React.createElement(ReactUniversal.NavigationExperimental.Header, props)
}
}
object NavigationHeader {
lazy val APPBAR_HEIGHT : Double = ReactUniversal.NavigationExperimental.Header.asInstanceOf[js.Dynamic].APPBAR_HEIGHT.asInstanceOf[Double]
lazy val STATUSBAR_HEIGHT : Double = ReactUniversal.NavigationExperimental.Header.asInstanceOf[js.Dynamic].STATUSBAR_HEIGHT.asInstanceOf[Double]
}
@js.native
trait NavigationHeaderM extends js.Object
|
hamazy/sri
|
universal/src/main/scala/sri/universal/navigation/NavigationHeader.scala
|
Scala
|
apache-2.0
| 1,512 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.