code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package equalitydemo
import org.scalatest._
trait SpireAssertions extends Assertions with SpireEquality
object SpireAssertions extends SpireAssertions
|
bvenners/equality-integration-demo
|
src/test/scala/equalitydemo/SpireAssertions.scala
|
Scala
|
apache-2.0
| 154 |
// Scala
import annotation.targetName
class A_1 {
@targetName("bar") def foo(): Int = 1
}
|
dotty-staging/dotty
|
tests/pos/alpha-override/A_1.scala
|
Scala
|
apache-2.0
| 92 |
/*
* Copyright (c) 2012-2013 SnowPlow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.hadoop.hive
// Specs2
import org.specs2.mutable.Specification
// Deserializer
import test.SnowPlowDeserializer
class BadQsFieldTest extends Specification {
// Toggle if tests are failing and you want to inspect the struct contents
implicit val _DEBUG = false
// Contains a bad querystring field - "referer" not "refr"
val badField = "2012-05-21\\t07:14:47\\tFRA2\\t3343\\t83.4.209.35\\tGET\\td3t05xllj8hhgj.cloudfront.net\\t/ice.png\\t200\\thttps://test.psybazaar.com/shop/checkout/\\tMozilla/5.0%20(X11;%20Ubuntu;%20Linux%20x86_64;%20rv:11.0)%20Gecko/20100101%20Firefox/11.0\\t&ev_ca=ecomm&ev_ac=checkout&ev_la=id_email&ev_pr=ERROR&tid=236095&referer=http%253A%252F%252Ftest.psybazaar.com%252F&duid=135f6b7536aff045&lang=en-US&vid=5&f_pdf=0&f_qt=1&f_realp=0&f_wma=1&f_dir=0&f_fla=1&f_java=1&f_gears=0&f_ag=0&res=1920x1080&cookie=1"
"A SnowPlow querystring with an incorrectly named field (\\"referer\\" not \\"refr\\")" should {
"not return a <<null>> record" in {
SnowPlowDeserializer.deserialize(badField).dt must not beNull
}
}
// Contains an extra querystring field - "future"
val extraField = "2012-05-21\\t07:14:47\\tFRA2\\t3343\\t83.4.209.35\\tGET\\td3t05xllj8hhgj.cloudfront.net\\t/ice.png\\t200\\thttps://test.psybazaar.com/shop/checkout/\\tMozilla/5.0%20(X11;%20Ubuntu;%20Linux%20x86_64;%20rv:11.0)%20Gecko/20100101%20Firefox/11.0\\t&ev_ca=ecomm&ev_ac=checkout&ev_la=id_email&ev_pr=ERROR&tid=236095&future=1&refr=http%253A%252F%252Ftest.psybazaar.com%252F&duid=135f6b7536aff045&lang=en-US&vid=5&f_pdf=0&f_qt=1&f_realp=0&f_wma=1&f_dir=0&f_fla=1&f_java=1&f_gears=0&f_ag=0&res=1920x1080&cookie=1"
"A SnowPlow querystring with an extra field (\\"future\\")" should {
"not return a <<null>> record" in {
SnowPlowDeserializer.deserialize(extraField).dt must not beNull
}
}
// Contains an unescaped referer
// Happens in case of tracker malfunction
val unescapedField = "2012-12-10 03:05:09 LHR5 3703 207.189.121.44 GET d10wr4jwvp55f9.cloudfront.net /ice.png 200 - Mozilla/5.0%20(Windows;%20U;%20Windows%20NT%205.1;%20en-US;%20rv:1.9.2.8)%20Gecko/20100721%20Firefox/3.6.8 page=Publisher:%20Piatnik%20-%20Psychic%20Bazaar&tid=128078&duid=ea4bcf975f101eec&vid=2&lang=en-gb&refr=http://www.google.co.uk/search?hl=en&tbo=d&site=&source=hp&q=piatnik+tarot&oq=piatnik+tarot&gs_l=mobile-gws-hp.1.0.0j0i30l2j0i8i10i30j0i8i30.11955.26264.0.29732.17.14.2.1.1.0.1060.4823.2j2j5j0j2j1j1j1.14.0.les%253B..0.0...1ac.1.vP9Vltg2PPw&f_pdf=0&f_qt=1&f_realp=0&f_wma=0&f_dir=0&f_fla=0&f_java=0&f_gears=0&f_ag=0&res=320x568&cookie=1&url=http://www.psychicbazaar.com/publisher/8_piatnik?utm_source=GoogleSearch&utm_medium=cpc&utm_campaign=uk-piatnik&utm_term=piatnik%2520tarot%2520cards&utm_content=29271604528&gclid=CL3159nCjLQCFe7MtAod83wACA - RefreshHit 9rjpHm7OYpiNE7bURuP3cGbsem974NNrIoxgdQ6XuQm6Ils0d6_mUQ=="
"A SnowPlow querystring with an unescaped field (\\"refr\\")" should {
val actual = SnowPlowDeserializer.deserialize(unescapedField)
"not return a <<null>> record" in {
actual.dt must not beNull
}
"have a <<null>> event field" in {
actual.event must beNull
}
}
}
|
richo/snowplow
|
3-etl/hive-etl/snowplow-log-deserializers/src/test/scala/com/snowplowanalytics/snowplow/hadoop/hive/BadQsFieldTest.scala
|
Scala
|
apache-2.0
| 3,882 |
package pl.combosolutions.backup.psm.elevation
import java.rmi.RemoteException
import java.rmi.registry.Registry
import org.specs2.matcher.Scope
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import pl.combosolutions.backup.test.Tags.UnitTest
class ElevatedExecutorSpec extends Specification with Mockito {
val notifierName = "notifier-test-name"
val serverName = "server-test-name"
val remotePort = 666
val arguments = new Array[String](3)
arguments.update(0, notifierName)
arguments.update(1, serverName)
arguments.update(2, remotePort.toString)
"ElevatedExecutor" should {
"export ElevationServer and notify about successful initialization" in new TestContext {
// given
// when
new TestElevatedExecutor(registry)(arguments)
// then
there was one(registry).bind(===(serverName), any[ElevationServer])
there was one(notifier).notifyReady
} tag UnitTest
"notify about failed initialization" in new TestContext {
// given
registry.bind(===(serverName), any[ElevationServer]) throws new RemoteException
// when
new TestElevatedExecutor(registry)(arguments)
// then
there was one(registry).bind(===(serverName), any[ElevationServer])
there was one(notifier).notifyFailure
} tag UnitTest
}
class TestElevatedExecutor(mockRegistry: Registry)(args: Array[String])
extends ElevatedExecutor(args) {
override def configureRMI = {}
override def locateRegistryFor(remotePort: Integer) = mockRegistry
override def exportServer(server: ElevationServer) = server
override def terminateOnFailure = {}
}
trait TestContext extends Scope {
val registry = mock[Registry]
val notifier = mock[ElevationReadyNotifier]
(registry lookup notifierName) returns notifier
}
}
|
MateuszKubuszok/BackupDSL
|
modules/psm/src/test/scala/pl/combosolutions/backup/psm/elevation/ElevatedExecutorSpec.scala
|
Scala
|
mit
| 1,846 |
package org.hibernate.cache.rediscala.strategy
import org.hibernate.cache.rediscala.regions._
import org.hibernate.cache.spi.access._
import org.hibernate.cfg.Settings
class NonStrictReadWriteRedisCollectionRegionAccessStrategy(private[this] val _region: RedisCollectionRegion,
private[this] val _settings: Settings)
extends AbstractRedisAccessStrategy(_region, _settings)
with CollectionRegionAccessStrategy {
override def getRegion = region
override def get(key: Any, txTimestamp: Long): AnyRef =
region.get(key).asInstanceOf[AnyRef]
override def putFromLoad(key: Any,
value: Any,
txTimestamp: Long,
version: Any,
minimalPutOverride: Boolean): Boolean = {
if (minimalPutOverride && region.contains(key)) {
return false
}
region.put(key, value)
true
}
override def lockItem(key: Any, version: Any): SoftLock = null
override def unlockItem(key: Any, lock: SoftLock) {
region.remove(key)
}
}
class NonStrictReadWriteRedisEntityRegionAccessStrategy(private[this] val _region: RedisEntityRegion,
private[this] val _settings: Settings)
extends AbstractRedisAccessStrategy(_region, _settings)
with EntityRegionAccessStrategy {
override def getRegion = region
override def get(key: Any, txTimestamp: Long): AnyRef =
region.get(key).asInstanceOf[AnyRef]
override def putFromLoad(key: Any,
value: Any,
txTimestamp: Long,
version: Any,
minimalPutOverride: Boolean): Boolean = {
if (minimalPutOverride && region.contains(key)) {
return false
}
region.put(key, value)
true
}
override def lockItem(key: Any, version: Any): SoftLock = null
override def unlockItem(key: Any, lock: SoftLock) {
region.remove(key)
}
override def insert(key: Any, value: Any, version: Any): Boolean = false
override def afterInsert(key: Any, value: Any, version: Any): Boolean = false
override def update(key: Any, value: Any, currentVersion: Any, previousVersion: Any): Boolean = {
remove(key)
true
}
override def afterUpdate(key: Any,
value: Any,
currentVersion: Any,
previousVersion: Any,
lock: SoftLock): Boolean = {
unlockItem(key, lock)
true
}
}
class NonStrictReadWriteRedisNatualIdRegionAccessStrategy(private[this] val _region: RedisNaturalIdRegion,
private[this] val _settings: Settings)
extends AbstractRedisAccessStrategy(_region, _settings)
with NaturalIdRegionAccessStrategy {
override def getRegion = region
override def get(key: Any, txTimestamp: Long) =
region.get(key).asInstanceOf[AnyRef]
override def putFromLoad(key: Any,
value: Any,
txTimestamp: Long,
version: Any,
minimalPutOverride: Boolean): Boolean = {
if (minimalPutOverride && region.contains(key)) {
return false
}
region.put(key, value)
true
}
override def lockItem(key: Any, version: Any): SoftLock = null
override def unlockItem(key: Any, lock: SoftLock) {
region.remove(key)
}
def insert(key: Any, value: Any): Boolean = false
def afterInsert(key: Any, value: Any): Boolean = false
def update(key: Any, value: Any): Boolean = {
remove(key)
true
}
def afterUpdate(key: Any, value: Any, lock: SoftLock): Boolean = {
unlockItem(key, lock)
true
}
}
|
debop/debop4s
|
hibernate-rediscala/src/main/scala/org/hibernate/cache/rediscala/strategy/NonStrictReadWriteStrategy.scala
|
Scala
|
apache-2.0
| 3,832 |
package com.twitter.common.args
import com.google.common.base.Optional
import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
import com.twitter.common.args._
import tools.scalap.scalax.rules.scalasig._
import java.lang.{Class, String}
import scala.annotation.target.field
object Flags {
private[this] val Copy = "copy"
private[this] val Copy$default$ = "copy$default$"
private[this] val DefaultHelp = "no help for you"
def apply[A <: AnyRef](caseInstance: A, args: Seq[String]): A = {
val caseClass = caseInstance.getClass
val fields = fieldsFor(caseClass)
val copyMethod = copyMethodFor(caseClass)
val optionInfos = fields map { case (name, clazz) =>
val prefix = ""
val flagAnnotation = Option(caseClass.getMethod(name).getAnnotation(classOf[CmdLine]))
OptionInfo.create(
flagAnnotation.map(_.name).getOrElse(name),
flagAnnotation.map(_.name).getOrElse(DefaultHelp),
prefix,
boxType(clazz))
}
val argumentInfo = new Args.ArgumentInfo(
Optional.absent[PositionalInfo[_]],
asJavaIterable(optionInfos))
new ArgScanner().parse(argumentInfo, args)
val parametersToCopyMethod = optionInfos.zipWithIndex.map { case (optionInfo, i) =>
val arg = optionInfo.getArg
if (arg.hasAppliedValue)
arg.get.asInstanceOf[Object]
else
caseClass.getMethod(Copy$default$ + (i + 1)).invoke(caseInstance)
}
copyMethod.invoke(caseInstance, parametersToCopyMethod: _*).asInstanceOf[A]
}
private[this] def boxType(clazz: Class[_]): Class[_] = {
clazz match {
case c if c == classOf[Int] => classOf[java.lang.Integer]
case _ => clazz
}
}
private[this] def copyMethodFor(clazz: Class[_]) = {
clazz.getMethods.find(_.getName == Copy).getOrElse(
error("Cannot find copy method for class " + clazz.getName)
)
}
/**
* Portions of this code are copied from
* http://stackoverflow.com/questions/6282464/in-scala-how-can-i-programmatically-determine-the-name-of-the-fields-of-a-case
*/
private[this] def fieldsFor(clazz: Class[_]): Seq[(String, Class[_])] = {
val rootClass = {
var currentClass = clazz
while (currentClass.getEnclosingClass ne null) currentClass = currentClass.getEnclosingClass
currentClass
}
val sig = ScalaSigParser.parse(rootClass).getOrElse(
error("No ScalaSig for class " + rootClass.getName + ", make sure it is a top-level case class"))
val tableSize = sig.table.size
val classSymbolIndex = (0 until tableSize).find { i =>
sig.parseEntry(i) match {
case c @ ClassSymbol(SymbolInfo(name, _, _, _, _, _), _)
if c.isCase && c.path == clazz.getCanonicalName => true
case _ => false
}
}.getOrElse(error("Class " + rootClass.getName + " is not a case class"))
val classSymbol = sig.parseEntry(classSymbolIndex).asInstanceOf[ClassSymbol]
val copyMethod = copyMethodFor(clazz)
val copyIndex = ((classSymbolIndex + 1) until tableSize).find { i =>
sig.parseEntry(i) match {
case m @ MethodSymbol(SymbolInfo(Copy, owner, _, _, _, _), _) => owner match {
case sym: SymbolInfoSymbol if sym.index == classSymbol.index => true
case _ => false
}
case _ => false
}
}.getOrElse(error("Cannot find copy method entry in ScalaSig for class " + rootClass.getName))
val paramsListBuilder = List.newBuilder[String]
for (i <- (copyIndex + 1) until tableSize) {
sig.parseEntry(i) match {
case MethodSymbol(SymbolInfo(name, owner, _, _, _, _), _) => owner match {
case sym: SymbolInfoSymbol if sym.index == copyIndex => paramsListBuilder += name
case _ =>
}
case _ =>
}
}
paramsListBuilder.result zip copyMethod.getParameterTypes
}
}
|
foursquare/commons-old
|
src/scala/com/twitter/common/args/Flags.scala
|
Scala
|
apache-2.0
| 3,876 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import org.apache.spark.sql.{DataFrame, QueryTest}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.scalatest.BeforeAndAfterAll
// TODO ideally we should put the test suite into the package `sql`, as
// `hive` package is optional in compiling, however, `SQLContext.sql` doesn't
// support the `cube` or `rollup` yet.
class HiveDataFrameAnalyticsSuite extends QueryTest with TestHiveSingleton with BeforeAndAfterAll {
import hiveContext.implicits._
import hiveContext.sql
private var testData: DataFrame = _
override def beforeAll() {
testData = Seq((1, 2), (2, 4)).toDF("a", "b")
hiveContext.registerDataFrameAsTable(testData, "mytable")
}
override def afterAll(): Unit = {
hiveContext.dropTempTable("mytable")
}
test("rollup") {
checkAnswer(
testData.rollup($"a" + $"b", $"b").agg(sum($"a" - $"b")),
sql("select a + b, b, sum(a - b) from mytable group by a + b, b with rollup").collect()
)
checkAnswer(
testData.rollup("a", "b").agg(sum("b")),
sql("select a, b, sum(b) from mytable group by a, b with rollup").collect()
)
}
test("cube") {
checkAnswer(
testData.cube($"a" + $"b", $"b").agg(sum($"a" - $"b")),
sql("select a + b, b, sum(a - b) from mytable group by a + b, b with cube").collect()
)
checkAnswer(
testData.cube("a", "b").agg(sum("b")),
sql("select a, b, sum(b) from mytable group by a, b with cube").collect()
)
}
}
|
pronix/spark
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveDataFrameAnalyticsSuite.scala
|
Scala
|
apache-2.0
| 2,346 |
package sc.ala.http.mock
import java.util.concurrent.ConcurrentLinkedQueue
import play.api.mvc._
final case class AccessLogQueue(queue: ConcurrentLinkedQueue[AccessLog] = new ConcurrentLinkedQueue[AccessLog]()) {
def add(request: RequestHeader): Unit = queue.add(AccessLog(request))
def add(request: RequestHeader, body: Option[ArrayByte]): Unit = queue.add(AccessLog(request, body))
def shift(): Option[AccessLog] = Option(queue.poll())
def filter(p: AccessLog => Boolean): Seq[AccessLog] = {
val array = scala.collection.mutable.ArrayBuffer[AccessLog]()
val it = queue.iterator()
while (it.hasNext) {
val log = it.next()
if (p(log))
array += log
}
array.toSeq
}
}
|
xuwei-k/http-mock
|
src/main/scala/sc/ala/http/mock/AccessLogQueue.scala
|
Scala
|
mit
| 723 |
package com.sksamuel.elastic4s.requests.searches.queries
import com.sksamuel.elastic4s.requests.script.Script
import com.sksamuel.exts.OptionImplicits._
case class IntervalsQuery(field: String, rule: IntervalsRule) extends Query
sealed trait IntervalsRule
case class Match(query: String,
maxGaps: Option[Int] = None,
ordered: Option[Boolean] = None,
analyzer: Option[String] = None,
filter: Option[IntervalsFilter] = None,
useField: Option[String] = None) extends IntervalsRule {
override def toString = "match"
def maxGaps(maxGaps: Int): Match = copy(maxGaps = maxGaps.some)
def ordered(ordered: Boolean): Match = copy(ordered = ordered.some)
def analyzer(analyzer: String): Match = copy(analyzer = analyzer.some)
def filter(filter: IntervalsFilter): Match = copy(filter = filter.some)
def useField(useField: String): Match = copy(useField = useField.some)
}
case class Prefix(prefix: String,
analyzer: Option[String] = None,
useField: Option[String] = None) extends IntervalsRule {
override def toString = "prefix"
def analyzer(analyzer: String): Prefix = copy(analyzer = analyzer.some)
def useField(useField: String): Prefix = copy(useField = useField.some)
}
case class Wildcard(pattern: String,
analyzer: Option[String] = None,
useField: Option[String] = None) extends IntervalsRule {
override def toString = "wildcard"
def analyzer(analyzer: String): Wildcard = copy(analyzer = analyzer.some)
def useField(useField: String): Wildcard = copy(useField = useField.some)
}
case class Fuzzy(term: String,
prefixLength: Option[String] = None,
transpositions: Option[Boolean] = None,
fuzziness: Option[String] = None,
analyzer: Option[String] = None,
useField: Option[String] = None) extends IntervalsRule {
override def toString = "fuzzy"
def prefixLength(prefixLength: String): Fuzzy = copy(prefixLength = prefixLength.some) // maybe Int ?
def transpositions(transpositions: Boolean): Fuzzy = copy(transpositions = transpositions.some)
def fuzziness(fuzziness: String): Fuzzy = copy(fuzziness = fuzziness.some)
def analyzer(analyzer: String): Fuzzy = copy(analyzer = analyzer.some)
def useField(useField: String): Fuzzy = copy(useField = useField.some)
}
case class AllOf(intervals: List[IntervalsRule],
maxGaps: Option[Int] = None,
ordered: Option[Boolean] = None,
filter: Option[IntervalsFilter] = None) extends IntervalsRule {
override def toString = "all_of"
def maxGaps(maxGaps: Int): AllOf = copy(maxGaps = maxGaps.some)
def ordered(ordered: Boolean): AllOf = copy(ordered = ordered.some)
def filter(filter: IntervalsFilter): AllOf = copy(filter = filter.some)
}
case class AnyOf(intervals: List[IntervalsRule],
filter: Option[IntervalsFilter] = None) extends IntervalsRule {
override def toString = "any_of"
def filter(filter: IntervalsFilter): AnyOf = copy(filter = filter.some)
}
case class IntervalsFilter(after: Option[IntervalsRule] = None,
before: Option[IntervalsRule] = None,
containedBy: Option[IntervalsRule] = None,
containing: Option[IntervalsRule] = None,
notContainedBy: Option[IntervalsRule] = None,
notContaining: Option[IntervalsRule] = None,
notOverlapping: Option[IntervalsRule] = None,
overlapping: Option[IntervalsRule] = None,
script: Option[Script] = None) {
def after(rule: IntervalsRule): IntervalsFilter = copy(after = rule.some)
def before(rule: IntervalsRule): IntervalsFilter = copy(before = rule.some)
def containedBy(rule: IntervalsRule): IntervalsFilter = copy(containedBy = rule.some)
def containing(rule: IntervalsRule): IntervalsFilter = copy(containing = rule.some)
def notContainedBy(rule: IntervalsRule): IntervalsFilter = copy(notContainedBy = rule.some)
def notContaining(rule: IntervalsRule): IntervalsFilter = copy(notContaining = rule.some)
def notOverlapping(rule: IntervalsRule): IntervalsFilter = copy(notOverlapping = rule.some)
def overlapping(rule: IntervalsRule): IntervalsFilter = copy(overlapping = rule.some)
def script(script: Script): IntervalsFilter = copy(script = script.some)
}
|
stringbean/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/queries/IntervalsQuery.scala
|
Scala
|
apache-2.0
| 4,555 |
package scodec
package codecs
import java.util.UUID
import org.scalacheck.Arbitrary
class UuidTest extends CodecSuite {
implicit val arbitraryUuid: Arbitrary[UUID] = Arbitrary(UUID.randomUUID)
test("roundtrip") {
forAll { (u: UUID) => roundtrip(uuid, u) }
}
}
|
ceedubs/scodec
|
src/test/scala/scodec/codecs/UuidTest.scala
|
Scala
|
bsd-3-clause
| 274 |
package com.softwaremill.scalaval.util
class InclusiveIterator[A](ia: Iterator[A]) {
def takeWhileInclusive(p: A => Boolean) = {
var done = false
val p2 = (a: A) => !done && { if (!p(a)) done=true; true }
ia.takeWhile(p2)
}
}
object InclusiveIterator {
implicit def iterator_can_include[A](ia: Iterator[A]) = new InclusiveIterator(ia)
}
|
mostr/scalaval
|
src/main/scala/com/softwaremill/scalaval/util/InclusiveIterator.scala
|
Scala
|
apache-2.0
| 357 |
package com.github.javifdev.survey
import twitter4j._
import scala.concurrent.duration._
import scala.collection.JavaConversions._
import scala.language.postfixOps
import scalaz.stream._
import scalaz.concurrent.Task
import scalaz.concurrent.Strategy.DefaultTimeoutScheduler
/**
* @author Javier Fuentes Sánchez
*/
object TwitterExample1 extends App with ConfigurationTwitter {
val twitter = new TwitterFactory(configurationBuilder.build()).getInstance()
implicit val _dts = DefaultTimeoutScheduler
/////////////////////////////////////////////////////
case class RespondUser(user: User, msg: String)
def processResponse(s: Status): RespondUser =
if(s.getText.indexOf("#scalazMAD") > 0)
RespondUser(s.getUser, "(╯°□°)╯︵ ┻━┻")
else
RespondUser(s.getUser, "(づ ̄ ³ ̄)づ")
// val t1 = process1 lift processResponse
def createRequest(ru: RespondUser): StatusUpdate = ru match {
case RespondUser(user, msg) =>
val updateStatus: StatusUpdate = new StatusUpdate(msg)
updateStatus.setInReplyToStatusId(user.getId)
updateStatus
}
// val t2 = process1 lift createRequest
val createResponses = process1 lift (processResponse _ andThen createRequest _)
//////////////////////////////////////////////////////
val polling = time.awakeEvery(30 seconds)
val hashtag = "ScalaMAD"
val query = new Query("#" + hashtag)
val getQuery: Process1[Any, Query] = process1 lift { _ => query }
def executeQuery(query: Query): Task[List[Status]] = Task { twitter.search(query).getTweets.toList }
val queryChannel = channel lift executeQuery
val tweets = polling pipe getQuery through queryChannel flatMap { Process emitAll _ }
//////////////////////////////////////////////////////
def executeRequest(su: StatusUpdate): Task[Unit] =
Task { twitter.updateStatus(su) }
val replySink = sink lift executeRequest
val program = tweets pipe createResponses to replySink
program.run.run
// def searchProcess: Channel[Task, Any, List[Status]] = channel lift (_ => Task { twitter.search(query).getTweets.toList })
// def reply: ((String, User)) => Task[Unit] = {
// case (str, user) =>
// val updateStatus: StatusUpdate = new StatusUpdate(str)
// updateStatus.setInReplyToStatusId(user.getId)
// Task { twitter.updateStatus(updateStatus) }
// }
// def replySink: Sink[Task, (String, User)] = sink lift reply
// def fun: Status => (String, User) = status =>
// if(status.getText.indexOf("#scalazMAD") > 0)
// ("(╯°□°)╯︵ ┻━┻", status.getUser)
// else ("(づ ̄ ³ ̄)づ", status.getUser)
// def funProcess = process1 lift fun
// time.awakeEvery(30 seconds) // Process[Task, Duration]
// .through(searchProcess) // Process[Task, List[Status]]
// .flatMap(Process.emitAll) // Process[Task, Status]
// .map(x => {println(x) ; x}) // .observe(io.stdOutLines) // Process[Task, Status]
// .pipe(funProcess) // Process[Task, (String, User)]
// .map(println)
// // .to(replySink) // Process[Task, Unit]
// .run.run
}
|
javierfs89/twitter-survey
|
src/main/scala/com/github/javifdev/twitter-survey/Phase1.scala
|
Scala
|
mit
| 3,104 |
package com.datlinq.datafiniti.response
import com.datlinq.datafiniti.response.DatafinitiError._
import org.scalatest.FunSuite
/**
* Created by Tom Lous on 05/09/2017.
* Copyright © 2017 Datlinq B.V..
*/
class NoRedirectFromDownloadTest extends FunSuite {
val e = NoRedirectFromDownload("http://datlinq.com")
test("testException") {
assert(e.exception.getMessage.contains(e.url))
}
test("testUrl") {
assert(e.message.contains(e.url))
}
test("testToString") {
assert(e.message === e.toString)
}
}
|
datlinq/scalafiniti
|
src/test/scala/com/datlinq/datafiniti/response/NoRedirectFromDownloadTest.scala
|
Scala
|
mit
| 536 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import org.apache.spark.JobExecutionStatus
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler.{AccumulableInfo, TaskInfo}
import org.apache.spark.util.collection.OpenHashSet
import scala.collection.mutable
import scala.collection.mutable.HashMap
private[spark] object UIData {
class ExecutorSummary {
var taskTime : Long = 0//任务时间
var failedTasks : Int = 0//失败任务数
var succeededTasks : Int = 0//完成任务数
var inputBytes : Long = 0
var inputRecords : Long = 0
var outputBytes : Long = 0
var outputRecords : Long = 0
var shuffleRead : Long = 0
var shuffleReadRecords : Long = 0
var shuffleWrite : Long = 0
var shuffleWriteRecords : Long = 0
var memoryBytesSpilled : Long = 0
var diskBytesSpilled : Long = 0
}
class JobUIData(
var jobId: Int = -1,
var submissionTime: Option[Long] = None,//提交时间
var completionTime: Option[Long] = None,//完成时间
var stageIds: Seq[Int] = Seq.empty,
var jobGroup: Option[String] = None,
var status: JobExecutionStatus = JobExecutionStatus.UNKNOWN,
/* Tasks */
// `numTasks` is a potential underestimate of the true number of tasks that this job will run.
// This may be an underestimate because the job start event references all of the result
// stages' transitive stage dependencies, but some of these stages might be skipped if their
// output is available from earlier runs.
//`numTasks`是一个潜在的低估这个工作将运行的真正的任务数量。这可能是一个低估,
// 因为作业开始事件引用所有结果阶段的传递阶段依赖关系,但是如果其输出可以从较早的运行中获取,则可能会跳过其中一些阶段。
// See https://github.com/apache/spark/pull/3009 for a more extensive discussion.
var numTasks: Int = 0,
var numActiveTasks: Int = 0,
var numCompletedTasks: Int = 0,
var numSkippedTasks: Int = 0,
var numFailedTasks: Int = 0,
/* Stages */
var numActiveStages: Int = 0,
// This needs to be a set instead of a simple count to prevent double-counting of rerun stages:
//这需要一个集合而不是一个简单的计数,以防止重新运行阶段的重复计数:
var completedStageIndices: mutable.HashSet[Int] = new mutable.HashSet[Int](),
var numSkippedStages: Int = 0,
var numFailedStages: Int = 0
)
class StageUIData {
var numActiveTasks: Int = _//活动任务数
var numCompleteTasks: Int = _//完成任务数
var completedIndices = new OpenHashSet[Int]() //完成指数
var numFailedTasks: Int = _ //失败任务数
var executorRunTime: Long = _ //运行执行时间
var inputBytes: Long = _
var inputRecords: Long = _
var outputBytes: Long = _
var outputRecords: Long = _
var shuffleReadTotalBytes: Long = _
var shuffleReadRecords : Long = _
var shuffleWriteBytes: Long = _
var shuffleWriteRecords: Long = _
var memoryBytesSpilled: Long = _
var diskBytesSpilled: Long = _
var schedulingPool: String = ""
var description: Option[String] = None
var accumulables = new HashMap[Long, AccumulableInfo]
var taskData = new HashMap[Long, TaskUIData]
var executorSummary = new HashMap[String, ExecutorSummary]
def hasInput: Boolean = inputBytes > 0
def hasOutput: Boolean = outputBytes > 0
def hasShuffleRead: Boolean = shuffleReadTotalBytes > 0
def hasShuffleWrite: Boolean = shuffleWriteBytes > 0
def hasBytesSpilled: Boolean = memoryBytesSpilled > 0 && diskBytesSpilled > 0
}
/**
* These are kept mutable and reused throughout a task's lifetime to avoid excessive reallocation.
* 这些在整个任务的生命周期中保持可变和重复使用,以避免过度重新分配。
*/
case class TaskUIData(
var taskInfo: TaskInfo,
var taskMetrics: Option[TaskMetrics] = None,
var errorMessage: Option[String] = None)
case class ExecutorUIData(
val startTime: Long,
var finishTime: Option[Long] = None,
var finishReason: Option[String] = None)
}
|
tophua/spark1.52
|
core/src/main/scala/org/apache/spark/ui/jobs/UIData.scala
|
Scala
|
apache-2.0
| 4,973 |
class a {
def foo() = a match {
<caret>
}
}
/*
case
*/
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/keywordCompletion/modifiers/defCase.scala
|
Scala
|
apache-2.0
| 62 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon May 19 15:52:24 EDT 2014
* @see LICENSE (MIT style license file).
*
* @see www.netlib.org/lapack/lawnspdf/lawn03.pdf
* @see www.netlib.org/lapack/lawns/lawn11.ps
* @see fortranwiki.org/fortran/show/svd
* @see www.math.pitt.edu/~sussmanm//2071Spring08/lab09/index.html
*/
package scalation.linalgebra
import scala.math.{abs, max, min, sqrt}
import scalation.math.{double_exp, sign}
import scalation.math.ExtremeD.EPSILON
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SVD_2by2` is used to solve Singular Value Decomposition for
* bidiagonal 2-by-2 matrices.
* <p>
* [ f g ]
* [ 0 h ]
* <p>
* @see fortranwiki.org/fortran/show/svd
*
* @param f the first diagonal element
* @param g the super-diagonal element
* @param h the second diagonal element
*/
class SVD_2by2 (f: Double, g: Double, h: Double)
extends SVDecomp
{
private var ssMin = 0.0 // smallest singular values
private var ssMax = 0.0 // largest singular value
private var left = (0.0, 0.0) // left singular vector
private var right = (0.0, 0.0) // right singular vector
private var lt = (0.0, 0.0) // temp left singular vector
private var rt = (0.0, 0.0) // temp right singular vector
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Factor matrix 'a' forming a diagonal matrix consisting of singular
* values and return the singular values in a vector.
*/
override def factor (): Tuple3 [MatrixD, VectorD, MatrixD] =
{
(null, null, null) // FIX
} // factor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the two singular values (smallest first) for the bidiagonal 2-by-2
* matrix form from the elements f, g and h.
*
* @see LAPACK SUBROUTINE DLAS2 (F, G, H, SSMIN, SSMAX)
*/
def deflate (): VectorD =
{
val fa = abs (f) // absolute value of f
val ga = abs (g) // absolute value of g
val ha = abs (h) // absolute value of h
val fhmn = min (fa, ha) // minimum of fa and ha
val fhmx = max (fa, ha) // maximum of fa and ha
var as = 0.0
var at = 0.0
var au = 0.0
var c = 0.0
if (fhmn =~ 0.0) {
return VectorD (0.0,
if (fhmx =~ 0.0) ga
else max (fhmx, ga) * sqrt (1.0 + (min (fhmx, ga) / max (fhmx, ga))~^2))
} // if
if (ga < fhmx) {
as = 1.0 + fhmn / fhmx
at = (fhmx - fhmn) / fhmx
au = (ga / fhmx)~^2
c = 2.0 / ( sqrt (as * as + au) + sqrt (at * at + au))
return VectorD (fhmn * c, fhmx / c)
} // if
au = fhmx / ga
if (au =~ 0.0) {
return VectorD ((fhmn * fhmx ) / ga, ga)
} // if
as = 1.0 + fhmn / fhmx
at = (fhmx - fhmn ) / fhmx
c = 1.0 / (sqrt (1.0 + (as * au)~^2) + sqrt (1.0 + (at * au)~^2))
VectorD ((fhmn * c) * au * 2.0, ga / (c + c))
} // deflate
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the two singular values (smallest first) for the bidiagonal 2-by-2
* matrix form from the elements f, g and h. Also, return the singular
* vectors.
*
* @see LAPACK SUBROUTINE DLASV2 (F, G, H, SSMIN, SSMAX, SNR, CSR, SNL, CSL)
*/
def deflateV (): Tuple6 [Double, Double, Double, Double, Double, Double] =
{
var ft = f
var fa = abs (f)
var ht = h
var ha = abs (h)
var gt = g
var ga = abs (gt)
var tmp = 0.0
// pmax points to the maximum absolute element of matrix
// pmax = 1 if f largest in absolute values
// pmax = 2 if g largest in absolute values
// pmax = 3 if h largest in absolute values
var pmax = 1
val swap = ha > fa
if (swap) {
pmax = 3
tmp = ft; ft = ht; ht = tmp // swap ft and ht
tmp = fa; fa = ha; ha = tmp // swap fa and ha, now fa >= ha
} // if
if (ga =~ 0.0) return (ha, fa, 0.0, 1.0, 0.0, 1.0) // it's already a diagonal matrix
var gaSmal = true
if (ga > fa) {
pmax = 2
if (fa / ga < EPSILON) { // case of very large ga
gaSmal = false
ssMax = ga
ssMin = if (ha > 1.0) fa / (ga / ha) else (fa / ga) * ha
lt = (1.0, ht / gt)
rt = (1.0, ft / gt)
} // if
} // if
if (gaSmal) { // normal case
val d = fa - ha
var l = if (d == fa) 1.0 else d / fa // copes with infinite f or h (note: 0 <= L <= 1)
val m = gt / ft // note: abs (m) <= 1/macheps
var t = 2.0 - l // note: t >= 1
val mm = m * m
val tt = t * t
val s = sqrt (tt + mm) // note: 1 <= s <= 1 + 1/macheps
val r = if (l =~ 0.0) abs (m)
else sqrt (l * l + mm ) // note: 0 <= r <= 1 + 1/macheps
val a = 0.5 * (s+r) // note: 1 <= a <= 1 + abs (m)
ssMin = ha / a // initial values for signular values
ssMax = fa * a
if (mm =~ 0.0) { // note: m is very tiny
t = if (l =~ 0.0) sign (2.0, ft) * sign (1.0, gt)
else gt / sign (d, ft) + m / t
} else {
t = (m / (s + t) + m / (r + l)) * (1.0 + a)
} // if
l = sqrt (t*t + 4.0)
rt = (2.0 / l, t / l) // initial values for signular vectors
lt = ((rt._1 + rt._2 * m) / a, (ht / ft) * rt._2 / a)
} // if
if (swap) {
left = (rt._2, rt._1)
right = (lt._2, lt._1)
} else {
left = lt
right = rt
} // if
val sigv = correctSigns (pmax) // correct signs for singular values
(sigv._1, sigv._2, right._1, right._2, left._1, left._2)
} // deflateV
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Correct signs of singular values 'ssMin' and 'ssMax'.
* @param pmax
*/
private def correctSigns (pmax: Int): Tuple2 [Double, Double] =
{
val tsign = pmax match {
case 1 => sign (1.0, right._1) * sign (1.0, left._1) * sign (1.0, f)
case 2 => sign (1.0, right._2) * sign (1.0, left._1) * sign (1.0, g)
case 3 => sign (1.0, right._2) * sign (1.0, left._2) * sign (1.0, h)
} // match
(sign (ssMin, tsign * sign (1.0, f) * sign (1.0, h)), sign (ssMax, tsign))
} // correctSigns
} // SVD_2by2 class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `SVD_2by2Test` is used to test the `SVD_2by2` class.
*/
object SVD_2by2Test extends App
{
import Eigen_2by2.eigenvalues
val a = new MatrixD ((2, 2), 1.0, 1.0,
0.0, 2.0)
val svd = new SVD_2by2 (a(0, 0), a(0, 1), a(1, 1))
println ("----------------------------------------")
println ("Test SVD_2by2")
println ("----------------------------------------")
println ("a = " + a)
println ("----------------------------------------")
println ("singular values = " + svd.deflate ())
println ("----------------------------------------")
println ("singular val/vec = " + svd.deflateV ())
println ("----------------------------------------")
println ("Compare to Eigen_2by2")
println ("----------------------------------------")
println ("root of eigenvalues = " + eigenvalues (a.t * a).map (sqrt _))
} // SVD_2by2Test object
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/linalgebra/SVD_2by2.scala
|
Scala
|
mit
| 8,347 |
/**
* @author Yuuto
*/
package yuuto.inventorytools.client
import cpw.mods.fml.common.eventhandler.SubscribeEvent
import cpw.mods.fml.common.gameevent.InputEvent
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.client.Minecraft
import yuuto.inventorytools.network.MessageKeyPress
import yuuto.inventorytools.until.ITKeyBinds
import yuuto.inventorytools.util.NBTHelper
import yuuto.inventorytools.util.NBTTags
import yuuto.inventorytools.InventoryTools
import yuuto.inventorytools.client.gui.GuiScreenToolBox
import cpw.mods.fml.client.FMLClientHandler
import net.minecraft.util.ChatComponentText
import yuuto.inventorytools.proxy.ProxyCommon
object InputHandler {
@SubscribeEvent
def handleKeyPress(event:InputEvent.KeyInputEvent){
val player:EntityPlayer = Minecraft.getMinecraft().thePlayer;
if(KeyBindings.TOOL_BOX.isPressed()) {
openToolBoxGui(player);
}
if(KeyBindings.DOLLY_MODE.getIsKeyPressed){
switchDollyMode(player);
}
}
def openToolBoxGui(player:EntityPlayer){
//System.out.println("Checking fool");
if (player.inventory.getCurrentItem() == null)
return;
if (!NBTHelper.hasTag(player.inventory.getCurrentItem(), NBTTags.TOOL_BOX_INV)) {
if (player.inventory.getCurrentItem().getItem() == ProxyCommon.itemToolBox)
player.addChatComponentMessage(new ChatComponentText("No Items in Tool Box"));
return;
}
if (FMLClientHandler.instance().isGUIOpen(classOf[GuiScreenToolBox]))
return;
player.openGui(InventoryTools, 1, player.worldObj, player.posX.asInstanceOf[Int], player.posY.asInstanceOf[Int], player.posZ.asInstanceOf[Int])
}
def switchDollyMode(player:EntityPlayer){
if(player.inventory.getCurrentItem.getItem==ProxyCommon.itemDolly || player.inventory.getCurrentItem.getItem==ProxyCommon.itemDollyAdvanced) {
InventoryTools.network.sendToServer(new MessageKeyPress(ITKeyBinds.DOLLY_MODE, player.worldObj.provider.dimensionId, player.posX.toInt, player.posY.toInt, player.posZ.toInt))
}
}
}
|
AnimeniacYuuto/InventoryTools
|
src/main/scala/yuuto/inventorytools/client/InputHandler.scala
|
Scala
|
gpl-3.0
| 2,046 |
package models
import java.util.Date
import play.api.libs.functional.syntax._
import play.api.libs.json.{Reads, JsPath, Writes}
import scala.collection.immutable.HashMap
case class Score(
timestamp: Date,
designScore: Int,
docScore: Int,
supportScore: Int,
maturityScore: Int,
feedback: String,
karma: Int) {
def toMap(): HashMap[String, String] = {
HashMap("scoreDocumentation" -> Integer.toString(docScore),
"scoreMaturity" -> Integer.toString(maturityScore),
"scoreDesign" -> Integer.toString(designScore),
"scoreSupport" -> Integer.toString(supportScore),
"feedback" -> feedback)
}
}
object Score {
implicit val scoreWrites: Writes[Score] = (
(JsPath \ "timestamp").write[Date] and
(JsPath \ "designScore").write[Int] and
(JsPath \ "docScore").write[Int] and
(JsPath \ "supportScore").write[Int] and
(JsPath \ "maturityScore").write[Int] and
(JsPath \ "feedback").write[String] and
(JsPath \ "karma").write[Int]
)(unlift(Score.unapply))
implicit val scoreReads: Reads[Score] = (
(JsPath \ "timestamp").read[Date] and
(JsPath \ "designScore").read[Int] and
(JsPath \ "docScore").read[Int] and
(JsPath \ "supportScore").read[Int] and
(JsPath \ "maturityScore").read[Int] and
(JsPath \ "feedback").read[String] and
(JsPath \ "karma").read[Int]
)(Score.apply _)
}
case class Feedback(
user: User,
score: Score
)
|
gitlinks/gitrank-web
|
app/models/Score.scala
|
Scala
|
apache-2.0
| 1,631 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package graph
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import java.io.{ DataInput, DataOutput }
import scala.collection.JavaConversions._
import scala.concurrent.{ Await, Future }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.io.Writable
import org.apache.spark.{ HashPartitioner, Partitioner, SparkConf }
import org.apache.spark.rdd.RDD
import com.asakusafw.bridge.stage.StageInfo
import com.asakusafw.lang.compiler.model.description.{ ClassDescription, ImmediateDescription }
import com.asakusafw.lang.compiler.model.graph.{ Groups, MarkerOperator }
import com.asakusafw.lang.compiler.model.testing.OperatorExtractor
import com.asakusafw.lang.compiler.planning.{ PlanBuilder, PlanMarker }
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.runtime.value.IntOption
import com.asakusafw.spark.compiler.graph.AggregateClassBuilderSpec._
import com.asakusafw.spark.compiler.planning.{ PartitionGroupInfo, SubPlanInfo, SubPlanOutputInfo }
import com.asakusafw.spark.compiler.spi.NodeCompiler
import com.asakusafw.spark.runtime._
import com.asakusafw.spark.runtime.graph.{
Broadcast,
BroadcastId,
Aggregate,
ParallelCollectionSource,
SortOrdering,
Source
}
import com.asakusafw.spark.runtime.io.WritableSerDe
import com.asakusafw.spark.runtime.rdd.{ BranchKey, ShuffleKey }
import com.asakusafw.vocabulary.flow.processor.PartialAggregation
import com.asakusafw.vocabulary.operator.Fold
@RunWith(classOf[JUnitRunner])
class AggregateClassBuilderSpecTest extends AggregateClassBuilderSpec
class AggregateClassBuilderSpec
extends FlatSpec
with ClassServerForAll
with SparkForAll
with FlowIdForEach
with UsingCompilerContext
with JobContextSugar
with RoundContextSugar {
behavior of classOf[AggregateClassBuilder].getSimpleName
override def configure(conf: SparkConf): SparkConf = {
conf.set(Props.Parallelism, 8.toString)
super.configure(conf)
}
for {
(dataSize, numPartitions) <- Seq(
(PartitionGroupInfo.DataSize.TINY, 1),
(PartitionGroupInfo.DataSize.SMALL, 4),
(PartitionGroupInfo.DataSize.REGULAR, 8),
(PartitionGroupInfo.DataSize.LARGE, 16),
(PartitionGroupInfo.DataSize.HUGE, 32))
} {
it should s"build aggregate class with DataSize.${dataSize}" in {
val foosMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
val operator = OperatorExtractor
.extract(classOf[Fold], classOf[FoldOperator], "fold")
.input("foos", ClassDescription.of(classOf[Foo]), foosMarker.getOutput)
.output("result", ClassDescription.of(classOf[Foo]))
.argument("n", ImmediateDescription.of(10))
.build()
val resultMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("result").connect(resultMarker.getInput)
val plan = PlanBuilder.from(Seq(operator))
.add(
Seq(foosMarker),
Seq(resultMarker)).build().getPlan()
assert(plan.getElements.size === 1)
val subplan = plan.getElements.head
subplan.putAttr(
new SubPlanInfo(_,
SubPlanInfo.DriverType.AGGREGATE,
Seq.empty[SubPlanInfo.DriverOption],
operator))
val foosInput = subplan.findIn(foosMarker)
subplan.findOut(resultMarker)
.putAttr(
new SubPlanOutputInfo(_,
SubPlanOutputInfo.OutputType.AGGREGATED,
Seq.empty[SubPlanOutputInfo.OutputOption],
Groups.parse(Seq("i")),
operator))
.putAttr(_ => new PartitionGroupInfo(dataSize))
implicit val context = newNodeCompilerContext(flowId, classServer.root.toFile)
context.branchKeys.getField(foosInput.getOperator.getSerialNumber)
val compiler = NodeCompiler.get(subplan)
val thisType = compiler.compile(subplan)
context.addClass(context.branchKeys)
context.addClass(context.broadcastIds)
val cls = classServer.loadClass(thisType).asSubclass(classOf[Aggregate[Foo, Foo]])
val branchKeyCls = classServer.loadClass(context.branchKeys.thisType.getClassName)
def getBranchKey(marker: MarkerOperator): BranchKey = {
val sn = subplan.getOperators.toSet
.find(_.getOriginalSerialNumber == marker.getOriginalSerialNumber).get.getSerialNumber
branchKeyCls.getField(context.branchKeys.getField(sn)).get(null).asInstanceOf[BranchKey]
}
implicit val jobContext = newJobContext(sc)
val foos =
new ParallelCollectionSource(getBranchKey(foosMarker), (0 until 10))("foos")
.map(getBranchKey(foosMarker))(Foo.intToFoo)
val aggregate = cls.getConstructor(
classOf[Seq[(Source, BranchKey)]],
classOf[Option[SortOrdering]],
classOf[Partitioner],
classOf[Map[BroadcastId, Broadcast[_]]],
classOf[JobContext])
.newInstance(
Seq((foos, getBranchKey(foosMarker))),
Option(new Foo.SortOrdering()),
new HashPartitioner(2),
Map.empty,
jobContext)
assert(aggregate.branchKeys === Set(resultMarker).map(getBranchKey))
assert(aggregate.partitioners(getBranchKey(resultMarker)).get.numPartitions === numPartitions)
val rc = newRoundContext()
val results = aggregate.compute(rc)
val result = Await.result(
results(getBranchKey(resultMarker))
.map { rddF =>
val rdd = rddF()
assert(rdd.partitions.size === numPartitions)
rdd.map {
case (_, foo: Foo) => (foo.i.get, foo.sum.get)
}.collect.toSeq.sortBy(_._1)
}, Duration.Inf)
assert(result === Seq(
(0, (0 until 10 by 2).map(i => i * 100).sum + 4 * 10),
(1, (1 until 10 by 2).map(i => i * 100).sum + 4 * 10)))
}
it should s"build aggregate class with DataSize.${dataSize} with grouping is empty" in {
implicit val jobContext = newJobContext(sc)
val foosMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
val operator = OperatorExtractor
.extract(classOf[Fold], classOf[FoldOperator], "fold")
.input("foos", ClassDescription.of(classOf[Foo]), foosMarker.getOutput)
.output("result", ClassDescription.of(classOf[Foo]))
.argument("n", ImmediateDescription.of(10))
.build()
val resultMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.CHECKPOINT).build()
operator.findOutput("result").connect(resultMarker.getInput)
val plan = PlanBuilder.from(Seq(operator))
.add(
Seq(foosMarker),
Seq(resultMarker)).build().getPlan()
assert(plan.getElements.size === 1)
val subplan = plan.getElements.head
subplan.putAttr(
new SubPlanInfo(_,
SubPlanInfo.DriverType.AGGREGATE,
Seq.empty[SubPlanInfo.DriverOption],
operator))
val foosInput = subplan.findIn(foosMarker)
subplan.findOut(resultMarker)
.putAttr(
new SubPlanOutputInfo(_,
SubPlanOutputInfo.OutputType.AGGREGATED,
Seq.empty[SubPlanOutputInfo.OutputOption],
Groups.parse(Seq.empty[String]),
operator))
.putAttr(_ => new PartitionGroupInfo(dataSize))
implicit val context = newNodeCompilerContext(flowId, classServer.root.toFile)
context.branchKeys.getField(foosInput.getOperator.getSerialNumber)
val compiler = NodeCompiler.get(subplan)
val thisType = compiler.compile(subplan)
context.addClass(context.branchKeys)
context.addClass(context.broadcastIds)
val cls = classServer.loadClass(thisType).asSubclass(classOf[Aggregate[Foo, Foo]])
val branchKeyCls = classServer.loadClass(context.branchKeys.thisType.getClassName)
def getBranchKey(marker: MarkerOperator): BranchKey = {
val sn = subplan.getOperators.toSet
.find(_.getOriginalSerialNumber == marker.getOriginalSerialNumber).get.getSerialNumber
branchKeyCls.getField(context.branchKeys.getField(sn)).get(null).asInstanceOf[BranchKey]
}
val foos =
new ParallelCollectionSource(getBranchKey(foosMarker), (0 until 10))("foos")
.map(getBranchKey(foosMarker))(Foo.intToFoo)
.map(getBranchKey(foosMarker)) {
foo: (ShuffleKey, Foo) =>
(new ShuffleKey(Array.emptyByteArray, Array.emptyByteArray), foo._2)
}
val aggregate = cls.getConstructor(
classOf[Seq[(Source, BranchKey)]],
classOf[Option[SortOrdering]],
classOf[Partitioner],
classOf[Map[BroadcastId, Broadcast[_]]],
classOf[JobContext])
.newInstance(
Seq((foos, getBranchKey(foosMarker))),
None,
new HashPartitioner(2),
Map.empty,
jobContext)
assert(aggregate.branchKeys === Set(resultMarker).map(getBranchKey))
assert(aggregate.partitioners(getBranchKey(resultMarker)).get.numPartitions === 1)
val rc = newRoundContext()
val results = aggregate.compute(rc)
val result = Await.result(
results(getBranchKey(resultMarker))
.map { rddF =>
val rdd = rddF()
assert(rdd.partitions.size === 1)
rdd.map {
case (_, foo: Foo) => (foo.i.get, foo.sum.get)
}.collect.toSeq.sortBy(_._1)
}, Duration.Inf)
assert(result.size === 1)
assert(result(0)._2 === (0 until 10).map(i => i * 100).sum + 9 * 10)
}
}
}
object AggregateClassBuilderSpec {
class Foo extends DataModel[Foo] with Writable {
val i = new IntOption()
val sum = new IntOption()
override def reset(): Unit = {
i.setNull()
sum.setNull()
}
override def copyFrom(other: Foo): Unit = {
i.copyFrom(other.i)
sum.copyFrom(other.sum)
}
override def readFields(in: DataInput): Unit = {
i.readFields(in)
sum.readFields(in)
}
override def write(out: DataOutput): Unit = {
i.write(out)
sum.write(out)
}
def getIOption: IntOption = i
def getSumOption: IntOption = sum
}
object Foo {
def intToFoo: Int => (_, Foo) = {
lazy val foo = new Foo()
{ i =>
foo.i.modify(i % 2)
foo.sum.modify(i * 100)
val shuffleKey = new ShuffleKey(
WritableSerDe.serialize(foo.i),
Array.emptyByteArray)
(shuffleKey, foo)
}
}
class SortOrdering extends Ordering[ShuffleKey] {
override def compare(x: ShuffleKey, y: ShuffleKey): Int = {
IntOption.compareBytes(x.grouping, 0, x.grouping.length, y.grouping, 0, y.grouping.length)
}
}
}
class FoldOperator {
@Fold(partialAggregation = PartialAggregation.PARTIAL)
def fold(acc: Foo, value: Foo, n: Int): Unit = {
acc.sum.add(value.sum)
acc.sum.add(n)
}
}
}
|
ueshin/asakusafw-spark
|
compiler/src/test/scala/com/asakusafw/spark/compiler/graph/AggregateClassBuilderSpec.scala
|
Scala
|
apache-2.0
| 11,957 |
package handlers.server
import java.nio.ByteOrder
import database.{Characters, Classes, Races}
import handlers.GameClient
import handlers.packets.{PacketWriter, ServerCodes}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
/**
* Created by franblas on 07/04/17.
*/
class CharacterOverview(realm: Int, gameClient: GameClient) {
private val characters = new Characters()
private val classes = new Classes()
private val races = new Races()
def process(): Future[Array[Byte]] = {
val writer = new PacketWriter(ServerCodes.characterOverview)
val loginName = gameClient.loginName
writer.fillString(loginName, 24)
characters.getCharacters(loginName, realm).map(result => {
if (result.isEmpty) {
writer.fill(0x0, 1880)
} else {
val firstAccountSlot = realm match {
case 0x01 => 100
case 0x02 => 200
case 0x03 => 300
}
for (i <- firstAccountSlot until firstAccountSlot+10) {
var written: Boolean = false
result.foreach(character => {
val characterAccountSlot = character.accountSlot
if (characterAccountSlot == i) {
writer.fill(0x0, 4)
writer.fillString(character.name, 24)
writer.writeByte(0x01)
writer.writeByte(character.eyeSize.toByte)
writer.writeByte(character.lipSize.toByte)
writer.writeByte(character.eyeColor.toByte)
writer.writeByte(character.hairColor.toByte)
writer.writeByte(character.faceType.toByte)
writer.writeByte(character.hairStyle.toByte)
writer.writeByte(0x0)
writer.writeByte(0x0)
writer.writeByte(character.customMode.toByte)
writer.writeByte(character.moodType.toByte)
writer.fill(0x0, 13)
// TODO
// Region reg = WorldMgr.GetRegion((ushort) characters[j].Region);
// if (reg != null)
// {
// var description = m_gameClient.GetTranslatedSpotDescription(reg, characters[j].Xpos, characters[j].Ypos, characters[j].Zpos);
// pak.FillString(description, 24);
// }
// else
// pak.Fill(0x0, 24); //No known location
writer.fill(0x0, 24) // No known location
// TODO: try to get rid of await
Await.result(classes.getCharClass(character.charClass).map(result => {
//writer.fillString(result.head.getString("char_class_name"), 24)
writer.fillString(result.head.characterClassName, 24)
}), 5000 millis)
// TODO: try to get rid of await
Await.result(races.getRace(character.race).map(result => {
//writer.fillString(result.head.getString("race_id"), 24)
writer.fillString(result.head.raceId, 24)
}), 5000 millis)
writer.writeByte(character.level.toByte)
writer.writeByte(character.charClass.toByte)
writer.writeByte(character.realm.toByte)
val flag: Int = (((character.race & 0x10) << 2) + (character.race & 0x0F)) | (character.gender << 4)
writer.writeByte(flag.toByte)
writer.writeShort(character.creationModel.toShort, ByteOrder.LITTLE_ENDIAN)
writer.writeByte(character.region.toByte)
// TODO
// if (reg == null || (int) m_gameClient.ClientType > reg.Expansion)
// pak.WriteByte(0x00);
// else
// pak.WriteByte((byte) (reg.Expansion + 1)); //0x04-Cata zone, 0x05 - DR zone
writer.writeByte(0x00)
writer.writeInt(0x00) // Internal database ID
writer.writeByte(character.strength.toByte)
writer.writeByte(character.dexterity.toByte)
writer.writeByte(character.constitution.toByte)
writer.writeByte(character.quickness.toByte)
writer.writeByte(character.intelligence.toByte)
writer.writeByte(character.piety.toByte)
writer.writeByte(character.empathy.toByte)
writer.writeByte(character.charisma.toByte)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeShort(0x0, ByteOrder.LITTLE_ENDIAN)
writer.writeByte(0xFF.toByte)
writer.writeByte(0xFF.toByte)
writer.writeByte(0x00)
writer.writeByte(character.constitution.toByte)
written = true
}
})
if (!written) writer.fill(0x0, 188)
}
}
writer.fill(0x0, 94)
writer.getFinalPacket()
})
}
}
|
franblas/NAOC
|
src/main/scala/handlers/server/CharacterOverview.scala
|
Scala
|
mit
| 5,994 |
package io.taig.android.unit
trait implicits extends syntax.all
object implicits extends implicits
|
Taig/Toolbelt
|
unit/src/main/scala/io/taig/android/unit/implicits.scala
|
Scala
|
mit
| 101 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.serialization
import com.esotericsoftware.kryo.io.{
Input,
KryoDataInput,
KryoDataOutput,
Output
}
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import it.unimi.dsi.fastutil.io.{ FastByteArrayInputStream, FastByteArrayOutputStream }
import org.apache.avro.io.{ BinaryDecoder, BinaryEncoder, DecoderFactory, EncoderFactory }
import org.apache.avro.specific.{ SpecificDatumReader, SpecificDatumWriter, SpecificRecord }
import org.apache.hadoop.io.Writable
import org.apache.spark.serializer.KryoRegistrator
import org.bdgenomics.utils.misc.Logging
import scala.reflect.ClassTag
case class InputStreamWithDecoder(size: Int) {
val buffer = new Array[Byte](size)
val stream = new FastByteArrayInputStream(buffer)
val decoder = DecoderFactory.get().directBinaryDecoder(stream, null.asInstanceOf[BinaryDecoder])
}
// NOTE: This class is not thread-safe; however, Spark guarantees that only a single thread will access it.
class AvroSerializer[T <: SpecificRecord: ClassTag] extends Serializer[T] {
val reader = new SpecificDatumReader[T](scala.reflect.classTag[T].runtimeClass.asInstanceOf[Class[T]])
val writer = new SpecificDatumWriter[T](scala.reflect.classTag[T].runtimeClass.asInstanceOf[Class[T]])
var in = InputStreamWithDecoder(1024)
val outstream = new FastByteArrayOutputStream()
val encoder = EncoderFactory.get().directBinaryEncoder(outstream, null.asInstanceOf[BinaryEncoder])
setAcceptsNull(false)
def write(kryo: Kryo, kryoOut: Output, record: T) = {
outstream.reset()
writer.write(record, encoder)
kryoOut.writeInt(outstream.array.length, true)
kryoOut.write(outstream.array)
}
def read(kryo: Kryo, kryoIn: Input, klazz: Class[T]): T = this.synchronized {
val len = kryoIn.readInt(true)
if (len > in.size) {
in = InputStreamWithDecoder(len + 1024)
}
in.stream.reset()
// Read Kryo bytes into input buffer
kryoIn.readBytes(in.buffer, 0, len)
// Read the Avro object from the buffer
reader.read(null.asInstanceOf[T], in.decoder)
}
}
/**
* A Kryo serializer for Hadoop writables.
*
* Lifted from the Apache Spark user email list
* (http://apache-spark-user-list.1001560.n3.nabble.com/Hadoop-Writable-and-Spark-serialization-td5721.html)
* which indicates that it was originally copied from Shark itself, back when
* Spark 0.9 was the state of the art.
*
* @tparam T The class to serialize, which implements the Writable interface.
*/
class WritableSerializer[T <: Writable] extends Serializer[T] {
override def write(kryo: Kryo, output: Output, writable: T) {
writable.write(new KryoDataOutput(output))
}
override def read(kryo: Kryo, input: Input, cls: java.lang.Class[T]): T = {
val writable = cls.newInstance()
writable.readFields(new KryoDataInput(input))
writable
}
}
class ADAMKryoRegistrator extends KryoRegistrator with Logging {
override def registerClasses(kryo: Kryo) {
// Register Avro classes using fully qualified class names
// Sort alphabetically and add blank lines between packages
// htsjdk.samtools
kryo.register(classOf[htsjdk.samtools.CigarElement])
kryo.register(classOf[htsjdk.samtools.CigarOperator])
kryo.register(classOf[htsjdk.samtools.Cigar])
kryo.register(classOf[htsjdk.samtools.SAMSequenceDictionary])
kryo.register(classOf[htsjdk.samtools.SAMFileHeader])
kryo.register(classOf[htsjdk.samtools.SAMSequenceRecord])
// htsjdk.variant.vcf
kryo.register(classOf[htsjdk.variant.vcf.VCFContigHeaderLine])
kryo.register(classOf[htsjdk.variant.vcf.VCFFilterHeaderLine])
kryo.register(classOf[htsjdk.variant.vcf.VCFFormatHeaderLine])
kryo.register(classOf[htsjdk.variant.vcf.VCFInfoHeaderLine])
kryo.register(classOf[htsjdk.variant.vcf.VCFHeader])
kryo.register(classOf[htsjdk.variant.vcf.VCFHeaderLine])
kryo.register(classOf[htsjdk.variant.vcf.VCFHeaderLineCount])
kryo.register(classOf[htsjdk.variant.vcf.VCFHeaderLineType])
kryo.register(Class.forName("htsjdk.variant.vcf.VCFCompoundHeaderLine$SupportedHeaderLineType"))
// java.lang
kryo.register(classOf[java.lang.Class[_]])
// java.util
kryo.register(classOf[java.util.ArrayList[_]])
kryo.register(classOf[java.util.LinkedHashMap[_, _]])
kryo.register(classOf[java.util.LinkedHashSet[_]])
kryo.register(classOf[java.util.HashMap[_, _]])
kryo.register(classOf[java.util.HashSet[_]])
// org.apache.avro
kryo.register(Class.forName("org.apache.avro.Schema$RecordSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$Field"))
kryo.register(Class.forName("org.apache.avro.Schema$Field$Order"))
kryo.register(Class.forName("org.apache.avro.Schema$UnionSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$Type"))
kryo.register(Class.forName("org.apache.avro.Schema$LockableArrayList"))
kryo.register(Class.forName("org.apache.avro.Schema$BooleanSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$NullSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$StringSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$IntSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$FloatSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$EnumSchema"))
kryo.register(Class.forName("org.apache.avro.Schema$Name"))
kryo.register(Class.forName("org.apache.avro.Schema$LongSchema"))
kryo.register(Class.forName("org.apache.avro.generic.GenericData$Array"))
// org.apache.hadoop.conf
kryo.register(classOf[org.apache.hadoop.conf.Configuration],
new WritableSerializer[org.apache.hadoop.conf.Configuration])
kryo.register(classOf[org.apache.hadoop.yarn.conf.YarnConfiguration],
new WritableSerializer[org.apache.hadoop.yarn.conf.YarnConfiguration])
// org.apache.hadoop.io
kryo.register(classOf[org.apache.hadoop.io.Text])
kryo.register(classOf[org.apache.hadoop.io.LongWritable])
// org.bdgenomics.adam.algorithms.consensus
kryo.register(classOf[org.bdgenomics.adam.algorithms.consensus.Consensus])
// org.bdgenomics.adam.converters
kryo.register(classOf[org.bdgenomics.adam.converters.FastaConverter.FastaDescriptionLine])
kryo.register(classOf[org.bdgenomics.adam.converters.FragmentCollector])
// org.bdgenomics.adam.models
kryo.register(classOf[org.bdgenomics.adam.models.Coverage])
kryo.register(classOf[org.bdgenomics.adam.models.IndelTable])
kryo.register(classOf[org.bdgenomics.adam.models.MdTag])
kryo.register(classOf[org.bdgenomics.adam.models.MultiContigNonoverlappingRegions])
kryo.register(classOf[org.bdgenomics.adam.models.NonoverlappingRegions])
kryo.register(classOf[org.bdgenomics.adam.models.RecordGroup])
kryo.register(classOf[org.bdgenomics.adam.models.RecordGroupDictionary])
kryo.register(classOf[org.bdgenomics.adam.models.ReferencePosition],
new org.bdgenomics.adam.models.ReferencePositionSerializer)
kryo.register(classOf[org.bdgenomics.adam.models.ReferenceRegion])
kryo.register(classOf[org.bdgenomics.adam.models.SAMFileHeaderWritable])
kryo.register(classOf[org.bdgenomics.adam.models.SequenceDictionary])
kryo.register(classOf[org.bdgenomics.adam.models.SequenceRecord])
kryo.register(classOf[org.bdgenomics.adam.models.SnpTable],
new org.bdgenomics.adam.models.SnpTableSerializer)
kryo.register(classOf[org.bdgenomics.adam.models.VariantContext],
new org.bdgenomics.adam.models.VariantContextSerializer)
// org.bdgenomics.adam.rdd
kryo.register(classOf[org.bdgenomics.adam.rdd.GenomeBins])
// IntervalArray registrations for org.bdgenomics.adam.rdd
kryo.register(classOf[org.bdgenomics.adam.rdd.read.AlignmentRecordArray],
new org.bdgenomics.adam.rdd.read.AlignmentRecordArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.feature.CoverageArray],
new org.bdgenomics.adam.rdd.feature.CoverageArraySerializer(kryo))
kryo.register(classOf[org.bdgenomics.adam.rdd.feature.FeatureArray],
new org.bdgenomics.adam.rdd.feature.FeatureArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.fragment.FragmentArray],
new org.bdgenomics.adam.rdd.fragment.FragmentArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.variant.GenotypeArray],
new org.bdgenomics.adam.rdd.variant.GenotypeArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.contig.NucleotideContigFragmentArray],
new org.bdgenomics.adam.rdd.contig.NucleotideContigFragmentArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.variant.VariantArray],
new org.bdgenomics.adam.rdd.variant.VariantArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.variant.VariantContextArray],
new org.bdgenomics.adam.rdd.variant.VariantContextArraySerializer)
// org.bdgenomics.adam.rdd.read
kryo.register(classOf[org.bdgenomics.adam.rdd.read.FlagStatMetrics])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.DuplicateMetrics])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.SingleReadBucket],
new org.bdgenomics.adam.rdd.read.SingleReadBucketSerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.read.ReferencePositionPair],
new org.bdgenomics.adam.rdd.read.ReferencePositionPairSerializer)
// org.bdgenomics.adam.rdd.read.realignment
kryo.register(classOf[org.bdgenomics.adam.rdd.read.realignment.IndelRealignmentTarget],
new org.bdgenomics.adam.rdd.read.realignment.IndelRealignmentTargetSerializer)
kryo.register(classOf[scala.Array[org.bdgenomics.adam.rdd.read.realignment.IndelRealignmentTarget]],
new org.bdgenomics.adam.rdd.read.realignment.IndelRealignmentTargetArraySerializer)
kryo.register(classOf[org.bdgenomics.adam.rdd.read.realignment.TargetSet],
new org.bdgenomics.adam.rdd.read.realignment.TargetSetSerializer)
// org.bdgenomics.adam.rdd.read.recalibration.
kryo.register(classOf[org.bdgenomics.adam.rdd.read.recalibration.CovariateKey])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.recalibration.CycleCovariate])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.recalibration.DinucCovariate])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.recalibration.RecalibrationTable])
kryo.register(classOf[org.bdgenomics.adam.rdd.read.recalibration.Observation])
// org.bdgenomics.adam.rich
kryo.register(classOf[org.bdgenomics.adam.rich.RichAlignmentRecord])
kryo.register(classOf[org.bdgenomics.adam.rich.RichVariant])
// org.bdgenomics.adam.util
kryo.register(classOf[org.bdgenomics.adam.util.ReferenceContigMap],
new org.bdgenomics.adam.util.ReferenceContigMapSerializer)
kryo.register(classOf[org.bdgenomics.adam.util.TwoBitFile],
new org.bdgenomics.adam.util.TwoBitFileSerializer)
// org.bdgenomics.formats.avro
kryo.register(classOf[org.bdgenomics.formats.avro.AlignmentRecord],
new AvroSerializer[org.bdgenomics.formats.avro.AlignmentRecord])
kryo.register(classOf[org.bdgenomics.formats.avro.Contig],
new AvroSerializer[org.bdgenomics.formats.avro.Contig])
kryo.register(classOf[org.bdgenomics.formats.avro.Dbxref],
new AvroSerializer[org.bdgenomics.formats.avro.Dbxref])
kryo.register(classOf[org.bdgenomics.formats.avro.Feature],
new AvroSerializer[org.bdgenomics.formats.avro.Feature])
kryo.register(classOf[org.bdgenomics.formats.avro.Fragment],
new AvroSerializer[org.bdgenomics.formats.avro.Fragment])
kryo.register(classOf[org.bdgenomics.formats.avro.Genotype],
new AvroSerializer[org.bdgenomics.formats.avro.Genotype])
kryo.register(classOf[org.bdgenomics.formats.avro.GenotypeAllele])
kryo.register(classOf[org.bdgenomics.formats.avro.GenotypeType])
kryo.register(classOf[org.bdgenomics.formats.avro.NucleotideContigFragment],
new AvroSerializer[org.bdgenomics.formats.avro.NucleotideContigFragment])
kryo.register(classOf[org.bdgenomics.formats.avro.OntologyTerm],
new AvroSerializer[org.bdgenomics.formats.avro.OntologyTerm])
kryo.register(classOf[org.bdgenomics.formats.avro.ProcessingStep],
new AvroSerializer[org.bdgenomics.formats.avro.ProcessingStep])
kryo.register(classOf[org.bdgenomics.formats.avro.Read],
new AvroSerializer[org.bdgenomics.formats.avro.Read])
kryo.register(classOf[org.bdgenomics.formats.avro.RecordGroup],
new AvroSerializer[org.bdgenomics.formats.avro.RecordGroup])
kryo.register(classOf[org.bdgenomics.formats.avro.Sample],
new AvroSerializer[org.bdgenomics.formats.avro.Sample])
kryo.register(classOf[org.bdgenomics.formats.avro.Sequence],
new AvroSerializer[org.bdgenomics.formats.avro.Sequence])
kryo.register(classOf[org.bdgenomics.formats.avro.Slice],
new AvroSerializer[org.bdgenomics.formats.avro.Slice])
kryo.register(classOf[org.bdgenomics.formats.avro.Strand])
kryo.register(classOf[org.bdgenomics.formats.avro.TranscriptEffect],
new AvroSerializer[org.bdgenomics.formats.avro.TranscriptEffect])
kryo.register(classOf[org.bdgenomics.formats.avro.Variant],
new AvroSerializer[org.bdgenomics.formats.avro.Variant])
kryo.register(classOf[org.bdgenomics.formats.avro.VariantAnnotation],
new AvroSerializer[org.bdgenomics.formats.avro.VariantAnnotation])
kryo.register(classOf[org.bdgenomics.formats.avro.VariantAnnotationMessage])
kryo.register(classOf[org.bdgenomics.formats.avro.VariantCallingAnnotations],
new AvroSerializer[org.bdgenomics.formats.avro.VariantCallingAnnotations])
// org.codehaus.jackson.node
kryo.register(classOf[org.codehaus.jackson.node.NullNode])
kryo.register(classOf[org.codehaus.jackson.node.BooleanNode])
kryo.register(classOf[org.codehaus.jackson.node.TextNode])
// org.apache.spark
try {
val cls = Class.forName("org.apache.spark.internal.io.FileCommitProtocol$TaskCommitMessage")
kryo.register(cls)
} catch {
case cnfe: java.lang.ClassNotFoundException => {
log.info("Did not find Spark internal class. This is expected for Spark 1.")
}
}
kryo.register(classOf[org.apache.spark.sql.catalyst.expressions.UnsafeRow])
kryo.register(Class.forName("org.apache.spark.sql.types.BooleanType$"))
kryo.register(Class.forName("org.apache.spark.sql.types.DoubleType$"))
kryo.register(Class.forName("org.apache.spark.sql.types.FloatType$"))
kryo.register(Class.forName("org.apache.spark.sql.types.IntegerType$"))
kryo.register(Class.forName("org.apache.spark.sql.types.LongType$"))
kryo.register(Class.forName("org.apache.spark.sql.types.StringType$"))
kryo.register(classOf[org.apache.spark.sql.types.ArrayType])
kryo.register(classOf[org.apache.spark.sql.types.MapType])
kryo.register(classOf[org.apache.spark.sql.types.Metadata])
kryo.register(classOf[org.apache.spark.sql.types.StructField])
kryo.register(classOf[org.apache.spark.sql.types.StructType])
// scala
kryo.register(classOf[scala.Array[scala.Array[Byte]]])
kryo.register(classOf[scala.Array[htsjdk.variant.vcf.VCFHeader]])
kryo.register(classOf[scala.Array[java.lang.Long]])
kryo.register(classOf[scala.Array[java.lang.Object]])
kryo.register(classOf[scala.Array[org.apache.spark.sql.catalyst.InternalRow]])
kryo.register(classOf[scala.Array[org.apache.spark.sql.types.StructField]])
kryo.register(classOf[scala.Array[org.apache.spark.sql.types.StructType]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.AlignmentRecord]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Contig]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Dbxref]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Feature]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Fragment]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Genotype]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.GenotypeAllele]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.OntologyTerm]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.NucleotideContigFragment]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Read]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.RecordGroup]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Sample]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Sequence]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Slice]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.TranscriptEffect]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.Variant]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.VariantAnnotation]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.VariantAnnotationMessage]])
kryo.register(classOf[scala.Array[org.bdgenomics.formats.avro.VariantCallingAnnotations]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.algorithms.consensus.Consensus]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.models.Coverage]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.models.ReferencePosition]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.models.ReferenceRegion]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.models.SequenceRecord]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.models.VariantContext]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.rdd.read.recalibration.CovariateKey]])
kryo.register(classOf[scala.Array[org.bdgenomics.adam.rich.RichAlignmentRecord]])
kryo.register(classOf[scala.Array[scala.collection.Seq[_]]])
kryo.register(classOf[scala.Array[Int]])
kryo.register(classOf[scala.Array[Long]])
kryo.register(classOf[scala.Array[String]])
kryo.register(classOf[scala.Array[Option[_]]])
kryo.register(Class.forName("scala.Tuple2$mcCC$sp"))
// scala.collection
kryo.register(Class.forName("scala.collection.Iterator$$anon$11"))
kryo.register(Class.forName("scala.collection.Iterator$$anonfun$toStream$1"))
// scala.collection.convert
kryo.register(Class.forName("scala.collection.convert.Wrappers$"))
// scala.collection.immutable
kryo.register(classOf[scala.collection.immutable.::[_]])
kryo.register(classOf[scala.collection.immutable.Range])
kryo.register(Class.forName("scala.collection.immutable.Stream$Cons"))
kryo.register(Class.forName("scala.collection.immutable.Stream$Empty$"))
kryo.register(Class.forName("scala.collection.immutable.Set$EmptySet$"))
// scala.collection.mutable
kryo.register(classOf[scala.collection.mutable.ArrayBuffer[_]])
kryo.register(classOf[scala.collection.mutable.ListBuffer[_]])
kryo.register(Class.forName("scala.collection.mutable.ListBuffer$$anon$1"))
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofInt])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofLong])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofByte])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofChar])
kryo.register(classOf[scala.collection.mutable.WrappedArray.ofRef[_]])
// scala.math
kryo.register(scala.math.Numeric.LongIsIntegral.getClass)
// This seems to be necessary when serializing a RangePartitioner, which writes out a ClassTag:
//
// https://github.com/apache/spark/blob/v1.5.2/core/src/main/scala/org/apache/spark/Partitioner.scala#L220
//
// See also:
//
// https://mail-archives.apache.org/mod_mbox/spark-user/201504.mbox/%3CCAC95X6JgXQ3neXF6otj6a+F_MwJ9jbj9P-Ssw3Oqkf518_eT1w@mail.gmail.com%3E
kryo.register(Class.forName("scala.reflect.ClassTag$$anon$1"))
// needed for manifests
kryo.register(Class.forName("scala.reflect.ManifestFactory$ClassTypeManifest"))
// Added to Spark in 1.6.0; needed here for Spark < 1.6.0.
kryo.register(classOf[Array[Tuple1[Any]]])
kryo.register(classOf[Array[(Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(classOf[Array[(Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any)]])
kryo.register(Map.empty.getClass)
kryo.register(Nil.getClass)
kryo.register(None.getClass)
}
}
|
laserson/adam
|
adam-core/src/main/scala/org/bdgenomics/adam/serialization/ADAMKryoRegistrator.scala
|
Scala
|
apache-2.0
| 23,064 |
package japgolly.scalajs.react.extra.router
import java.util.UUID
import java.util.regex.{Pattern, Matcher}
import scala.reflect.ClassTag
import scala.util.matching.Regex
import japgolly.scalajs.react.CallbackTo
import japgolly.scalajs.react.extra.internal.RouterMacros
import japgolly.scalajs.react.internal.identityFn
import japgolly.scalajs.react.vdom.VdomElement
import RouterConfig.Parsed
/**
* This is not meant to be imported by library-users;
* [[RouterConfigDsl]] is the entire library-user-facing facade & DSL.
*/
object StaticDsl {
private val regexEscape1 = """([-()\\[\\]{}+?*.$\\^|,:#<!\\\\])""".r
private val regexEscape2 = """\\x08""".r
/**
* Pattern.quote doesn't work in Scala.JS.
*
* http://stackoverflow.com/questions/2593637/how-to-escape-regular-expression-in-javascript
*/
def regexEscape(s: String): String = {
var r = s
r = regexEscape1.replaceAllIn(r, """\\\\$1""")
r = regexEscape2.replaceAllIn(r, """\\\\x08""")
r
}
/**
* Route builder. Allows you to specify routes like `"user" / int / "display"`.
* Once complete, [[RouteB]] will become a [[Route]].
*/
object RouteB {
trait Composition[A, B] {
type C
val ga: C => A
val gb: C => B
val gc: (A, B) => C
def apply(fa: RouteB[A], fb: RouteB[B]): RouteB[C] =
new RouteB(
fa.regex + fb.regex,
fa.matchGroups + fb.matchGroups,
g => for {a <- fa.parse(g); b <- fb.parse(i => g(i + fa.matchGroups))} yield gc(a, b),
c => fa.build(ga(c)) + fb.build(gb(c)))
}
trait Composition_PriLowest {
implicit def ***[A, B] = Composition[A, B, (A, B)](_._1, _._2, (_, _))
}
trait Composition_PriLow extends Composition_PriLowest {
// Generated by bin/gen-router
implicit def T3[A,B,C] = Composition[(A,B), C, (A,B,C)](r => (r._1,r._2), _._3, (l,r) => (l._1,l._2,r))
implicit def T4[A,B,C,D] = Composition[(A,B,C), D, (A,B,C,D)](r => (r._1,r._2,r._3), _._4, (l,r) => (l._1,l._2,l._3,r))
implicit def T5[A,B,C,D,E] = Composition[(A,B,C,D), E, (A,B,C,D,E)](r => (r._1,r._2,r._3,r._4), _._5, (l,r) => (l._1,l._2,l._3,l._4,r))
implicit def T6[A,B,C,D,E,F] = Composition[(A,B,C,D,E), F, (A,B,C,D,E,F)](r => (r._1,r._2,r._3,r._4,r._5), _._6, (l,r) => (l._1,l._2,l._3,l._4,l._5,r))
implicit def T7[A,B,C,D,E,F,G] = Composition[(A,B,C,D,E,F), G, (A,B,C,D,E,F,G)](r => (r._1,r._2,r._3,r._4,r._5,r._6), _._7, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,r))
implicit def T8[A,B,C,D,E,F,G,H] = Composition[(A,B,C,D,E,F,G), H, (A,B,C,D,E,F,G,H)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7), _._8, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,r))
implicit def T9[A,B,C,D,E,F,G,H,I] = Composition[(A,B,C,D,E,F,G,H), I, (A,B,C,D,E,F,G,H,I)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8), _._9, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,r))
implicit def T10[A,B,C,D,E,F,G,H,I,J] = Composition[(A,B,C,D,E,F,G,H,I), J, (A,B,C,D,E,F,G,H,I,J)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9), _._10, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,r))
implicit def T11[A,B,C,D,E,F,G,H,I,J,K] = Composition[(A,B,C,D,E,F,G,H,I,J), K, (A,B,C,D,E,F,G,H,I,J,K)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10), _._11, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,r))
implicit def T12[A,B,C,D,E,F,G,H,I,J,K,L] = Composition[(A,B,C,D,E,F,G,H,I,J,K), L, (A,B,C,D,E,F,G,H,I,J,K,L)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11), _._12, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,r))
implicit def T13[A,B,C,D,E,F,G,H,I,J,K,L,M] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L), M, (A,B,C,D,E,F,G,H,I,J,K,L,M)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12), _._13, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,r))
implicit def T14[A,B,C,D,E,F,G,H,I,J,K,L,M,N] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M), N, (A,B,C,D,E,F,G,H,I,J,K,L,M,N)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13), _._14, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,r))
implicit def T15[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N), O, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14), _._15, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,r))
implicit def T16[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O), P, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15), _._16, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,r))
implicit def T17[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P), Q, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16), _._17, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,r))
implicit def T18[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q), R, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16,r._17), _._18, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,l._17,r))
implicit def T19[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R), S, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16,r._17,r._18), _._19, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,l._17,l._18,r))
implicit def T20[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S), T, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16,r._17,r._18,r._19), _._20, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,l._17,l._18,l._19,r))
implicit def T21[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T), U, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16,r._17,r._18,r._19,r._20), _._21, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,l._17,l._18,l._19,l._20,r))
implicit def T22[A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V] = Composition[(A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U), V, (A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V)](r => (r._1,r._2,r._3,r._4,r._5,r._6,r._7,r._8,r._9,r._10,r._11,r._12,r._13,r._14,r._15,r._16,r._17,r._18,r._19,r._20,r._21), _._22, (l,r) => (l._1,l._2,l._3,l._4,l._5,l._6,l._7,l._8,l._9,l._10,l._11,l._12,l._13,l._14,l._15,l._16,l._17,l._18,l._19,l._20,l._21,r))
}
trait Composition_PriMed extends Composition_PriLow {
implicit def _toA[A] = Composition[Unit, A, A](_ => (), identityFn, (_, a) => a)
implicit def Ato_[A] = Composition[A, Unit, A](identityFn, _ => (), (a, _) => a)
}
object Composition extends Composition_PriMed {
implicit def _to_ = Composition[Unit, Unit, Unit](_ => (), _ => (), (_, _) => ())
type Aux[A, B, O] = Composition[A, B] {type C = O}
def apply[A, B, O](a: O => A, b: O => B, c: (A, B) => O): Aux[A, B, O] =
new Composition[A, B] {
override type C = O
val ga = a
val gb = b
val gc = c
}
}
private val someUnit = Some(())
def literal(s: String): RouteB[Unit] =
new RouteB(regexEscape(s), 0, _ => someUnit, _ => s)
val / = literal("/")
}
abstract class RouteCommon[R[X] <: RouteCommon[R, X], A] {
def parseThen(f: Option[A] => Option[A]): R[A]
/**
* Prism map.
*
* Some values of `A` can be turned into a `B`s, some fail (in which case the route is considered non-matching).
*
* All `B`s can be turned back into `A`s.
*/
def pmap[B](b: A => Option[B])(a: B => A): R[B]
/**
* Exponential map.
*
* Any `A` can be turned into a `B` and vice versa.
*/
final def xmap[B](b: A => B)(a: B => A): R[B] =
pmap(a => Some(b(a)))(a)
final def filter(f: A => Boolean): R[A] =
parseThen(_ filter f)
final def mapParsed[B <: A](f: A => B): R[B] =
xmap(f)(x => x)
final def mapInput[B >: A](f: B => A): R[B] =
xmap[B](x => x)(f)
final def const[B](b: B)(implicit ev: A =:= Unit, ev2: Unit =:= A): R[B] =
xmap(_ => b)(_ => ())
}
/**
* A fragment of a route. Can be composed with other fragments.
*
* @param matchGroups The number of matches that `regex` will capture.
*/
class RouteB[A](val regex: String,
val matchGroups: Int,
val parse: (Int => String) => Option[A],
val build: A => String) extends RouteCommon[RouteB, A] {
import RouteB.Composition
override def toString =
s"RouteB($regex)"
def ~[B](next: RouteB[B])(implicit c: Composition[A, B]): RouteB[c.C] =
c(this, next)
def /[B](next: RouteB[B])(implicit c: Composition[A, B]): RouteB[c.C] =
this ~ RouteB./ ~ next
override def parseThen(f: Option[A] => Option[A]): RouteB[A] =
new RouteB(regex, matchGroups, f compose parse, build)
override def pmap[B](b: A => Option[B])(a: B => A): RouteB[B] =
new RouteB(regex, matchGroups, parse(_) flatMap b, build compose a)
/**
* Maps the captures values of the route to a case class.
*/
def caseClass[B]: RouteB[B] =
macro RouterMacros.quietCaseClassB[B]
/**
* Same as [[caseClass]] except the code generated by the macro is printed to stdout.
*/
def caseClassDebug[B]: RouteB[B] =
macro RouterMacros.debugCaseClassB[B]
def option: RouteB[Option[A]] =
new RouteB[Option[A]](s"($regex)?", matchGroups + 1,
g => Some(if (g(0) eq null) None else parse(i => g(i + 1))),
_.fold("")(build))
final def route: Route[A] = {
val p = Pattern.compile("^" + regex + "$")
// https://github.com/scala-js/scala-js/issues/1727
// val g = p.matcher("").groupCount
// if (g != matchGroups)
// sys.error(s"Error in regex: /${p.pattern}/. Expected $matchGroups match groups but detected $g.")
new Route(p, m => parse(i => m.group(i + 1)), a => Path(build(a)))
}
}
class RouteBO[A](private val r: RouteB[Option[A]]) extends AnyVal {
/**
* Specify a default value when parsing.
*
* Note: Unlike [[withDefault()]] path generation will still explicitly include the default value.
*
* Eg. If the path is like "/file[.format]" and the default is JSON, "/file" will be read as "/file.json", but
* when generating a path with JSON this will generate "/file.json" instead of "/file".
*/
def parseDefault(default: => A): RouteB[A] =
r.xmap(_ getOrElse default)(Some(_))
/**
* Specify a default value.
*
* Note: Unlike [[parseDefault()]] this will affect path generation too.
*
* Eg. If the path is like "/file[.format]" and the default is JSON, "/file" will be read as "/file.json", and
* when generating a path with JSON this will generate "/file" instead of "/file.json".
*
* Make sure the type has a useful `.equals()` implementation.
* Example: `default == default` should be `true`.
*/
def withDefault(default: => A): RouteB[A] =
r.xmap(_ getOrElse default)(a => if (default == a) None else Some(a))
}
/**
* A complete route.
*/
final class Route[A](pattern: Pattern,
parseFn: Matcher => Option[A],
buildFn: A => Path) extends RouteCommon[Route, A] {
override def toString =
s"Route($pattern)"
override def parseThen(f: Option[A] => Option[A]): Route[A] =
new Route(pattern, f compose parseFn, buildFn)
override def pmap[B](b: A => Option[B])(a: B => A): Route[B] =
new Route(pattern, parseFn(_) flatMap b, buildFn compose a)
/**
* Maps the captures values of the route to a case class.
*/
def caseClass[B]: Route[B] =
macro RouterMacros.quietCaseClass[B]
/**
* Same as [[caseClass]] except the code generated by the macro is printed to stdout.
*/
def caseClassDebug[B]: Route[B] =
macro RouterMacros.debugCaseClass[B]
def parse(path: Path): Option[A] = {
val m = pattern.matcher(path.value)
if (m.matches)
parseFn(m)
else
None
}
def pathFor(a: A): Path =
buildFn(a)
}
// ===================================================================================================================
object Rule {
def parseOnly[Page](parse: Path => Option[Parsed[Page]]) =
new Rule[Page](parse, _ => None, (_, _) => None)
def empty[P]: Rule[P] =
Rule(_ => None, _ => None, (_, _) => None)
}
/**
* A single routing rule. Intended to be composed with other [[Rule]]s.
* When all rules are composed, this is turned into a [[Rules]] instance.
*
* @param parse Attempt to parse a given path.
* @param path Attempt to determine the path for some page.
* @param action Attempt to determine the action when a route resolves to some page.
* @tparam Page The type of legal pages.
*/
final case class Rule[Page](parse : Path => Option[Parsed[Page]],
path : Page => Option[Path],
action: (Path, Page) => Option[Action[Page]]) {
/**
* Compose rules.
*/
def |(that: Rule[Page]): Rule[Page] =
new Rule[Page](
parse || that.parse,
path || that.path,
(u, p) => (if (path(p).isDefined) action else that.action)(u, p))
def xmap[A](f: Page => A)(g: A => Page): Rule[A] =
new Rule[A](
p => parse(p).map(_.bimap(_ map f, f)),
path compose g,
(u, p) => action(u, g(p)).map(_ map f))
def pmap[W](f: Page => W)(pf: PartialFunction[W, Page]): Rule[W] =
pmapF(f)(pf.lift)
def pmapCT[W](f: Page => W)(implicit ct: ClassTag[Page]): Rule[W] =
pmapF(f)(ct.unapply)
def pmapF[W](f: Page => W)(g: W => Option[Page]): Rule[W] =
new Rule[W](
parse(_) map (_.bimap(_ map f, f)),
g(_) flatMap path,
(path, w) => g(w) flatMap (action(path, _)) map (_ map f))
def widen[W >: Page](pf: PartialFunction[W, Page]): Rule[W] =
widenF(pf.lift)
def widenCT[W >: Page](implicit ct: ClassTag[Page]): Rule[W] =
widenF(ct.unapply)
def widenF[W >: Page](f: W => Option[Page]): Rule[W] =
pmapF[W](p => p)(f)
/** See [[autoCorrect()]]. */
def autoCorrect: Rule[Page] =
autoCorrect(Redirect.Replace)
/**
* When a route matches a page, compare its [[Path]] to what the route would generate for the same page and if they
* differ, redirect to the generated one.
*
* Example: If a route matches `/issue/dev-23` and returns a `Page("DEV", 23)` for which the generate path would be
* `/issue/DEV-23`, this would automatically redirect `/issue/dev-23` to `/issue/DEV-23`, and process
* `/issue/DEV-23` normally using its associated action.
*/
def autoCorrect(redirectMethod: Redirect.Method): Rule[Page] =
new Rule(parse, path,
(actualPath, page) =>
path(page).flatMap(expectedPath =>
if (expectedPath == actualPath)
action(actualPath, page)
else
Some(RedirectToPath(expectedPath, redirectMethod))
)
)
/**
* Modify the path(es) generated and parsed by this rule.
*/
def modPath(add: Path => Path, remove: Path => Option[Path]): Rule[Page] =
new Rule(
remove(_) flatMap parse,
path(_) map add,
action)
/**
* Add a prefix to the path(es) generated and parsed by this rule.
*/
def prefixPath(prefix: String): Rule[Page] =
modPath(
p => Path(prefix + p.value),
_ removePrefix prefix)
/**
* Add a prefix to the path(es) generated and parsed by this rule.
*
* Unlike [[prefixPath()]] when the suffix is non-empty, a slash is added between prefix and suffix.
*/
def prefixPath_/(prefix: String): Rule[Page] = {
val pre = Path(prefix)
modPath(
p => if (p.isEmpty) pre else pre / p,
p => if (p.value == prefix) Some(Path.root) else p.removePrefix(prefix + "/"))
}
/**
* Prevent this rule from functioning unless some condition holds.
* When the condition doesn't hold, an alternative action may be performed.
*
* @param condUnmet Response when rule matches but condition doesn't hold.
* If response is `None` it will be as if this rule doesn't exist and will likely end in the
* route-not-found fallback behaviour.
*/
def addCondition(cond: CallbackTo[Boolean])(condUnmet: Page => Option[Action[Page]]): Rule[Page] =
addCondition(_ => cond)(condUnmet)
/**
* Prevent this rule from functioning unless some condition holds, passes in the page
* requested as part of the context.
* When the condition doesn't hold, an alternative action may be performed.
*
* @param cond Function that takes the requested page and returns true if the page should be rendered.
* @param condUnmet Response when rule matches but condition doesn't hold.
* If response is `None` it will be as if this rule doesn't exist and will likely end in the
* route-not-found fallback behaviour.
*/
def addCondition(cond: Page => CallbackTo[Boolean])(condUnmet: Page => Option[Action[Page]]): Rule[Page] =
new Rule[Page](parse, path,
(u, p) => if (cond(p).runNow()) action(u, p) else condUnmet(p))
/**
* Specify behaviour when a `Page` doesn't have an associated `Path` or `Action`.
*/
def fallback(fp: Page => Path, fa: (Path, Page) => Action[Page]): Rules[Page] =
new Rules[Page](parse, path | fp, action | fa)
/**
* When a `Page` doesn't have an associated `Path` or `Action`, throw a runtime error.
*
* This is the trade-off for keeping the parsing and generation of known `Page`s in sync - compiler proof of
* `Page` exhaustiveness is sacrificed.
*
* It is recommended that you call [[RouterConfig.verify]] as a sanity-check.
*/
def noFallback: Rules[Page] =
fallback(
page => sys error s"Unspecified path for page $page.",
(path, page) => sys error s"Unspecified action for page $page at $path.")
}
object Rules {
/**
* Create routing rules all at once, with compiler proof that all `Page`s will have a `Path` and `Action`
* associated.
*
* The trade-off here is that care will need to be taken to ensure that path-parsing aligns with paths
* generated for pages. It is recommended that you call [[RouterConfig.verify]] as a sanity-check.
*/
def apply[Page](toPage: Path => Option[Parsed[Page]], fromPage: Page => (Path, Action[Page])) =
new Rules[Page](toPage, fromPage(_)._1, (_, p) => fromPage(p)._2)
}
/**
* Exhaustive routing rules. For all `Page`s there are `Path`s and `Action`s.
*/
final case class Rules[Page](parse : Path => Option[Parsed[Page]],
path : Page => Path,
action: (Path, Page) => Action[Page]) {
/**
* Specify a catch-all response to unmatched/invalid routes.
*/
def notFound(f: Path => Parsed[Page]): RouterConfig[Page] =
RouterConfig.withDefaults(parse | f, path, action)
}
// ===================================================================================================================
final class DynamicRouteB[Page, P <: Page, O](private val f: (P => Action[Page]) => O) extends AnyVal {
def ~>(g: P => Action[Page]): O = f(g)
}
final class StaticRouteB[Page, O](private val f: (=> Action[Page]) => O) extends AnyVal {
def ~>(a: => Action[Page]): O = f(a)
}
final class StaticRedirectB[Page, O](private val f: (=> Redirect[Page]) => O) extends AnyVal {
def ~>(a: => Redirect[Page]): O = f(a)
}
final class DynamicRedirectB[Page, A, O](private val f: (A => Redirect[Page]) => O) extends AnyVal {
def ~>(a: A => Redirect[Page]): O = f(a)
}
}
// =====================================================================================================================
// =====================================================================================================================
object RouterConfigDsl {
def apply[Page] =
new BuildInterface[Page]
class BuildInterface[Page] {
def use[A](f: RouterConfigDsl[Page] => A): A =
f(new RouterConfigDsl)
def buildConfig(f: RouterConfigDsl[Page] => RouterConfig[Page]): RouterConfig[Page] =
use(f)
def buildRule(f: RouterConfigDsl[Page] => StaticDsl.Rule[Page]): StaticDsl.Rule[Page] =
use(f)
}
}
/**
* DSL for creating [[RouterConfig]].
*
* Instead creating an instance of this yourself, use [[RouterConfigDsl.apply]].
*/
final class RouterConfigDsl[Page] {
import StaticDsl.{Rule => _, Rules => _, _}
type Action = japgolly.scalajs.react.extra.router.Action[Page]
type Renderer = japgolly.scalajs.react.extra.router.Renderer[Page]
type Redirect = japgolly.scalajs.react.extra.router.Redirect[Page]
type Parsed = RouterConfig.Parsed[Page]
// -------------------------------------------------------------------------------------------------------------------
// Route DSL
private def uuidRegex = "([A-Fa-f0-9]{8}(?:-[A-Fa-f0-9]{4}){3}-[A-Fa-f0-9]{12})"
def root = Path.root
val int = new RouteB[Int] ("(-?\\\\d+)", 1, g => Some(g(0).toInt), _.toString)
val long = new RouteB[Long]("(-?\\\\d+)", 1, g => Some(g(0).toLong), _.toString)
val uuid = new RouteB[UUID](uuidRegex, 1, g => Some(UUID fromString g(0)), _.toString)
private def __string1(regex: String): RouteB[String] =
new RouteB(regex, 1, g => Some(g(0)), identityFn)
/**
* Matches a string.
*
* Best to use a whitelist of characters, eg. "[a-zA-Z0-9]+".
* Do not capture groups; use "[a-z]+" instead of "([a-z]+)".
* If you need to group, use non-capturing groups like "(?:bye|hello)" instead of "(bye|hello)".
*/
def string(regex: String): RouteB[String] =
__string1("(" + regex + ")")
/** Captures the (non-empty) remaining portion of the URL path. */
def remainingPath: RouteB[String] =
__string1("(.+)$")
/** Captures the (potentially-empty) remaining portion of the URL path. */
def remainingPathOrBlank: RouteB[String] =
__string1("(.*)$")
implicit def _ops_for_routeb_option[A](r: RouteB[Option[A]]) = new RouteBO(r)
implicit def _auto_routeB_from_str(l: String) = RouteB.literal(l)
implicit def _auto_routeB_from_path(p: Path) = RouteB.literal(p.value)
implicit def _auto_route_from_routeB[A, R <% RouteB[A]](r: R) = r.route
// -------------------------------------------------------------------------------------------------------------------
// Action DSL
implicit def _auto_someAction[A <: Action](a: A): Option[A] = Some(a)
def render[A <% VdomElement](a: => A): Renderer =
Renderer(_ => a)
def renderR[A <% VdomElement](g: RouterCtl[Page] => A): Renderer =
Renderer(g(_))
def dynRender[P <: Page, A <% VdomElement](g: P => A): P => Renderer =
p => Renderer(_ => g(p))
def dynRenderR[P <: Page, A <% VdomElement](g: (P, RouterCtl[Page]) => A): P => Renderer =
p => Renderer(r => g(p, r))
def redirectToPage(page: Page)(implicit method: Redirect.Method): RedirectToPage[Page] =
RedirectToPage[Page](page, method)
def redirectToPath(path: Path)(implicit method: Redirect.Method): RedirectToPath[Page] =
RedirectToPath[Page](path, method)
def redirectToPath(path: String)(implicit method: Redirect.Method): RedirectToPath[Page] =
redirectToPath(Path(path))
// -------------------------------------------------------------------------------------------------------------------
// Rule building DSL
type Rule = StaticDsl.Rule[Page]
type Rules = StaticDsl.Rules[Page]
def Rule = StaticDsl.Rule
def emptyRule: Rule = Rule.empty
implicit def _auto_parsed_from_redirect(r: Redirect): Parsed = Left(r)
implicit def _auto_parsed_from_page (p: Page) : Parsed = Right(p)
implicit def _auto_parsedO_from_parsed [A <% Parsed](p: A) : Option[Parsed] = Some(p)
implicit def _auto_parsedO_from_parsedO[A <% Parsed](o: Option[A]): Option[Parsed] = o.map(a => a)
implicit def _auto_notFound_from_parsed [A <% Parsed](a: A) : Path => Parsed = _ => a
implicit def _auto_notFound_from_parsedF[A <% Parsed](f: Path => A): Path => Parsed = f(_)
implicit def _auto_routeParser_from_parsed [A <% Parsed](a: A) : Path => Option[Parsed] = _ => Some(a)
implicit def _auto_routeParser_from_parsedF [A <% Parsed](f: Path => A) : Path => Option[Parsed] = p => Some(f(p))
implicit def _auto_routeParser_from_parsedO [A <% Parsed](o: Option[A]) : Path => Option[Parsed] = _ => o.map(a => a)
implicit def _auto_routeParser_from_parsedFO[A <% Parsed](f: Path => Option[A]): Path => Option[Parsed] = f(_).map(a => a)
// allows dynamicRoute ~~> X to not care if X is (Action) or (P => Action)
implicit def _auto_pToAction_from_action(a: => Action): Page => Action = _ => a
implicit def _auto_rules_from_rulesB(r: Rule): Rules = r.noFallback
// Only really aids rewriteRuleR but safe anyway
implicit def _auto_pattern_from_regex(r: Regex): Pattern = r.pattern
/**
* Note: Requires that `Page#equals()` be sensible.
*/
def staticRoute(r: Route[Unit], page: Page): StaticRouteB[Page, Rule] = {
val dyn = dynamicRoute(r const page){ case p if page == p => p }
new StaticRouteB(a => dyn ~> a)
}
def dynamicRoute[P <: Page](r: Route[P])(pf: PartialFunction[Page, P]): DynamicRouteB[Page, P, Rule] =
dynamicRouteF(r)(pf.lift)
def dynamicRouteF[P <: Page](r: Route[P])(op: Page => Option[P]): DynamicRouteB[Page, P, Rule] = {
def onPage[A](f: P => A)(page: Page): Option[A] =
op(page) map f
new DynamicRouteB(a => Rule(r.parse, onPage(r.pathFor), (_, p) => onPage(a)(p)))
}
def dynamicRouteCT[P <: Page](r: Route[P])(implicit ct: ClassTag[P]): DynamicRouteB[Page, P, Rule] =
dynamicRouteF(r)(ct.unapply)
def staticRedirect(r: Route[Unit]): StaticRedirectB[Page, Rule] =
new StaticRedirectB(a => rewritePathF(r.parse(_) map (_ => a)))
def dynamicRedirect[A](r: Route[A]): DynamicRedirectB[Page, A, Rule] =
new DynamicRedirectB(f => rewritePathF(r.parse(_) map f))
def rewritePath(pf: PartialFunction[Path, Redirect]): Rule =
rewritePathF(pf.lift)
def rewritePathF(f: Path => Option[Redirect]): Rule =
Rule parseOnly f
def rewritePathR(r: Pattern, f: Matcher => Option[Redirect]): Rule =
rewritePathF { p =>
val m = r.matcher(p.value)
if (m.matches) f(m) else None
}
// -------------------------------------------------------------------------------------------------------------------
// Utilities
/**
* Removes the query portion of the URL.
*
* e.g. `a/b?c=1` to `a/b`
*/
def removeQuery: Rule =
rewritePathR("^(.*?)\\\\?.*$".r, m => redirectToPath(m group 1)(Redirect.Replace))
/**
* A rule that uses a replace-state redirect to remove trailing slashes from route URLs.
*/
def removeTrailingSlashes: Rule =
rewritePathR("^(.*?)/+$".r, m => redirectToPath(m group 1)(Redirect.Replace))
/**
* A rule that uses a replace-state redirect to remove leading slashes from route URLs.
*/
def removeLeadingSlashes: Rule =
rewritePathR("^/+(.*)$".r, m => redirectToPath(m group 1)(Redirect.Replace))
/**
* A rule that uses a replace-state redirect to remove leading and trailing slashes from route URLs.
*/
def trimSlashes: Rule = (
rewritePathR("^/*(.*?)/+$".r, m => redirectToPath(m group 1)(Redirect.Replace))
| removeLeadingSlashes)
}
|
matthughes/scalajs-react
|
extra/src/main/scala/japgolly/scalajs/react/extra/router/Dsl.scala
|
Scala
|
apache-2.0
| 28,697 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.{File, PrintWriter}
import java.net.URI
import java.util.TimeZone
import java.util.concurrent.TimeUnit
import scala.collection.mutable
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogColumnStat
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils
import org.apache.spark.sql.catalyst.util.DateTimeUtils.TimeZoneUTC
import org.apache.spark.sql.functions.timestamp_seconds
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.ArrayData
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* End-to-end suite testing statistics collection and use on both entire table and columns.
*/
class StatisticsCollectionSuite extends StatisticsCollectionTestBase with SharedSparkSession {
import testImplicits._
test("estimates the size of a limit 0 on outer join") {
withTempView("test") {
Seq(("one", 1), ("two", 2), ("three", 3), ("four", 4)).toDF("k", "v")
.createOrReplaceTempView("test")
val df1 = spark.table("test")
val df2 = spark.table("test").limit(0)
val df = df1.join(df2, Seq("k"), "left")
val sizes = df.queryExecution.analyzed.collect { case g: Join =>
g.stats.sizeInBytes
}
assert(sizes.size === 1, s"number of Join nodes is wrong:\\n ${df.queryExecution}")
assert(sizes.head === BigInt(128),
s"expected exact size 96 for table 'test', got: ${sizes.head}")
}
}
test("analyzing views is not supported") {
def assertAnalyzeUnsupported(analyzeCommand: String): Unit = {
val err = intercept[AnalysisException] {
sql(analyzeCommand)
}
assert(err.message.contains("ANALYZE TABLE is not supported"))
}
val tableName = "tbl"
withTable(tableName) {
spark.range(10).write.saveAsTable(tableName)
val viewName = "view"
withView(viewName) {
sql(s"CREATE VIEW $viewName AS SELECT * FROM $tableName")
assertAnalyzeUnsupported(s"ANALYZE TABLE $viewName COMPUTE STATISTICS")
assertAnalyzeUnsupported(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id")
}
}
}
test("statistics collection of a table with zero column") {
val table_no_cols = "table_no_cols"
withTable(table_no_cols) {
val rddNoCols = sparkContext.parallelize(1 to 10).map(_ => Row.empty)
val dfNoCols = spark.createDataFrame(rddNoCols, StructType(Seq.empty))
dfNoCols.write.format("json").saveAsTable(table_no_cols)
sql(s"ANALYZE TABLE $table_no_cols COMPUTE STATISTICS")
checkTableStats(table_no_cols, hasSizeInBytes = true, expectedRowCounts = Some(10))
}
}
test("analyze empty table") {
val table = "emptyTable"
withTable(table) {
val df = Seq.empty[Int].toDF("key")
df.write.format("json").saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS noscan")
val fetchedStats1 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetchedStats1.get.sizeInBytes == 0)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
val fetchedStats2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(0))
assert(fetchedStats2.get.sizeInBytes == 0)
val expectedColStat =
"key" -> CatalogColumnStat(Some(0), None, None, Some(0),
Some(IntegerType.defaultSize), Some(IntegerType.defaultSize))
// There won't be histogram for empty column.
Seq("true", "false").foreach { histogramEnabled =>
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> histogramEnabled) {
checkColStats(df, mutable.LinkedHashMap(expectedColStat))
}
}
}
}
test("analyze column command - unsupported types and invalid columns") {
val tableName = "column_stats_test1"
withTable(tableName) {
Seq(ArrayData(Seq(1, 2, 3), Seq(Seq(1, 2, 3)))).toDF().write.saveAsTable(tableName)
// Test unsupported data types
val err1 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS FOR COLUMNS data")
}
assert(err1.message.contains("does not support statistics collection"))
// Test invalid columns
val err2 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS FOR COLUMNS some_random_column")
}
assert(err2.message.contains("does not exist"))
}
}
test("test table-level statistics for data source table") {
val tableName = "tbl"
withTable(tableName) {
sql(s"CREATE TABLE $tableName(i INT, j STRING) USING parquet")
Seq(1 -> "a", 2 -> "b").toDF("i", "j").write.mode("overwrite").insertInto(tableName)
// noscan won't count the number of rows
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS noscan")
checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = None)
// without noscan, we count the number of rows
sql(s"ANALYZE TABLE $tableName COMPUTE STATISTICS")
checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = Some(2))
}
}
test("SPARK-15392: DataFrame created from RDD should not be broadcasted") {
val rdd = sparkContext.range(1, 100).map(i => Row(i, i))
val df = spark.createDataFrame(rdd, new StructType().add("a", LongType).add("b", LongType))
assert(df.queryExecution.analyzed.stats.sizeInBytes >
spark.sessionState.conf.autoBroadcastJoinThreshold)
assert(df.selectExpr("a").queryExecution.analyzed.stats.sizeInBytes >
spark.sessionState.conf.autoBroadcastJoinThreshold)
}
test("column stats round trip serialization") {
// Make sure we serialize and then deserialize and we will get the result data
val df = data.toDF(stats.keys.toSeq :+ "carray" : _*)
Seq(stats, statsWithHgms).foreach { s =>
s.zip(df.schema).foreach { case ((k, v), field) =>
withClue(s"column $k with type ${field.dataType}") {
val roundtrip = CatalogColumnStat.fromMap("table_is_foo", field.name, v.toMap(k))
assert(roundtrip == Some(v))
}
}
}
}
test("analyze column command - result verification") {
// (data.head.productArity - 1) because the last column does not support stats collection.
assert(stats.size == data.head.productArity - 1)
val df = data.toDF(stats.keys.toSeq :+ "carray" : _*)
checkColStats(df, stats)
// test column stats with histograms
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> "true", SQLConf.HISTOGRAM_NUM_BINS.key -> "2") {
checkColStats(df, statsWithHgms)
}
}
test("column stats collection for null columns") {
val dataTypes: Seq[(DataType, Int)] = Seq(
BooleanType, ByteType, ShortType, IntegerType, LongType,
DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT,
StringType, BinaryType, DateType, TimestampType
).zipWithIndex
val df = sql("select " + dataTypes.map { case (tpe, idx) =>
s"cast(null as ${tpe.sql}) as col$idx"
}.mkString(", "))
val expectedColStats = dataTypes.map { case (tpe, idx) =>
(s"col$idx", CatalogColumnStat(Some(0), None, None, Some(1),
Some(tpe.defaultSize.toLong), Some(tpe.defaultSize.toLong)))
}
// There won't be histograms for null columns.
Seq("true", "false").foreach { histogramEnabled =>
withSQLConf(SQLConf.HISTOGRAM_ENABLED.key -> histogramEnabled) {
checkColStats(df, mutable.LinkedHashMap(expectedColStats: _*))
}
}
}
test("SPARK-25028: column stats collection for null partitioning columns") {
val table = "analyze_partition_with_null"
withTempDir { dir =>
withTable(table) {
sql(s"""
|CREATE TABLE $table (value string, name string)
|USING PARQUET
|PARTITIONED BY (name)
|LOCATION '${dir.toURI}'""".stripMargin)
val df = Seq(("a", null), ("b", null)).toDF("value", "name")
df.write.mode("overwrite").insertInto(table)
sql(s"ANALYZE TABLE $table PARTITION (name) COMPUTE STATISTICS")
val partitions = spark.sessionState.catalog.listPartitions(TableIdentifier(table))
assert(partitions.head.stats.get.rowCount.get == 2)
}
}
}
test("number format in statistics") {
val numbers = Seq(
BigInt(0) -> (("0.0 B", "0")),
BigInt(100) -> (("100.0 B", "100")),
BigInt(2047) -> (("2047.0 B", "2.05E+3")),
BigInt(2048) -> (("2.0 KiB", "2.05E+3")),
BigInt(3333333) -> (("3.2 MiB", "3.33E+6")),
BigInt(4444444444L) -> (("4.1 GiB", "4.44E+9")),
BigInt(5555555555555L) -> (("5.1 TiB", "5.56E+12")),
BigInt(6666666666666666L) -> (("5.9 PiB", "6.67E+15")),
BigInt(1L << 10 ) * (1L << 60) -> (("1024.0 EiB", "1.18E+21")),
BigInt(1L << 11) * (1L << 60) -> (("2.36E+21 B", "2.36E+21"))
)
numbers.foreach { case (input, (expectedSize, expectedRows)) =>
val stats = Statistics(sizeInBytes = input, rowCount = Some(input))
val expectedString = s"sizeInBytes=$expectedSize, rowCount=$expectedRows"
assert(stats.simpleString == expectedString)
}
}
test("change stats after truncate command") {
val table = "change_stats_truncate_table"
withTable(table) {
spark.range(100).select($"id", $"id" % 5 as "value").write.saveAsTable(table)
// analyze to get initial stats
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS id, value")
val fetched1 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(100))
assert(fetched1.get.sizeInBytes > 0)
assert(fetched1.get.colStats.size == 2)
// truncate table command
sql(s"TRUNCATE TABLE $table")
val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(0))
assert(fetched2.get.sizeInBytes == 0)
assert(fetched2.get.colStats.isEmpty)
}
}
test("change stats after set location command") {
val table = "change_stats_set_location_table"
val tableLoc = new File(spark.sessionState.catalog.defaultTablePath(TableIdentifier(table)))
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
spark.range(100).select($"id", $"id" % 5 as "value").write.saveAsTable(table)
// analyze to get initial stats
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS id, value")
val fetched1 = checkTableStats(
table, hasSizeInBytes = true, expectedRowCounts = Some(100))
assert(fetched1.get.sizeInBytes > 0)
assert(fetched1.get.colStats.size == 2)
// set location command
val initLocation = spark.sessionState.catalog.getTableMetadata(TableIdentifier(table))
.storage.locationUri.get.toString
withTempDir { newLocation =>
sql(s"ALTER TABLE $table SET LOCATION '${newLocation.toURI.toString}'")
if (autoUpdate) {
val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched2.get.sizeInBytes == 0)
assert(fetched2.get.colStats.isEmpty)
// set back to the initial location
sql(s"ALTER TABLE $table SET LOCATION '$initLocation'")
val fetched3 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched3.get.sizeInBytes == fetched1.get.sizeInBytes)
} else {
checkTableStats(table, hasSizeInBytes = false, expectedRowCounts = None)
// SPARK-19724: clean up the previous table location.
waitForTasksToFinish()
Utils.deleteRecursively(tableLoc)
}
}
}
}
}
}
test("change stats after insert command for datasource table") {
val table = "change_stats_insert_datasource_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
sql(s"CREATE TABLE $table (i int, j string) USING PARQUET")
// analyze to get initial stats
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS i, j")
val fetched1 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = Some(0))
assert(fetched1.get.sizeInBytes == 0)
assert(fetched1.get.colStats.size == 2)
// table lookup will make the table cached
spark.table(table)
assert(isTableInCatalogCache(table))
// insert into command
sql(s"INSERT INTO TABLE $table SELECT 1, 'abc'")
if (autoUpdate) {
val fetched2 = checkTableStats(table, hasSizeInBytes = true, expectedRowCounts = None)
assert(fetched2.get.sizeInBytes > 0)
assert(fetched2.get.colStats.isEmpty)
} else {
checkTableStats(table, hasSizeInBytes = false, expectedRowCounts = None)
}
// check that tableRelationCache inside the catalog was invalidated after insert
assert(!isTableInCatalogCache(table))
}
}
}
}
test("auto gather stats after insert command") {
val table = "change_stats_insert_datasource_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
sql(s"CREATE TABLE $table (i int, j string) USING PARQUET")
// insert into command
sql(s"INSERT INTO TABLE $table SELECT 1, 'abc'")
val stats = getCatalogTable(table).stats
if (autoUpdate) {
assert(stats.isDefined)
assert(stats.get.sizeInBytes >= 0)
} else {
assert(stats.isEmpty)
}
}
}
}
}
test("invalidation of tableRelationCache after inserts") {
val table = "invalidate_catalog_cache_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
spark.range(100).write.saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
spark.table(table)
val initialSizeInBytes = getTableFromCatalogCache(table).stats.sizeInBytes
spark.range(100).write.mode(SaveMode.Append).saveAsTable(table)
spark.table(table)
assert(getTableFromCatalogCache(table).stats.sizeInBytes == 2 * initialSizeInBytes)
}
}
}
}
test("invalidation of tableRelationCache after table truncation") {
val table = "invalidate_catalog_cache_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTable(table) {
spark.range(100).write.saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
spark.table(table)
sql(s"TRUNCATE TABLE $table")
spark.table(table)
assert(getTableFromCatalogCache(table).stats.sizeInBytes == 0)
}
}
}
}
test("invalidation of tableRelationCache after alter table add partition") {
val table = "invalidate_catalog_cache_table"
Seq(false, true).foreach { autoUpdate =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> autoUpdate.toString) {
withTempDir { dir =>
withTable(table) {
val path = dir.getCanonicalPath
sql(s"""
|CREATE TABLE $table (col1 int, col2 int)
|USING PARQUET
|PARTITIONED BY (col2)
|LOCATION '${dir.toURI}'""".stripMargin)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS")
spark.table(table)
assert(getTableFromCatalogCache(table).stats.sizeInBytes == 0)
spark.catalog.recoverPartitions(table)
val df = Seq((1, 2), (1, 2)).toDF("col2", "col1")
df.write.parquet(s"$path/col2=1")
sql(s"ALTER TABLE $table ADD PARTITION (col2=1) LOCATION '${dir.toURI}'")
spark.table(table)
val cachedTable = getTableFromCatalogCache(table)
val cachedTableSizeInBytes = cachedTable.stats.sizeInBytes
val defaultSizeInBytes = conf.defaultSizeInBytes
if (autoUpdate) {
assert(cachedTableSizeInBytes != defaultSizeInBytes && cachedTableSizeInBytes > 0)
} else {
assert(cachedTableSizeInBytes == defaultSizeInBytes)
}
}
}
}
}
}
test("Simple queries must be working, if CBO is turned on") {
withSQLConf(SQLConf.CBO_ENABLED.key -> "true") {
withTable("TBL1", "TBL") {
import org.apache.spark.sql.functions._
val df = spark.range(1000L).select('id,
'id * 2 as "FLD1",
'id * 12 as "FLD2",
lit("aaa") + 'id as "fld3")
df.write
.mode(SaveMode.Overwrite)
.bucketBy(10, "id", "FLD1", "FLD2")
.sortBy("id", "FLD1", "FLD2")
.saveAsTable("TBL")
sql("ANALYZE TABLE TBL COMPUTE STATISTICS ")
sql("ANALYZE TABLE TBL COMPUTE STATISTICS FOR COLUMNS ID, FLD1, FLD2, FLD3")
val df2 = spark.sql(
"""
|SELECT t1.id, t1.fld1, t1.fld2, t1.fld3
|FROM tbl t1
|JOIN tbl t2 on t1.id=t2.id
|WHERE t1.fld3 IN (-123.23,321.23)
""".stripMargin)
df2.createTempView("TBL2")
sql("SELECT * FROM tbl2 WHERE fld3 IN ('qqq', 'qwe') ").queryExecution.executedPlan
}
}
}
test("store and retrieve column stats in different time zones") {
val (start, end) = (0, TimeUnit.DAYS.toSeconds(2))
def checkTimestampStats(
t: DataType,
srcTimeZone: TimeZone,
dstTimeZone: TimeZone)(checker: ColumnStat => Unit): Unit = {
val table = "time_table"
val column = "T"
val original = TimeZone.getDefault
try {
withTable(table) {
TimeZone.setDefault(srcTimeZone)
spark.range(start, end)
.select(timestamp_seconds($"id").cast(t).as(column))
.write.saveAsTable(table)
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS $column")
TimeZone.setDefault(dstTimeZone)
val stats = getCatalogTable(table)
.stats.get.colStats(column).toPlanStat(column, t)
checker(stats)
}
} finally {
TimeZone.setDefault(original)
}
}
DateTimeTestUtils.outstandingZoneIds.foreach { zid =>
val timeZone = TimeZone.getTimeZone(zid)
checkTimestampStats(DateType, TimeZoneUTC, timeZone) { stats =>
assert(stats.min.get.asInstanceOf[Int] == TimeUnit.SECONDS.toDays(start))
assert(stats.max.get.asInstanceOf[Int] == TimeUnit.SECONDS.toDays(end - 1))
}
checkTimestampStats(TimestampType, TimeZoneUTC, timeZone) { stats =>
assert(stats.min.get.asInstanceOf[Long] == TimeUnit.SECONDS.toMicros(start))
assert(stats.max.get.asInstanceOf[Long] == TimeUnit.SECONDS.toMicros(end - 1))
}
}
}
def getStatAttrNames(tableName: String): Set[String] = {
val queryStats = spark.table(tableName).queryExecution.optimizedPlan.stats.attributeStats
queryStats.map(_._1.name).toSet
}
test("analyzes column statistics in cached query") {
withTempView("cachedQuery") {
sql(
"""CACHE TABLE cachedQuery AS
| SELECT c0, avg(c1) AS v1, avg(c2) AS v2
| FROM (SELECT id % 3 AS c0, id % 5 AS c1, 2 AS c2 FROM range(1, 30))
| GROUP BY c0
""".stripMargin)
// Analyzes one column in the cached logical plan
sql("ANALYZE TABLE cachedQuery COMPUTE STATISTICS FOR COLUMNS v1")
assert(getStatAttrNames("cachedQuery") === Set("v1"))
// Analyzes two more columns
sql("ANALYZE TABLE cachedQuery COMPUTE STATISTICS FOR COLUMNS c0, v2")
assert(getStatAttrNames("cachedQuery") === Set("c0", "v1", "v2"))
}
}
test("analyzes column statistics in cached local temporary view") {
withTempView("tempView") {
// Analyzes in a temporary view
sql("CREATE TEMPORARY VIEW tempView AS SELECT * FROM range(1, 30)")
val errMsg = intercept[AnalysisException] {
sql("ANALYZE TABLE tempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg.contains(s"Table or view 'tempView' not found in database 'default'"))
// Cache the view then analyze it
sql("CACHE TABLE tempView")
assert(getStatAttrNames("tempView") !== Set("id"))
sql("ANALYZE TABLE tempView COMPUTE STATISTICS FOR COLUMNS id")
assert(getStatAttrNames("tempView") === Set("id"))
}
}
test("analyzes column statistics in cached global temporary view") {
withGlobalTempView("gTempView") {
val globalTempDB = spark.sharedState.globalTempViewManager.database
val errMsg1 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg1.contains(s"Table or view not found: $globalTempDB.gTempView"))
// Analyzes in a global temporary view
sql("CREATE GLOBAL TEMP VIEW gTempView AS SELECT * FROM range(1, 30)")
val errMsg2 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(errMsg2.contains(s"Table or view 'gTempView' not found in database '$globalTempDB'"))
// Cache the view then analyze it
sql(s"CACHE TABLE $globalTempDB.gTempView")
assert(getStatAttrNames(s"$globalTempDB.gTempView") !== Set("id"))
sql(s"ANALYZE TABLE $globalTempDB.gTempView COMPUTE STATISTICS FOR COLUMNS id")
assert(getStatAttrNames(s"$globalTempDB.gTempView") === Set("id"))
}
}
test("analyzes column statistics in cached catalog view") {
withTempDatabase { database =>
sql(s"CREATE VIEW $database.v AS SELECT 1 c")
sql(s"CACHE TABLE $database.v")
assert(getStatAttrNames(s"$database.v") !== Set("c"))
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS FOR COLUMNS c")
assert(getStatAttrNames(s"$database.v") === Set("c"))
}
}
test("analyzes table statistics in cached catalog view") {
def getTableStats(tableName: String): Statistics = {
spark.table(tableName).queryExecution.optimizedPlan.stats
}
withTempDatabase { database =>
sql(s"CREATE VIEW $database.v AS SELECT 1 c")
// Cache data eagerly by default, so this operation collects table stats
sql(s"CACHE TABLE $database.v")
val stats1 = getTableStats(s"$database.v")
assert(stats1.sizeInBytes > 0)
assert(stats1.rowCount === Some(1))
sql(s"UNCACHE TABLE $database.v")
// Cache data lazily, then analyze table stats
sql(s"CACHE LAZY TABLE $database.v")
val stats2 = getTableStats(s"$database.v")
assert(stats2.sizeInBytes === OneRowRelation().computeStats().sizeInBytes)
assert(stats2.rowCount === None)
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS NOSCAN")
val stats3 = getTableStats(s"$database.v")
assert(stats3.sizeInBytes === OneRowRelation().computeStats().sizeInBytes)
assert(stats3.rowCount === None)
sql(s"ANALYZE TABLE $database.v COMPUTE STATISTICS")
val stats4 = getTableStats(s"$database.v")
assert(stats4.sizeInBytes === stats1.sizeInBytes)
assert(stats4.rowCount === Some(1))
}
}
test(s"CTAS should update statistics if ${SQLConf.AUTO_SIZE_UPDATE_ENABLED.key} is enabled") {
val tableName = "spark_27694"
Seq(false, true).foreach { updateEnabled =>
withSQLConf(SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> updateEnabled.toString) {
withTable(tableName) {
// Create a data source table using the result of a query.
sql(s"CREATE TABLE $tableName USING parquet AS SELECT 'a', 'b'")
val catalogTable = getCatalogTable(tableName)
if (updateEnabled) {
assert(catalogTable.stats.nonEmpty)
} else {
assert(catalogTable.stats.isEmpty)
}
}
}
}
}
test("Metadata files and temporary files should not be counted as data files") {
withTempDir { tempDir =>
val tableName = "t1"
val stagingDirName = ".test-staging-dir"
val tableLocation = s"${tempDir.toURI}/$tableName"
withSQLConf(
SQLConf.AUTO_SIZE_UPDATE_ENABLED.key -> "true",
"hive.exec.stagingdir" -> stagingDirName) {
withTable("t1") {
sql(s"CREATE TABLE $tableName(c1 BIGINT) USING PARQUET LOCATION '$tableLocation'")
sql(s"INSERT INTO TABLE $tableName VALUES(1)")
val staging = new File(new URI(s"$tableLocation/$stagingDirName"))
Utils.tryWithResource(new PrintWriter(staging)) { stagingWriter =>
stagingWriter.write("12")
}
val metadata = new File(new URI(s"$tableLocation/_metadata"))
Utils.tryWithResource(new PrintWriter(metadata)) { metadataWriter =>
metadataWriter.write("1234")
}
sql(s"INSERT INTO TABLE $tableName VALUES(1)")
val stagingFileSize = staging.length()
val metadataFileSize = metadata.length()
val tableLocationSize = getDataSize(new File(new URI(tableLocation)))
val stats = checkTableStats(tableName, hasSizeInBytes = true, expectedRowCounts = None)
assert(stats.get.sizeInBytes === tableLocationSize - stagingFileSize - metadataFileSize)
}
}
}
}
Seq(true, false).foreach { caseSensitive =>
test(s"SPARK-30903: Fail fast on duplicate columns when analyze columns " +
s"- caseSensitive=$caseSensitive") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive.toString) {
val table = "test_table"
withTable(table) {
sql(s"CREATE TABLE $table (value string, name string) USING PARQUET")
val dupCol = if (caseSensitive) "value" else "VaLuE"
val errorMsg = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $table COMPUTE STATISTICS FOR COLUMNS value, name, $dupCol")
}.getMessage
assert(errorMsg.contains("Found duplicate column(s)"))
}
}
}
}
}
|
shuangshuangwang/spark
|
sql/core/src/test/scala/org/apache/spark/sql/StatisticsCollectionSuite.scala
|
Scala
|
apache-2.0
| 27,421 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.rest
/**
* @version $Revision: 1.1 $
*/
trait Container[K, E] {
def get(key: K): Option[E]
def put(element: E): Unit
def key(element: E): K
def remove(element: E): Unit = removeKey(key(element))
def removeKey(key: K): Unit
}
|
maslovalex/scalate
|
scalate-jaxrs/src/main/scala/org/fusesource/scalate/rest/Container.scala
|
Scala
|
apache-2.0
| 1,012 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.accessibility
import java.io.{File, FileWriter}
import org.apache.commons.io.FileUtils
object Filesystem {
def withFileWriter(directory: String, filename: String)(block: FileWriter => Unit): File = {
val dir = new File(directory)
FileUtils.forceMkdir(dir)
val outFile = new File(dir, filename)
withFileWriter(outFile)(block)
}
def withFileWriter(outFile: File)(block: FileWriter => Unit): File = {
val out = new FileWriter(outFile)
block(out)
out.flush()
out.close()
outFile
}
}
|
kristapsmelderis/accessibility-driver
|
src/main/scala/uk/gov/hmrc/accessibility/Filesystem.scala
|
Scala
|
apache-2.0
| 1,153 |
package example.test
import org.scalatest.FunSpec
import example.ClassA
class ClassATest extends FunSpec {
describe("ClassA") {
it("echoes integers!") {
val classA = new ClassA
assert(1 == classA.echo(1))
}
}
}
|
MartinSnyder/scalatest-multiproject-example
|
subproject_a/src/test/scala/example/test/ClassATest.scala
|
Scala
|
mit
| 237 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.util
import scala.annotation.tailrec
import scala.collection.JavaConverters._
class LinkedList[E]() extends AbstractSequentialList[E]
with List[E] with Deque[E] with Cloneable with Serializable {
def this(c: Collection[_ <: E]) = {
this()
addAll(c)
}
import LinkedList._
private var head: Node[E] = null
private var last: Node[E] = null
/* Inner size is represented with a Double to satisfy Collection
* size method requirement:
* If this collection contains more than Integer.MAX_VALUE elements,
* returns Integer.MAX_VALUE.
*/
private var _size: Double = 0
def getFirst(): E = {
if (isEmpty())
throw new NoSuchElementException()
else
peekFirst()
}
def getLast(): E = {
if (isEmpty())
throw new NoSuchElementException()
else
peekLast()
}
def removeFirst(): E = {
if (isEmpty())
throw new NoSuchElementException()
val oldHead = head
head = oldHead.next
if (head ne null)
head.prev = null
else
last = null
_size -= 1
oldHead.value
}
def removeLast(): E = {
if (isEmpty())
throw new NoSuchElementException()
val oldLast = last
last = oldLast.prev
if (last ne null)
last.next = null
else
head = null
_size -= 1
oldLast.value
}
def addFirst(e: E): Unit = {
val oldHead = head
head = new Node(e, next = oldHead)
_size += 1
if (oldHead ne null)
oldHead.prev = head
else
last = head
}
def addLast(e: E): Unit = {
val oldLast = last
last = new Node(e, prev = oldLast)
_size += 1
if (oldLast ne null)
oldLast.next = last
else
head = last
}
override def contains(o: Any): Boolean =
iterator().asScala.exists(_ === o)
override def size(): Int =
_size.toInt
override def add(e: E): Boolean = {
addLast(e)
true
}
override def remove(o: Any): Boolean =
_removeOccurrence(listIterator, o)
override def addAll(c: Collection[_ <: E]): Boolean = {
val iter = c.iterator
val changed = iter.hasNext()
while (iter.hasNext())
addLast(iter.next())
changed
}
override def clear(): Unit = {
head = null
last = null
_size = 0
}
private def getNodeAt(index: Int): Node[E] = {
if (index == 0) head
else if (index == size - 1) last
else {
var current: Node[E] = null
if (index <= size/2) {
current = head
for (_ <- 0 until index)
current = current.next
} else {
current = last
for (_ <- index until (size - 1))
current = current.prev
}
current
}
}
override def get(index: Int): E = {
checkIndexInBounds(index)
getNodeAt(index).value
}
override def set(index: Int, element: E): E = {
checkIndexInBounds(index)
val node = getNodeAt(index)
val oldValue = node.value
node.value = element
oldValue
}
private def addNode(nextNode: Node[E], e: E): Unit = {
if (nextNode eq head) addFirst(e)
else if (nextNode eq null) addLast(e)
else {
val node = new Node(e, prev = nextNode.prev, next = nextNode)
nextNode.prev.next = node
nextNode.prev = node
_size += 1
}
}
override def add(index: Int, element: E): Unit = {
checkIndexOnBounds(index)
addNode(getNodeAt(index), element)
}
private def removeNode(node: Node[E]): E = {
if (node eq head) removeFirst()
else if (node eq last) removeLast()
else {
node.prev.next = node.next
node.next.prev = node.prev
_size -= 1
node.value
}
}
override def remove(index: Int): E = {
checkIndexInBounds(index)
removeNode(getNodeAt(index))
}
def peek(): E =
peekFirst()
def element(): E =
getFirst()
def poll(): E =
pollFirst()
def remove(): E =
removeFirst()
def offer(e: E): Boolean =
offerLast(e)
def offerFirst(e: E): Boolean = {
addFirst(e)
true
}
def offerLast(e: E): Boolean = {
addLast(e)
true
}
def peekFirst(): E =
if (head eq null) null.asInstanceOf[E]
else head.value
def peekLast(): E =
if (last eq null) null.asInstanceOf[E]
else last.value
def pollFirst(): E =
if (isEmpty()) null.asInstanceOf[E]
else removeFirst()
def pollLast(): E =
if (isEmpty) null.asInstanceOf[E]
else removeLast()
def push(e: E): Unit =
addFirst(e)
def pop(): E =
removeFirst()
private def _removeOccurrence(iter: Iterator[E], o: Any): Boolean = {
var changed = false
while (iter.hasNext() && !changed) {
if (iter.next() === o) {
iter.remove()
changed = true
}
}
changed
}
def removeFirstOccurrence(o: Any): Boolean =
_removeOccurrence(iterator(), o)
def removeLastOccurrence(o: Any): Boolean =
_removeOccurrence(descendingIterator(), o)
override def listIterator(index: Int): ListIterator[E] = {
checkIndexOnBounds(index)
new ListIterator[E] {
private var last: Double = -1
private var i: Double = index
private var currentNode: Node[E] =
if (index == size) null else
getNodeAt(index)
private var lastNode: Node[E] =
if (currentNode ne null) null else
LinkedList.this.last
def hasNext(): Boolean =
i < size
def next(): E = {
if (i >= size)
throw new NoSuchElementException()
last = i
i += 1
lastNode = currentNode
currentNode = currentNode.next
lastNode.value
}
def hasPrevious(): Boolean =
i > 0
def previous(): E = {
if (!hasPrevious)
throw new NoSuchElementException()
i -= 1
last = i
if (currentNode eq null)
currentNode = LinkedList.this.last
else
currentNode = currentNode.prev
lastNode = currentNode
lastNode.value
}
def nextIndex(): Int = i.toInt
def previousIndex(): Int = (i - 1).toInt
def remove(): Unit = {
checkThatHasLast()
if (currentNode eq null) {
removeLast()
lastNode = LinkedList.this.last
} else {
removeNode(lastNode)
}
if (last < i) {
i -= 1
}
last = -1
}
def set(e: E): Unit = {
checkThatHasLast()
lastNode.value = e
}
def add(e: E): Unit = {
if (currentNode eq null) {
addLast(e)
lastNode = LinkedList.this.last
} else {
addNode(currentNode, e)
}
i += 1
last = -1
}
private def checkThatHasLast(): Unit = {
if (last == -1)
throw new IllegalStateException()
}
}
}
def descendingIterator(): Iterator[E] = {
new Iterator[E] {
private var removeEnabled = false
private var nextNode: Node[E] =
LinkedList.this.last
def hasNext(): Boolean =
nextNode ne null
def next(): E = {
if (!hasNext())
throw new NoSuchElementException()
removeEnabled = true
val ret = nextNode
nextNode = nextNode.prev
ret.value
}
def remove(): Unit = {
if (!removeEnabled)
throw new IllegalStateException()
removeEnabled = false
if (nextNode eq null)
removeFirst()
else
removeNode(nextNode.next)
}
}
}
override def clone(): AnyRef =
new LinkedList[E](this)
}
object LinkedList {
protected[LinkedList] final class Node[T](
var value: T,
var prev: Node[T] = null,
var next: Node[T] = null)
}
|
nicolasstucki/scala-js
|
javalib/src/main/scala/java/util/LinkedList.scala
|
Scala
|
apache-2.0
| 8,029 |
package net.manub.embeddedkafka.schemaregistry.streams
import net.manub.embeddedkafka.UUIDs
import net.manub.embeddedkafka.schemaregistry.EmbeddedKafkaWithSchemaRegistry.consumerConfigForSchemaRegistry
import net.manub.embeddedkafka.schemaregistry.{
EmbeddedKafkaConfigWithSchemaRegistry,
EmbeddedKafkaWithSchemaRegistry
}
import net.manub.embeddedkafka.streams.TestStreamsConfig
import org.apache.kafka.streams.{KafkaStreams, Topology}
// TODO: need to find a better way of not duplicating this code from the kafka-streams module
/** Helper trait for testing Kafka Streams.
* It creates an embedded Kafka Instance for each test case.
* Use `runStreams` to execute your streams.
*/
trait EmbeddedKafkaStreamsWithSchemaRegistry
extends EmbeddedKafkaWithSchemaRegistry
with TestStreamsConfig {
/** Execute Kafka streams and pass a block of code that can
* operate while the streams are active.
* The code block can be used for publishing and consuming messages in Kafka.
*
* @param topicsToCreate the topics that should be created in Kafka before launching the streams.
* @param topology the streams topology that will be used to instantiate the streams with
* a default configuration (all state directories are different and
* in temp folders)
* @param extraConfig additional KafkaStreams configuration (overwrite existing keys in
* default config)
* @param block the code block that will executed while the streams are active.
* Once the block has been executed the streams will be closed.
*/
def runStreams[T](topicsToCreate: Seq[String],
topology: Topology,
extraConfig: Map[String, AnyRef] = Map.empty)(block: => T)(
implicit config: EmbeddedKafkaConfigWithSchemaRegistry): T =
withRunningKafka {
topicsToCreate.foreach(topic => createCustomTopic(topic))
val streamId = UUIDs.newUuid().toString
val streams =
new KafkaStreams(
topology,
streamConfig(streamId,
extraConfig ++ consumerConfigForSchemaRegistry))
streams.start()
try {
block
} finally {
streams.close()
}
}(config)
}
|
manub/scalatest-embedded-kafka
|
schema-registry/src/main/scala/net.manub.embeddedkafka/schemaregistry/streams/EmbeddedKafkaStreamsWithSchemaRegistry.scala
|
Scala
|
mit
| 2,325 |
package dpla.ingestion3.mappers.providers
import dpla.ingestion3.mappers.utils._
class SdMapping extends MdlMapping with JsonMapping with JsonExtractor {
// ID minting functions
override def useProviderName: Boolean = true
override def getProviderName: Option[String] = Some("sd")
}
|
dpla/ingestion3
|
src/main/scala/dpla/ingestion3/mappers/providers/SdMapping.scala
|
Scala
|
mit
| 293 |
/*
* Copyright 2017 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Created by yegor on 10/3/16. */
package ws.epigraph.java
import ws.epigraph.compiler.{CDataType, CListType, CTypeRef, CEntityTypeDef}
import ws.epigraph.java.JavaGenNames.{jn, lqn, pn, pnq2, tt}
import ws.epigraph.java.NewlineStringInterpolator.NewlineHelper
abstract class ListGen[Type >: Null <: CListType](from: Type, ctx: GenContext) extends JavaTypeGen[Type](from, ctx)
with DatumTypeJavaGen {
/** element value type */
protected val ev: CDataType = t.elementDataType
/** element type ref */
protected val etr: CTypeRef = ev.typeRef
/** element type */
protected val et: etr.Type = etr.resolved
protected def genTypeClass(ogc: ObjectGenContext):String
override def generate: String = {
val ogc = new ObjectGenContext(ctx, pnq2(t), true)
val typeClass = genTypeClass(ogc)
/*@formatter:off*/sn"""\\
${JavaGenUtils.topLevelComment}\\
package ${pn(t)};
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
${ObjectGenUtils.genImports(ogc)}\\
/**
* Base (read) interface for `${t.name.name}` datum.
*/
${JavaGenUtils.generatedAnnotation(this)}
public interface $ln extends${JavaGenUtils.withParents(t)} ws.epigraph.data.ListDatum.Static {
$ln.Type type = $ln.Type.instance();
static @NotNull $ln.Builder create() { return $ln.Type.instance().createBuilder(); }
@Override
@NotNull $ln.Imm toImmutable();
${t.effectiveDefaultElementTagName match { // default element tag (if defined) views
case None => ""
case Some(dtn) => sn"""\\
${"/**"}
* Returns list view of element default tag datums. Elements where the tag datum is not set will be `null`.
*/
@NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}> datums();
${"/**"}
* Returns list view of element default tag values. Elements where the tag value is not set will be `null`.
*/
@NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}.Value> values();
"""
}
}\\
${et match { // element tags views (for vartypes)
case evt: CEntityTypeDef => sn"""\\
${"/**"}
* Returns list view of element data.
*/
@NotNull java.util.List<${NotNull_}? extends ${lqn(et, t)}> datas();
${
evt.effectiveTags.map { tag => sn"""\\
//
// /**
// * Returns list view of `${tag.name}` tag datums. Elements where the tag value is not set will be `null`.
// */
// @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}> ${jn(tag.name + "Datums")}();
//
// /**
// * Returns list view of `${tag.name}` tag values. Elements where the tag value is not set will be `null`.
// */
// @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}.Value> ${jn(tag.name + "Values")}();
"""
}.mkString
}\\
"""
case _ => ""
}
}\\
${t.meta match {
case Some(mt) => sn"""\\
${"/**"}
* @return meta-data instance
*/
@Nullable ${lqn(mt, t)} meta();
"""
case None => ""
}
}\\
/**
* Class for `${t.name.name}` type.
*/
$typeClass\\
/**
* Builder for `${t.name.name}` datum.
*/
final class Builder extends ws.epigraph.data.ListDatum.Builder.Static<$ln.Imm, $ln.Value.Builder> implements $ln {
Builder(@NotNull ws.epigraph.data.ListDatum.Builder.Raw raw) {
super($ln.Type.instance(), raw, $ln.Imm.Impl::new, $ln.Value.Builder::new);
}
${t.effectiveDefaultElementTagName match { // default element tag (if defined) views
case None => ""
case Some(dtn) =>
def genPrimitiveAdd(nativeType: String): String =
sn"""\\
${"/**"} Adds${vt(et, s" default `$dtn` tag", "")} datum element to the list. */
public @NotNull $ln.Builder add(@Nullable $nativeType datum) {
datas().add(${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}(
datum == null ? null : ${lqn(tt(etr, dtn), t)}.create(datum)
));
return this;
}
"""
def genNonPrimitiveAdd: String =
sn"""\\
${"/**"} Adds${vt(et, s" default `$dtn` tag", "")} datum element to the list. */
public @NotNull $ln.Builder add(@Nullable ${lqn(tt(etr, dtn), t)} datum) {
datas().add(${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}(datum));
return this;
}
"""
val add = JavaGenUtils.builtInPrimitives
.get(etr.resolved.name.name)
.map(genPrimitiveAdd)
.getOrElse(genNonPrimitiveAdd)
sn"""\\
${"/**"} Returns modifiable list view of default `$dtn` tag element datums. Elements where the tag datum is not set will be `null`. */
@Override
public @NotNull java.util.List<${lqn(Nullable_, tt(etr, dtn), t)}> datums() {
return new ws.epigraph.util.ListView<>(
datas(),
${lqn(et, t)}${vt(et, "", ".Data")}::get${vt(et, up(dtn), "")},
v -> ${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}(v)
);
}
${"/**"} Returns list view of element default tag value builders. Elements where the tag value is not set will be `null`. */
@Override
public @NotNull java.util.List<${lqn(tt(etr, dtn), t)}.${Nullable_}Value> values() {
return new ws.epigraph.util.ListView<>(
datas(),
${lqn(et, t)}${vt(et, "", ".Data")}::get${vt(et, up(dtn), "")}_,
v -> ${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}_(v)
);
}
$add\\
${"/**"} Adds${vt(et, s" default `$dtn` tag", "")} error element to the list. */
public @NotNull $ln.Builder addError(@NotNull ws.epigraph.errors.ErrorValue error) {
datas().add(${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}_Error(error));
return this;
}
${"/**"} Adds${vt(et, s" default `$dtn` tag", "")} value element to the list. */
public @NotNull $ln.Builder add_(@Nullable ${lqn(tt(etr, dtn), t)}.Value value) {
datas().add(${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(dtn), "")}_(value));
return this;
}
"""
}
}\\
${et match { // data view (for vartypes)
case evt: CEntityTypeDef => sn"""\\
${"/**"} Returns modifiable list view of element data builders. */
@Override
public @NotNull java.util.List<${lqn(NotNull_, et, t)}> datas() {
return ws.epigraph.util.Util.cast(_raw().elements());
}
${"/**"} Adds data element to the list. */
public @NotNull $ln.Builder add(@NotNull ${lqn(et, t)} data) {
datas().add(data);
return this;
}
${
evt.effectiveTags.map { tag => sn"""\\
//
// /**
// * Returns modifiable list view of elements `${tag.name}` tag datums. Elements where the tag value is not set will be `null`.
// */
// public @NotNull java.util.List<${lqn("@Nullable ", tt(etr, tag.name), t)}> ${jn(tag.name + "Datums")}() {
// return new ws.epigraph.util.ListView<>(
// datas(),
// ${lqn(et, t)}${vt(et, "", ".Data")}::get${vt(et, up(tag.name), "")},
// v -> ${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(tag.name), "")}(v)
// );
// }
//
// /**
// * Returns modifiable list view of elements `${tag.name}` tag values. Elements where the tag value is not set will be `null`.
// */
// public @NotNull java.util.List<${lqn(tt(etr, tag.name), t)}.${Nullable_}Value> ${jn(tag.name + "Values")}() {
// return new ws.epigraph.util.ListView<>(
// datas(),
// ${lqn(et, t)}${vt(et, "", ".Data")}::get${vt(et, up(tag.name), "")}_,
// v -> ${lqn(et, t)}.Type.instance().createDataBuilder().set${vt(et, up(tag.name), "")}(v)
// );
// }
"""
}.mkString
}\\
"""
case _ => sn"""\\
// method is private to not expose datas() for non-union types (so simple type can be replaced with union type while preserving backwards-compatibility)
private @NotNull java.util.List<${lqn(et, t)}.${NotNull_}Data> datas() {
return ws.epigraph.util.Util.cast(_raw().elements());
}
"""
}
}\\
${t.meta match {
case Some(mt) => sn"""\\
${"/**"}
* @return meta-data instance
*/
@Override
public @Nullable ${lqn(mt, t)} meta() {
return (${lqn(mt, t)}) _raw().meta();
}
${"/**"}
* Sets meta-data value
*
* @param meta new meta-data value
*
* @return {@code this}
*/
public @NotNull $ln.Builder setMeta(@Nullable ${lqn(mt,t)} meta) {
_raw().setMeta(meta);
return this;
}
"""
case None => ""
}
}\\
}
/**
* Immutable interface for `${t.name.name}` datum.
*/
interface Imm extends $ln,${withParents(".Imm")} ws.epigraph.data.ListDatum.Imm.Static {
${t.effectiveDefaultElementTagName match { // default element tag (if defined) views
case None => ""
case Some(dtn) => sn"""\\
${"/**"}
* Returns immutable list view of element default tag datums. Elements where the tag datum is not set will be `null`.
*/
@Override
@NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}.Imm> datums();
${"/**"}
* Returns immutable list view of element default tag values. Elements where the tag value is not set will be `null`.
*/
@Override
@NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}.Value.Imm> values();
"""
}
}\\
${et match { // element tags (for vartypes)
case evt: CEntityTypeDef => sn"""\\
${"/**"}
* Returns immutable list view of elements data.
*/
@NotNull java.util.List<${NotNull_}? extends ${lqn(et, t)}.Imm> datas();
${
evt.effectiveTags.map { tag => sn"""\\
//
// /**
// * Returns immutable list view of `${tag.name}` tag datums. Elements where the tag value is not set will be `null`.
// */
// @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}.Imm> ${jn(tag.name + "Datums")}();
//
// /**
// * Returns immutable list view of `${tag.name}` tag values. Elements where the tag value is not set will be `null`.
// */
// @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}.Value.Imm> ${jn(tag.name + "Values")}();
"""
}.mkString
}\\
"""
case _ => ""
}
}\\
${t.meta match {
case Some(mt) => sn"""\\
${"/**"}
* @return meta-data instance
*/
@Override
@Nullable ${lqn(mt, t)}.Imm meta();
"""
case None => ""
}
}\\
/** Private implementation of `$ln.Imm` interface. */
final class Impl extends ws.epigraph.data.ListDatum.Imm.Static.Impl<$ln.Imm, $ln.Value.Imm> implements $ln.Imm {
Impl(@NotNull ws.epigraph.data.ListDatum.Imm.Raw raw) { super($ln.Type.instance(), raw, $ln.Value.Imm.Impl::new); }
${t.effectiveDefaultElementTagName match { // default element tag (if defined) views
case None => ""
case Some(dtn) => sn"""\\
${"/**"}
* Returns immutable list view of element default tag datums. Elements where the tag datum is not set will be `null`.
*/
@Override
public @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}.Imm> datums() {
return new ws.epigraph.util.Unmodifiable.ListView<${lqn(et, t)}${vt(et, "", ".Data")}.Imm, ${lqn(tt(etr, dtn), t)}.Imm>(
datas(),
${lqn(et, t)}${vt(et, "", ".Data")}.Imm::get${vt(et, up(dtn), "")}
);
}
${"/**"}
* Returns immutable list view of element default tag values. Elements where the tag value is not set will be `null`.
*/
@Override
public @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, dtn), t)}.Value.Imm> values() {
return new ws.epigraph.util.Unmodifiable.ListView<${lqn(et, t)}${vt(et, "", ".Data")}.Imm, ${lqn(tt(etr, dtn), t)}.Value.Imm>(
datas(),
${lqn(et, t)}${vt(et, "", ".Data")}.Imm::get${vt(et, up(dtn), "")}_
);
}
"""
}
}\\
${et match { // element tags (for vartypes)
case evt: CEntityTypeDef => sn"""\\
${"/**"}
* Returns immutable list view of elements data.
*/
@Override
public @NotNull java.util.List<${NotNull_}? extends ${lqn(et, t)}.Imm> datas() {
return ws.epigraph.util.Util.castEx(_raw().elements());
}
${
evt.effectiveTags.map { tag => sn"""\\
//
// /**
// * Returns immutable list view of `${tag.name}` tag datums. Elements where the tag value is not set will be `null`.
// */
// public @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}.Imm> ${jn(tag.name + "Datums")}() {
// return new ws.epigraph.util.Unmodifiable.ListView<${lqn(et, t)}${vt(et, "", ".Data")}.Imm, ${lqn(tt(etr, tag.name), t)}.Imm>(
// datas(),
// ${lqn(et, t)}${vt(et, "", ".Data")}.Imm::get${vt(et, up(tag.name), "")}
// );
// }
//
// /**
// * Returns immutable list view of `${tag.name}` tag values. Elements where the tag value is not set will be `null`.
// */
// public @NotNull java.util.List<${Nullable_}? extends ${lqn(tt(etr, tag.name), t)}.Value.Imm> ${jn(tag.name + "Values")}() {
// return new ws.epigraph.util.Unmodifiable.ListView<${lqn(et, t)}${vt(et, "", ".Data")}.Imm, ${lqn(tt(etr, tag.name), t)}.Value.Imm>(
// datas(),
// ${lqn(et, t)}${vt(et, "", ".Data")}.Imm::get${vt(et, up(tag.name), "")}_
// );
// }
"""
}.mkString
}\\
"""
case _ => sn"""\\
// method is private to not expose datas() for non-union types (so simple type can be replaced with union type while preserving backwards-compatibility)
private @NotNull java.util.List<${NotNull_}? extends ${lqn(et, t)}.Data.Imm> datas() {
return ws.epigraph.util.Util.castEx(_raw().elements());
}
"""
}
}\\
${t.meta match {
case Some(mt) => sn"""\\
${"/**"}
* @return meta-data instance
*/
@Override
public @Nullable ${lqn(mt, t)}.Imm meta() {
return (${lqn(mt, t)}.Imm) _raw().meta();
}
"""
case None => ""
}
}\\
}
}
$datumValue\\
$datumData\\
}
"""/*@formatter:on*/
}
}
|
SumoLogic/epigraph
|
java/codegen/src/main/scala/ws/epigraph/java/ListGen.scala
|
Scala
|
apache-2.0
| 14,612 |
package com.kashoo.ws
import play.api.{Configuration, Logger}
import scala.concurrent.ExecutionContext
/**
* Associates a rate limit with a request matcher for applying to outgoing client requests.
*/
case class RequestRateLimit(rate: Rate, requestMatcher: RequestMatcher)(implicit val ec: ExecutionContext) {
val rateLimit: RateLimit = RateLimit(rate)(ec)
}
object RequestRateLimit {
val logger: Logger = Logger("request-rate-limit")
def apply(rateConfig: Configuration, requestLimitConfig: Configuration)
(implicit ec: ExecutionContext): RequestRateLimit = {
val rateName = requestLimitConfig.getOptional[String]("rate").getOrElse(throw new IllegalStateException("Rate is required for a request limit configuration"))
val rate = Rate(rateConfig, rateName)
val reqMatcher = RequestMatcher(requestLimitConfig)
logger.trace(s"Enabling client request rate limit against $reqMatcher with $rate, using $ec")
RequestRateLimit(rate, reqMatcher)(ec)
}
}
|
Kashoo/ws-limited
|
app/com/kashoo/ws/RequestRateLimit.scala
|
Scala
|
mit
| 995 |
package com.jsuereth.pgp
import org.specs2.mutable._
import sbt.IO
import java.io.File
class KeyGenSpec extends Specification {
PGP.init
val user = "Test User <[email protected]>"
val pw = "test-pw".toCharArray
val (pub,sec) = PGP.makeNewKeyRings(user,pw)
"Secret Key Ring" should {
"serialize and deserialze ring from file" in {
IO withTemporaryDirectory { dir =>
val secFile = new File(dir, "secring.pgp")
sec.saveToFile(secFile)
val deserialized = SecretKeyRing.loadFromFile(secFile)
deserialized must not(beNull)
def keyIds(ring: SecretKeyRing) = ring.secretKeys.map(_.keyID).toSet
keyIds(deserialized) must equalTo(keyIds(sec))
}
}
"encode and decode a message" in {
val message = "Hello from me"
val encrypted = sec.publicKey.encryptString(message)
val decrypted = sec.secretKey.decryptString(encrypted, pw)
decrypted must equalTo(decrypted)
}
// TODO - This is failing
"sign and verify a string" in {
val message = "Hello from me"
val signature = sec.secretKey.signString(message, pw)
pub.verifySignatureString(message, signature) must beTrue
}
"sign and verify a message" in {
val message = "Hello from me"
val signedMessage = sec.secretKey.signMessageString(message, "test-message", pw)
pub.verifyMessageString(signedMessage) must equalTo(message)
}
"give nice error on invalid password" in {
sec.secretKey.signString("test", Array()) must throwAn[IncorrectPassphraseException]
}
// TODO - Handle unicode characters in passwords
}
}
|
voetha/sbt-pgp
|
gpg-library/src/test/scala/com/jsuereth/pgp/SecretKeyRingSpec.scala
|
Scala
|
bsd-3-clause
| 1,650 |
package com.wavesplatform.state.reader
import com.wavesplatform.db.WithState
import com.wavesplatform.features.BlockchainFeatures._
import com.wavesplatform.lagonaki.mocks.TestBlock.{create => block}
import com.wavesplatform.settings.TestFunctionalitySettings.Enabled
import com.wavesplatform.state.LeaseBalance
import com.wavesplatform.state.diffs._
import com.wavesplatform.test.PropSpec
import com.wavesplatform.transaction.TxHelpers
class StateReaderEffectiveBalancePropertyTest extends PropSpec with WithState {
property("No-interactions genesis account's effectiveBalance doesn't depend on depths") {
val master = TxHelpers.signer(1)
val genesis = TxHelpers.genesis(master.toAddress)
val emptyBlocksAmt = 10
val confirmations = 20
val genesisBlock = block(Seq(genesis))
val nextBlocks = List.fill(emptyBlocksAmt - 1)(block(Seq.empty))
assertDiffAndState(genesisBlock +: nextBlocks, block(Seq.empty)) { (_, newState) =>
newState.effectiveBalance(genesis.recipient, confirmations) shouldBe genesis.amount
}
}
property("Negative generating balance case") {
val fs = Enabled.copy(preActivatedFeatures = Map(SmartAccounts.id -> 0, SmartAccountTrading.id -> 0))
val Fee = 100000
val setup = {
val master = TxHelpers.signer(1)
val leaser = TxHelpers.signer(2)
val genesis = TxHelpers.genesis(master.toAddress)
val xfer1 = TxHelpers.transfer(master, leaser.toAddress, ENOUGH_AMT / 3)
val lease1 = TxHelpers.lease(leaser, master.toAddress, xfer1.amount - Fee, fee = Fee)
val xfer2 = TxHelpers.transfer(master, leaser.toAddress, ENOUGH_AMT / 3)
val lease2 = TxHelpers.lease(leaser, master.toAddress, xfer2.amount - Fee, fee = Fee)
(leaser, genesis, xfer1, lease1, xfer2, lease2)
}
val (leaser, genesis, xfer1, lease1, xfer2, lease2) = setup
assertDiffAndState(Seq(block(Seq(genesis)), block(Seq(xfer1, lease1))), block(Seq(xfer2, lease2)), fs) { (_, state) =>
val portfolio = state.wavesPortfolio(lease1.sender.toAddress)
val expectedBalance = xfer1.amount + xfer2.amount - 2 * Fee
portfolio.balance shouldBe expectedBalance
state.generatingBalance(leaser.toAddress, state.lastBlockId) shouldBe 0
portfolio.lease shouldBe LeaseBalance(0, expectedBalance)
portfolio.effectiveBalance shouldBe 0
}
}
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/state/reader/StateReaderEffectiveBalancePropertyTest.scala
|
Scala
|
mit
| 2,372 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.cli.test
import akka.http.scaladsl.model.StatusCodes.OK
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import spray.json.JsObject
import common.rest.WskRest
import common.rest.RestResult
import common.TestUtils.RunResult
/**
* Tests for testing the CLI "api" subcommand. Most of these tests require a deployed backend.
*/
@RunWith(classOf[JUnitRunner])
class ApiGwRestTests extends ApiGwTests {
override lazy val wsk = new WskRest
override lazy val createCode = OK.intValue
override def verifyBadCommands(rr: RunResult, badpath: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val error = RestResult.getField(apiResultRest.respBody, "error")
error should include("Error: Resource path must begin with '/'.")
}
override def verifyBadCommandsDelete(rr: RunResult, badpath: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val error = RestResult.getField(apiResultRest.respBody, "error")
error should include(s"API deletion failure: API '/basepath' does not exist")
}
override def verifyBadCommandsList(rr: RunResult, badpath: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apis = apiResultRest.getFieldListJsObject("apis")
apis.size shouldBe 0
}
override def verifyInvalidCommands(rr: RunResult, badverb: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val error = apiResultRest.getField("error")
error should include(s"Error: Resource verb '${badverb}' not supported")
}
override def verifyInvalidCommandsDelete(rr: RunResult, badverb: String): Unit = {
verifyBadCommandsDelete(rr, badverb)
}
override def verifyInvalidCommandsList(rr: RunResult, badverb: String): Unit = {
verifyBadCommandsList(rr, badverb)
}
override def verifyNonJsonSwagger(rr: RunResult, filename: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val error = apiResultRest.getField("error")
error should include(s"swagger field cannot be parsed. Ensure it is valid JSON")
}
override def verifyMissingField(rr: RunResult): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val error = apiResultRest.getField("error")
error should include(s"swagger is missing the basePath field.")
}
override def verifyApiCreated(rr: RunResult): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
apiResultRest.statusCode shouldBe OK
}
def verifyList(rr: RunResult,
namespace: String,
actionName: String,
testurlop: String,
testbasepath: String,
testrelpath: String,
testapiname: String,
newEndpoint: String = ""): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apiValue = RestResult.getFieldJsObject(apiResultRest.getFieldListJsObject("apis")(0), "value")
val apidoc = RestResult.getFieldJsObject(apiValue, "apidoc")
val basepath = RestResult.getField(apidoc, "basePath")
basepath shouldBe testbasepath
val paths = RestResult.getFieldJsObject(apidoc, "paths")
paths.fields.contains(testrelpath) shouldBe true
val info = RestResult.getFieldJsObject(apidoc, "info")
val title = RestResult.getField(info, "title")
title shouldBe testapiname
verifyPaths(paths, testrelpath, testurlop, actionName, namespace)
if (newEndpoint != "") {
verifyPaths(paths, newEndpoint, testurlop, actionName, namespace)
}
}
def verifyPaths(paths: JsObject,
testrelpath: String,
testurlop: String,
actionName: String,
namespace: String = "") = {
val relpath = RestResult.getFieldJsObject(paths, testrelpath)
val urlop = RestResult.getFieldJsObject(relpath, testurlop)
val openwhisk = RestResult.getFieldJsObject(urlop, "x-openwhisk")
val actionN = RestResult.getField(openwhisk, "action")
actionN shouldBe actionName
if (namespace != "") {
val namespaceS = RestResult.getField(openwhisk, "namespace")
namespaceS shouldBe namespace
}
}
override def verifyApiList(rr: RunResult,
clinamespace: String,
actionName: String,
testurlop: String,
testbasepath: String,
testrelpath: String,
testapiname: String): Unit = {
verifyList(rr, clinamespace, actionName, testurlop, testbasepath, testrelpath, testapiname)
}
override def verifyApiGet(rr: RunResult): Unit = {
rr.stdout should include regex (s""""operationId":"getPathWithSub_pathsInIt"""")
}
override def verifyApiFullList(rr: RunResult,
clinamespace: String,
actionName: String,
testurlop: String,
testbasepath: String,
testrelpath: String,
testapiname: String): Unit = {
verifyList(rr, clinamespace, actionName, testurlop, testbasepath, testrelpath, testapiname)
}
override def verifyApiFullListDouble(rr: RunResult,
clinamespace: String,
actionName: String,
testurlop: String,
testbasepath: String,
testrelpath: String,
testapiname: String,
newEndpoint: String): Unit = {
verifyList(rr, clinamespace, actionName, testurlop, testbasepath, testrelpath, testapiname, newEndpoint)
}
override def verifyApiDeleted(rr: RunResult): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
apiResultRest.statusCode shouldBe OK
}
override def verifyApiDeletedRelpath(rr: RunResult,
testrelpath: String,
testbasepath: String,
op: String = ""): Unit = {
verifyApiDeleted(rr)
}
override def verifyApiNameGet(rr: RunResult,
testbasepath: String,
actionName: String,
responseType: String = "json"): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apiValue = RestResult.getFieldJsObject(apiResultRest.getFieldListJsObject("apis")(0), "value")
val apidoc = RestResult.getFieldJsObject(apiValue, "apidoc")
val config = RestResult.getFieldJsObject(apidoc, "x-ibm-configuration")
val cors = RestResult.getFieldJsObject(config, "cors")
val enabled = RestResult.getFieldJsValue(cors, "enabled").toString()
enabled shouldBe "true"
val basepath = RestResult.getField(apidoc, "basePath")
basepath shouldBe testbasepath
val paths = RestResult.getFieldJsObject(apidoc, "paths")
val relpath = RestResult.getFieldJsObject(paths, "/path")
val urlop = RestResult.getFieldJsObject(relpath, "get")
val openwhisk = RestResult.getFieldJsObject(urlop, "x-openwhisk")
val actionN = RestResult.getField(openwhisk, "action")
actionN shouldBe actionName
rr.stdout should include regex (s""""target-url":".*${actionName}.${responseType}"""")
}
override def verifyInvalidSwagger(rr: RunResult): Unit = {
verifyMissingField(rr)
}
override def verifyApiOp(rr: RunResult, testurlop: String, testapiname: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apiValue = RestResult.getFieldJsObject(apiResultRest.getFieldListJsObject("apis")(0), "value")
val apidoc = RestResult.getFieldJsObject(apiValue, "apidoc")
val info = RestResult.getFieldJsObject(apidoc, "info")
val title = RestResult.getField(info, "title")
title shouldBe testapiname
val paths = RestResult.getFieldJsObject(apidoc, "paths")
val relpath = RestResult.getFieldJsObject(paths, "/")
val urlop = RestResult.getFieldJsObject(relpath, testurlop)
relpath.fields.contains(testurlop) shouldBe true
}
override def verifyApiBaseRelPath(rr: RunResult, testbasepath: String, testrelpath: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apiValue = RestResult.getFieldJsObject(apiResultRest.getFieldListJsObject("apis")(0), "value")
val apidoc = RestResult.getFieldJsObject(apiValue, "apidoc")
val basepath = RestResult.getField(apidoc, "basePath")
basepath shouldBe testbasepath
val paths = RestResult.getFieldJsObject(apidoc, "paths")
paths.fields.contains(testrelpath) shouldBe true
}
override def verifyApiOpVerb(rr: RunResult, testurlop: String): Unit = {
val apiResultRest = rr.asInstanceOf[RestResult]
val apiValue = RestResult.getFieldJsObject(apiResultRest.getFieldListJsObject("apis")(0), "value")
val apidoc = RestResult.getFieldJsObject(apiValue, "apidoc")
val paths = RestResult.getFieldJsObject(apidoc, "paths")
val relpath = RestResult.getFieldJsObject(paths, "/")
val urlop = RestResult.getFieldJsObject(relpath, testurlop)
relpath.fields.contains(testurlop) shouldBe true
}
override def verifyInvalidKey(rr: RunResult): Unit = {
rr.stderr should include("A valid auth key is required")
}
}
|
duynguyen/incubator-openwhisk
|
tests/src/test/scala/whisk/core/cli/test/ApiGwRestTests.scala
|
Scala
|
apache-2.0
| 10,287 |
package com.edinhodzic.service.repository
import com.edinhodzic.service.Paginated
import scala.util.Try
trait Queryable[T] {
def query(queryString: String): Try[Paginated[T]]
}
|
edinhodzic/jersey-rest-service
|
src/main/scala/com/edinhodzic/service/repository/Queryable.scala
|
Scala
|
apache-2.0
| 184 |
package test {
class Test {
def foo(): Unit = {
val r: Seq[Int] = Seq.empty
case class TestClass(polka: Int, kolka: String)
val q: TestClass = null
q.polk/*caret*/
}
}
}
/*
polka
*/
|
ilinum/intellij-scala
|
testdata/completion/Basic/LocalClassInPackage.scala
|
Scala
|
apache-2.0
| 222 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.profiler
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.slf4j.LoggerFactory
import java.io.File
import org.apache.mxnet.Profiler
import org.apache.mxnet.Context
/**
* Integration test for profiler example.
*/
class ProfilerSuite extends FunSuite with BeforeAndAfterAll {
private val logger = LoggerFactory.getLogger(classOf[ProfilerSuite])
override def beforeAll(): Unit = {
logger.info("Running profiler test...")
val eray = new ProfilerNDArray
val path = System.getProperty("java.io.tmpdir")
val kwargs = Map("file_name" -> path)
logger.info(s"profile file save to $path")
Profiler.profilerSetState("run")
}
override def afterAll(): Unit = {
Profiler.profilerSetState("stop")
}
test("Profiler Broadcast test") {
ProfilerNDArray.testBroadcast()
}
test("Profiler NDArray Saveload test") {
ProfilerNDArray.testNDArraySaveload()
}
test("Profiler NDArray Copy") {
ProfilerNDArray.testNDArrayCopy()
}
test("Profiler NDArray Negate") {
ProfilerNDArray.testNDArrayNegate()
}
test("Profiler NDArray Scalar") {
ProfilerNDArray.testNDArrayScalar()
}
test("Profiler NDArray Onehot") {
ProfilerNDArray.testNDArrayOnehot()
}
test("Profiler Clip") {
ProfilerNDArray.testClip()
}
test("Profiler Dot") {
ProfilerNDArray.testDot()
}
}
|
zhreshold/mxnet
|
scala-package/examples/src/test/scala/org/apache/mxnetexamples/profiler/ProfilerSuite.scala
|
Scala
|
apache-2.0
| 2,189 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical
import java.util.concurrent.TimeUnit
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.types.MetadataBuilder
import org.apache.spark.unsafe.types.CalendarInterval
object EventTimeWatermark {
/** The [[org.apache.spark.sql.types.Metadata]] key used to hold the eventTime watermark delay. */
val delayKey = "spark.watermarkDelayMs"
def getDelayMs(delay: CalendarInterval): Long = {
// We define month as `31 days` to simplify calculation.
val millisPerMonth = TimeUnit.MICROSECONDS.toMillis(CalendarInterval.MICROS_PER_DAY) * 31
delay.milliseconds + delay.months * millisPerMonth
}
}
/**
* Used to mark a user specified column as holding the event time for a row.
*/
case class EventTimeWatermark(
eventTime: Attribute,
delay: CalendarInterval,
child: LogicalPlan) extends UnaryNode {
// Update the metadata on the eventTime column to include the desired delay.
override val output: Seq[Attribute] = child.output.map { a =>
if (a semanticEquals eventTime) {
val delayMs = EventTimeWatermark.getDelayMs(delay)
val updatedMetadata = new MetadataBuilder()
.withMetadata(a.metadata)
.putLong(EventTimeWatermark.delayKey, delayMs)
.build()
a.withMetadata(updatedMetadata)
} else if (a.metadata.contains(EventTimeWatermark.delayKey)) {
// Remove existing watermark
val updatedMetadata = new MetadataBuilder()
.withMetadata(a.metadata)
.remove(EventTimeWatermark.delayKey)
.build()
a.withMetadata(updatedMetadata)
} else {
a
}
}
}
|
pgandhi999/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/EventTimeWatermark.scala
|
Scala
|
apache-2.0
| 2,465 |
package x
import a._
class f
class g( ) extends k {
def <caret>foo( ) = return true
}
/*
*/
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/keywordCompletion/generatedTests/autoTest_66.scala
|
Scala
|
apache-2.0
| 104 |
package gapt.formats.tip
package object util {
object find {
def apply[T](
elements: Seq[T], p: ( T ) => Boolean ): Option[( Seq[T], T, Seq[T] )] = {
val index = elements.indexWhere( p )
if ( index == -1 ) {
None
} else {
Some( (
elements.take( index ),
elements( index ),
elements.drop( index + 1 ) ) )
}
}
}
}
|
gapt/gapt
|
core/src/main/scala/gapt/formats/tip/util/package.scala
|
Scala
|
gpl-3.0
| 403 |
package com.mostlyharmlesscode.unpredicablefuture.library
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
class StringOpsFuture {
slow: SlowExecutionComponent =>
def upperCase(data: String): Future[String] = Future {
val delay = doSomethingPotentiallySlow()
Performance.wasItFastEnough(delay, "upperCase", data)
data.toUpperCase
}
def reverse(data: String): Future[String] = Future {
val delay = doSomethingPotentiallySlow()
Performance.wasItFastEnough(delay, "reverse", data)
data.reverse
}
def double(data: String): Future[String] = Future {
val delay = doSomethingPotentiallySlow()
Performance.wasItFastEnough(delay, "double", data)
data * 2
}
}
|
llaakes/UnpredictableFuture
|
src/main/scala/com/mostlyharmlesscode/unpredicablefuture/library/StringOpsFuture.scala
|
Scala
|
mit
| 745 |
package dotty.tools.dotc.typer
import collection.mutable
case class Mode(val bits: Int) extends AnyVal {
import Mode._
def | (that: Mode) = Mode(bits | that.bits)
def & (that: Mode) = Mode(bits & that.bits)
def &~ (that: Mode) = Mode(bits & ~that.bits)
def is (that: Mode) = (bits & that.bits) == that.bits
def isExpr = (this & PatternOrType) == None
override def toString =
(0 until 31).filter(i => (bits & (1 << i)) != 0).map(modeName).mkString("Mode(", ",", ")")
}
object Mode {
val None = Mode(0)
private var modeName = new Array[String](32)
def newMode(bit: Int, name: String): Mode = {
modeName(bit) = name
Mode(1 << bit)
}
val Pattern = newMode(0, "Pattern")
val Type = newMode(1, "Type")
val ImplicitsEnabled = newMode(2, "ImplicitsEnabled")
val InferringReturnType = newMode(3, "InferringReturnType")
/** This mode bit is set if we collect information without reference to a valid
* context with typerstate and constraint. This is typically done when we
* cache the eligibility of implicits. Caching needs to be done across different constraints.
* Therefore, if TypevarsMissContext is set, subtyping becomes looser, and assumes
* that PolyParams can be sub- and supertypes of anything. See TypeComparer.
*/
val TypevarsMissContext = newMode(4, "TypevarsMissContext")
val CheckCyclic = newMode(5, "CheckCyclic")
val InSuperCall = newMode(6, "InSuperCall")
/** This mode bit is set if we want to allow accessing a symbol's denotation
* at a period before that symbol is first valid. An example where this is
* the case is if we want to examine the environment where an access is made.
* The computation might take place at an earlier phase (e.g. it is part of
* some completion such as unpickling), but the environment might contain
* synbols that are not yet defined in that phase.
* If the mode bit is set, getting the denotation of a symbol at a phase
* before the symbol is defined will return the symbol's denotation at the
* first phase where it is valid, instead of throwing a NotDefinedHere error.
*/
val FutureDefsOK = newMode(7, "FutureDefsOK")
/** Allow GADTFlexType labelled types to have their bounds adjusted */
val GADTflexible = newMode(8, "GADTflexible")
/** Allow dependent functions. This is currently necessary for unpickling, because
* some dependent functions are passed through from the front end(s?), even though they
* are technically speaking illegal.
*/
val AllowDependentFunctions = newMode(9, "AllowDependentFunctions")
/** We are currently printing something: avoid to produce more logs about
* the printing
*/
val Printing = newMode(10, "Printing")
/** We are currently typechecking an ident to determine whether some implicit
* is shadowed - don't do any other shadowing tests.
*/
val ImplicitShadowing = newMode(11, "ImplicitShadowing")
/** We are currently in a `viewExists` check. In that case, ambiguous
* implicits checks are disabled and we succeed with the first implicit
* found.
*/
val ImplicitExploration = newMode(12, "ImplicitExploration")
val PatternOrType = Pattern | Type
}
|
vsalvis/dotty
|
src/dotty/tools/dotc/typer/Mode.scala
|
Scala
|
bsd-3-clause
| 3,213 |
package com.wixpress.petri.petri
import java.util.Arrays._
import com.fasterxml.jackson.databind.JsonMappingException
import com.wixpress.common.specs2.JMock
import com.wixpress.petri.experiments.domain.ExperimentSpec
import com.wixpress.petri.petri.SpecDefinition.ExperimentSpecBuilder._
import org.joda.time.{DateTime, DateTimeZone}
import org.specs2.mutable.SpecWithJUnit
/**
* @author dmitryk
* @since 17-Sep-2015
*/
class JdbcSpecsDaoTest extends SpecWithJUnit with JMock {
"JdbcScalaSpecsDao" should {
"throw on serialization error" in {
val spec = anExperimentSpec("f.q.n", DateTime.now(DateTimeZone.UTC)).withTestGroups(asList("on", "off")).build
val mockMapper = mock[PetriMapper[ExperimentSpec]]
checking {
oneOf(mockMapper).serialize(spec) willThrow new JsonMappingException("")
}
val dao = new JdbcSpecsDao(jdbcTemplate = null, mapper = mockMapper)
dao.add(spec) must throwA[FullPetriClient.PetriException]
}
}
}
|
wix/petri
|
petri-server-core/src/test/java/com/wixpress/petri/petri/JdbcSpecsDaoTest.scala
|
Scala
|
bsd-3-clause
| 994 |
package debop4s.core.utils
import debop4s.core.{AbstractCoreFunSuite, YearWeek}
/**
* HashsFunSuite
* @author Sunghyouk Bae
*/
class HashsFunSuite extends AbstractCoreFunSuite {
test("Hash 계산") {
val a = Hashs.compute(1, 2)
val b = Hashs.compute(2, 1)
a should not be b
a shouldEqual Hashs.compute(1, 2)
b shouldEqual Hashs.compute(2, 1)
Hashs.compute(1, null) should not be Hashs.compute(null, 1)
val withNull1 = Hashs.compute(YearWeek(2013, 1), null)
val withNull2 = Hashs.compute(null, YearWeek(2013, 1))
val withNull3 = Hashs.compute(YearWeek(2013, 1), null)
withNull1 should not be withNull2
withNull1 shouldEqual withNull3
}
}
|
debop/debop4s
|
debop4s-core/src/test/scala/debop4s/core/utils/HashsFunSuite.scala
|
Scala
|
apache-2.0
| 696 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.Properties
import kafka.admin.AdminUtils
import kafka.admin.AdminUtils._
import kafka.log.LogConfig._
import kafka.server.KafkaConfig.fromProps
import kafka.server.QuotaType._
import kafka.utils.TestUtils._
import kafka.utils.CoreUtils._
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.junit.Assert._
import org.junit.{After, Before, Test}
import scala.collection.JavaConverters._
/**
* This is the main test which ensure Replication Quotas work correctly.
*
* The test will fail if the quota is < 1MB/s as 1MB is the default for replica.fetch.max.bytes.
* So with a throttle of 100KB/s, 1 fetch of 1 partition would fill 10s of quota. In turn causing
* the throttled broker to pause for > 10s
*
* Anything over 100MB/s tends to fail as this is the non-throttled replication rate
*/
class ReplicationQuotasTest extends ZooKeeperTestHarness {
def percentError(percent: Int, value: Long): Long = Math.round(value * percent / 100)
val msg100KB = new Array[Byte](100000)
var brokers: Seq[KafkaServer] = null
val topic = "topic1"
var producer: KafkaProducer[Array[Byte], Array[Byte]] = null
@Before
override def setUp() {
super.setUp()
}
@After
override def tearDown() {
brokers.par.foreach(_.shutdown())
producer.close()
super.tearDown()
}
@Test
def shouldBootstrapTwoBrokersWithLeaderThrottle(): Unit = {
shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(true)
}
@Test
def shouldBootstrapTwoBrokersWithFollowerThrottle(): Unit = {
shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(false)
}
def shouldMatchQuotaReplicatingThroughAnAsymmetricTopology(leaderThrottle: Boolean): Unit = {
/**
* In short we have 8 brokers, 2 are not-started. We assign replicas for the two non-started
* brokers, so when we start them we can monitor replication from the 6 to the 2.
*
* We also have two non-throttled partitions on two of the 6 brokers, just to make sure
* regular replication works as expected.
*/
brokers = (100 to 105).map { id => createServer(fromProps(createBrokerConfig(id, zkConnect))) }
//Given six partitions, led on nodes 0,1,2,3,4,5 but with followers on node 6,7 (not started yet)
//And two extra partitions 6,7, which we don't intend on throttling.
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkUtils, topic, Map(
0 -> Seq(100, 106), //Throttled
1 -> Seq(101, 106), //Throttled
2 -> Seq(102, 106), //Throttled
3 -> Seq(103, 107), //Throttled
4 -> Seq(104, 107), //Throttled
5 -> Seq(105, 107), //Throttled
6 -> Seq(100, 106), //Not Throttled
7 -> Seq(101, 107) //Not Throttled
))
val msg = msg100KB
val msgCount = 100
val expectedDuration = 10 //Keep the test to N seconds
var throttle: Long = msgCount * msg.length / expectedDuration
if (!leaderThrottle) throttle = throttle * 3 //Follower throttle needs to replicate 3x as fast to get the same duration as there are three replicas to replicate for each of the two follower brokers
//Set the throttle limit on all 8 brokers, but only assign throttled replicas to the six leaders, or two followers
(100 to 107).foreach { brokerId =>
changeBrokerConfig(zkUtils, Seq(brokerId),
propsWith(
(DynamicConfig.Broker.LeaderReplicationThrottledRateProp, throttle.toString),
(DynamicConfig.Broker.FollowerReplicationThrottledRateProp, throttle.toString)
))
}
//Either throttle the six leaders or the two followers
if (leaderThrottle)
changeTopicConfig(zkUtils, topic, propsWith(LeaderReplicationThrottledReplicasProp, "0:100,1:101,2:102,3:103,4:104,5:105" ))
else
changeTopicConfig(zkUtils, topic, propsWith(FollowerReplicationThrottledReplicasProp, "0:106,1:106,2:106,3:107,4:107,5:107"))
//Add data equally to each partition
producer = createNewProducer(getBrokerListStrFromServers(brokers), retries = 5, acks = 1)
(0 until msgCount).foreach { _ =>
(0 to 7).foreach { partition =>
producer.send(new ProducerRecord(topic, partition, null, msg))
}
}
//Ensure data is fully written: broker 1 has partition 1, broker 2 has partition 2 etc
(0 to 5).foreach { id => waitForOffsetsToMatch(msgCount, id, 100 + id) }
//Check the non-throttled partitions too
waitForOffsetsToMatch(msgCount, 6, 100)
waitForOffsetsToMatch(msgCount, 7, 101)
val start = System.currentTimeMillis()
//When we create the 2 new, empty brokers
createBrokers(106 to 107)
//Check that throttled config correctly migrated to the new brokers
(106 to 107).foreach { brokerId =>
assertEquals(throttle, brokerFor(brokerId).quotaManagers.follower.upperBound())
}
if (!leaderThrottle) {
(0 to 2).foreach { partition => assertTrue(brokerFor(106).quotaManagers.follower.isThrottled(tp(partition))) }
(3 to 5).foreach { partition => assertTrue(brokerFor(107).quotaManagers.follower.isThrottled(tp(partition))) }
}
//Wait for non-throttled partitions to replicate first
(6 to 7).foreach { id => waitForOffsetsToMatch(msgCount, id, 100 + id) }
val unthrottledTook = System.currentTimeMillis() - start
//Wait for replicas 0,1,2,3,4,5 to fully replicated to broker 106,107
(0 to 2).foreach { id => waitForOffsetsToMatch(msgCount, id, 106) }
(3 to 5).foreach { id => waitForOffsetsToMatch(msgCount, id, 107) }
val throttledTook = System.currentTimeMillis() - start
//Check the times for throttled/unthrottled are each side of what we expect
val throttledLowerBound = expectedDuration * 1000 * 0.9
val throttledUpperBound = expectedDuration * 1000 * 3
assertTrue(s"Expected $unthrottledTook < $throttledLowerBound", unthrottledTook < throttledLowerBound)
assertTrue(s"Expected $throttledTook > $throttledLowerBound", throttledTook > throttledLowerBound)
assertTrue(s"Expected $throttledTook < $throttledUpperBound", throttledTook < throttledUpperBound)
// Check the rate metric matches what we expect.
// In a short test the brokers can be read unfairly, so assert against the average
val rateUpperBound = throttle * 1.1
val rateLowerBound = throttle * 0.5
val rate = if (leaderThrottle) avRate(LeaderReplication, 100 to 105) else avRate(FollowerReplication, 106 to 107)
assertTrue(s"Expected ${rate} < $rateUpperBound", rate < rateUpperBound)
assertTrue(s"Expected ${rate} > $rateLowerBound", rate > rateLowerBound)
}
def tp(partition: Int): TopicPartition = new TopicPartition(topic, partition)
@Test
def shouldThrottleOldSegments(): Unit = {
/**
* Simple test which ensures throttled replication works when the dataset spans many segments
*/
//2 brokers with 1MB Segment Size & 1 partition
val config: Properties = createBrokerConfig(100, zkConnect)
config.put("log.segment.bytes", (1024 * 1024).toString)
brokers = Seq(createServer(fromProps(config)))
AdminUtils.createOrUpdateTopicPartitionAssignmentPathInZK(zkUtils, topic, Map(0 -> Seq(100, 101)))
//Write 20MBs and throttle at 5MB/s
val msg = msg100KB
val msgCount: Int = 200
val expectedDuration = 4
val throttle: Long = msg.length * msgCount / expectedDuration
//Set the throttle to only limit leader
changeBrokerConfig(zkUtils, Seq(100), propsWith(DynamicConfig.Broker.LeaderReplicationThrottledRateProp, throttle.toString))
changeTopicConfig(zkUtils, topic, propsWith(LeaderReplicationThrottledReplicasProp, "0:100"))
//Add data
addData(msgCount, msg)
val start = System.currentTimeMillis()
//Start the new broker (and hence start replicating)
brokers = brokers :+ createServer(fromProps(createBrokerConfig(101, zkConnect)))
waitForOffsetsToMatch(msgCount, 0, 101)
val throttledTook = System.currentTimeMillis() - start
assertTrue((s"Throttled replication of ${throttledTook}ms should be > ${expectedDuration * 1000 * 0.9}ms"),
throttledTook > expectedDuration * 1000 * 0.9)
assertTrue((s"Throttled replication of ${throttledTook}ms should be < ${expectedDuration * 1500}ms"),
throttledTook < expectedDuration * 1000 * 1.5)
}
def addData(msgCount: Int, msg: Array[Byte]): Boolean = {
producer = createNewProducer(getBrokerListStrFromServers(brokers), retries = 5, acks = 0)
(0 until msgCount).map(_ => producer.send(new ProducerRecord(topic, msg))).foreach(_.get)
waitForOffsetsToMatch(msgCount, 0, 100)
}
private def waitForOffsetsToMatch(offset: Int, partitionId: Int, brokerId: Int): Boolean = {
waitUntilTrue(() => {
offset == brokerFor(brokerId).getLogManager.getLog(new TopicPartition(topic, partitionId))
.map(_.logEndOffset).getOrElse(0)
}, s"Offsets did not match for partition $partitionId on broker $brokerId", 60000)
}
private def brokerFor(id: Int): KafkaServer = brokers.filter(_.config.brokerId == id).head
def createBrokers(brokerIds: Seq[Int]): Unit = {
brokerIds.foreach { id =>
brokers = brokers :+ createServer(fromProps(createBrokerConfig(id, zkConnect)))
}
}
private def avRate(replicationType: QuotaType, brokers: Seq[Int]): Double = {
brokers.map(brokerFor).map(measuredRate(_, replicationType)).sum / brokers.length
}
private def measuredRate(broker: KafkaServer, repType: QuotaType): Double = {
val metricName = broker.metrics.metricName("byte-rate", repType.toString)
broker.metrics.metrics.asScala(metricName).value
}
}
|
eribeiro/kafka
|
core/src/test/scala/unit/kafka/server/ReplicationQuotasTest.scala
|
Scala
|
apache-2.0
| 10,518 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
import sbt.serialization._
object Configurations {
def config(name: String) = new Configuration(name)
def default: Seq[Configuration] = defaultMavenConfigurations
def defaultMavenConfigurations: Seq[Configuration] = Seq(Compile, Runtime, Test, Provided, Optional)
def defaultInternal: Seq[Configuration] = Seq(CompileInternal, RuntimeInternal, TestInternal)
def auxiliary: Seq[Configuration] = Seq(Sources, Docs, Pom)
def names(cs: Seq[Configuration]) = cs.map(_.name)
lazy val RuntimeInternal = optionalInternal(Runtime)
lazy val TestInternal = fullInternal(Test)
lazy val IntegrationTestInternal = fullInternal(IntegrationTest)
lazy val CompileInternal = fullInternal(Compile)
def internalMap(c: Configuration) = c match {
case Compile => CompileInternal
case Test => TestInternal
case Runtime => RuntimeInternal
case IntegrationTest => IntegrationTestInternal
case _ => c
}
def internal(base: Configuration, ext: Configuration*) = config(base.name + "-internal") extend (ext: _*) hide
def fullInternal(base: Configuration): Configuration = internal(base, base, Optional, Provided)
def optionalInternal(base: Configuration): Configuration = internal(base, base, Optional)
lazy val Default = config("default")
lazy val Compile = config("compile")
lazy val IntegrationTest = config("it") extend (Runtime)
lazy val Provided = config("provided")
lazy val Docs = config("docs")
lazy val Runtime = config("runtime") extend (Compile)
lazy val Test = config("test") extend (Runtime)
lazy val Sources = config("sources")
lazy val System = config("system")
lazy val Optional = config("optional")
lazy val Pom = config("pom")
lazy val ScalaTool = config("scala-tool") hide
lazy val CompilerPlugin = config("plugin") hide
lazy val Component = config("component") hide
private[sbt] val DefaultMavenConfiguration = defaultConfiguration(true)
private[sbt] val DefaultIvyConfiguration = defaultConfiguration(false)
private[sbt] def DefaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) DefaultMavenConfiguration else DefaultIvyConfiguration
private[sbt] def defaultConfiguration(mavenStyle: Boolean) = if (mavenStyle) Configurations.Compile else Configurations.Default
private[sbt] def removeDuplicates(configs: Iterable[Configuration]) = Set(scala.collection.mutable.Map(configs.map(config => (config.name, config)).toSeq: _*).values.toList: _*)
/** Returns true if the configuration should be under the influence of scalaVersion. */
private[sbt] def underScalaVersion(c: Configuration): Boolean =
c match {
case Default | Compile | IntegrationTest | Provided | Runtime | Test | Optional |
CompilerPlugin | CompileInternal | RuntimeInternal | TestInternal => true
case config =>
config.extendsConfigs exists underScalaVersion
}
}
/** Represents an Ivy configuration. */
final case class Configuration(name: String, description: String, isPublic: Boolean, extendsConfigs: List[Configuration], transitive: Boolean) {
require(name != null && !name.isEmpty)
require(description != null)
def this(name: String) = this(name, "", true, Nil, true)
def describedAs(newDescription: String) = Configuration(name, newDescription, isPublic, extendsConfigs, transitive)
def extend(configs: Configuration*) = Configuration(name, description, isPublic, configs.toList ::: extendsConfigs, transitive)
def notTransitive = intransitive
def intransitive = Configuration(name, description, isPublic, extendsConfigs, false)
def hide = Configuration(name, description, false, extendsConfigs, transitive)
override def toString = name
}
object Configuration {
implicit val pickler: Pickler[Configuration] with Unpickler[Configuration] = PicklerUnpickler.generate[Configuration]
}
|
som-snytt/xsbt
|
ivy/src/main/scala/sbt/Configuration.scala
|
Scala
|
bsd-3-clause
| 3,933 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.core.bricks.blocks
import akka.actor.ActorSystem
import com.typesafe.config.Config
import mathact.core.model.config.PumpConfigLike
import mathact.core.model.enums.BlockType
import mathact.core.model.holders._
/** Provide support and management of Workbench
* Created by CAB on 20.06.2016.
*/
case class BlockContext(
val blockType: BlockType,
val system: ActorSystem,
val controller: SketchControllerRef,
val userLogging: UserLoggingRef,
val layout: LayoutRef,
val plumbing: PlumbingRef,
val pumpConfig: PumpConfigLike,
val commonConfig: Config)
{
override def toString =
s"BlockContext(blockType = $blockType, $controller, $userLogging, $layout, $plumbing )"}
|
AlexCAB/MathAct
|
mathact_core/src/main/scala/mathact/core/bricks/blocks/BlockContext.scala
|
Scala
|
mit
| 1,815 |
package ohnosequences.stuff
abstract class MonoidalCategory {
type On <: Category
val on: Category.is[On]
type I <: On#Objects
@infix
type ⊗[X <: On#Objects, Y <: On#Objects] <: On#Objects
def ⊗[A <: On#Objects, B <: On#Objects, C <: On#Objects, D <: On#Objects]
: (On#C[A, B] × On#C[C, D]) -> On#C[A ⊗ C, B ⊗ D]
def assoc_right[A <: On#Objects, B <: On#Objects, C <: On#Objects]
: On#C[(A ⊗ B) ⊗ C, A ⊗ (B ⊗ C)]
def assoc_left[A <: On#Objects, B <: On#Objects, C <: On#Objects]
: On#C[A ⊗ (B ⊗ C), (A ⊗ B) ⊗ C]
def unitl[A <: On#Objects]: On#C[I ⊗ A, A]
def lunit[A <: On#Objects]: On#C[A, I ⊗ A]
def unitr[A <: On#Objects]: On#C[A ⊗ I, A]
def runit[A <: On#Objects]: On#C[A, A ⊗ I]
}
object MonoidalCategory {
// NOTE we need this version when working with concrete values (sad)
@inline
final def apply[MonCat <: MonoidalCategory](monCat: MonCat)(
implicit ev: monCat.type <:< is[MonCat]): Syntax[MonCat] =
new Syntax(ev(monCat))
final class Syntax[MonCat <: MonoidalCategory](val monCat: is[MonCat]) {
// type aliases
/////////////////////////////////////////////////////////////////////////
type Objects =
MonCat#On#Objects
type Cat =
MonCat#On
@infix
type >=>[X <: Objects, Y <: Objects] =
Cat#C[X, Y]
@infix
type ⊗[X <: Objects, Y <: Objects] =
MonCat# ⊗[X, Y]
type I =
MonCat#I
// function aliases
/////////////////////////////////////////////////////////////////////////
@inline
final def id[X <: Objects]: X >=> X =
monCat.on.identity[X]
@inline
final def assr[
X <: Objects,
Y <: Objects,
Z <: Objects
]: ((X ⊗ Y) ⊗ Z) >=> (X ⊗ (Y ⊗ Z)) =
monCat.assoc_right
@inline
final def assl[
X <: Objects,
Y <: Objects,
Z <: Objects
]: (X ⊗ (Y ⊗ Z)) >=> ((X ⊗ Y) ⊗ Z) =
monCat.assoc_left
@inline
final def unitl[X <: Objects]: (I ⊗ X) >=> X =
monCat.unitl
@inline
final def lunit[X <: Objects]: X >=> (I ⊗ X) =
monCat.lunit
@inline
final def unitr[X <: Objects]: (X ⊗ I) >=> X =
monCat.unitr
@inline
final def runit[X <: Objects]: X >=> (X ⊗ I) =
monCat.runit
@inline
final def ⊗-[A <: Objects]: LeftTensor[MonCat, A] =
new LeftTensor(monCat)
@inline
final def -⊗[A <: Objects]: RightTensor[MonCat, A] =
new RightTensor(monCat)
// implicits
/////////////////////////////////////////////////////////////////////////
@inline
implicit final val _on: Category.is[Cat] =
monCat.on
@inline
implicit final val _monCat: is[MonCat] =
monCat
@inline
implicit final def categorySyntax[X <: Objects, Y <: Objects](
f: MonCat#On#C[X, Y]): Category.MorphismSyntax[MonCat#On, X, Y] =
new Category.MorphismSyntax[MonCat#On, X, Y](f)
@inline
implicit final def syntax[X <: Objects, Y <: Objects](
f: MonCat#On#C[X, Y]): MorphismSyntax[MonCat, X, Y] =
new MorphismSyntax(f)
}
final class MorphismSyntax[
MonCat <: MonoidalCategory,
A1 <: MonCat#On#Objects,
B1 <: MonCat#On#Objects
](val f: MonCat#On#C[A1, B1])
extends CompileTime {
@inline
final def ⊗[
A2 <: MonCat#On#Objects,
B2 <: MonCat#On#Objects
](g: MonCat#On#C[A2, B2])(
implicit monCat: MonoidalCategory.is[MonCat]
): MonCat#On#C[MonCat# ⊗[A1, A2], MonCat# ⊗[B1, B2]] =
monCat ⊗ (f and g)
}
final class LeftTensor[MCat <: MonoidalCategory, A <: MCat#On#Objects](
val mcat: is[MCat])
extends Functor {
type Source = MCat#On
val source = mcat.on
type Target = MCat#On
val target = mcat.on
type F[X <: Source#Objects] =
MCat# ⊗[A, X]
def at[X <: Source#Objects, Y <: Source#Objects]
: Source#C[X, Y] -> Target#C[F[X], F[Y]] =
MonoidalCategory(mcat) ⊢ { { id ⊗ _ } }
}
final class RightTensor[MCat <: MonoidalCategory, A <: MCat#On#Objects](
val mcat: is[MCat])
extends Functor {
type Source = MCat#On
val source = mcat.on
type Target = MCat#On
val target = mcat.on
type F[X <: Source#Objects] =
MCat# ⊗[X, A]
def at[X <: Source#Objects, Y <: Source#Objects]
: Source#C[X, Y] -> Target#C[F[X], F[Y]] =
MonoidalCategory(mcat) ⊢ { { _ ⊗ id } }
}
final class TensorFunctor[MCat <: MonoidalCategory](val mcat: is[MCat])
extends Functor {
type Source = Category.Product[MCat#On, MCat#On]
val source = Category.product(mcat.on and mcat.on)
type Target = MCat#On
val target = mcat.on
type F[X <: Source#Objects] =
MCat# ⊗[X#Left, X#Right]
def at[X <: Source#Objects, Y <: Source#Objects]
: Source#C[X, Y] -> Target#C[F[X], F[Y]] =
mcat ⊗
}
type is[MCat <: MonoidalCategory] =
MCat {
type On = MCat#On
type ⊗[X <: On#Objects, Y <: On#Objects] = MCat# ⊗[X, Y]
type I = MCat#I
}
}
abstract class SymmetricStructure {
type On <: MonoidalCategory
val on: MonoidalCategory.is[On]
// note that swap must be its own inverse modulo swapping at the level of types
def swap[X <: On#On#Objects, Y <: On#On#Objects]
: On#On#C[On# ⊗[X, Y], On# ⊗[Y, X]]
}
object SymmetricStructure {
type is[SS <: SymmetricStructure] =
SS {
type On = SS#On
}
}
|
ohnosequences/stuff
|
src/main/scala/monoidalCategories.scala
|
Scala
|
agpl-3.0
| 5,504 |
package org.scalatra
import org.scalatra.test.scalatest._
import skinny.engine.SkinnyEngineServlet
class UrlSupportTest extends ScalatraFunSuite {
override def contextPath = "/context"
addServlet(new SkinnyEngineServlet {
get("/") {
if (params.contains("session")) session // trigger a jsessionid
this.url(params("url"), params - "url", absolutize = false)
}
get("/option") {
this.url(params("url"), Seq("id" -> params.get("id")), absolutize = false)
}
get("/strip-context") {
this.url(params("url")) //, includeContextPath = false)
}
}, "/*")
def url(url: String, params: Map[String, String] = Map.empty) =
get("/context/", params + ("url" -> url)) { response.body }
test("a page-relative URL should not have the context path prepended") {
url("page-relative") should equal("page-relative")
}
test("a should expand an option") {
url("page-relative", Map("id" -> "the-id")) should equal("page-relative?id=the-id")
}
test("a context-relative URL should have the context path prepended") {
url("/context-relative") should equal("/context/context-relative")
}
test("an absolute URL should not have the context path prepended") {
url("http://www.example.org/") should equal("http://www.example.org/")
}
test("empty params should not generate a query string") {
url("foo", Map.empty) should equal("foo")
}
test("a '/' should come out as /context") {
get("/context/strip-context?url=/") { body should equal("/context") }
}
test("a '' should come out as /") {
get("/context/strip-context?url=") { body should equal("") }
}
test("params should be rendered as a query string") {
val params = Map("one" -> "uno", "two" -> "dos")
val result = url("en-to-es", params)
val Array(path, query) = result.split("""\\?""")
val urlParams = query.split("&")
urlParams.toSet should equal(Set("one=uno", "two=dos"))
}
test("params should url encode both keys and values in UTF-8") {
url("de-to-ru", Map("fünf" -> "пять")) should equal("de-to-ru?f%C3%BCnf=%D0%BF%D1%8F%D1%82%D1%8C")
}
test("encodes URL through response") {
session {
url("foo", Map("session" -> "session")) should include("jsessionid=")
}
}
}
|
holycattle/skinny-framework
|
engine/src/test/scala/org/scalatra/UrlSupportTest.scala
|
Scala
|
mit
| 2,273 |
package org.bfn.ninetynineprobs
import org.scalatest._
class P91Spec extends UnitSpec {
// TODO
}
|
bfontaine/99Scala
|
src/test/scala/P91Spec.scala
|
Scala
|
mit
| 105 |
package concrete
package it
import com.typesafe.scalalogging.LazyLogging
import concrete.filter.ACC
import concrete.generator.ProblemGenerator
import concrete.generator.cspompatterns.ConcretePatterns
import concrete.runner.XCSP3Concrete
import cspom.compiler.CSPOMCompiler
import org.scalatest.{FlatSpec, TryValues}
final class ProblemGeneratorTest extends FlatSpec with LazyLogging with TryValues {
"ProblemGenerator" should "generate zebra" in {
generateTest("Zebra.xml.xz");
}
it should "generate queens-12" in {
generateTest("Queens-0012-m1.xml.xz");
}
// it should "generate scen11-f12" in {
// generateTest("scen11-f12.xml.bz2");
// }
it should "generate crosswordm2" in {
generateTest("crossword-m1-debug-05-01.xml.xz");
}
// @Test
// public void fapp01_0200_0() throws CSPParseException, IOException,
// FailedGenerationException, ClassNotFoundException {
// generateTest("fapp01-0200-0.xml");
// }
private def generateTest(file: String): Unit = {
val pm = new ParameterManager()
val cspom = XCSP3Concrete
.loadCSPOMURL(classOf[ProblemGeneratorTest].getResource(file))
.get
logger.info(s"$cspom\n${cspom.referencedExpressions.size} vars, ${cspom.constraints.size} cons")
CSPOMCompiler.compile(cspom, ConcretePatterns(pm)).get
logger.info(s"$cspom\n${cspom.referencedExpressions.size} vars, ${cspom.constraints.size} cons")
val problem = new ProblemGenerator(pm).generate(cspom).get._1
// match {
// case Success((problem, _)) => problem
// case Failure(e) => fail(e)
// }
logger.info(s"$problem\n${problem.variables.length} vars, ${problem.constraints.length} cons")
new ACC(problem, pm).reduceAll(problem.initState.toState) match {
case _: Contradiction => logger.info("UNSAT")
case newState: ProblemState => logger.info(problem.toString(newState))
}
}
}
|
concrete-cp/concrete
|
src/test/scala/concrete/it/ProblemGeneratorTest.scala
|
Scala
|
lgpl-2.1
| 1,932 |
package com.salcedo.rapbot.websocket
import java.net.InetSocketAddress
import akka.actor.Status.Success
import akka.actor.{Actor, ActorLogging, Props, Terminated}
import akka.io.Tcp._
import akka.io.{IO, Tcp}
import akka.routing.{BroadcastRoutingLogic, Router}
import com.salcedo.rapbot.hub.Hub.SystemState
import com.salcedo.rapbot.snapshot.SnapshotActor.TakeSubSystemSnapshot
object WebSocketActor {
def props(port: Int): Props = Props(new WebSocketActor(port))
}
class WebSocketActor(port: Int) extends Actor with ActorLogging {
var router = Router(BroadcastRoutingLogic())
override def preStart(): Unit = {
context.system.eventStream.subscribe(self, classOf[SystemState])
IO(Tcp)(context.system) ! Bind(self, new InetSocketAddress(port))
}
override def receive: Receive = {
case Bound(_) ⇒ log.info("Started WebSocket server on http://0.0.0.0:{}/.", port)
case CommandFailed(_: Bind) => context.stop(self)
case connected: Connected => this.connect(connected)
case state: SystemState => this.broadcast(state)
case terminated: Terminated => this.terminate(terminated)
case _: TakeSubSystemSnapshot => sender() ! Success(None)
}
def connect(connected: Connected): Unit = {
val routee = context.actorOf(WebSocketConnectionActor.props(sender()))
context.watch(routee)
router = router.addRoutee(routee)
log.debug("Received connection from {}.", connected.remoteAddress)
sender() ! Register(routee)
}
def broadcast(state: SystemState): Unit = {
if (router.routees.nonEmpty) router.route(state, sender())
}
private def terminate(message: Terminated): Unit = {
router = router.removeRoutee(message.actor)
}
}
|
misalcedo/RapBot
|
Hub/src/main/scala/com/salcedo/rapbot/websocket/WebSocketActor.scala
|
Scala
|
mit
| 1,701 |
/*
* Copyright 1998-2015 Linux.org.ru
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ru.org.linux.tag
import javax.annotation.Nonnull
import javax.servlet.http.HttpServletRequest
import akka.actor.ActorSystem
import com.typesafe.scalalogging.StrictLogging
import org.apache.commons.lang3.text.WordUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.{PathVariable, RequestMapping, RequestMethod}
import org.springframework.web.context.request.async.DeferredResult
import org.springframework.web.servlet.ModelAndView
import ru.org.linux.gallery.ImageService
import ru.org.linux.group.GroupDao
import ru.org.linux.section.{Section, SectionService}
import ru.org.linux.site.Template
import ru.org.linux.topic._
import ru.org.linux.user.UserTagService
import ru.org.linux.util.RichFuture._
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
object TagPageController {
val TotalNewsCount = 21
val ForumTopicCount = 20
val GalleryCount = 3
val Timeout = 500 millis
}
@Controller
@RequestMapping(value = Array("/tag/{tag}"), params = Array("!section"))
class TagPageController @Autowired()
(tagService: TagService, prepareService: TopicPrepareService, topicListService: TopicListService,
sectionService: SectionService, groupDao: GroupDao, userTagService: UserTagService, imageService: ImageService,
actorSystem: ActorSystem) extends StrictLogging {
private implicit val akka = actorSystem
@RequestMapping(method = Array(RequestMethod.GET, RequestMethod.HEAD))
def tagPage(request: HttpServletRequest, @PathVariable tag: String): DeferredResult[ModelAndView] = {
val deadline = TagPageController.Timeout.fromNow
val tmpl = Template.getTemplate(request)
if (!TagName.isGoodTag(tag)) {
throw new TagNotFoundException
}
val countF = tagService.countTagTopics(tag)
val relatedF = {
tagService.getRelatedTags(tag) map { relatedTags ⇒
if (relatedTags.nonEmpty) {
Some("relatedTags" -> relatedTags.asJava)
} else {
None
}
}
}
val favs = if (tmpl.isSessionAuthorized) {
Seq("showFavoriteTagButton" -> !userTagService.hasFavoriteTag(tmpl.getCurrentUser, tag),
"showUnFavoriteTagButton" -> userTagService.hasFavoriteTag(tmpl.getCurrentUser, tag),
"showIgnoreTagButton" -> (!tmpl.isModeratorSession && !userTagService.hasIgnoreTag(tmpl.getCurrentUser, tag)),
"showUnIgnoreTagButton" -> (!tmpl.isModeratorSession && userTagService.hasIgnoreTag(tmpl.getCurrentUser, tag)))
} else {
Seq.empty
}
val tagInfo = tagService.getTagInfo(tag, skipZero = true)
val sections = getNewsSection(request, tag) ++ getGallerySection(tag, tagInfo.id, tmpl) ++ getForumSection(tag, tagInfo.id)
val model = Map(
"tag" -> tag,
"title" -> WordUtils.capitalize(tag),
"favsCount" -> userTagService.countFavs(tagInfo.id),
"ignoreCount" -> userTagService.countIgnore(tagInfo.id)
) ++ sections ++ favs
val safeRelatedF = relatedF withTimeout deadline.timeLeft recover {
case ex: TimeoutException ⇒
logger.warn(s"Tag related search timed out (${ex.getMessage})")
None
case ex ⇒
logger.warn("Unable to find related tags", ex)
None
}
val safeCountF = countF withTimeout deadline.timeLeft recover {
case ex: TimeoutException ⇒
logger.warn(s"Tag topics count timed out (${ex.getMessage})")
tagInfo.topicCount.toLong
case ex ⇒
logger.warn("Unable to count tag topics", ex)
tagInfo.topicCount.toLong
}
(for {
counter <- safeCountF
related <- safeRelatedF
} yield {
new ModelAndView("tag-page", (model + ("counter" -> counter) ++ related).asJava)
}) toDeferredResult
}
private def getNewsSection(request: HttpServletRequest, tag: String) = {
val tmpl = Template.getTemplate(request)
val newsSection = sectionService.getSection(Section.SECTION_NEWS)
val newsTopics = topicListService.getTopicsFeed(newsSection, null, tag, 0, null, null, TagPageController.TotalNewsCount)
val (fullNewsTopics, briefNewsTopics) = newsTopics.asScala.splitAt(1)
val fullNews = prepareService.prepareMessagesForUser(fullNewsTopics.asJava, request.isSecure, tmpl.getCurrentUser, tmpl.getProf, false)
val briefNewsByDate = TopicListTools.datePartition(briefNewsTopics)
val more = if (newsTopics.size == TagPageController.TotalNewsCount) {
Some("moreNews" -> TagTopicListController.tagListUrl(tag, newsSection))
} else {
None
}
Map(
"fullNews" -> fullNews,
"addNews" -> AddTopicController.getAddUrl(newsSection, tag),
"briefNews" -> TopicListTools.split(briefNewsByDate.map(p ⇒ p._1 -> BriefTopicRef.fromTopicNoGroup(p._2)))
) ++ more
}
private def getGallerySection(tag: String, tagId: Int, tmpl: Template) = {
val list = imageService.prepareGalleryItem(imageService.getGalleryItems(TagPageController.GalleryCount, tagId))
val section = sectionService.getSection(Section.SECTION_GALLERY)
val add = if (tmpl.isSessionAuthorized) {
Some("addGallery" -> AddTopicController.getAddUrl(section, tag))
} else {
None
}
val more = if (list.size == TagPageController.GalleryCount) {
Some("moreGallery" -> TagTopicListController.tagListUrl(tag, section))
} else {
None
}
Map(
"gallery" -> list
) ++ add ++ more
}
private def getForumSection(@Nonnull tag: String, tagId: Int) = {
val forumSection = sectionService.getSection(Section.SECTION_FORUM)
val topicListDto = new TopicListDto
topicListDto.setSection(forumSection.getId)
topicListDto.setCommitMode(TopicListDao.CommitMode.POSTMODERATED_ONLY)
topicListDto.setTag(tagId)
topicListDto.setLimit(TagPageController.ForumTopicCount)
val forumTopics = topicListService.getTopics(topicListDto)
val topicByDate = TopicListTools.datePartition(forumTopics.asScala)
val more = if (forumTopics.size == TagPageController.ForumTopicCount) {
Some("moreForum" -> TagTopicListController.tagListUrl(tag, forumSection))
} else {
None
}
Map(
"addForum" -> AddTopicController.getAddUrl(forumSection, tag),
"forum" -> TopicListTools.split(
topicByDate.map(p ⇒ p._1 -> BriefTopicRef.fromTopic(p._2, groupDao.getGroup(p._2.getGroupId).getTitle)))
) ++ more
}
}
|
ymn/lorsource
|
src/main/java/ru/org/linux/tag/TagPageController.scala
|
Scala
|
apache-2.0
| 7,183 |
package algorithms
import scala.io.StdIn
class BinarySearch {
def rank(key: Int, a: Seq[Int]): Int = {
var lo = 0
var hi = a.size - 1
while (lo <= hi) {
val mid = lo + (hi - lo) / 2
if (key < a(mid)) {
hi = mid - 1
}
else if (key > a(mid)) {
lo = mid + 1
} else
return mid
}
-1
}
}
|
cricanr/AlgorithmsHackerRank
|
src/main/scala/algorithms/BinarySearch.scala
|
Scala
|
mit
| 368 |
/*
* Copyright 2014 - 2015 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package slamdata.engine.analysis
import slamdata.Predef._
import slamdata.engine.fp._
import scalaz.{Tree => ZTree, Node => _, _}
import Scalaz._
import Id.Id
import slamdata.engine.{RenderTree, Terminal, NonTerminal}
sealed trait term {
case class Term[F[_]](unFix: F[Term[F]]) {
def isLeaf(implicit F: Foldable[F]): Boolean =
!Tag.unwrap(F.foldMap(unFix)(κ(Tags.Disjunction(true))))
def children(implicit F: Foldable[F]): List[Term[F]] =
F.foldMap(unFix)(_ :: Nil)
def universe(implicit F: Foldable[F]): List[Term[F]] =
this :: children.flatMap(_.universe)
// Foldable-like operations
def all(p: Term[F] ⇒ Boolean)(implicit F: Foldable[F]): Boolean = {
def loop(z0: Boolean, term: Term[F]): Boolean =
term.unFix.foldLeft(z0 && p(term))(loop(_, _))
loop(true, this)
}
def any(p: Term[F] ⇒ Boolean)(implicit F: Foldable[F]): Boolean = {
def loop(z0: Boolean, term: Term[F]): Boolean =
term.unFix.foldLeft(z0 || p(term))(loop(_, _))
loop(false, this)
}
def contains(t: Term[F])(implicit E: EqualF[F], F: Foldable[F]): Boolean =
any(_ ≟ t)
def foldMap[Z](f: Term[F] => Z)(implicit F: Traverse[F], Z: Monoid[Z]): Z = {
(foldMapM[Free.Trampoline, Z] { (term: Term[F]) =>
f(term).pure[Free.Trampoline]
}).run
}
def foldMapM[M[_], Z](f: Term[F] => M[Z])(implicit F: Traverse[F], M: Monad[M], Z: Monoid[Z]): M[Z] = {
def loop(z0: Z, term: Term[F]): M[Z] = {
for {
z1 <- f(term)
z2 <- F.foldLeftM(term.unFix, Z.append(z0, z1))(loop(_, _))
} yield z2
}
loop(Z.zero, this)
}
def transform(f: Term[F] => Term[F])(implicit T: Traverse[F]): Term[F] =
transformM[Free.Trampoline]((v: Term[F]) => f(v).pure[Free.Trampoline]).run
def transformM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], F: Traverse[F]): M[Term[F]] = {
def loop(term: Term[F]): M[Term[F]] = {
for {
y <- F.traverse(term.unFix)(loop _)
z <- f(Term(y))
} yield z
}
loop(this)
}
def topDownTransform(f: Term[F] => Term[F])(implicit T: Traverse[F]): Term[F] = {
topDownTransformM[Free.Trampoline]((term: Term[F]) => f(term).pure[Free.Trampoline]).run
}
def topDownTransformM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], F: Traverse[F]): M[Term[F]] = {
def loop(term: Term[F]): M[Term[F]] = {
for {
x <- f(term)
y <- F.traverse(x.unFix)(loop _)
} yield Term(y)
}
loop(this)
}
def topDownCata[A](a: A)(f: (A, Term[F]) => (A, Term[F]))(implicit F: Traverse[F]): Term[F] = {
topDownCataM[Free.Trampoline, A](a)((a: A, term: Term[F]) => f(a, term).pure[Free.Trampoline]).run
}
def topDownCataM[M[_], A](a: A)(f: (A, Term[F]) => M[(A, Term[F])])(implicit M: Monad[M], F: Traverse[F]): M[Term[F]] = {
def loop(a: A, term: Term[F]): M[Term[F]] = {
for {
tuple <- f(a, term)
(a, tf) = tuple
rec <- F.traverse(tf.unFix)(loop(a, _))
} yield Term(rec)
}
loop(a, this)
}
def descend(f: Term[F] => Term[F])(implicit F: Functor[F]): Term[F] = {
Term(F.map(unFix)(f))
}
def descendM[M[_]](f: Term[F] => M[Term[F]])(implicit M: Monad[M], TraverseF: Traverse[F]): M[Term[F]] = {
TraverseF.traverse(unFix)(f).map(Term.apply _)
}
def rewrite(f: Term[F] => Option[Term[F]])(implicit T: Traverse[F]): Term[F] = {
rewriteM[Free.Trampoline]((term: Term[F]) => f(term).pure[Free.Trampoline]).run
}
def rewriteM[M[_]](f: Term[F] => M[Option[Term[F]]])(implicit M: Monad[M], TraverseF: Traverse[F]): M[Term[F]] = {
transformM[M] { term =>
for {
x <- f(term)
y <- Traverse[Option].traverse(x)(_ rewriteM f).map(_.getOrElse(term))
} yield y
}
}
def restructure[G[_]](f: F[Term[G]] => G[Term[G]])(implicit T: Traverse[F]): Term[G] = {
restructureM[Free.Trampoline, G]((term: F[Term[G]]) => f(term).pure[Free.Trampoline]).run
}
def restructureM[M[_], G[_]](f: F[Term[G]] => M[G[Term[G]]])(implicit M: Monad[M], T: Traverse[F]): M[Term[G]] = {
for {
x <- T.traverse(unFix)(_ restructureM f)
y <- f(x)
} yield Term(y)
}
def trans[G[_]](f: F ~> G)(implicit G: Functor[G]): Term[G] = Term[G](G.map(f(unFix))(_.trans(f)(G)))
def cata[A](f: F[A] => A)(implicit F: Functor[F]): A =
f(unFix.map(_.cata(f)(F)))
def cataM[M[_]: Monad, A](f: F[A] => M[A])(implicit F: Traverse[F]):
M[A] =
unFix.map(_.cataM(f)).sequence.flatMap(f)
def para[A](f: F[(Term[F], A)] => A)(implicit F: Functor[F]): A =
f(unFix.map(t => t -> t.para(f)(F)))
def gpara[W[_]: Comonad, A](
t: λ[α => F[W[α]]] ~> λ[α => W[F[α]]], f: F[EnvT[Term[F], W, A]] => A)(
implicit F: Functor[F]):
A =
gzygo[W, A, Term[F]](Term(_), t, f)
def gcata[W[_]: Comonad, A](
k: λ[α => F[W[α]]] ~> λ[α => W[F[α]]], g: F[W[A]] => A)(
implicit F: Functor[F]):
A = {
def loop(t: Term[F]): W[F[W[A]]] = k(t.unFix.map(loop(_).map(g).cojoin))
g(loop(this).copoint)
}
def zygo[A, B](f: F[B] => B, g: F[(B, A)] => A)(implicit F: Functor[F]): A =
gcata[(B, ?), A](distZygo(f), g)
def gzygo[W[_], A, B](
f: F[B] => B,
w: λ[α => F[W[α]]] ~> λ[α => W[F[α]]],
g: F[EnvT[B, W, A]] => A)(
implicit F: Functor[F], W: Comonad[W]):
A =
gcata[EnvT[B, W, ?], A](distZygoT(f, w), g)
def histo[A](f: F[Cofree[F, A]] => A)(implicit F: Functor[F]): A =
gcata[Cofree[F, ?], A](distHisto, f)
def ghisto[H[_]: Functor, A](
g: λ[α => F[H[α]]] ~> λ[α => H[F[α]]], f: F[Cofree[H, A]] => A)(
implicit F: Functor[F]):
A =
gcata[Cofree[H, ?], A](distGHisto(g), f)
def paraZygo[A, B](
f: F[(Term[F], B)] => B, g: F[(B, A)] => A)(
implicit F: Functor[F], U: Unzip[F]):
A = {
def h(t: Term[F]): (B, A) =
(t.unFix.map { x =>
val (b, a) = h(x)
((x, b), (b, a))
}).unfzip.bimap(f, g)
h(this)._2
}
override def toString = unFix.toString
}
sealed trait TermInstances {
implicit def TermRenderTree[F[_]](implicit F: Foldable[F], RF: RenderTree[F[_]]) = new RenderTree[Term[F]] {
override def render(v: Term[F]) = {
val t = RF.render(v.unFix)
NonTerminal(t.nodeType, t.label, v.children.map(render(_)))
}
}
implicit def TermEqual[F[_]](implicit F: Equal ~> λ[α => Equal[F[α]]]):
Equal[Term[F]] =
Equal.equal { (a, b) => F(TermEqual[F]).equal(a.unFix, b.unFix) }
}
object Term extends TermInstances
def distPara[F[_]: Functor]:
λ[α => F[(Term[F], α)]] ~> λ[α => (Term[F], F[α])] =
distZygo(Term(_))
def distParaT[F[_]: Functor, W[_]: Comonad](
t: λ[α => F[W[α]]] ~> λ[α => W[F[α]]]):
(λ[α => F[EnvT[Term[F], W, α]]] ~> λ[α => EnvT[Term[F], W, F[α]]]) =
distZygoT(Term(_), t)
def distCata[F[_]]: λ[α => F[Id[α]]] ~> λ[α => Id[F[α]]] =
NaturalTransformation.refl
def distZygo[F[_]: Functor, B](g: F[B] => B) =
new (λ[α => F[(B, α)]] ~> λ[α => (B, F[α])]) {
def apply[α](m: F[(B, α)]) = (g(m.map(_._1)), m.map(_._2))
}
def distZygoT[F[_], W[_], B](
g: F[B] => B, k: λ[α => F[W[α]]] ~> λ[α => W[F[α]]])(
implicit F: Functor[F], W: Comonad[W]) =
new (λ[α => F[EnvT[B, W, α]]] ~> λ[α => EnvT[B, W, F[α]]]) {
def apply[α](fe: F[EnvT[B, W, α]]) =
EnvT((
g(F.lift[EnvT[B, W, α], B](_.ask)(fe)),
k(F.lift[EnvT[B, W, α], W[α]](_.lower)(fe))))
}
def distHisto[F[_]: Functor] =
new (λ[α => F[Cofree[F, α]]] ~> λ[α => Cofree[F, F[α]]]) {
def apply[α](m: F[Cofree[F, α]]) =
distGHisto[F, F](NaturalTransformation.refl[λ[α => F[F[α]]]]).apply(m)
}
def distGHisto[F[_], H[_]](
k: λ[α => F[H[α]]] ~> λ[α => H[F[α]]])(
implicit F: Functor[F], H: Functor[H]) =
new (λ[α => F[Cofree[H, α]]] ~> λ[α => Cofree[H, F[α]]]) {
def apply[α](m: F[Cofree[H, α]]) =
Cofree.unfold(m)(as => (
F.lift[Cofree[H, α], α](_.copure)(as),
k(F.lift[Cofree[H, α], H[Cofree[H, α]]](_.tail)(as))))
}
def ana[F[_]: Functor, A](a: A)(f: A => F[A]): Term[F] =
Term(f(a).map(ana(_)(f)))
def apo[F[_]: Functor, A](a: A)(f: A => F[Term[F] \\/ A]): Term[F] =
Term(f(a).map(_.fold(ɩ, apo(_)(f))))
def postpro[F[_]: Functor, A](a: A)(e: F ~> F, g: A => F[A]): Term[F] =
Term(g(a).map(x => ana(postpro(x)(e, g))(x => e(x.unFix))))
def gpostpro[F[_]: Functor, M[_], A](
a: A)(
k: λ[α => M[F[α]]] ~> λ[α => F[M[α]]], e: F ~> F, g: A => F[M[A]])(
implicit M: Monad[M]):
Term[F] = {
def loop(ma: M[A]): Term[F] =
Term(k(M.lift(g)(ma)).map(x => ana(loop(x.join))(x => e(x.unFix))))
loop(a.point[M])
}
def hylo[F[_]: Functor, A, B](a: A)(f: F[B] => B, g: A => F[A]): B =
f(g(a).map(hylo(_)(f, g)))
def gana[M[_], F[_]: Functor, A](
a: A)(
k: λ[α => M[F[α]]] ~> λ[α => F[M[α]]], f: A => F[M[A]])(
implicit M: Monad[M]):
Term[F] = {
def loop(x: M[F[M[A]]]): Term[F] =
Term(k(x).map(x => loop(M.lift(f)(x.join))))
loop(M.point(f(a)))
}
def distAna[F[_]: Functor, A]: λ[α => Id[F[α]]] ~> λ[α => F[Id[α]]] =
NaturalTransformation.refl
def ghylo[W[_]: Comonad, F[_]: Functor, M[_], A, B](
a: A)(
w: λ[α => F[W[α]]] ~> λ[α => W[F[α]]],
m: λ[α => M[F[α]]] ~> λ[α => F[M[α]]],
f: F[W[B]] => B,
g: A => F[M[A]])(
implicit M: Monad[M]):
B = {
def h(x: M[A]): W[B] =
w(m(M.lift(g)(x)).map(y => h(y.join).cojoin)).map(f)
h(a.point[M]).copoint
}
def futu[F[_], A](a: A)(f: A => F[Free[F, A]])(implicit F: Functor[F]):
Term[F] =
gana[Free[F, ?], F, A](a)(distFutu, f)
def distFutu[F[_]: Functor] =
new (λ[α => Free[F, F[α]]] ~> λ[α => F[Free[F, α]]]) {
def apply[α](m: Free[F, F[α]]) =
distGFutu[F, F](NaturalTransformation.refl[λ[α => F[F[α]]]]).apply(m)
}
def distGFutu[H[_], F[_]](
k: λ[α => H[F[α]]] ~> λ[α => F[H[α]]])(
implicit H: Functor[H], F: Functor[F]):
(λ[α => Free[H, F[α]]] ~> λ[α => F[Free[H, α]]]) =
new (λ[α => Free[H, F[α]]] ~> λ[α => F[Free[H, α]]]) {
def apply[α](m: Free[H, F[α]]) =
m.resume.fold(
as => F.lift(Free.liftF(_: H[Free[H, α]]).join)(k(H.lift(distGFutu(k)(H, F)(_: Free[H, F[α]]))(as))),
F.lift(Free.point[H, α](_)))
}
def chrono[F[_]: Functor, A, B](
a: A)(
g: F[Cofree[F, B]] => B,
f: A => F[Free[F, A]]):
B =
ghylo[Cofree[F, ?], F, Free[F, ?], A, B](a)(distHisto, distFutu, g, f)
}
sealed trait holes {
sealed trait Hole
val Hole = new Hole{}
def holes[F[_], A](fa: F[A])(implicit F: Traverse[F]): F[(A, A => F[A])] = {
(F.mapAccumL(fa, 0) {
case (i, x) =>
val h: A => F[A] = { y =>
val g: (Int, A) => (Int, A) = (j, z) => (j + 1, if (i == j) y else z)
F.mapAccumL(fa, 0)(g)._2
}
(i + 1, (x, h))
})._2
}
def holesList[F[_]: Traverse, A](fa: F[A]): List[(A, A => F[A])] = Traverse[F].toList(holes(fa))
def builder[F[_]: Traverse, A, B](fa: F[A], children: List[B]): F[B] = {
(Traverse[F].mapAccumL(fa, children) {
case (x :: xs, _) => (xs, x)
case _ => scala.sys.error("Not enough children")
})._2
}
def project[F[_], A](index: Int, fa: F[A])(implicit F: Foldable[F]): Option[A] =
if (index < 0) None
else F.foldMap(fa)(_ :: Nil).drop(index).headOption
def sizeF[F[_]: Foldable, A](fa: F[A]): Int = Foldable[F].foldLeft(fa, 0)((a, _) => a + 1)
}
sealed trait attr extends term with holes {
def attrUnit[F[_]: Functor](term: Term[F]): Cofree[F, Unit] = attrK(term, ())
def attribute[F[_]: Functor, A](term: Term[F])(f: F[A] => A): Cofree[F, A] =
term.cata[Cofree[F, A]](fa => Cofree(f(fa.map(_.head)), fa))
def attrK[F[_]: Functor, A](term: Term[F], k: A): Cofree[F, A] = {
Cofree(k, Functor[F].map(term.unFix)(attrK(_, k)(Functor[F])))
}
def attrSelf[F[_]: Functor](term: Term[F]): Cofree[F, Term[F]] = {
Cofree(term, Functor[F].map(term.unFix)(attrSelf(_)(Functor[F])))
}
def children[F[_], A](attr: Cofree[F, A])(implicit F: Foldable[F]): List[Cofree[F, A]] =
F.foldMap(attr.tail)(_ :: Nil)
def universe[F[_], A](attr: Cofree[F, A])(implicit F: Foldable[F]): List[Cofree[F, A]] =
attr :: children(attr).flatMap(universe(_))
def forget[F[_]: Functor](attr: Cofree[F, _]): Term[F] =
Term(attr.tail.map(forget[F]))
implicit def CofreeRenderTree[F[_], A](implicit F: Foldable[F], RF: RenderTree[F[_]], RA: RenderTree[A]) = new RenderTree[Cofree[F, A]] {
override def render(attr: Cofree[F, A]) = {
val term = RF.render(attr.tail)
val ann = RA.render(attr.head)
NonTerminal(term.nodeType, term.label,
(if (ann.children.isEmpty) NonTerminal(List("Annotation"), None, ann :: Nil) else ann.copy(label=None, nodeType=List("Annotation"))) ::
children(attr).map(render(_)))
}
}
// These lifts are largely useful when you want to zip a cata (or ana) with
// some more complicated algebra.
def liftPara[F[_]: Functor, A](f: F[A] => A): F[(Term[F], A)] => A =
node => f(node.map(_._2))
def liftHisto[F[_]: Functor, A](f: F[A] => A): F[Cofree[F, A]] => A =
node => f(node.map(_.head))
def liftApo[F[_]: Functor, A](f: A => F[A]): A => F[Term[F] \\/ A] =
f(_).map(\\/-(_))
def liftFutu[F[_]: Functor, A](f: A => F[A]): A => F[Free[F, A]] =
f(_).map(Free.pure(_))
// roughly DownStar(f) *** DownStar(g)
def zipCata[F[_]: Unzip, A, B](f: F[A] => A, g: F[B] => B):
F[(A, B)] => (A, B) =
node => node.unfzip.bimap(f, g)
def zipPara[F[_]: Functor, A, B](f: F[(Term[F], A)] => A, g: F[(Term[F], B)] => B):
F[(Term[F], (A, B))] => (A, B) =
node => (f(node.map(({ (x: (A, B)) => x._1 }).second)), g(node.map(({ (x: (A, B)) => x._2 }).second)))
// Inherited: inherit, inherit2, inherit3, inheritM, inheritM_
def inherit[F[_], A, B](tree: Cofree[F, A], b: B)(f: (B, Cofree[F, A]) => B)(implicit F: Functor[F]): Cofree[F, B] = {
val b2 = f(b, tree)
Cofree[F, B](b2, F.map(tree.tail)(inherit(_, b2)(f)(F)))
}
// TODO: Top down folds
def transform[F[_], A](attrfa: Cofree[F, A])(f: A => Option[Cofree[F, A]])(implicit F: Functor[F]): Cofree[F, A] = {
val a = attrfa.head
f(a).map(transform(_)(f)(F))
.getOrElse(Cofree(a, F.map(attrfa.tail)(transform(_)(f)(F))))
}
def swapTransform[F[_], A, B](attrfa: Cofree[F, A])(f: A => B \\/ Cofree[F, B])(implicit F: Functor[F]): Cofree[F, B] = {
lazy val fattrfb = F.map(attrfa.tail)(swapTransform(_)(f)(F))
f(attrfa.head).fold(Cofree(_, fattrfb), ɩ)
}
def sequenceUp[F[_], G[_], A](attr: Cofree[F, G[A]])(implicit F: Traverse[F], G: Applicative[G]): G[Cofree[F, A]] = {
val ga : G[A] = attr.head
val fgattr : F[G[Cofree[F, A]]] = F.map(attr.tail)(t => sequenceUp(t)(F, G))
val gfattr : G[F[Cofree[F, A]]] = F.traverseImpl(fgattr)(ɩ)
G.apply2(gfattr, ga)((node, attr) => Cofree(attr, node))
}
def sequenceDown[F[_], G[_], A](attr: Cofree[F, G[A]])(implicit F: Traverse[F], G: Applicative[G]): G[Cofree[F, A]] = {
val ga : G[A] = attr.head
val fgattr : F[G[Cofree[F, A]]] = F.map(attr.tail)(t => sequenceDown(t)(F, G))
val gfattr : G[F[Cofree[F, A]]] = F.traverseImpl(fgattr)(ɩ)
G.apply2(ga, gfattr)(Cofree(_, _))
}
/**
* Zips two attributed nodes together. This is unsafe in the sense that the
* user is responsible for ensuring both left and right parameters have the
* same shape (i.e. represent the same tree).
*/
def unsafeZip2[F[_]: Traverse, A, B](left: Cofree[F, A], right: Cofree[F, B]): Cofree[F, (A, B)] = {
val lattr: A = left.head
val lunAnn: F[Cofree[F, A]] = left.tail
val lunAnnL: List[Cofree[F, A]] = Foldable[F].toList(lunAnn)
val rattr: B = right.head
val runAnn: F[Cofree[F, B]] = right.tail
val runAnnL: List[Cofree[F, B]] = Foldable[F].toList(runAnn)
val abs: List[Cofree[F, (A, B)]] = lunAnnL.zip(runAnnL).map { case ((a, b)) => unsafeZip2(a, b) }
val fabs : F[Cofree[F, (A, B)]] = builder(lunAnn, abs)
Cofree((lattr, rattr), fabs)
}
}
trait binding extends attr {
trait Binder[F[_]] {
type G[A]
def initial[A]: G[A]
// Extracts bindings from a node:
def bindings[A](t: F[Term[F]], b: G[A])(f: F[Term[F]] => A): G[A]
// Possibly binds a free term to its definition:
def subst[A](t: F[Term[F]], b: G[A]): Option[A]
}
def boundCata[F[_]: Functor, A](t: Term[F])(f: F[A] => A)(implicit B: Binder[F]): A = {
def loop(t: F[Term[F]], b: B.G[A]): A = {
val newB = B.bindings(t, b)(loop(_, b))
B.subst(t, newB).getOrElse(f(t.map(x => loop(x.unFix, newB))))
}
loop(t.unFix, B.initial)
}
def boundPara[F[_]: Functor, A](t: Term[F])(f: F[(Term[F], A)] => A)(implicit B: Binder[F]): A = {
def loop(t: F[Term[F]], b: B.G[A]): A = {
val newB = B.bindings(t, b)(loop(_, b))
B.subst(t, newB).getOrElse(f(t.map(x => (x, loop(x.unFix, newB)))))
}
loop(t.unFix, B.initial)
}
}
object fixplate extends binding
|
wemrysi/quasar
|
core/src/main/scala/slamdata/engine/analysis/fixplate.scala
|
Scala
|
apache-2.0
| 18,160 |
/*
* The MIT License
*
* Copyright (c) 2022 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.fasta
import com.fulcrumgenomics.FgBioDef.PathToSequenceDictionary
import com.fulcrumgenomics.cmdline.{ClpGroups, FgBioTool}
import com.fulcrumgenomics.commons.CommonsDef._
import com.fulcrumgenomics.commons.util.LazyLogging
import com.fulcrumgenomics.sopt._
import com.fulcrumgenomics.util.Io
import scala.collection.immutable.IndexedSeq
import scala.collection.mutable.{ListBuffer, Builder}
@clp(description =
"""
|Sorts a sequence dictionary file in the order of another sequence dictionary.
|
|The inputs are to two `*.dict` files. One to be sorted, and the other to provide the order for the sorting.
|
|If there is a contig in the input dictionary that is not in the sorting dictionary, that contig will be appended
|to the end of the sequence dictionary in the same relative order to other appended contigs as in the input dictionary.
|Missing contigs can be omitted by setting `--skip-missing-contigs` to true.
|
|If there is a contig in the sorting dictionary that is not in the input dictionary, that contig will be ignored.
|
|The output will be a sequence dictionary, containing the version header line and one
|line per contig. The fields of the entries in this dictionary will be the same as in input, but in the order of
|`--sort-dictionary`.
""",
group = ClpGroups.Fasta)
class SortSequenceDictionary
(@arg(flag='i', doc="Input sequence dictionary file to be sorted.") val input: PathToSequenceDictionary,
@arg(flag='d', doc="Input sequence dictionary file containing contigs in the desired sort order.") val sortDictionary: PathToSequenceDictionary,
@arg(flag='o', doc="Output sequence dictionary file.") val output: PathToSequenceDictionary,
@arg(doc="Skip input contigs that have no matching contig in the sort dictionary rather than appending to the end of the output dictionary.") val skipMissingContigs: Boolean = false,
) extends FgBioTool with LazyLogging {
Io.assertReadable(input)
Io.assertReadable(sortDictionary)
Io.assertCanWriteFile(output)
override def execute(): Unit = {
val inputDict = SequenceDictionary(input)
val sortOrderDict = SequenceDictionary(sortDictionary)
// Iterate through the sort dictionary collecting metas from the input that match by name
val metasBuilder = IndexedSeq.newBuilder[SequenceMetadata]
sortOrderDict.foreach { sortMeta =>
sortMeta.allNames.find { name => inputDict.contains(name) } match {
case Some(name) => metasBuilder += inputDict(name)
case None => logger.info(s"Contig '${sortMeta.name}' corresponded to no contig in input dictionary, skipping")
}
}
// build a dictionary from the input contigs found in the sort dictionary
val metasFoundInSortDictDict = {
val metadata = metasBuilder.result().zipWithIndex.map {
case (meta, index) => meta.copy(index=index)
}.toSeq
SequenceDictionary(metadata:_*)
}
// maybe append input contigs not found in the sort dictionary. Their index will be reset after aggregation.
inputDict.foreach { inMeta =>
if (!metasFoundInSortDictDict.contains(inMeta.name)) {
val skipBehavior = if (skipMissingContigs) "skipping." else "appending."
logger.warning(s"Contig '${inMeta.name}' was not found in sort order dictionary: $skipBehavior")
// Append if desired. The index will be reset later.
if (!skipMissingContigs) {
metasBuilder += inMeta.copy()
}
}
}
// Finally we have all the contigs, so reset the index and write out the dictionary.
val finalMetadataDict = metasBuilder.result().zipWithIndex.map {
case (meta, index) => meta.copy(index=index)
}.toSeq
SequenceDictionary(finalMetadataDict:_*).write(output)
}
}
|
fulcrumgenomics/fgbio
|
src/main/scala/com/fulcrumgenomics/fasta/SortSequenceDictionary.scala
|
Scala
|
mit
| 5,050 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.attribute._
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.MLlibTestSparkContext
class RFormulaSuite extends SparkFunSuite with MLlibTestSparkContext {
test("params") {
ParamsSuite.checkParams(new RFormula())
}
test("transform numeric data") {
val formula = new RFormula().setFormula("id ~ v1 + v2")
val original = sqlContext.createDataFrame(
Seq((0, 1.0, 3.0), (2, 2.0, 5.0))).toDF("id", "v1", "v2")
val model = formula.fit(original)
val result = model.transform(original)
val resultSchema = model.transformSchema(original.schema)
val expected = sqlContext.createDataFrame(
Seq(
(0, 1.0, 3.0, Vectors.dense(1.0, 3.0), 0.0),
(2, 2.0, 5.0, Vectors.dense(2.0, 5.0), 2.0))
).toDF("id", "v1", "v2", "features", "label")
// TODO(ekl) make schema comparisons ignore metadata, to avoid .toString
assert(result.schema.toString == resultSchema.toString)
assert(resultSchema == expected.schema)
assert(result.collect() === expected.collect())
}
test("features column already exists") {
val formula = new RFormula().setFormula("y ~ x").setFeaturesCol("x")
val original = sqlContext.createDataFrame(Seq((0, 1.0), (2, 2.0))).toDF("x", "y")
intercept[IllegalArgumentException] {
formula.fit(original)
}
intercept[IllegalArgumentException] {
formula.fit(original)
}
}
test("label column already exists") {
val formula = new RFormula().setFormula("y ~ x").setLabelCol("y")
val original = sqlContext.createDataFrame(Seq((0, 1.0), (2, 2.0))).toDF("x", "y")
val model = formula.fit(original)
val resultSchema = model.transformSchema(original.schema)
assert(resultSchema.length == 3)
assert(resultSchema.toString == model.transform(original).schema.toString)
}
test("label column already exists but is not double type") {
val formula = new RFormula().setFormula("y ~ x").setLabelCol("y")
val original = sqlContext.createDataFrame(Seq((0, 1), (2, 2))).toDF("x", "y")
val model = formula.fit(original)
intercept[IllegalArgumentException] {
model.transformSchema(original.schema)
}
intercept[IllegalArgumentException] {
model.transform(original)
}
}
test("allow missing label column for test datasets") {
val formula = new RFormula().setFormula("y ~ x").setLabelCol("label")
val original = sqlContext.createDataFrame(Seq((0, 1.0), (2, 2.0))).toDF("x", "_not_y")
val model = formula.fit(original)
val resultSchema = model.transformSchema(original.schema)
assert(resultSchema.length == 3)
assert(!resultSchema.exists(_.name == "label"))
assert(resultSchema.toString == model.transform(original).schema.toString)
}
test("encodes string terms") {
val formula = new RFormula().setFormula("id ~ a + b")
val original = sqlContext.createDataFrame(
Seq((1, "foo", 4), (2, "bar", 4), (3, "bar", 5), (4, "baz", 5))
).toDF("id", "a", "b")
val model = formula.fit(original)
val result = model.transform(original)
val resultSchema = model.transformSchema(original.schema)
val expected = sqlContext.createDataFrame(
Seq(
(1, "foo", 4, Vectors.dense(0.0, 1.0, 4.0), 1.0),
(2, "bar", 4, Vectors.dense(1.0, 0.0, 4.0), 2.0),
(3, "bar", 5, Vectors.dense(1.0, 0.0, 5.0), 3.0),
(4, "baz", 5, Vectors.dense(0.0, 0.0, 5.0), 4.0))
).toDF("id", "a", "b", "features", "label")
assert(result.schema.toString == resultSchema.toString)
assert(result.collect() === expected.collect())
}
test("attribute generation") {
val formula = new RFormula().setFormula("id ~ a + b")
val original = sqlContext.createDataFrame(
Seq((1, "foo", 4), (2, "bar", 4), (3, "bar", 5), (4, "baz", 5))
).toDF("id", "a", "b")
val model = formula.fit(original)
val result = model.transform(original)
val attrs = AttributeGroup.fromStructField(result.schema("features"))
val expectedAttrs = new AttributeGroup(
"features",
Array(
new BinaryAttribute(Some("a__bar"), Some(1)),
new BinaryAttribute(Some("a__foo"), Some(2)),
new NumericAttribute(Some("b"), Some(3))))
assert(attrs === expectedAttrs)
}
}
|
practice-vishnoi/dev-spark-1
|
mllib/src/test/scala/org/apache/spark/ml/feature/RFormulaSuite.scala
|
Scala
|
apache-2.0
| 5,209 |
// Wei Chen - Density Peak Cluster Test
// 2016-06-03
import com.scalaml.TestData._
import com.scalaml.general.MatrixFunc._
import com.scalaml.algorithm.DensityPeakCluster
import org.scalatest.funsuite.AnyFunSuite
class DensityPeakClusterSuite extends AnyFunSuite {
val dpc = new DensityPeakCluster()
test("DensityPeakCluster Test : Clustering Tiny Data") {
assert(dpc.clear())
assert(dpc.config(Map("sd" -> 1.0, "densityf" -> 1.0, "deltaf" -> 1.0)))
val result = dpc.cluster(UNLABELED_TINY_DATA)
assert(arrayequal(result, LABEL_TINY_DATA.reverse))
}
test("DensityPeakCluster Test : Clustering Small Data") {
assert(dpc.clear())
assert(dpc.config(Map("sd" -> 2.0, "densityf" -> 1.0, "deltaf" -> 2.0)))
val result = dpc.cluster(UNLABELED_SMALL_DATA)
assert(arrayequal(result, LABEL_SMALL_DATA.reverse))
}
test("DensityPeakCluster Test : Clustering Large Data") {
assert(dpc.clear())
assert(dpc.config(Map("sd" -> 3.0, "densityf" -> 12.0, "deltaf" -> 2.0)))
val result = dpc.cluster(UNLABELED_LARGE_DATA)
assert(arrayequal(result, LABEL_LARGE_DATA))
}
test("DensityPeakCluster Test : Clear") {
assert(dpc.clear())
assert(dpc.dddata.isEmpty)
val emptyArr = dpc.dddata
assert(dpc.importdd(emptyArr))
}
test("DensityPeakCluster Test : Invalid Config") {
assert(dpc.clear())
assert(!dpc.config(Map("sd" -> "test")))
}
}
|
Wei-1/Scala-Machine-Learning
|
src/test/scala/algorithm/clustering/DensityPeakClusterTest.scala
|
Scala
|
mit
| 1,509 |
/*
* Copyright (C) 2015 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.domain.modifier
import org.openmole.core.highlight.HighLight
import org.openmole.core.pluginregistry.{ PluginInfo, PluginRegistry }
import org.osgi.framework._
class Activator extends BundleActivator {
override def stop(context: BundleContext): Unit = PluginRegistry.unregister(this)
override def start(context: BundleContext): Unit = {
import org.openmole.core.highlight.HighLight._
val highLight: Vector[HighLight] =
Vector(
"take",
"group",
"sliding",
"map",
"filter",
"zipWith",
"zipWithIndex",
"zipWithName",
"sort",
"sortBy",
"shuffle"
)
PluginRegistry.register(this, nameSpaces = Vector(this.getClass.getPackage), highLight = highLight)
}
}
|
openmole/openmole
|
openmole/plugins/org.openmole.plugin.domain.modifier/src/main/scala/org/openmole/plugin/domain/modifier/Activator.scala
|
Scala
|
agpl-3.0
| 1,516 |
package definiti.core.parser.project
import definiti.common.ast._
import definiti.common.utils.StringUtils
import definiti.core.Configuration
class NamespaceBuilder(fileContent: FileContent, configuration: Configuration) {
private val imports: Map[String, String] = {
val internalImports = fileContent.elements.collect {
case aliasType: AliasType => aliasType.name -> fullName(aliasType.name)
case definedType: DefinedType => definedType.name -> fullName(definedType.name)
case enum: Enum => enum.name -> fullName(enum.name)
case namedFunction: NamedFunction => namedFunction.name -> fullName(namedFunction.name)
case verification: Verification => verification.name -> fullName(verification.name)
}
fileContent.imports ++ internalImports.toMap
}
def build(): Namespace = {
Namespace(
name = StringUtils.lastPart(fileContent.packageName),
fullName = fileContent.packageName,
elements = fileContent.elements.map(processNamespaceElement)
)
}
private def processNamespaceElement(namespaceElement: NamespaceElement): NamespaceElement = {
namespaceElement match {
case aliasType: AliasType => processAliasType(aliasType)
case definedType: DefinedType => processDefinedType(definedType)
case enum: Enum => processEnum(enum)
case namedFunction: NamedFunction => processNamedFunction(namedFunction)
case verification: Verification => processVerification(verification)
case extendedContext: ExtendedContext[_] => processExtendedContext(extendedContext)
case nativeClassDefinition: NativeClassDefinition => nativeClassDefinition
}
}
private def processAliasType(aliasType: AliasType): AliasType = {
aliasType.copy(
fullName = fullName(aliasType.name),
parameters = aliasType.parameters.map(processParameterDefinition),
alias = processTypeDeclaration(aliasType.alias),
inherited = aliasType.inherited.map(processVerificationReference),
verifications = aliasType.verifications.map(processTypeVerification)
)
}
private def processParameterDefinition(parameterDefinition: ParameterDefinition): ParameterDefinition = {
parameterDefinition.copy(
typeReference = processAbstractTypeReference(parameterDefinition.typeReference)
)
}
private def processVerificationReference(verificationReference: VerificationReference): VerificationReference = {
verificationReference.copy(
verificationName = normalizeImport(verificationReference.verificationName)
)
}
private def processTypeVerification(typeVerification: TypeVerification): TypeVerification = {
typeVerification match {
case atomicTypeVerification: AtomicTypeVerification =>
atomicTypeVerification.copy(
message = processMessage(atomicTypeVerification.message),
function = processFunction(atomicTypeVerification.function)
)
case dependentTypeVerification: DependentTypeVerification =>
dependentTypeVerification.copy(
message = processMessage(dependentTypeVerification.message),
function = processFunction(dependentTypeVerification.function)
)
}
}
private def processFunction(definedFunction: DefinedFunction): DefinedFunction = {
definedFunction.copy(
parameters = definedFunction.parameters.map(processParameterDefinition),
body = processExpression(definedFunction.body)
)
}
private def processDefinedType(definedType: DefinedType): DefinedType = {
definedType.copy(
fullName = fullName(definedType.name),
parameters = definedType.parameters.map(processParameterDefinition),
attributes = definedType.attributes.map(processAttributeDefinition),
verifications = definedType.verifications.map(processTypeVerification),
inherited = definedType.inherited.map(processVerificationReference)
)
}
private def processAttributeDefinition(attributeDefinition: AttributeDefinition): AttributeDefinition = {
attributeDefinition.copy(
typeDeclaration = processTypeDeclaration(attributeDefinition.typeDeclaration),
verifications = attributeDefinition.verifications.map(processVerificationReference)
)
}
private def processEnum(enum: Enum): Enum = {
enum.copy(
fullName = fullName(enum.name)
)
}
private def processNamedFunction(namedFunction: NamedFunction): NamedFunction = {
namedFunction.copy(
fullName = fullName(namedFunction.name),
parameters = namedFunction.parameters.map(processParameterDefinition),
body = processExpression(namedFunction.body)
)
}
private def processVerification(verification: Verification): Verification = {
verification.copy(
fullName = fullName(verification.name),
parameters = verification.parameters.map(processParameterDefinition),
message = processMessage(verification.message),
function = processFunction(verification.function)
)
}
private def processMessage(message: VerificationMessage): VerificationMessage = {
message match {
case literalMessage: LiteralMessage =>
literalMessage
case typedMessage: TypedMessage =>
typedMessage.copy(
types = typedMessage.types.map(processTypeReference)
)
}
}
private def processExpression(expression: Expression): Expression = {
expression match {
case logicalExpression: LogicalExpression =>
logicalExpression.copy(
left = processExpression(logicalExpression.left),
right = processExpression(logicalExpression.right)
)
case calculatorExpression: CalculatorExpression =>
calculatorExpression.copy(
left = processExpression(calculatorExpression.left),
right = processExpression(calculatorExpression.right)
)
case not: Not =>
not.copy(
inner = processExpression(not.inner)
)
case booleanValue: BooleanValue => booleanValue
case integerValue: IntegerValue => integerValue
case numberValue: NumberValue => numberValue
case quotedStringValue: QuotedStringValue => quotedStringValue
case reference: Reference =>
reference.copy(
name = normalizeImport(reference.name)
)
case methodCall: MethodCall =>
methodCall.copy(
expression = processExpression(methodCall.expression),
parameters = methodCall.parameters.map(processExpression),
generics = methodCall.generics.map(processTypeReference)
)
case attributeCall: AttributeCall =>
attributeCall.copy(
expression = processExpression(attributeCall.expression)
)
case combinedExpression: CombinedExpression =>
combinedExpression.copy(
parts = combinedExpression.parts.map(processExpression)
)
case condition: Condition =>
condition.copy(
condition = processExpression(condition.condition),
onTrue = processExpression(condition.onTrue),
onFalse = condition.onFalse.map(processExpression)
)
case lambdaExpression: LambdaExpression =>
lambdaExpression.copy(
parameterList = lambdaExpression.parameterList.map(processParameterDefinition),
expression = processExpression(lambdaExpression.expression)
)
case functionCall: FunctionCall =>
functionCall.copy(
name = normalizeImport(functionCall.name),
parameters = functionCall.parameters.map(processExpression),
generics = functionCall.generics.map(processTypeReference)
)
case okValue: OkValue => okValue
case koValue: KoValue =>
koValue.copy(
parameters = koValue.parameters.map(processExpression)
)
}
}
private def processExtendedContext(context: ExtendedContext[_]): ExtendedContext[_] = {
configuration.contexts
.find(_.contextName == context.name)
.map { contextPlugin =>
ExtendedContext(
name = context.name,
content = contextPlugin.parse(context.content.toString, fileContent.packageName, imports, context.innerLocation),
innerLocation = context.innerLocation,
location = context.location
)
}
.getOrElse(context)
}
private def processTypeDeclaration(typeDeclaration: TypeDeclaration): TypeDeclaration = {
typeDeclaration.copy(
typeName = normalizeImport(typeDeclaration.typeName),
genericTypes = typeDeclaration.genericTypes.map(processTypeDeclaration)
)
}
private def fullName(value: String): String = {
StringUtils.canonical(fileContent.packageName, value)
}
private def processAbstractTypeReference(abstractTypeReference: AbstractTypeReference): AbstractTypeReference = {
abstractTypeReference match {
case lambdaReference: LambdaReference =>
lambdaReference.copy(
inputTypes = lambdaReference.inputTypes.map(processTypeReference),
outputType = processTypeReference(lambdaReference.outputType)
)
case namedFunctionReference: NamedFunctionReference =>
namedFunctionReference.copy(
normalizeImport(namedFunctionReference.functionName)
)
case typeReference: TypeReference =>
processTypeReference(typeReference)
case Unset => Unset
}
}
private def processTypeReference(typeReference: TypeReference): TypeReference = {
typeReference.copy(
typeName = normalizeImport(typeReference.typeName),
genericTypes = typeReference.genericTypes.map(processTypeReference)
)
}
private def normalizeImport(name: String): String = {
name.split("\\.").toList match {
case Nil => ""
case head :: tail => (imports.getOrElse(head, head) +: tail).mkString(".")
}
}
}
|
definiti/definiti-core
|
src/main/scala/definiti/core/parser/project/NamespaceBuilder.scala
|
Scala
|
mit
| 9,852 |
package org.jetbrains.sbt.project
import com.intellij.application.options.RegistryManager
import com.intellij.notification.NotificationType
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.externalSystem.model.project.{ProjectData => ESProjectData, _}
import com.intellij.openapi.externalSystem.model.task.event._
import com.intellij.openapi.externalSystem.model.task.{ExternalSystemTaskId, ExternalSystemTaskNotificationListener}
import com.intellij.openapi.externalSystem.model.{DataNode, ExternalSystemException}
import com.intellij.openapi.externalSystem.service.project.ExternalSystemProjectResolver
import com.intellij.openapi.module.StdModuleTypes
import com.intellij.openapi.project.{Project, ProjectManager}
import com.intellij.openapi.roots.DependencyScope
import com.intellij.openapi.util.io.FileUtil
import org.jetbrains.annotations.{NonNls, Nullable, TestOnly}
import org.jetbrains.plugins.scala._
import org.jetbrains.plugins.scala.build._
import org.jetbrains.plugins.scala.compiler.data.serialization.extensions.EitherExt
import org.jetbrains.plugins.scala.project.Version
import org.jetbrains.plugins.scala.project.external.{AndroidJdk, JdkByHome, JdkByName, SdkReference}
import org.jetbrains.plugins.scala.util.ScalaNotificationGroups
import org.jetbrains.sbt.SbtUtil._
import org.jetbrains.sbt.project.SbtProjectResolver._
import org.jetbrains.sbt.project.data._
import org.jetbrains.sbt.project.module.SbtModuleType
import org.jetbrains.sbt.project.settings._
import org.jetbrains.sbt.project.structure._
import org.jetbrains.sbt.resolvers.{SbtIvyResolver, SbtMavenResolver, SbtResolver}
import org.jetbrains.sbt.structure.XmlSerializer._
import org.jetbrains.sbt.structure.{BuildData, Configuration, ConfigurationData, DependencyData, DirectoryData, JavaData, ModuleDependencyData, ModuleIdentifier, ProjectData}
import org.jetbrains.sbt.{RichBoolean, RichFile, Sbt, SbtBundle, SbtUtil, usingTempFile, structure => sbtStructure}
import java.io.{File, FileNotFoundException}
import java.util.{Locale, UUID}
import scala.collection.{MapView, mutable}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.jdk.CollectionConverters._
import scala.util.{Failure, Random, Success, Try}
import scala.xml.{Elem, XML}
/**
* @author Pavel Fatin
*/
class SbtProjectResolver extends ExternalSystemProjectResolver[SbtExecutionSettings] with ExternalSourceRootResolution {
private val log = Logger.getInstance(getClass)
@volatile private var activeProcessDumper: Option[SbtStructureDump] = None
override def resolveProjectInfo(taskId: ExternalSystemTaskId,
wrongProjectPathDontUseIt: String,
isPreview: Boolean,
settings: SbtExecutionSettings,
listener: ExternalSystemTaskNotificationListener): DataNode[ESProjectData] = {
val projectRoot = {
val file = new File(settings.realProjectPath)
if (file.isDirectory) file else file.getParentFile
}
val sbtLauncher = settings.customLauncher.getOrElse(getDefaultLauncher)
val sbtVersion = detectSbtVersion(projectRoot, sbtLauncher)
if (isPreview) dummyProject(projectRoot, settings, sbtVersion).toDataNode
else importProject(taskId, settings, projectRoot, sbtLauncher, sbtVersion, listener)
}
private def importProject(taskId: ExternalSystemTaskId,
settings: SbtExecutionSettings,
projectRoot: File,
sbtLauncher: File,
@NonNls sbtVersion: String,
notifications: ExternalSystemTaskNotificationListener): DataNode[ESProjectData] = {
@NonNls val importTaskId = s"import:${UUID.randomUUID()}"
val importTaskDescriptor =
new TaskOperationDescriptorImpl(SbtBundle.message("sbt.import.to.intellij.project.model"), System.currentTimeMillis(), "project-model-import")
val esReporter = new ExternalSystemNotificationReporter(projectRoot.getAbsolutePath, taskId, notifications)
implicit val reporter: BuildReporter = if (isUnitTestMode) {
val logReporter = new LogReporter
new CompositeReporter(esReporter, logReporter)
} else esReporter
val structureDump = dumpStructure(projectRoot, sbtLauncher, Version(sbtVersion), settings, taskId.findProject())
// side-effecty status reporting
structureDump.foreach { _ =>
val convertStartEvent = new ExternalSystemStartEventImpl(importTaskId, null, importTaskDescriptor)
val event = new ExternalSystemTaskExecutionEvent(taskId, convertStartEvent)
notifications.onStatusChange(event)
}
val conversionResult = structureDump
.map { case (elem, _) =>
val data = elem.deserialize[sbtStructure.StructureData].getRight
convert(normalizePath(projectRoot), data, settings.jdk, sbtVersion).toDataNode
}
.recoverWith {
case ImportCancelledException(cause) =>
val causeMessage = if (cause != null) cause.getMessage else SbtBundle.message("sbt.unknown.cause")
// notify user if project exists already
val projectOpt = ProjectManager.getInstance().getOpenProjects.find(p => FileUtil.pathsEqual(p.getBasePath, projectRoot.getCanonicalPath))
projectOpt.foreach { p =>
val notification = ScalaNotificationGroups.balloonGroup.createNotification(SbtBundle.message("sbt.import.cancelled", causeMessage), NotificationType.INFORMATION)
notification.notify(p)
}
log.info("sbt import cancelled", cause)
// sorry, ExternalSystem expects a null when resolving is not possible
Success(null)
case x: Exception =>
Failure(new ExternalSystemException(x))
}
// more side-effecty reporting
conversionResult.transform (
_ => Success(new SuccessResultImpl(0, System.currentTimeMillis(), true)), /* TODO starttime*/
x => Success(
new FailureResultImpl(0, System.currentTimeMillis(),
List.empty[com.intellij.openapi.externalSystem.model.task.event.Failure].asJava // TODO error list
)
)
).foreach { result =>
val convertFinishedEvent = new ExternalSystemFinishEventImpl[TaskOperationDescriptor](
importTaskId, null, importTaskDescriptor, result
)
val event = new ExternalSystemTaskExecutionEvent(taskId, convertFinishedEvent)
notifications.onStatusChange(event)
}
conversionResult.get // ok to throw here, that's the way ExternalSystem likes it
}
private def dumpStructure(projectRoot: File,
sbtLauncher: File,
sbtVersion: Version,
settings:SbtExecutionSettings,
@Nullable project: Project
)(implicit reporter: BuildReporter): Try[(Elem, BuildMessages)] = {
val useShellImport = settings.useShellForImport && shellImportSupported(sbtVersion) && project != null
val options = dumpOptions(settings)
def doDumpStructure(structureFile: File): Try[(Elem, BuildMessages)] = {
val structureFilePath = normalizePath(structureFile)
val dumper = new SbtStructureDump()
activeProcessDumper = Option(dumper)
val messageResult: Try[BuildMessages] = {
if (useShellImport) {
val messagesF = dumper.dumpFromShell(project, structureFilePath, options, reporter, settings.preferScala2)
Try(Await.result(messagesF, Duration.Inf)) // TODO some kind of timeout / cancel mechanism
}
else {
val sbtStructureJar = settings
.customSbtStructureFile
.orElse(SbtUtil.getSbtStructureJar(sbtVersion))
.getOrElse(throw new ExternalSystemException(s"Could not find sbt-structure-extractor for sbt version $sbtVersion"))
val structureFilePath = normalizePath(structureFile)
// TODO add error/warning messages during dump, report directly
dumper.dumpFromProcess(
projectRoot, structureFilePath, options,
settings.vmExecutable, settings.vmOptions, settings.environment,
sbtLauncher, sbtStructureJar, settings.preferScala2)
}
}
activeProcessDumper = None
val result: Try[(Elem, BuildMessages)] = messageResult.flatMap { messages =>
val tried = {
def failure(reason: String): Failure[(Elem, BuildMessages)] = {
val message = SbtBundle.message("sbt.import.extracting.structure.failed") + s": $reason"
Failure(new Exception(message))
}
if (messages.status != BuildMessages.OK)
failure(SbtBundle.message("sbt.import.message.build.status", messages.status))
else if (!structureFile.isFile)
failure(SbtBundle.message("sbt.import.message.structure.file.is.not.a.file", structureFile.getPath))
else if (structureFile.length <= 0)
failure(SbtBundle.message("sbt.import.message.structure.file.is.empty", structureFile.getPath))
else Try {
val elem = XML.load(structureFile.toURI.toURL)
(elem, messages)
}
}
tried.recoverWith { case error =>
val exceptions = messages.exceptions.map(_.getLocalizedMessage).mkString("\\n")
val errorMsgs = messages.errors.map(_.getMessage).mkString("\\n")
val message = error.getMessage + "\\n" +
exceptions + (if (exceptions.nonEmpty) "\\n" else "") +
errorMsgs
Failure(new Exception(message, error.getCause))
}
}
if (result.isFailure) {
val processOutput = dumper.processOutput.mkString
// exception is logged in other places
log.debug(s"failed to dump sbt structure, sbt process output:\\n$processOutput")
}
result
}
if (!sbtLauncher.isFile) {
val error = SbtBundle.message("sbt.launcher.not.found", sbtLauncher.getCanonicalPath)
Failure(new FileNotFoundException(error))
} else if (!importSupported(sbtVersion)) {
val message = SbtBundle.message("sbt.sincesbtversion.required", sinceSbtVersion)
Failure(new UnsupportedOperationException(message))
}
else {
val structureFileReused = new File(FileUtil.getTempDirectory) / s"sbt-structure-reused-${projectRoot.getName}.xml"
if (RegistryManager.getInstance().is("sbt.project.import.reuse.previous.structure.file")){
if (structureFileReused.exists()) {
log.warn(s"reused structure file: $structureFileReused")
val elem = XML.load(structureFileReused.toURI.toURL)
Try((elem, BuildMessages.empty))
}
else {
log.warn(s"reused structure file created: $structureFileReused")
doDumpStructure(structureFileReused)
}
}
else {
if (structureFileReused.exists()) {
structureFileReused.delete()
}
usingTempFile("sbt-structure", Some(".xml")) { structureFile =>
doDumpStructure(structureFile)
}
}
}
}
private def dumpOptions(settings: SbtExecutionSettings): Seq[String] = {
Seq("download") ++
settings.resolveClassifiers.seq("resolveClassifiers") ++
settings.resolveJavadocs.seq("resolveJavadocs") ++
settings.resolveSbtClassifiers.seq("resolveSbtClassifiers")
}
/**
* Create project preview without using sbt, since sbt import can fail and users would have to do a manual edit of the project.
* Also sbt boot makes the whole process way too slow.
*/
private def dummyProject(projectRoot: File, settings: SbtExecutionSettings, sbtVersion: String): Node[ESProjectData] = {
// TODO add default scala sdk and sbt libs (newest versions or so)
val projectPath = projectRoot.getAbsolutePath
val projectName = normalizeModuleId(projectRoot.getName)
val projectTmpName = projectName + "_" + Random.nextInt(10000)
val sourceDir = new File(projectRoot, "src/main/scala")
val classDir = new File(projectRoot, "target/dummy")
val dummyConfigurationData = ConfigurationData("compile", Seq(DirectoryData(sourceDir, managed = false)), Seq.empty, Seq.empty, classDir)
val dummyJavaData = JavaData(None, Seq.empty)
val dummyDependencyData = DependencyData(Seq.empty, Seq.empty, Seq.empty)
val dummyRootProject = ProjectData(
projectTmpName, projectRoot.toURI, projectTmpName, s"org.$projectName", "0.0", projectRoot, None, Seq.empty,
new File(projectRoot, "target"), Seq(dummyConfigurationData), Option(dummyJavaData), None, None,
dummyDependencyData, Set.empty, None, Seq.empty, Seq.empty, Seq.empty
)
val projects = Seq(dummyRootProject)
val projectNode = new ProjectNode(projectName, projectPath, projectPath)
val libraryNodes = Seq.empty[LibraryNode]
val moduleFilesDirectory = new File(projectPath, Sbt.ModulesDirectory)
val projectToModule = createModules(projects, libraryNodes, moduleFilesDirectory)
val dummySbtProjectData = SbtProjectData(settings.jdk.map(JdkByName), sbtVersion, projectPath)
projectNode.add(new SbtProjectNode(dummySbtProjectData))
projectNode.addAll(projectToModule.values)
val dummyBuildData = BuildData(projectRoot.toURI, Seq.empty, Seq.empty, Seq.empty, Seq.empty)
val buildModule = createBuildModule(dummyBuildData, projects, moduleFilesDirectory, None, sbtVersion)
projectNode.add(buildModule)
projectNode
}
/**
* This implementation is the same as in sbt.Project.normalizeModuleId to avoid inconsistencies in the import process.
* Normalize a String so that it is suitable for use as a dependency management module identifier.
* This is a best effort implementation, since valid characters are not documented or consistent.
*/
private def normalizeModuleId(s: String) =
s.toLowerCase(Locale.ENGLISH)
.replaceAll("""\\W+""", "-")
private def convert(root: String,
data: sbtStructure.StructureData,
settingsJdk: Option[String],
sbtVersion: String): Node[ESProjectData] = {
val projects: Seq[sbtStructure.ProjectData] = data.projects
val rootProject: sbtStructure.ProjectData =
projects.find(p => FileUtil.filesEqual(p.base, new File(root)))
.orElse(projects.headOption)
.getOrElse(throw new RuntimeException("No root project found"))
val projectNode = new ProjectNode(rootProject.name, root, root)
val projectJdk = chooseJdk(rootProject, settingsJdk)
projectNode.add(new SbtProjectNode(SbtProjectData(projectJdk, data.sbtVersion, root)))
val newPlay2Data = projects.flatMap(p => p.play2.map(d => (p.id, p.base, d)))
projectNode.add(new Play2ProjectNode(Play2OldStructureAdapter(newPlay2Data)))
val libraryNodes = createLibraries(data, projects)
projectNode.addAll(libraryNodes)
val moduleFilesDirectory = new File(root, Sbt.ModulesDirectory)
val projectToModule = createModules(projects, libraryNodes, moduleFilesDirectory)
//Sort modules by id to make project imports more reproducible
//In particular this will easy testing of `org.jetbrains.sbt.project.SbtProjectImportingTest.testSCL13600`
//(note, still the order can be different on different machine, casue id depends on URI)
val modulesSorted: Seq[ModuleNode] = projectToModule.values.toSeq.sortBy(_.getId)
projectNode.addAll(modulesSorted)
val sharedSourceModules = createSharedSourceModules(projectToModule, libraryNodes, moduleFilesDirectory)
projectNode.addAll(sharedSourceModules)
val buildModuleForProject: BuildData => ModuleNode = createBuildModule(_, projects, moduleFilesDirectory, data.localCachePath.map(_.getCanonicalPath), sbtVersion)
val buildModules = data.builds.map(buildModuleForProject)
if (buildModules.size > 1) {
buildModules.foreach(_.setIdeModuleGroup(Array("sbt-build-modules")))
}
projectNode.addAll(buildModules)
projectNode
}
/** Choose a project jdk based on information from sbt settings and IDE.
* More specific settings from sbt are preferred over IDE settings, on the assumption that the sbt project definition
* is what is more likely to be under source control.
*/
private def chooseJdk(project: sbtStructure.ProjectData, defaultJdk: Option[String]): Option[SdkReference] = {
// TODO put some of this logic elsewhere in resolving process?
val androidSdk = project.android.map(android => AndroidJdk(android.targetVersion))
val jdkHomeInSbtProject = project.java.flatMap(_.home).map(JdkByHome)
// default either from project structure or initial import settings
val default = defaultJdk.map(JdkByName)
androidSdk
.orElse(jdkHomeInSbtProject)
.orElse(default)
}
private def createModuleDependencies(projectToModule: Map[ProjectData,ModuleNode]): Unit = {
projectToModule.foreach { case (moduleProject, moduleNode) =>
moduleProject.dependencies.projects.foreach { dependencyId =>
val dependency =
projectToModule.values
.find(_.getId == ModuleNode.combinedId(dependencyId.project, dependencyId.buildURI))
.getOrElse(throw new ExternalSystemException("Cannot find project dependency: " + dependencyId.project))
val data = new ModuleDependencyNode(moduleNode, dependency)
data.setScope(scopeFor(dependencyId.configuration))
data.setExported(true)
moduleNode.add(data)
}
}
}
private def createModules(projects: Seq[sbtStructure.ProjectData], libraryNodes: Seq[LibraryNode], moduleFilesDirectory: File): Map[ProjectData,ModuleNode] = {
val unmanagedSourcesAndDocsLibrary = libraryNodes.map(_.data).find(_.getExternalName == Sbt.UnmanagedSourcesAndDocsName)
val nameToProjects = projects.groupBy(_.name)
val namesAreUnique = nameToProjects.size == projects.size
val projectToModule = projects.map { project =>
val moduleName =
if (namesAreUnique) project.name
else project.id
val groupName =
if (nameToProjects(project.name).size > 1) Array(project.name)
else null
val moduleNode = createModule(project, moduleFilesDirectory, moduleName)
moduleNode.setIdeModuleGroup(groupName)
val contentRootNode = createContentRoot(project)
project.android.foreach(a => a.apklibs.foreach(addApklibDirs(contentRootNode, _)))
moduleNode.add(contentRootNode)
moduleNode.addAll(createLibraryDependencies(project.dependencies.modules)(moduleNode, libraryNodes.map(_.data)))
moduleNode.add(createModuleExtData(project))
moduleNode.add(new SbtModuleNode(SbtModuleData(project.id, project.buildURI)))
moduleNode.addAll(createTaskData(project))
moduleNode.addAll(createSettingData(project))
moduleNode.addAll(createCommandData(project))
moduleNode.addAll(project.android.map(createFacet).toSeq)
moduleNode.addAll(createUnmanagedDependencies(project.dependencies.jars)(moduleNode))
unmanagedSourcesAndDocsLibrary foreach { lib =>
val dependency = new LibraryDependencyNode(moduleNode, lib, LibraryLevel.MODULE)
dependency.setScope(DependencyScope.COMPILE)
moduleNode.add(dependency)
}
(project,moduleNode)
}
val projectToModuleMap = projectToModule.toMap
createModuleDependencies(projectToModuleMap)
projectToModuleMap
}
private def createLibraries(data: sbtStructure.StructureData, projects: Seq[sbtStructure.ProjectData]): Seq[LibraryNode] = {
val repositoryModules = data.repository.map(_.modules).getOrElse(Seq.empty)
val (modulesWithoutBinaries, modulesWithBinaries) = repositoryModules.partition(_.binaries.isEmpty)
val otherModuleIds = projects.flatMap(_.dependencies.modules.map(_.id)).toSet --
repositoryModules.map(_.id).toSet
val libs = modulesWithBinaries.map(createResolvedLibrary) ++ otherModuleIds.map(createUnresolvedLibrary)
val modulesWithDocumentation = modulesWithoutBinaries.filter(m => m.docs.nonEmpty || m.sources.nonEmpty)
if (modulesWithDocumentation.isEmpty) return libs
val unmanagedSourceLibrary = new LibraryNode(Sbt.UnmanagedSourcesAndDocsName, true)
unmanagedSourceLibrary.addPaths(LibraryPathType.DOC, modulesWithDocumentation.flatMap(_.docs).map(_.path))
unmanagedSourceLibrary.addPaths(LibraryPathType.SOURCE, modulesWithDocumentation.flatMap(_.sources).map(_.path))
libs :+ unmanagedSourceLibrary
}
private def createModuleExtData(project: sbtStructure.ProjectData): ModuleExtNode = {
val ProjectData(_, _, _, _, _, _, packagePrefix, basePackages, _, _, java, scala, android, _, _, _, _, _, _) = project
val sdk = android.map(_.targetVersion).map(AndroidJdk)
.orElse(java.flatMap(_.home).map(JdkByHome))
val data = SbtModuleExtData(
scalaVersion = scala.map(_.version),
scalacClasspath = scala.fold(Seq.empty[File])(_.allCompilerJars),
scaladocExtraClasspath = scala.fold(Seq.empty[File])(_.extraJars),
scalacOptions = scala.fold(Seq.empty[String])(_.options),
sdk = sdk,
javacOptions = java.fold(Seq.empty[String])(_.options),
packagePrefix = packagePrefix,
basePackage = basePackages.headOption // TODO Rename basePackages to basePackage in sbt-ide-settings?
)
new ModuleExtNode(data)
}
private def createTaskData(project: sbtStructure.ProjectData): Seq[SbtTaskNode] = {
project.tasks.map { t =>
new SbtTaskNode(SbtTaskData(t.label, t.description.getOrElse(""), t.rank))
}
}
private def createSettingData(project: sbtStructure.ProjectData): Seq[SbtSettingNode] = {
project.settings.map { s =>
// TODO use options for description, value and handle them in the UI appropriately
new SbtSettingNode(SbtSettingData(s.label, s.description.getOrElse(""), s.rank, s.stringValue.getOrElse("")))
}
}
private def createCommandData(project: sbtStructure.ProjectData) = {
project.commands.map { c =>
new SbtCommandNode(SbtCommandData(c.name, c.help))
}
}
private def createFacet(android: sbtStructure.AndroidData): AndroidFacetNode = {
new AndroidFacetNode(SbtAndroidFacetData(android.targetVersion, android.manifest, android.apk,
android.res, android.assets, android.gen, android.libs,
android.isLibrary, android.proguardConfig))
}
private def createUnresolvedLibrary(moduleId: sbtStructure.ModuleIdentifier): LibraryNode = {
val module = sbtStructure.ModuleData(moduleId, Set.empty, Set.empty, Set.empty)
createLibrary(module, resolved = false)
}
private def createResolvedLibrary(module: sbtStructure.ModuleData): LibraryNode = {
createLibrary(module, resolved = true)
}
private def createLibrary(module: sbtStructure.ModuleData, resolved: Boolean): LibraryNode = {
val result = new LibraryNode(nameFor(module.id), resolved)
result.addPaths(LibraryPathType.BINARY, module.binaries.map(_.path).toSeq)
result.addPaths(LibraryPathType.SOURCE, module.sources.map(_.path).toSeq)
result.addPaths(LibraryPathType.DOC, module.docs.map(_.path).toSeq)
result
}
private def nameFor(id: sbtStructure.ModuleIdentifier) = {
if (IJ_SDK_CLASSIFIERS.contains(id.classifier)) { // DevKit expects IJ SDK library names in certain format for some features to work
s"[${id.classifier}]${id.organization}:${id.name}:${id.revision}"
} else {
val classifierOption = if (id.classifier.isEmpty) None else Some(id.classifier)
s"${id.organization}:${id.name}:${id.revision}" + classifierOption.map(":" + _).getOrElse("") + s":${id.artifactType}"
}
}
private def createModule(project: sbtStructure.ProjectData, moduleFilesDirectory: File, moduleName: String): ModuleNode = {
// TODO use both ID and Name when related flaws in the External System will be fixed
// TODO explicit canonical path is needed until IDEA-126011 is fixed
val projectId = ModuleNode.combinedId(project.id, Option(project.buildURI))
val result = new ModuleNode(StdModuleTypes.JAVA.getId, projectId, moduleName,
moduleFilesDirectory.path, project.base.canonicalPath)
result.setInheritProjectCompileOutputPath(false)
project.configurations.find(_.id == "compile").foreach { configuration =>
result.setCompileOutputPath(ExternalSystemSourceType.SOURCE, configuration.classes.path)
}
project.configurations.find(_.id == "test").foreach { configuration =>
result.setCompileOutputPath(ExternalSystemSourceType.TEST, configuration.classes.path)
}
result
}
private def createContentRoot(project: sbtStructure.ProjectData): ContentRootNode = {
val productionSources = validRootPathsIn(project, "compile")(_.sources)
val productionResources = validRootPathsIn(project, "compile")(_.resources)
val testSources = validRootPathsIn(project, "test")(_.sources) ++ validRootPathsIn(project, "it")(_.sources)
val testResources = validRootPathsIn(project, "test")(_.resources) ++ validRootPathsIn(project, "it")(_.resources)
val result = new ContentRootNode(project.base.path)
result.storePaths(ExternalSystemSourceType.SOURCE, unmanagedDirectories(productionSources))
result.storePaths(ExternalSystemSourceType.SOURCE_GENERATED, managedDirectories(productionSources))
result.storePaths(ExternalSystemSourceType.RESOURCE, allDirectories(productionResources))
result.storePaths(ExternalSystemSourceType.TEST, unmanagedDirectories(testSources))
result.storePaths(ExternalSystemSourceType.TEST_GENERATED, managedDirectories(testSources))
result.storePaths(ExternalSystemSourceType.TEST_RESOURCE, allDirectories(testResources))
getExcludedTargetDirs(project).foreach { path =>
result.storePath(ExternalSystemSourceType.EXCLUDED, path.path)
}
result
}
private def allDirectories(dirs: Seq[sbtStructure.DirectoryData]) =
dirs.map(_.file.canonicalPath)
private def managedDirectories(dirs: Seq[sbtStructure.DirectoryData]) =
dirs.filter(_.managed).map(_.file.canonicalPath)
private def unmanagedDirectories(dirs: Seq[sbtStructure.DirectoryData]) =
dirs.filterNot(_.managed).map(_.file.canonicalPath)
// We cannot always exclude the whole ./target/ directory because of
// the generated sources, so we resort to an heuristic.
private def getExcludedTargetDirs(project: sbtStructure.ProjectData): Seq[File] = {
val extractedExcludes = project.configurations.flatMap(_.excludes)
if (extractedExcludes.nonEmpty)
return extractedExcludes.distinct
val managedDirectories = project.configurations
.flatMap(configuration => configuration.sources ++ configuration.resources)
.filter(_.managed)
.map(_.file)
val defaultNames = Set("main", "test")
val relevantDirectories = managedDirectories.filter(file => file.exists || !defaultNames.contains(file.getName))
def isRelevant(f: File): Boolean = !relevantDirectories.forall(_.isOutsideOf(f))
if (isRelevant(project.target)) {
// If we can't exclude the target directory, go one level deeper (which may hit resolution-cache and streams)
Option(project.target.listFiles()).toList.flatten.filter {
child => child.isDirectory && !isRelevant(child)
}
} else List(project.target)
}
private def createBuildModule(build: sbtStructure.BuildData, projects: Seq[ProjectData], moduleFilesDirectory: File, localCachePath: Option[String], sbtVersion: String): ModuleNode = {
val buildBaseProject =
projects
.filter(p => p.buildURI == build.uri)
.foldLeft(None: Option[ProjectData]) {
case (None, p) => Some(p)
case (Some(p), p1) =>
val parent = if (p.base.isAncestorOf(p1.base)) p else p1
Some(parent)
}
val buildId = buildBaseProject
.map(_.name + Sbt.BuildModuleSuffix)
.getOrElse(build.uri.toString)
val buildBaseDir = buildBaseProject
.map(_.base)
.getOrElse {
if (build.uri.getScheme == "file") new File(build.uri.getPath)
else projects.head.base // this really shouldn't happen
}
val buildRoot = buildBaseDir / Sbt.ProjectDirectory
// TODO explicit canonical path is needed until IDEA-126011 is fixed
val result = new ModuleNode(SbtModuleType.instance.getId, buildId, buildId, moduleFilesDirectory.path, buildRoot.canonicalPath)
//todo: probably it should depend on sbt version?
result.add(ModuleSdkNode.inheritFromProject)
result.setInheritProjectCompileOutputPath(false)
result.setCompileOutputPath(ExternalSystemSourceType.SOURCE, (buildRoot / Sbt.TargetDirectory / "idea-classes").path)
result.setCompileOutputPath(ExternalSystemSourceType.TEST, (buildRoot / Sbt.TargetDirectory / "idea-test-classes").path)
result.add(createBuildContentRoot(buildRoot))
val library = {
val classes = build.classes.filter(_.exists).map(_.path)
val docs = build.docs.filter(_.exists).map(_.path)
val sources = build.sources.filter(_.exists).map(_.path)
createModuleLevelDependency(Sbt.BuildLibraryPrefix + sbtVersion, classes, docs, sources, DependencyScope.PROVIDED)(result)
}
result.add(library)
result.add(createSbtBuildModuleData(build, projects, localCachePath))
result
}
private def createBuildContentRoot(buildRoot: File): ContentRootNode = {
val result = new ContentRootNode(buildRoot.path)
val sourceDirs = Seq(buildRoot) // , base << 1
val excludedDirs = Seq(
buildRoot / Sbt.TargetDirectory,
buildRoot / Sbt.ProjectDirectory / Sbt.TargetDirectory)
result.storePaths(ExternalSystemSourceType.SOURCE, sourceDirs.map(_.path))
result.storePaths(ExternalSystemSourceType.EXCLUDED, excludedDirs.map(_.path))
result
}
private def createSbtBuildModuleData(build: sbtStructure.BuildData, projects: Seq[ProjectData], localCachePath: Option[String]): SbtBuildModuleNode = {
val buildProjects = projects.filter(p => p.buildURI == build.uri)
val imports = build.imports.flatMap(_.trim.substring(7).split(", "))
val projectResolvers = buildProjects.flatMap(_.resolvers)
val resolvers = projectResolvers.map { r => new SbtMavenResolver(r.name, r.root).asInstanceOf[SbtResolver] }
val resolversAll = resolvers.toSet + localCacheResolver(localCachePath)
val moduleData = SbtBuildModuleData(imports, resolversAll, build.uri)
new SbtBuildModuleNode(moduleData)
}
private def localCacheResolver(localCachePath: Option[String]): SbtResolver = {
val localCachePathFinal = localCachePath.getOrElse {
System.getProperty("user.home") + "/.ivy2/cache".replace('/', File.separatorChar)
}
new SbtIvyResolver("Local cache", localCachePathFinal, isLocal = true, SbtBundle.message("sbt.local.cache"))
}
private def validRootPathsIn(project: sbtStructure.ProjectData, scope: String)
(selector: sbtStructure.ConfigurationData => Seq[sbtStructure.DirectoryData]): Seq[sbtStructure.DirectoryData] = {
project.configurations
.find(_.id == scope)
.map(selector)
.getOrElse(Seq.empty)
.filterNot(_.file.isOutsideOf(project.base))
}
protected def createLibraryDependencies(dependencies: Seq[sbtStructure.ModuleDependencyData])
(moduleData: ModuleData, libraries: Seq[LibraryData]): Seq[LibraryDependencyNode] = {
val dependenciesWithResolvedConflicts = resolveLibraryDependencyConflicts(dependencies)
dependenciesWithResolvedConflicts.map { dependency =>
val name = nameFor(dependency.id)
val library = libraries.find(_.getExternalName == name).getOrElse(
throw new ExternalSystemException("Library not found: " + name))
val data = new LibraryDependencyNode(moduleData, library, LibraryLevel.PROJECT)
data.setScope(scopeFor(dependency.configurations))
data
}
}
private def createUnmanagedDependencies(dependencies: Seq[sbtStructure.JarDependencyData])
(moduleData: ModuleData): Seq[LibraryDependencyNode] = {
dependencies.groupBy(it => scopeFor(it.configurations)).toSeq.map { case (scope, dependency) =>
val name = scope match {
case DependencyScope.COMPILE => Sbt.UnmanagedLibraryName
case it => s"${Sbt.UnmanagedLibraryName}-${it.getDisplayName.toLowerCase}"
}
val files = dependency.map(_.file.path)
createModuleLevelDependency(name, files, Seq.empty, Seq.empty, scope)(moduleData)
}
}
private def createModuleLevelDependency(name: String, classes: Seq[String], docs: Seq[String], sources: Seq[String], scope: DependencyScope)
(moduleData: ModuleData): LibraryDependencyNode = {
val libraryNode = new LibraryNode(name, resolved = true)
libraryNode.addPaths(LibraryPathType.BINARY, classes)
libraryNode.addPaths(LibraryPathType.DOC, docs)
libraryNode.addPaths(LibraryPathType.SOURCE, sources)
val result = new LibraryDependencyNode(moduleData, libraryNode, LibraryLevel.MODULE)
result.setScope(scope)
result
}
private def addApklibDirs(contentRootNode: ContentRootNode, apklib: sbtStructure.ApkLib): Unit = {
contentRootNode.storePath(ExternalSystemSourceType.SOURCE, apklib.sources.canonicalPath)
contentRootNode.storePath(ExternalSystemSourceType.SOURCE_GENERATED, apklib.gen.canonicalPath)
contentRootNode.storePath(ExternalSystemSourceType.RESOURCE, apklib.resources.canonicalPath)
}
protected def scopeFor(configurations: Seq[sbtStructure.Configuration]): DependencyScope = {
val ids = configurations.toSet
//note: these configuration values are calculated in
// org.jetbrains.sbt.extractors.DependenciesExtractor.mapConfigurations (it's a separate project)
if (ids.contains(sbtStructure.Configuration.Compile))
DependencyScope.COMPILE
else if (ids.contains(sbtStructure.Configuration.Runtime))
DependencyScope.RUNTIME //note: in sbt Runtime and Provided dependencies are also automatically included into Test scope
else if (ids.contains(sbtStructure.Configuration.Test))
DependencyScope.TEST
else if (ids.contains(sbtStructure.Configuration.Provided))
DependencyScope.PROVIDED
else
DependencyScope.COMPILE
}
override def cancelTask(taskId: ExternalSystemTaskId, listener: ExternalSystemTaskNotificationListener): Boolean =
//noinspection UnitInMap
activeProcessDumper
.map(_.cancel())
.isDefined
}
object SbtProjectResolver {
val IJ_SDK_CLASSIFIERS = Set("IJ-SDK", "IJ-PLUGIN")
case class ImportCancelledException(cause: Throwable) extends Exception(cause)
val SBT_PROCESS_CHECK_TIMEOUT_MSEC = 100
def shellImportSupported(sbtVersion: Version): Boolean =
sbtVersion >= sinceSbtVersionShell
def importSupported(sbtVersion: Version): Boolean =
sbtVersion >= sinceSbtVersion
// TODO shared code, move to a more suitable object
val sinceSbtVersion: Version = Version("0.13.0")
// TODO shared code, move to a more suitable object
val sinceSbtVersionShell: Version = Version("0.13.5")
private case class LibraryIdentifierWithoutRevision(
organization: String,
name: String,
artifactType: String,
classifier: String
)
private object LibraryIdentifierWithoutRevision {
def from(id: ModuleIdentifier): LibraryIdentifierWithoutRevision =
LibraryIdentifierWithoutRevision(id.organization, id.name, id.artifactType, id.classifier)
}
/**
* In case there are several dependencies (usually transitive) on same library but with different versions we leave one "best" dependency.<br>
* Otherwise, it can lead to various classpath-related issues at runtime (e.g. SCL-19878, SCL-18952)
*
* Note, that this basic conflict managing process is far from what is implemented in SBT.
* For example SCL-18952 is not fixed "fairly".
* But it's at least better then nothing, it helps avoiding multiple jars of same library in the classpath.
*
* Note that sbt has separate set of classpath for each scope, which can be obtained using {{{
* show Compile / dependencyClasspathAsJars
* show Runtime / dependencyClasspathAsJars
* show Test/ dependencyClasspathAsJars
* }}}
* And right now we can't fully emulate this with IntelliJ model, which implies single dependency on same library.
*
* Though in future we could move this "conflicts resolving" to the runtime, when program is being executed and hold multiple dependencies on same library in the model.
* It would require patching UI for `Project settings | Modules | Dependencies`
*
* @param dependencies library dependencies with potential conflicting versions
* @return library dependencies where all conflicting library versions are replaces with a single "best" library dependency.
* @note it emulates the default sbt behaviour when "latest revision is selected".
* If in sbt build definition some non-default conflictManager is set, this may behave not as expected<br>
* (see https://www.scala-sbt.org/1.x/docs/Library-Management.html#Conflict+Management)
*/
@TestOnly
def resolveLibraryDependencyConflicts(dependencies: Seq[sbtStructure.ModuleDependencyData]): Seq[sbtStructure.ModuleDependencyData] = {
val libToConflictingDeps: Map[LibraryIdentifierWithoutRevision, Seq[ModuleDependencyData]] =
dependencies.groupBy(d => LibraryIdentifierWithoutRevision.from(d.id)).filter(_._2.size > 1)
val libToBestDependencyData: MapView[LibraryIdentifierWithoutRevision, ModuleDependencyData] =
libToConflictingDeps.view.mapValues(calculateBestDependency)
val alreadyResolvedConflicts = mutable.Set.empty[LibraryIdentifierWithoutRevision]
dependencies.flatMap { dep =>
val ortArtName = LibraryIdentifierWithoutRevision.from(dep.id)
libToBestDependencyData.get(ortArtName) match {
case None => Some(dep)
case Some(value) =>
if (alreadyResolvedConflicts.contains(ortArtName))
None
else {
alreadyResolvedConflicts += ortArtName
Some(value)
}
}
}
}
/**
* Return dependency with max library version and "max" scope. Note, that scopes do not have a strict order.
* The most problematic part is that we can't directly compare "Provided" and "Runtime" scopes.
* They have completely opposite semantics. But here we assume that "Provided" > "Runtime".
*
* @note anyway in general we can't 100% emulate SBT dependencies & classpath model with current IntelliJ model
* @note in sbt, Provided & Runtime scopes are automatically added to the "Test" scope, so "Test" has the lowest priority.
*/
private def calculateBestDependency(conflictingDependencies: Seq[ModuleDependencyData]): ModuleDependencyData = {
val dependencyWithMaxVersion = conflictingDependencies.maxBy(d => Version(d.id.revision))
val maxConfigurationOpt = conflictingDependencies.iterator.flatMap(_.configurations).maxByOption {
case Configuration.Compile => 4
case Configuration.Provided => 3
case Configuration.Runtime => 2
case Configuration.Test => 1
case _ => 0
}
ModuleDependencyData(
dependencyWithMaxVersion.id,
maxConfigurationOpt.map(Seq(_)).getOrElse(dependencyWithMaxVersion.configurations)
)
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/project/SbtProjectResolver.scala
|
Scala
|
apache-2.0
| 39,698 |
package jp.co.cyberagent.aeromock.core.bootstrap
import jp.co.cyberagent.aeromock.test.SpecSupport
import org.specs2.mutable.{Tables, Specification}
/**
*
* @author stormcat24
*/
class BootstrapManagerJade4jSpec extends Specification with Tables with SpecSupport {
"BootstrapManager" should {
"delegete" in {
BootstrapManager.delegate.collectFirst {
case (EnabledMode.JADE4j, either) => either
} must beSome(beRight())
}
}
}
|
CyberAgent/aeromock
|
aeromock-jade4j/src/test/scala/jp/co/cyberagent/aeromock/core/bootstrap/BootstrapManagerJade4jSpec.scala
|
Scala
|
mit
| 464 |
package org.scalameter.utils
final class SlidingWindow(_cap: Int) {
require(_cap >= 1)
val capacity = _cap + 1
private val store = new Array[Double](capacity)
private var first = 0
private var next = 0
def add(t: Double) {
store(next) = t
val inc = (next + 1) % capacity
next = inc
if (inc == first) first = (first + 1) % capacity
}
def size: Int = {
if (first == next) 0
else if (first < next) next - first
else next + capacity - first
}
class Iterator extends scala.Iterator[Double] {
var i = first
def hasNext = i != SlidingWindow.this.next
def next() = {
val r = store(i)
i = (i + 1) % capacity
r
}
override def toString = s"SlidingWindow.Iterator(i: $i, sw: ${SlidingWindow.this})"
}
def iterator = new Iterator
override def toString = s"SlidingWindow(first: $first, next: $next, raw: ${store.mkString(", ")})"
def sum: Double = {
var i = first
var s = 0.0
while (i != next) {
s += store(i)
i = (i + 1) % capacity
}
s
}
def mean: Double = sum.toDouble / size
def stdev: Double = {
val m = mean
var i = first
var s = 0.0
while (i != next) {
val diff = store(i) - m
s += diff * diff
i = (i + 1) % capacity
}
math.sqrt(s / (size - 1))
}
def cov: Double = stdev / mean
}
|
lossyrob/scalpel
|
src/main/scala/org/scalameter/utils/SlidingWindow.scala
|
Scala
|
bsd-3-clause
| 1,367 |
package devnull.rest
import java.util.UUID
import javax.servlet.http.HttpServletRequest
import com.typesafe.scalalogging.LazyLogging
import devnull.UuidFromString
import devnull.rest.helpers.ContentTypeResolver._
import devnull.rest.helpers.DirectiveHelper.trueOrElse
import devnull.rest.helpers.EitherDirective.{EitherDirective, fromEither, withJson}
import devnull.rest.helpers.ResponseWrites.ResponseJson
import devnull.rest.helpers._
import devnull.sessions.{EventId, SessionId, SessionService}
import devnull.storage._
import doobie.imports.toMoreConnectionIOOps
import doobie.util.transactor.Transactor
import unfiltered.directives.Directive
import unfiltered.directives.Directives._
import unfiltered.request.{GET, POST}
import unfiltered.response._
import scalaz.concurrent.Task
class SessionFeedbackResource(
sessionService: SessionService,
feedbackRepository: FeedbackRepository,
paperFeedbackRepository: PaperFeedbackRepository,
xa: Transactor[Task]
) extends LazyLogging {
type ResponseDirective =
Directive[HttpServletRequest, ResponseFunction[Any], ResponseFunction[Any]]
def getOrRespond[R, A](opt: Option[A], orElse: => ResponseFunction[R]) =
opt.map(success).getOrElse(error(orElse))
def handleFeedbacks(eventIdStr: String, sessionIdStr: String): ResponseDirective = {
val postFeedback = for {
_ <- POST
voterInfo <- VoterIdentification.identify()
_ <- withContentTypes(List(MIMEType.Json))
eventId <- fromEither(UuidFromString(eventIdStr).right.map(EventId.apply))
sessionId <- fromEither(UuidFromString(sessionIdStr).right.map(SessionId.apply))
session <- getOrRespond(
sessionService.getSession(eventId, sessionId),
NotFound ~> ResponseString("Didn't find the session")
)
_ <- trueOrElse(
sessionService.canRegisterFeedback(eventId, sessionId),
Forbidden ~> ResponseString("Feedback not open yet!")
)
parsed <- parseFeedback(session.eventId, session.sessionId, voterInfo)
optFeedback <- fromEither(parsed)
feedback <- getOrRespond(
optFeedback,
BadRequest ~> ResponseString(
"Feedback did not contain all required fields."
)
)
} yield {
logger.info(s"POST => $feedback from $voterInfo")
val feedbackId: FeedbackId =
feedbackRepository.insertFeedback(feedback).transact(xa).unsafePerformSync
Accepted ~> ResponseJson(feedbackId)
}
val getFeedback = for {
_ <- GET
eventId <- fromEither(UuidFromString(eventIdStr).right.map(EventId.apply))
sessionId <- fromEither(UuidFromString(sessionIdStr).right.map(SessionId.apply))
_ <- getOrRespond(
sessionService.getSession(eventId, sessionId),
NotFound ~> ResponseString("Didn't find the session")
)
} yield {
val sId: UUID = sessionId.id
val eId: UUID = eventId.id
val response = for {
sessionOnlineFeedback <- feedbackRepository
.selectFeedbackForSession(sId)
.transact(xa)
sessionPaper <- paperFeedbackRepository.selectFeedbackForSession(sId).transact(xa)
avgConferenceOnlineFeedback <- feedbackRepository
.selectFeedbackForEvent(eId)
.transact(xa)
avgPaperEvent <- paperFeedbackRepository
.selectAvgFeedbackForEvent(eId)
.transact(xa)
comments <- feedbackRepository.selectComments(sId).transact(xa)
} yield {
val (paperDto: PaperDto, participants: Int) = avgPaperEvent.map {
case (f: PaperRatingResult, i: Option[Double]) =>
(
PaperDto(f.green.getOrElse(0), f.yellow.getOrElse(0), f.red.getOrElse(0)),
i.getOrElse(0d).toInt
)
}.getOrElse((PaperDto(0, 0, 0), 0))
GivenFeedbackDto(
session = FeedbackDto(
OnlineDto(sessionOnlineFeedback),
sessionPaper
.map(f => PaperDto(f.ratings.green, f.ratings.yellow, f.ratings.red))
.getOrElse(PaperDto(0, 0, 0)),
sessionPaper.map(_.participants).getOrElse(0)
),
conference = FeedbackDto(
OnlineDto(avgConferenceOnlineFeedback),
paperDto,
participants
),
comments
)
}
Ok ~> ResponseJson(response.unsafePerformSync)
}
postFeedback | getFeedback
}
def parseFeedback(
eventId: EventId,
sessionId: SessionId,
voterInfo: VoterInfo
): EitherDirective[Either[Throwable, Option[Feedback]]] = {
withJson { rating: Ratings =>
Feedback(null, null, voterInfo, sessionId.id, rating)
}
}
}
case class OnlineDto(
overall: Double,
relevance: Double,
content: Double,
quality: Double,
count: Double
)
case class PaperDto(green: Double, yellow: Double, red: Double)
case class FeedbackDto(online: OnlineDto, paper: PaperDto, participants: Int)
case class GivenFeedbackDto(
session: FeedbackDto,
conference: FeedbackDto,
comments: List[String]
)
object OnlineDto {
def apply(input: Option[FeedbackResult]): OnlineDto = {
input.map { i =>
OnlineDto(
i.overall.getOrElse(0),
i.relevance.getOrElse(0),
i.content.getOrElse(0),
i.quality.getOrElse(0),
i.count.getOrElse(0)
)
}.getOrElse(OnlineDto(0d, 0d, 0d, 0d, 0))
}
}
|
javaBin/devnull
|
src/main/scala/devnull/rest/SessionFeedbackResource.scala
|
Scala
|
apache-2.0
| 5,681 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.io.{File, NotSerializableException}
import java.lang.management.ManagementFactory
import java.net.URL
import java.nio.ByteBuffer
import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.util.control.NonFatal
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.scheduler.{DirectTaskResult, IndirectTaskResult, Task}
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.{StorageLevel, TaskResultBlockId}
import org.apache.spark.unsafe.memory.TaskMemoryManager
import org.apache.spark.util._
/**
* Spark executor, backed by a threadpool to run tasks.
*
* This can be used with Mesos, YARN, and the standalone scheduler.
* An internal RPC interface (at the moment Akka) is used for communication with the driver,
* except in the case of Mesos fine-grained mode.
*/
private[spark] class Executor(
executorId: String,
executorHostname: String,
env: SparkEnv,
userClassPath: Seq[URL] = Nil,
isLocal: Boolean = false)
extends Logging {
logInfo(s"Starting executor ID $executorId on host $executorHostname")
// Application dependencies (added through SparkContext) that we've fetched so far on this node.
// Each map holds the master's timestamp for the version of that file or JAR we got.
private val currentFiles: HashMap[String, Long] = new HashMap[String, Long]()
private val currentJars: HashMap[String, Long] = new HashMap[String, Long]()
private val EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new Array[Byte](0))
private val conf = env.conf
// No ip or host:port - just hostname
Utils.checkHost(executorHostname, "Expected executed slave to be a hostname")
// must not have port specified.
assert (0 == Utils.parseHostPort(executorHostname)._2)
// Make sure the local hostname we report matches the cluster scheduler's name for this host
Utils.setCustomHostname(executorHostname)
if (!isLocal) {
// Setup an uncaught exception handler for non-local mode.
// Make any thread terminations due to uncaught exceptions kill the entire
// executor process to avoid surprising stalls.
Thread.setDefaultUncaughtExceptionHandler(SparkUncaughtExceptionHandler)
}
// Start worker thread pool
private val threadPool = ThreadUtils.newDaemonCachedThreadPool("Executor task launch worker")
private val executorSource = new ExecutorSource(threadPool, executorId)
if (!isLocal) {
env.metricsSystem.registerSource(executorSource)
env.blockManager.initialize(conf.getAppId)
}
// Create an RpcEndpoint for receiving RPCs from the driver
private val executorEndpoint = env.rpcEnv.setupEndpoint(
ExecutorEndpoint.EXECUTOR_ENDPOINT_NAME, new ExecutorEndpoint(env.rpcEnv, executorId))
// Whether to load classes in user jars before those in Spark jars
private val userClassPathFirst = conf.getBoolean("spark.executor.userClassPathFirst", false)
// Create our ClassLoader
// do this after SparkEnv creation so can access the SecurityManager
private val urlClassLoader = createClassLoader()
private val replClassLoader = addReplClassLoaderIfNeeded(urlClassLoader)
// Set the classloader for serializer
env.serializer.setDefaultClassLoader(replClassLoader)
// Akka's message frame size. If task result is bigger than this, we use the block manager
// to send the result back.
private val akkaFrameSize = AkkaUtils.maxFrameSizeBytes(conf)
// Limit of bytes for total size of results (default is 1GB)
private val maxResultSize = Utils.getMaxResultSize(conf)
// Maintains the list of running tasks.
private val runningTasks = new ConcurrentHashMap[Long, TaskRunner]
// Executor for the heartbeat task.
private val heartbeater = ThreadUtils.newDaemonSingleThreadScheduledExecutor("driver-heartbeater")
startDriverHeartbeater()
def launchTask(
context: ExecutorBackend,
taskId: Long,
attemptNumber: Int,
taskName: String,
serializedTask: ByteBuffer): Unit = {
val tr = new TaskRunner(context, taskId = taskId, attemptNumber = attemptNumber, taskName,
serializedTask)
runningTasks.put(taskId, tr)
threadPool.execute(tr)
}
def killTask(taskId: Long, interruptThread: Boolean): Unit = {
val tr = runningTasks.get(taskId)
if (tr != null) {
tr.kill(interruptThread)
}
}
def stop(): Unit = {
env.metricsSystem.report()
env.rpcEnv.stop(executorEndpoint)
heartbeater.shutdown()
heartbeater.awaitTermination(10, TimeUnit.SECONDS)
threadPool.shutdown()
if (!isLocal) {
env.stop()
}
}
/** Returns the total amount of time this JVM process has spent in garbage collection. */
private def computeTotalGcTime(): Long = {
ManagementFactory.getGarbageCollectorMXBeans.map(_.getCollectionTime).sum
}
class TaskRunner(
execBackend: ExecutorBackend,
val taskId: Long,
val attemptNumber: Int,
taskName: String,
serializedTask: ByteBuffer)
extends Runnable {
/** Whether this task has been killed. */
@volatile private var killed = false
/** How much the JVM process has spent in GC when the task starts to run. */
@volatile var startGCTime: Long = _
/**
* The task to run. This will be set in run() by deserializing the task binary coming
* from the driver. Once it is set, it will never be changed.
*/
@volatile var task: Task[Any] = _
def kill(interruptThread: Boolean): Unit = {
logInfo(s"Executor is trying to kill $taskName (TID $taskId)")
killed = true
if (task != null) {
task.kill(interruptThread)
}
}
override def run(): Unit = {
val taskMemoryManager = new TaskMemoryManager(env.executorMemoryManager)
val deserializeStartTime = System.currentTimeMillis()
Thread.currentThread.setContextClassLoader(replClassLoader)
val ser = env.closureSerializer.newInstance()
logInfo(s"Running $taskName (TID $taskId)")
execBackend.statusUpdate(taskId, TaskState.RUNNING, EMPTY_BYTE_BUFFER)
var taskStart: Long = 0
startGCTime = computeTotalGcTime()
try {
val (taskFiles, taskJars, taskBytes) = Task.deserializeWithDependencies(serializedTask)
updateDependencies(taskFiles, taskJars)
task = ser.deserialize[Task[Any]](taskBytes, Thread.currentThread.getContextClassLoader)
task.setTaskMemoryManager(taskMemoryManager)
// If this task has been killed before we deserialized it, let's quit now. Otherwise,
// continue executing the task.
if (killed) {
// Throw an exception rather than returning, because returning within a try{} block
// causes a NonLocalReturnControl exception to be thrown. The NonLocalReturnControl
// exception will be caught by the catch block, leading to an incorrect ExceptionFailure
// for the task.
throw new TaskKilledException
}
logDebug("Task " + taskId + "'s epoch is " + task.epoch)
env.mapOutputTracker.updateEpoch(task.epoch)
// Run the actual task and measure its runtime.
taskStart = System.currentTimeMillis()
var threwException = true
val (value, accumUpdates) = try {
val res = task.run(
taskAttemptId = taskId,
attemptNumber = attemptNumber,
metricsSystem = env.metricsSystem)
threwException = false
res
} finally {
val freedMemory = taskMemoryManager.cleanUpAllAllocatedMemory()
if (freedMemory > 0) {
val errMsg = s"Managed memory leak detected; size = $freedMemory bytes, TID = $taskId"
if (conf.getBoolean("spark.unsafe.exceptionOnMemoryLeak", false) && !threwException) {
throw new SparkException(errMsg)
} else {
logError(errMsg)
}
}
}
val taskFinish = System.currentTimeMillis()
// If the task has been killed, let's fail it.
if (task.killed) {
throw new TaskKilledException
}
val resultSer = env.serializer.newInstance()
val beforeSerialization = System.currentTimeMillis()
val valueBytes = resultSer.serialize(value)
val afterSerialization = System.currentTimeMillis()
for (m <- task.metrics) {
// Deserialization happens in two parts: first, we deserialize a Task object, which
// includes the Partition. Second, Task.run() deserializes the RDD and function to be run.
m.setExecutorDeserializeTime(
(taskStart - deserializeStartTime) + task.executorDeserializeTime)
// We need to subtract Task.run()'s deserialization time to avoid double-counting
m.setExecutorRunTime((taskFinish - taskStart) - task.executorDeserializeTime)
m.setJvmGCTime(computeTotalGcTime() - startGCTime)
m.setResultSerializationTime(afterSerialization - beforeSerialization)
m.updateAccumulators()
}
val directResult = new DirectTaskResult(valueBytes, accumUpdates, task.metrics.orNull)
val serializedDirectResult = ser.serialize(directResult)
val resultSize = serializedDirectResult.limit
// directSend = sending directly back to the driver
val serializedResult: ByteBuffer = {
if (maxResultSize > 0 && resultSize > maxResultSize) {
logWarning(s"Finished $taskName (TID $taskId). Result is larger than maxResultSize " +
s"(${Utils.bytesToString(resultSize)} > ${Utils.bytesToString(maxResultSize)}), " +
s"dropping it.")
ser.serialize(new IndirectTaskResult[Any](TaskResultBlockId(taskId), resultSize))
} else if (resultSize >= akkaFrameSize - AkkaUtils.reservedSizeBytes) {
val blockId = TaskResultBlockId(taskId)
env.blockManager.putBytes(
blockId, serializedDirectResult, StorageLevel.MEMORY_AND_DISK_SER)
logInfo(
s"Finished $taskName (TID $taskId). $resultSize bytes result sent via BlockManager)")
ser.serialize(new IndirectTaskResult[Any](blockId, resultSize))
} else {
logInfo(s"Finished $taskName (TID $taskId). $resultSize bytes result sent to driver")
serializedDirectResult
}
}
execBackend.statusUpdate(taskId, TaskState.FINISHED, serializedResult)
} catch {
case ffe: FetchFailedException =>
val reason = ffe.toTaskEndReason
execBackend.statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason))
case _: TaskKilledException | _: InterruptedException if task.killed =>
logInfo(s"Executor killed $taskName (TID $taskId)")
execBackend.statusUpdate(taskId, TaskState.KILLED, ser.serialize(TaskKilled))
case cDE: CommitDeniedException =>
val reason = cDE.toTaskEndReason
execBackend.statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason))
case t: Throwable =>
// Attempt to exit cleanly by informing the driver of our failure.
// If anything goes wrong (or this was a fatal exception), we will delegate to
// the default uncaught exception handler, which will terminate the Executor.
logError(s"Exception in $taskName (TID $taskId)", t)
val metrics: Option[TaskMetrics] = Option(task).flatMap { task =>
task.metrics.map { m =>
m.setExecutorRunTime(System.currentTimeMillis() - taskStart)
m.setJvmGCTime(computeTotalGcTime() - startGCTime)
m.updateAccumulators()
m
}
}
val serializedTaskEndReason = {
try {
ser.serialize(new ExceptionFailure(t, metrics))
} catch {
case _: NotSerializableException =>
// t is not serializable so just send the stacktrace
ser.serialize(new ExceptionFailure(t, metrics, false))
}
}
execBackend.statusUpdate(taskId, TaskState.FAILED, serializedTaskEndReason)
// Don't forcibly exit unless the exception was inherently fatal, to avoid
// stopping other tasks unnecessarily.
if (Utils.isFatalError(t)) {
SparkUncaughtExceptionHandler.uncaughtException(t)
}
} finally {
runningTasks.remove(taskId)
}
}
}
/**
* Create a ClassLoader for use in tasks, adding any JARs specified by the user or any classes
* created by the interpreter to the search path
*/
private def createClassLoader(): MutableURLClassLoader = {
// Bootstrap the list of jars with the user class path.
val now = System.currentTimeMillis()
userClassPath.foreach { url =>
currentJars(url.getPath().split("/").last) = now
}
val currentLoader = Utils.getContextOrSparkClassLoader
// For each of the jars in the jarSet, add them to the class loader.
// We assume each of the files has already been fetched.
val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
new File(uri.split("/").last).toURI.toURL
}
if (userClassPathFirst) {
new ChildFirstURLClassLoader(urls, currentLoader)
} else {
new MutableURLClassLoader(urls, currentLoader)
}
}
/**
* If the REPL is in use, add another ClassLoader that will read
* new classes defined by the REPL as the user types code
*/
private def addReplClassLoaderIfNeeded(parent: ClassLoader): ClassLoader = {
val classUri = conf.get("spark.repl.class.uri", null)
if (classUri != null) {
logInfo("Using REPL class URI: " + classUri)
try {
val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
val klass = Utils.classForName("org.apache.spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[SparkConf], classOf[String],
classOf[ClassLoader], classOf[Boolean])
constructor.newInstance(conf, classUri, parent, _userClassPathFirst)
} catch {
case _: ClassNotFoundException =>
logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")
System.exit(1)
null
}
} else {
parent
}
}
/**
* Download any missing dependencies if we receive a new set of files and JARs from the
* SparkContext. Also adds any new JARs we fetched to the class loader.
*/
private def updateDependencies(newFiles: HashMap[String, Long], newJars: HashMap[String, Long]) {
lazy val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
synchronized {
// Fetch missing dependencies
for ((name, timestamp) <- newFiles if currentFiles.getOrElse(name, -1L) < timestamp) {
logInfo("Fetching " + name + " with timestamp " + timestamp)
// Fetch file with useCache mode, close cache for local mode.
Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf,
env.securityManager, hadoopConf, timestamp, useCache = !isLocal)
currentFiles(name) = timestamp
}
for ((name, timestamp) <- newJars) {
val localName = name.split("/").last
val currentTimeStamp = currentJars.get(name)
.orElse(currentJars.get(localName))
.getOrElse(-1L)
if (currentTimeStamp < timestamp) {
logInfo("Fetching " + name + " with timestamp " + timestamp)
// Fetch file with useCache mode, close cache for local mode.
Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf,
env.securityManager, hadoopConf, timestamp, useCache = !isLocal)
currentJars(name) = timestamp
// Add it to our class loader
val url = new File(SparkFiles.getRootDirectory(), localName).toURI.toURL
if (!urlClassLoader.getURLs().contains(url)) {
logInfo("Adding " + url + " to class loader")
urlClassLoader.addURL(url)
}
}
}
}
}
private val heartbeatReceiverRef =
RpcUtils.makeDriverRef(HeartbeatReceiver.ENDPOINT_NAME, conf, env.rpcEnv)
/** Reports heartbeat and metrics for active tasks to the driver. */
private def reportHeartBeat(): Unit = {
// list of (task id, metrics) to send back to the driver
val tasksMetrics = new ArrayBuffer[(Long, TaskMetrics)]()
val curGCTime = computeTotalGcTime()
for (taskRunner <- runningTasks.values()) {
if (taskRunner.task != null) {
taskRunner.task.metrics.foreach { metrics =>
metrics.updateShuffleReadMetrics()
metrics.updateInputMetrics()
metrics.setJvmGCTime(curGCTime - taskRunner.startGCTime)
metrics.updateAccumulators()
if (isLocal) {
// JobProgressListener will hold an reference of it during
// onExecutorMetricsUpdate(), then JobProgressListener can not see
// the changes of metrics any more, so make a deep copy of it
val copiedMetrics = Utils.deserialize[TaskMetrics](Utils.serialize(metrics))
tasksMetrics += ((taskRunner.taskId, copiedMetrics))
} else {
// It will be copied by serialization
tasksMetrics += ((taskRunner.taskId, metrics))
}
}
}
}
val message = Heartbeat(executorId, tasksMetrics.toArray, env.blockManager.blockManagerId)
try {
val response = heartbeatReceiverRef.askWithRetry[HeartbeatResponse](message)
if (response.reregisterBlockManager) {
logInfo("Told to re-register on heartbeat")
env.blockManager.reregister()
}
} catch {
case NonFatal(e) => logWarning("Issue communicating with driver in heartbeater", e)
}
}
/**
* Schedules a task to report heartbeat and partial metrics for active tasks to driver.
*/
private def startDriverHeartbeater(): Unit = {
val intervalMs = conf.getTimeAsMs("spark.executor.heartbeatInterval", "10s")
// Wait a random interval so the heartbeats don't end up in sync
val initialDelay = intervalMs + (math.random * intervalMs).asInstanceOf[Int]
val heartbeatTask = new Runnable() {
override def run(): Unit = Utils.logUncaughtExceptions(reportHeartBeat())
}
heartbeater.scheduleAtFixedRate(heartbeatTask, initialDelay, intervalMs, TimeUnit.MILLISECONDS)
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
core/src/main/scala/org/apache/spark/executor/Executor.scala
|
Scala
|
apache-2.0
| 19,463 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.index.lucene
import java.io.{File, PrintWriter}
import scala.collection.JavaConverters._
import scala.util.Random
import org.apache.spark.{SPARK_VERSION, SparkException}
import org.apache.spark.sql.{CarbonEnv, Row}
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.common.exceptions.sql.{MalformedCarbonCommandException, MalformedIndexCommandException}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.index.status.IndexStatus
import org.apache.carbondata.core.metadata.index.IndexType
import org.apache.carbondata.core.util.CarbonProperties
class LuceneFineGrainIndexSuite extends QueryTest with BeforeAndAfterAll {
val originDistributedIndexStatus = CarbonProperties.getInstance().getProperty(
CarbonCommonConstants.USE_DISTRIBUTED_INDEX,
CarbonCommonConstants.USE_DISTRIBUTED_INDEX_DEFAULT
)
val file2 = resourcesPath + "/index_input.csv"
override protected def beforeAll(): Unit = {
sql("drop database if exists lucene cascade")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
LuceneFineGrainIndexSuite.createFile(file2)
sql("create database if not exists lucene")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.USE_DISTRIBUTED_INDEX, "true")
sql("use lucene")
sql("DROP TABLE IF EXISTS normal_test")
sql(
"""
| CREATE TABLE normal_test(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE normal_test OPTIONS('header'='false')")
sql("DROP TABLE IF EXISTS index_test")
sql(
"""
| CREATE TABLE index_test(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test OPTIONS('header'='false')")
}
test("validate INDEX_COLUMNS property") {
// not exists
var exception = intercept[MalformedIndexCommandException](sql(
s"""
| CREATE INDEX dm1
| ON index_test (city, school)
| AS 'lucene'
""".stripMargin))
assertResult("column 'school' does not exist in table. Please check create index statement.")(
exception.getMessage)
// duplicate columns
exception = intercept[MalformedIndexCommandException](sql(
s"""
| CREATE INDEX dm1
| ON index_test (name, city, name)
| AS 'lucene'
""".stripMargin))
assertResult("index column list has duplicate column")(exception.getMessage)
// only support String DataType
exception = intercept[MalformedIndexCommandException](sql(
s"""
| CREATE INDEX dm1
| ON index_test (city,id)
| AS 'lucene'
""".stripMargin))
assertResult("Only String column is supported, column 'id' is INT type. ")(exception.getMessage)
}
test("test lucene fine grain index") {
sql("drop index if exists dm on table index_test")
sql(
s"""
| CREATE INDEX dm
| ON index_test (Name, cIty)
| AS 'lucene'
""".stripMargin)
checkAnswer(sql("SELECT * FROM index_test WHERE TEXT_MATCH('name:n10')"),
sql(s"select * from index_test where name='n10'"))
checkAnswer(sql("SELECT * FROM index_test WHERE TEXT_MATCH('city:c020')"),
sql(s"SELECT * FROM index_test WHERE city='c020'"))
sql("drop index dm on table index_test")
}
// for CARBONDATA-2820, we will first block deferred refresh for lucene
test("test block rebuild for lucene") {
val deferredRebuildException = intercept[MalformedIndexCommandException] {
sql(
s"""
| CREATE INDEX index1
| ON index_test (city)
| AS 'lucene'
| WITH DEFERRED REFRESH
""".stripMargin)
}
assert(deferredRebuildException.getMessage.contains(
s"DEFERRED REFRESH is not supported on this index index1 with provider lucene"))
sql(
s"""
| CREATE INDEX index1
| ON index_test (city)
| AS 'lucene'
""".stripMargin)
val exception = intercept[MalformedIndexCommandException] {
sql(s"REFRESH INDEX index1 ON TABLE index_test")
}
sql("drop index index1 on table index_test")
assert(exception.getMessage.contains("Non-lazy index index1 does not support manual refresh"))
}
ignore("test lucene rebuild index") {
sql("DROP TABLE IF EXISTS index_test4")
sql(
"""
| CREATE TABLE index_test4(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test4 OPTIONS('header'='false')")
sql(
s"""
| CREATE INDEX dm4
| ON index_test4 (name, city)
| AS 'lucene'
| WITH DEFERRED REFRESH
""".stripMargin)
sql("REFRESH INDEX dm4 ON TABLE index_test4")
checkAnswer(sql("SELECT * FROM index_test4 WHERE TEXT_MATCH('name:n10')"),
sql(s"select * from index_test where name='n10'"))
checkAnswer(sql("SELECT * FROM index_test4 WHERE TEXT_MATCH('city:c020')"),
sql(s"SELECT * FROM index_test4 WHERE city='c020'"))
sql("drop table index_test4")
}
test("test lucene fine grain index drop") {
sql("DROP TABLE IF EXISTS index_test1")
sql(
"""
| CREATE TABLE index_test1(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm12
| ON index_test1 (name, city)
| as 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test1 OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test1 WHERE TEXT_MATCH('name:n10')"),
sql(s"select * from index_test1 where name='n10'"))
var carbonTable = CarbonEnv.getCarbonTable(Some("lucene"), "index_test1")(sqlContext
.sparkSession)
val indexes = carbonTable.getIndexMetadata.getIndexesMap
.get(IndexType.LUCENE.getIndexProviderName).asScala
.filter(p => p._2.get(CarbonCommonConstants.INDEX_STATUS)
.equalsIgnoreCase(IndexStatus.ENABLED.name()))
assert(indexes.exists(p => p._1.equals("dm12") &&
p._2.get(CarbonCommonConstants.INDEX_STATUS) ==
IndexStatus.ENABLED.name()))
sql("drop index dm12 on table index_test1")
carbonTable = CarbonEnv.getCarbonTable(Some("lucene"), "index_test1")(sqlContext.sparkSession)
assert(null == carbonTable.getIndexMetadata)
sql("DROP TABLE IF EXISTS index_test1")
}
test("test lucene fine grain index show") {
sql("DROP TABLE IF EXISTS index_test2")
sql("DROP TABLE IF EXISTS index_test3")
sql(
"""
| CREATE TABLE index_test2(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm122
| ON index_test2 (name, city)
| AS 'lucene'
""".stripMargin)
sql(
"""
| CREATE TABLE index_test3(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm123
| ON index_test3 (name, city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test2 OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test2 WHERE TEXT_MATCH('name:n10')"),
sql(s"select * from index_test2 where name='n10'"))
assert(sql("show indexes on table index_test2").count() == 1)
sql("DROP TABLE IF EXISTS index_test2")
sql("DROP TABLE IF EXISTS index_test3")
}
test("test lucene fine grain index with wildcard matching ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name, city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n99*')"),
sql("select * from index_test_table where name like 'n99%'"))
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n*9')"),
sql(s"select * from index_test_table where name like 'n%9'"))
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with TEXT_MATCH 'AND' Filter ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql(
"SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n0* AND city:c0*')"),
sql("select * from index_test_table where name like 'n0%' and city like 'c0%'"))
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with TEXT_MATCH 'OR' Filter ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql(
"SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n1* OR city:c01*')"),
sql("select * from index_test_table where name like 'n1%' or city like 'c01%'"))
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with TEXT_MATCH 'AND' and 'OR' Filter ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql(
"SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n1* OR (city:c01* AND city:c02*)')"),
sql(
"select * from index_test_table where name like 'n1%' OR city like 'c01%' and city like" +
" 'c02%'"))
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with compaction-Major ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select * from index_test_table where name='n10'"))
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql("alter table index_test_table compact 'major'")
if (!sqlContext.sparkContext.version.startsWith("3.1")) {
checkAnswer(sql("SELECT COUNT(*) FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select COUNT(*) from index_test_table where name='n10'"))
}
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with compaction-Minor ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select * from index_test_table where name='n10'"))
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql("alter table index_test_table compact 'minor'")
if (!sqlContext.sparkContext.version.startsWith("3.1")) {
checkAnswer(sql("SELECT COUNT(*) FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select count(*) from index_test_table where name='n10'"))
}
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with GLOBAL_SORT_SCOPE ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES(
| 'SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT', 'CACHE_LEVEL'='BLOCKLET')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table " +
s"OPTIONS('header'='false','GLOBAL_SORT_PARTITIONS'='2')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select * from index_test_table where name='n10'"))
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table " +
s"OPTIONS('header'='false','GLOBAL_SORT_PARTITIONS'='2')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n10')"),
sql("select * from index_test_table where name='n10'"))
sql("DROP TABLE IF EXISTS index_test_table")
}
test("test Clean Files and check Lucene Index") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='GLOBAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm2
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
if (!sqlContext.sparkContext.version.startsWith("3.1")) {
checkAnswer(sql("SELECT count(*) FROM index_test_table WHERE TEXT_MATCH('name:n99*')"),
sql("select count(*) from index_test_table where name like 'n99%'"))
sql("delete from table index_test_table where SEGMENT.ID in (0) ")
checkAnswer(sql("SELECT count(*) FROM index_test_table WHERE TEXT_MATCH('name:n99*')"),
sql("select count(*) from index_test_table where name like 'n99%'"))
}
sql("clean files for table index_test_table")
sql("drop index if exists dm2 on table index_test_table")
}
test("test lucene fine grain index with TEXT_MATCH 'NOT' Filter ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
// check NOT filter with TEXTMATCH term-search
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n0 NOT n1')"),
sql("select *from index_test_table where name='n0' AND not name='n1'"))
// check NOT filter with TEXTMATCH wildcard-search
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n1* NOT n2*')"),
sql("select *from index_test_table where name like'n1%' AND not name like 'n2%'"))
// check NOT filter with TEXTMATCH wildcard-search using AND on different columns
checkAnswer(sql(
"select *from index_test_table where TEXT_MATCH('name:n1* AND city:c01* NOT " +
"c02*')"),
sql("select *from index_test_table where name like'n1%' AND not city='c02%'"))
sql("drop index if exists dm on table index_test_table")
}
test("test lucene fine grain index with CTAS") {
sql("DROP TABLE IF EXISTS source_table")
sql("DROP TABLE IF EXISTS target_table")
sql(
"""
| CREATE TABLE source_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON source_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE source_table OPTIONS('header'='false')")
sql(
"""
| CREATE TABLE target_table
| STORED AS carbondata
| AS
| Select * from source_table where TEXT_MATCH('name:n1*')
""".stripMargin)
if (!sqlContext.sparkContext.version.startsWith("3.1")) {
checkAnswer(sql("SELECT count(*) FROM target_table"),
sql("select count(*) from source_table where name like 'n1%'"))
}
sql("DROP TABLE IF EXISTS source_table")
sql("DROP TABLE IF EXISTS target_table")
}
test("test lucene fine grain index with text-match limit") {
sql("DROP TABLE IF EXISTS index_test_limit")
sql(
"""
| CREATE TABLE index_test_limit(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_limit (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_limit OPTIONS('header'='false')")
if (!sqlContext.sparkContext.version.startsWith("3.1")) {
checkAnswer(sql(
"select count(*) from index_test_limit where TEXT_MATCH_WITH_LIMIT('name:n10*',10)"),
Seq(Row(10)))
checkAnswer(sql(
"select count(*) from index_test_limit where TEXT_MATCH_WITH_LIMIT('name:n10*',50)"),
Seq(Row(50)))
}
sql("drop index dm on table index_test_limit")
}
test("test lucene fine grain index with InsertOverwrite") {
sql("DROP TABLE IF EXISTS index_test_overwrite")
sql(
"""
| CREATE TABLE index_test_overwrite(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_overwrite (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' " +
"INTO TABLE index_test_overwrite OPTIONS('header'='false')")
sql(
"""
| CREATE TABLE table1(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql("INSERT OVERWRITE TABLE table1 " +
"select *from index_test_overwrite where TEXT_MATCH('name:n*')")
checkAnswer(sql("select count(*) from table1"), Seq(Row(10000)))
sql("drop index dm on table index_test_overwrite")
}
// Exclude when running with index server, as pruning info for explain command
// not set with index server.
test("explain query with lucene index", true) {
sql("drop table if exists main")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.BLOCKLET_SIZE, "8")
sql(
"""
| CREATE TABLE main(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'CACHE_LEVEL'='BLOCKLET')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON main (name,city)
| AS 'lucene'
""".stripMargin)
val file1 = resourcesPath + "/main.csv"
LuceneFineGrainIndexSuite.createFile(file1, 1000000)
sql(s"LOAD DATA LOCAL INPATH '$file1' INTO TABLE main OPTIONS('header'='false')")
sql("EXPLAIN SELECT * FROM main WHERE TEXT_MATCH('name:bob')").collect()
val rows = sql("EXPLAIN SELECT * FROM main WHERE TEXT_MATCH('name:bob')").collect()
// sometimes the plan comparison is failing even in case of both the plan being same.
// once the failure happens the dropped index is not getting executed
// and due to this other test cases also failing.
try {
assertResult(
"""== CarbonData Profiler ==
|Table Scan on main
| - total: 1 blocks, 1 blocklets
| - filter: TEXT_MATCH('name:bob')
| - pruned by Main Index
| - skipped: 0 blocks, 0 blocklets
| - pruned by FG Index
| - name: dm
| - provider: lucene
| - skipped: 1 blocks, 1 blocklets
|""".stripMargin)(rows(0).getString(0))
} finally {
LuceneFineGrainIndexSuite.deleteFile(file1)
sql("drop index dm on table main")
CarbonProperties.getInstance().addProperty(
CarbonCommonConstants.BLOCKLET_SIZE, CarbonCommonConstants.BLOCKLET_SIZE_DEFAULT_VAL)
}
}
test("test lucene index creation for blocked features") {
sql("DROP TABLE IF EXISTS index_test7")
sql(
"""
| CREATE TABLE index_test7(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm124
| ON index_test7 (name,city)
| AS 'lucene'
""".stripMargin)
val ex1 = intercept[MalformedCarbonCommandException] {
sql("alter table index_test7 rename to index_test5")
}
assert(ex1.getMessage.contains("alter rename is not supported"))
val ex2 = intercept[MalformedCarbonCommandException] {
sql("alter table index_test7 add columns(address string)")
}
assert(ex2.getMessage.contains("alter table add column is not supported"))
val ex3 = intercept[MalformedCarbonCommandException] {
sql("alter table index_test7 change id id BIGINT")
}
assert(ex3.getMessage.contains("alter table change datatype is not supported"))
val ex4 = intercept[MalformedCarbonCommandException] {
sql("alter table index_test7 drop columns(name)")
}
assert(ex4.getMessage.contains("alter table drop column is not supported"))
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test7 OPTIONS('header'='false')")
val ex5 = intercept[MalformedCarbonCommandException] {
sql("UPDATE index_test7 d set(d.city)=('luc') where d.name='n10'").collect()
}
assert(ex5.getMessage.contains("update operation is not supported for index"))
val ex6 = intercept[MalformedCarbonCommandException] {
sql("delete from index_test7 where name = 'n10'").collect()
}
assert(ex6.getMessage.contains("delete operation is not supported for index"))
val ex7 = intercept[MalformedCarbonCommandException] {
sql("alter table index_test7 change id test int")
}
assert(ex7.getMessage.contains("alter table column rename is not supported"))
}
ignore("test lucene fine grain multiple index on table") {
sql("DROP TABLE IF EXISTS index_test5")
sql(
"""
| CREATE TABLE index_test5(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES(
| 'SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT', 'CACHE_LEVEL'='BLOCKLET')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm_city
| ON index_test5 (city)
| AS 'lucene'
""".stripMargin)
sql(
s"""
| CREATE INDEX dm_name
| ON index_test5 (name)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test5 OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test5 WHERE TEXT_MATCH('name:n10')"),
sql(s"select * from index_test5 where name='n10'"))
checkAnswer(sql("SELECT * FROM index_test5 WHERE TEXT_MATCH('city:c020')"),
sql(s"SELECT * FROM index_test5 WHERE city='c020'"))
val explainString = sql("explain select * from index_test5 where TEXT_MATCH('name:n10')")
.collect()
assert(explainString(0).getString(0).contains(
"pruned by FG Index\\n - name: dm_name\\n - provider: lucene"))
sql("DROP TABLE IF EXISTS index_test5")
}
ignore("test lucene fine grain index rebuild") {
sql("DROP TABLE IF EXISTS index_test5")
sql(
"""
| CREATE TABLE index_test5(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test5 (city)
| AS 'lucene'
| WITH DEFERRED REFRESH
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test5 OPTIONS('header'='false')")
sql("REFRESH INDEX dm ON TABLE index_test5")
checkAnswer(sql("SELECT * FROM index_test5 WHERE TEXT_MATCH('city:c020')"),
sql(s"SELECT * FROM index_test5 WHERE city='c020'"))
sql("DROP TABLE IF EXISTS index_test5")
}
ignore("test text_match on normal table") {
sql("DROP TABLE IF EXISTS table1")
sql(
"""
| CREATE TABLE table1(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE table1 OPTIONS('header'='false')")
val msg = intercept[SparkException] {
sql("select * from table1 where TEXT_MATCH('name:n*')").collect()
}
assert(msg.getCause.getMessage.contains("TEXT_MATCH is not supported on table"))
sql("DROP TABLE table1")
}
test("test lucene with flush_cache as true") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm_flush
| ON index_test_table (name,city)
| AS 'lucene'
| properties('flush_cache'='true')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n99*')"),
sql("select * from index_test_table where name like 'n99%'"))
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n*9')"),
sql(s"select * from index_test_table where name like 'n%9'"))
sql("drop index if exists dm_flush on table index_test_table")
}
test("test lucene with split_blocklet as false ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm_split_false
| ON index_test_table (name,city)
| AS 'lucene'
| properties('split_blocklet'='false')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n99*')"),
sql("select * from index_test_table where name like 'n99%'"))
checkAnswer(sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n*9')"),
sql(s"select * from index_test_table where name like 'n%9'"))
sql("drop index if exists dm_split_false on table index_test_table")
}
test("test text_match filters with more than one text_match udf ") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm_text
| ON index_test_table (name,city)
| AS 'lucene'
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE index_test_table OPTIONS('header'='false')")
val msg = intercept[MalformedCarbonCommandException] {
sql("SELECT * FROM index_test_table WHERE TEXT_MATCH('name:n0*') AND TEXT_MATCH" +
"('city:c0*')").collect()
}
assert(msg.getMessage
.contains("Specify all search filters for Lucene within a single text_match UDF"))
sql("drop index if exists dm_text on table index_test_table")
}
test("test lucene indexing english stop words") {
sql("drop table if exists table_stop")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS, "false")
sql("create table table_stop(suggestion string,goal string) " +
"STORED AS carbondata TBLPROPERTIES('CACHE_LEVEL'='BLOCKLET')")
sql(
"create index stop_dm on table table_stop (suggestion) as 'lucene'")
sql("insert into table_stop select 'The is the stop word','abcde'")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS, "true")
sql("insert into table_stop select 'The is one more stop word','defg'")
assert(
sql("select * from table_stop where text_match('suggestion:*is*')").collect().length == 1)
}
test("test lucene index on null values") {
sql("DROP TABLE IF EXISTS index_test4")
sql("DROP TABLE IF EXISTS index_copy")
sql(
"""
| CREATE TABLE index_test4(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT',
| 'CACHE_LEVEL'='BLOCKLET')
""".stripMargin)
sql(
"""
| CREATE TABLE index_copy(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT',
| 'CACHE_LEVEL'='BLOCKLET')
""".stripMargin)
sql("insert into index_test4 select 1,'name','city',20")
sql("insert into index_test4 select 2,'name1','city1',20")
sql("insert into index_test4 select 25,cast(null as string),'city2',NULL")
sql("insert into index_copy select * from index_test4")
sql(
s"""
| CREATE INDEX dm4
| ON index_test4 (name,city)
| AS 'lucene'
""".stripMargin)
checkAnswer(sql("SELECT * FROM index_test4 WHERE TEXT_MATCH('name:n*')"),
sql(s"select * from index_copy where name like '%n%'"))
sql("drop table index_test4")
sql("drop table index_copy")
}
test("test create index: unable to create same index for one column") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
val exception_duplicate_column: Exception = intercept[MalformedIndexCommandException] {
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name)
| AS 'lucene'
""".stripMargin)
sql(
s"""
| CREATE INDEX dm1
| ON index_test_table (name)
| AS 'lucene'
""".stripMargin)
}
assertResult("column 'name' already has lucene index created")(
exception_duplicate_column.getMessage)
sql("drop table if exists index_test_table")
}
test("test create index: able to create different index for one column") {
sql("DROP TABLE IF EXISTS index_test_table")
sql(
"""
| CREATE TABLE index_test_table(id INT, name STRING, city STRING, age INT)
| STORED AS carbondata
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
sql(
s"""
| CREATE INDEX dm
| ON index_test_table (name)
| AS 'lucene'
""".stripMargin)
sql(
s"""
| CREATE INDEX dm1
| ON index_test_table (name)
| AS 'bloomfilter'
""".stripMargin)
sql("show indexes on table index_test_table").collect()
checkExistence(sql("show indexes on table index_test_table"),
true, "dm", "dm1", "lucene", "bloomfilter")
sql("drop table if exists index_test_table")
}
override protected def afterAll(): Unit = {
LuceneFineGrainIndexSuite.deleteFile(file2)
sql("DROP TABLE IF EXISTS normal_test")
sql("DROP TABLE IF EXISTS index_test")
sql("DROP TABLE IF EXISTS source_table")
sql("DROP TABLE IF EXISTS target_table")
sql("DROP TABLE IF EXISTS index_test_table")
sql("DROP TABLE IF EXISTS index_test1")
sql("DROP TABLE IF EXISTS index_test2")
sql("DROP TABLE IF EXISTS index_test3")
sql("DROP TABLE IF EXISTS index_test4")
sql("DROP TABLE IF EXISTS index_test5")
sql("DROP TABLE IF EXISTS index_test7")
sql("DROP TABLE IF EXISTS index_main")
sql("DROP TABLE IF EXISTS table_stop")
sql("use default")
sql("drop database if exists lucene cascade")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS,
CarbonCommonConstants.CARBON_LUCENE_INDEX_STOP_WORDS_DEFAULT)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.USE_DISTRIBUTED_INDEX,
originDistributedIndexStatus)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}
}
object LuceneFineGrainIndexSuite {
def createFile(fileName: String, line: Int = 10000, start: Int = 0): Unit = {
val write = new PrintWriter(new File(fileName))
for (i <- start until (start + line)) {
// scalastyle:off println
write.println(i + "," + "n" + i + "," + "c0" + i + "," + Random.nextInt(80))
// scalastyle:on println
}
write.close()
}
def deleteFile(fileName: String): Unit = {
val file = new File(fileName)
if (file.exists()) {
file.delete()
}
}
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/index/lucene/LuceneFineGrainIndexSuite.scala
|
Scala
|
apache-2.0
| 37,637 |
/* Example based on discussion with Pierre Quinton */
import stainless.lang._
import stainless.annotation._
object LawsExample {
abstract class A[T] {
def a: T
def f(x: T, y: T): T
@law
def uniqueRight(x: T, y: T, z: T): Boolean = {
f(x,y) != f(x,z) || y == z
}
}
object Aux {
def something[T](ceci: A[T], x: T, y: T): Boolean = {
require(ceci.f(x,y) == ceci.f(x,ceci.a))
ceci.uniqueRight(x,y,ceci.a)
y == ceci.a
}.holds
def somethingelse[T](ceci: A[T], y: T): Unit = {
require(ceci.f(ceci.a,ceci.a) == ceci.f(ceci.a,y))
assert(something(ceci, ceci.a,y))
}.ensuring(y == ceci.a)
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/LawsExample.scala
|
Scala
|
apache-2.0
| 668 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.util.{Arrays, UUID}
import java.util.concurrent.locks.ReentrantLock
import java.nio.ByteBuffer
import java.util.regex.Pattern
import org.apache.log4j.Logger
import org.scalatest.junit.JUnitSuite
import org.junit.Assert._
import kafka.common.KafkaException
import kafka.utils.CoreUtils.inLock
import org.junit.Test
import org.apache.kafka.common.utils.{Base64, Utils}
class UtilsTest extends JUnitSuite {
private val logger = Logger.getLogger(classOf[UtilsTest])
val clusterIdPattern = Pattern.compile("[a-zA-Z0-9_\\\\-]+")
@Test
def testSwallow() {
CoreUtils.swallow(logger.info, throw new KafkaException("test"))
}
@Test
def testCircularIterator() {
val l = List(1, 2)
val itl = CoreUtils.circularIterator(l)
assertEquals(1, itl.next())
assertEquals(2, itl.next())
assertEquals(1, itl.next())
assertEquals(2, itl.next())
assertFalse(itl.hasDefiniteSize)
val s = Set(1, 2)
val its = CoreUtils.circularIterator(s)
assertEquals(1, its.next())
assertEquals(2, its.next())
assertEquals(1, its.next())
assertEquals(2, its.next())
assertEquals(1, its.next())
}
@Test
def testReadBytes() {
for(testCase <- List("", "a", "abcd")) {
val bytes = testCase.getBytes
assertTrue(Arrays.equals(bytes, Utils.readBytes(ByteBuffer.wrap(bytes))))
}
}
@Test
def testAbs() {
assertEquals(0, Utils.abs(Integer.MIN_VALUE))
assertEquals(1, Utils.abs(-1))
assertEquals(0, Utils.abs(0))
assertEquals(1, Utils.abs(1))
assertEquals(Integer.MAX_VALUE, Utils.abs(Integer.MAX_VALUE))
}
@Test
def testReplaceSuffix() {
assertEquals("blah.foo.text", CoreUtils.replaceSuffix("blah.foo.txt", ".txt", ".text"))
assertEquals("blah.foo", CoreUtils.replaceSuffix("blah.foo.txt", ".txt", ""))
assertEquals("txt.txt", CoreUtils.replaceSuffix("txt.txt.txt", ".txt", ""))
assertEquals("foo.txt", CoreUtils.replaceSuffix("foo", "", ".txt"))
}
@Test
def testReadInt() {
val values = Array(0, 1, -1, Byte.MaxValue, Short.MaxValue, 2 * Short.MaxValue, Int.MaxValue/2, Int.MinValue/2, Int.MaxValue, Int.MinValue, Int.MaxValue)
val buffer = ByteBuffer.allocate(4 * values.size)
for(i <- 0 until values.length) {
buffer.putInt(i*4, values(i))
assertEquals("Written value should match read value.", values(i), CoreUtils.readInt(buffer.array, i*4))
}
}
@Test
def testCsvList() {
val emptyString:String = ""
val nullString:String = null
val emptyList = CoreUtils.parseCsvList(emptyString)
val emptyListFromNullString = CoreUtils.parseCsvList(nullString)
val emptyStringList = Seq.empty[String]
assertTrue(emptyList!=null)
assertTrue(emptyListFromNullString!=null)
assertTrue(emptyStringList.equals(emptyListFromNullString))
assertTrue(emptyStringList.equals(emptyList))
}
@Test
def testCsvMap() {
val emptyString: String = ""
val emptyMap = CoreUtils.parseCsvMap(emptyString)
val emptyStringMap = Map.empty[String, String]
assertTrue(emptyMap != null)
assertTrue(emptyStringMap.equals(emptyStringMap))
val kvPairsIpV6: String = "a:b:c:v,a:b:c:v"
val ipv6Map = CoreUtils.parseCsvMap(kvPairsIpV6)
for (m <- ipv6Map) {
assertTrue(m._1.equals("a:b:c"))
assertTrue(m._2.equals("v"))
}
val singleEntry:String = "key:value"
val singleMap = CoreUtils.parseCsvMap(singleEntry)
val value = singleMap.getOrElse("key", 0)
assertTrue(value.equals("value"))
val kvPairsIpV4: String = "192.168.2.1/30:allow, 192.168.2.1/30:allow"
val ipv4Map = CoreUtils.parseCsvMap(kvPairsIpV4)
for (m <- ipv4Map) {
assertTrue(m._1.equals("192.168.2.1/30"))
assertTrue(m._2.equals("allow"))
}
val kvPairsSpaces: String = "key:value , key: value"
val spaceMap = CoreUtils.parseCsvMap(kvPairsSpaces)
for (m <- spaceMap) {
assertTrue(m._1.equals("key"))
assertTrue(m._2.equals("value"))
}
}
@Test
def testInLock() {
val lock = new ReentrantLock()
val result = inLock(lock) {
assertTrue("Should be in lock", lock.isHeldByCurrentThread)
1 + 1
}
assertEquals(2, result)
assertFalse("Should be unlocked", lock.isLocked)
}
@Test
def testUrlSafeBase64EncodeUUID() {
// Test a UUID that has no + or / characters in base64 encoding [a149b4a3-06e1-4b49-a8cb-8a9c4a59fa46 ->(base64)-> oUm0owbhS0moy4qcSln6Rg==]
val clusterId1 = Base64.urlEncoderNoPadding.encodeToString(CoreUtils.getBytesFromUuid(UUID.fromString(
"a149b4a3-06e1-4b49-a8cb-8a9c4a59fa46")))
assertEquals(clusterId1, "oUm0owbhS0moy4qcSln6Rg")
assertEquals(clusterId1.length, 22)
assertTrue(clusterIdPattern.matcher(clusterId1).matches())
// Test a UUID that has + or / characters in base64 encoding [d418ec02-277e-4853-81e6-afe30259daec ->(base64)-> 1BjsAid+SFOB5q/jAlna7A==]
val clusterId2 = Base64.urlEncoderNoPadding.encodeToString(CoreUtils.getBytesFromUuid(UUID.fromString(
"d418ec02-277e-4853-81e6-afe30259daec")))
assertEquals(clusterId2, "1BjsAid-SFOB5q_jAlna7A")
assertEquals(clusterId2.length, 22)
assertTrue(clusterIdPattern.matcher(clusterId2).matches())
}
@Test
def testGenerateUuidAsBase64() {
val clusterId = CoreUtils.generateUuidAsBase64()
assertEquals(clusterId.length, 22)
assertTrue(clusterIdPattern.matcher(clusterId).matches())
}
}
|
zzwlstarby/mykafka
|
core/src/test/scala/unit/kafka/utils/UtilsTest.scala
|
Scala
|
apache-2.0
| 6,249 |
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.hyperbus.model.annotations
import scala.annotation.StaticAnnotation
class location(v: String) extends StaticAnnotation
|
hypertino/hyperbus
|
hyperbus/src/main/scala/com/hypertino/hyperbus/model/annotations/location.scala
|
Scala
|
mpl-2.0
| 411 |
package com.rabbitonweb.nnspc
/**
* P07 (**) Flatten a nested list structure.
Example:
scala> flatten(List(List(1, 1), 2, List(3, List(5, 8))))
res0: List[Any] = List(1, 1, 2, 3, 5, 8)
*/
object P07 {
def flatten(list: List[Any]): List[Any] = list match {
case head :: tail if head.isInstanceOf[List[_]] => flatten(head.asInstanceOf[List[_]]) ++ flatten(tail)
case head :: tail => head :: flatten(tail)
case Nil => Nil
}
}
|
rabbitonweb/99-scala-problems
|
src/main/scala/com/rabbitonweb/nnspc/P07.scala
|
Scala
|
gpl-2.0
| 442 |
/*
import ie.nuig.entitylinking.core.AnnotatedMention
import ie.nuig.entitylinking.core.ELDocument
import ie.nuig.entitylinking.main.nel.EntityLinkingMain
import java.util.List
import scala.collection.JavaConversions._
object NUIGEntityLinkingTest {
def main(args: Array[String]) {
//config file path that contains paths for classifier and candidate index.
val configPath = "/home/cnavarro/workspace/mixedemotions/entitylinking/ie.nuig.me.nel.properties"
//initiate entity linking module, load classifier and open index
val entityLinkingDemo : EntityLinkingMain = new EntityLinkingMain(configPath)
//input text example
val docText = "Apple CEO Steve Jobs and Baez dated in the late 1970s, and she performed at his Stanford memorial."
//create an document that contains document text
val elDocument : ELDocument = new ELDocument(docText, null)
//process document to recognize the entities and link them to DBpedia
entityLinkingDemo.processDocument(elDocument)
//print results
//EntityText, entityType, DBpediaLink, confidenceScore
val annotatedMentions : List[AnnotatedMention] = elDocument.getAnnotatedMention().toList
println(annotatedMentions.getClass)
for(annotatedMention : AnnotatedMention <- annotatedMentions){
println("Entity text: "+annotatedMention.getMention() + "\\tclass: "+annotatedMention.getClassType() +"\\tURI: "+
annotatedMention.getUriScorePair().getKey() +"\\tscore: "+annotatedMention.getUriScorePair().getValue())
}
}
}
*/
|
canademar/me_extractors
|
BRMDemo_MarchMeeting/src/main/scala/NUIGEntityLinkingTest.scala
|
Scala
|
gpl-2.0
| 1,543 |
package k2b6s9j.singingKIA.Songs
object HarderBetterFasterStronger {
}
|
k2b6s9j/singingKIA
|
src/main/scala/k2b6s9j/singingKIA/Songs/HarderBetterFasterStronger.scala
|
Scala
|
mit
| 73 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate
import akka.actor._
import akka.remote.testkit._
import akka.testkit.TestProbe
import com.typesafe.config._
import scala.util._
class FilteredReplicationConfig(providerConfig: Config) extends MultiNodeReplicationConfig {
val nodeA = role("nodeA")
val nodeB = role("nodeB")
setConfig(providerConfig)
}
object FilteredReplicationSpec {
class PayloadEqualityFilter(payload: String) extends ReplicationFilter {
override def apply(event: DurableEvent): Boolean = {
event.payload == payload
}
}
class ReplicatedActor(val id: String, val eventLog: ActorRef, probe: ActorRef) extends EventsourcedActor {
def onCommand = {
case s: String => persist(s) {
case Success(e) =>
case Failure(e) => throw e
}
}
def onEvent = {
case s: String => probe ! s
}
}
}
abstract class FilteredReplicationSpec(config: FilteredReplicationConfig) extends MultiNodeSpec(config) with MultiNodeWordSpec with MultiNodeReplicationEndpoint {
import FilteredReplicationSpec._
import config._
def initialParticipants: Int =
roles.size
muteDeadLetters(classOf[AnyRef])(system)
"Event log replication" must {
"replicate events based on filter criteria" in {
val probe = TestProbe()
runOn(nodeA) {
val connection = node(nodeB).address.toReplicationConnection.copy(filters = Map(logName -> new PayloadEqualityFilter("B2")))
val endpoint = createEndpoint(nodeA.name, Set(connection))
val actor = system.actorOf(Props(new ReplicatedActor("pa", endpoint.log, probe.ref)))
actor ! ("A1")
actor ! ("A2")
actor ! ("A3")
probe.expectMsgAllOf("A1", "A2", "A3", "B2")
}
runOn(nodeB) {
val connection = node(nodeA).address.toReplicationConnection.copy(filters = Map(logName -> new PayloadEqualityFilter("A2")))
val endpoint = createEndpoint(nodeB.name, Set(connection))
val actor = system.actorOf(Props(new ReplicatedActor("pb", endpoint.log, probe.ref)))
actor ! ("B1")
actor ! ("B2")
actor ! ("B3")
probe.expectMsgAllOf("B1", "B2", "B3", "A2")
}
enterBarrier("finish")
}
}
}
|
ianclegg/eventuate
|
eventuate-core/src/multi-jvm/scala/com/rbmhtechnology/eventuate/FilteredReplicationSpec.scala
|
Scala
|
apache-2.0
| 2,908 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization.macros
import org.scalatest.{ FunSuite, ShouldMatchers }
import org.scalatest.prop.Checkers
import org.scalatest.prop.PropertyChecks
import impl.ordered_serialization.runtime_helpers.TraversableHelpers._
class TraversableHelperLaws extends FunSuite with PropertyChecks with ShouldMatchers {
test("Iterator ordering should be Iterable ordering") {
forAll { (l1: List[Int], l2: List[Int]) =>
assert(iteratorCompare[Int](l1.iterator, l2.iterator) ===
Ordering[Iterable[Int]].compare(l1, l2), "Matches scala's Iterable compare")
}
}
test("Iterator equiv should be Iterable ordering") {
forAll { (l1: List[Int], l2: List[Int]) =>
assert(iteratorEquiv[Int](l1.iterator, l2.iterator) ===
Ordering[Iterable[Int]].equiv(l1, l2), "Matches scala's Iterable compare")
}
}
test("sortedCompare matches sort followed by compare List[Int]") {
forAll(minSuccessful(1000)) { (l1: List[Int], l2: List[Int]) =>
assert(sortedCompare[Int](l1, l2) ===
Ordering[Iterable[Int]].compare(l1.sorted, l2.sorted), "Matches scala's Iterable compare")
}
}
test("sortedCompare matches sort followed by compare Set[Int]") {
forAll(minSuccessful(1000)) { (l1: Set[Int], l2: Set[Int]) =>
assert(sortedCompare[Int](l1, l2) ===
Ordering[Iterable[Int]].compare(l1.toList.sorted, l2.toList.sorted), "Matches scala's Iterable compare")
}
}
}
|
sriramkrishnan/scalding
|
scalding-serialization/src/test/scala/com/twitter/scalding/serialization/macros/TraversableHelperLaws.scala
|
Scala
|
apache-2.0
| 2,008 |
package com.phasmid.hedge_fund.rules
import scala.util.Try
import scala.util._
import scala.util.Failure
/**
* @author robinhillyard
*/
case class NumberPredicate(variable: String, operator: Operator[Double], value: Double) extends Predicate {
def apply(candidate: Candidate): Either[Throwable, Boolean] = candidate(variable) match {
case Some(x) => Try { operate(x, operator, value) } match {
case Success(v) => Right(v)
case Failure(f) => Left(f)
}
case _ => Left(new Exception(s"variable $variable not found in $candidate"))
}
// CONSIDER Moving this into Operator class
def operate(x: Any, operator: Operator[Double], value: Double): Boolean = {
x match {
case y: Double => operator(y, value)
case y: Int => operator(y, value)
case y: String => operator(y.toDouble, value)
case _ => throw new Exception(s"variable $variable cannot be for operator $operator")
}
}
}
object NumberPredicate {
def apply(variable: String, operator: String, value: Double): NumberPredicate =
new NumberPredicate(variable, Operator.createNumeric(operator), value)
def apply(variable: String, operator: Operator[Double], value: String): NumberPredicate =
new NumberPredicate(variable, operator, value.toDouble)
def apply(variable: String, operator: String, value: String): NumberPredicate =
apply(variable, Operator.createNumeric(operator), value)
def apply(predicate: String): NumberPredicate = {
val rPredicate = """^\s*(\w+)\s*([=<>]{1,2})\s*(-?[0-9]+\.?[0-9]*)\s*$""".r
predicate match {
case rPredicate(v, o, n) => apply(v, o, n)
case _ => throw new Exception(s"predicate: $predicate is malformed")
}
}
}
|
rchillyard/Scalaprof
|
hedge-fund/src/main/scala/com/phasmid/hedge_fund/rules/NumberPredicate.scala
|
Scala
|
gpl-2.0
| 1,709 |
package org.jetbrains.sbt.project.modifier.ui
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.project.Project
import com.intellij.openapi.vcs.changes.Change
import com.intellij.openapi.vcs.changes.actions.{ShowDiffAction, ShowDiffUIContext}
import com.intellij.openapi.vcs.changes.ui.ChangesBrowser
import com.intellij.openapi.vfs.VirtualFile
import scala.collection.JavaConversions._
import scala.collection.mutable
/**
* @author Roman.Shein
* @since 20.03.2015.
*/
class BuildFileChangeBrowser(val project: Project, val changes: java.util.List[Change], val canExcludeChanges: Boolean,
val fileChangesMap: mutable.Map[VirtualFile, (BuildFileModifiedStatus, Long)]) extends
ChangesBrowser(project, null, changes, null, canExcludeChanges, true, null, ChangesBrowser.MyUseCase.LOCAL_CHANGES,
null) {
override def afterDiffRefresh() {
val updatedChanges = new java.util.ArrayList[Change]
updatedChanges.addAll(
getSelectedChanges map {
myChange => {
val changeSwapped = BuildFileChange.swap(myChange.asInstanceOf[BuildFileChange])
fileChangesMap.get(changeSwapped.getVirtualFile).map {
case (modifiedStatus, modificationStamp) =>
val newModificationStamp = FileDocumentManager.getInstance().getDocument(changeSwapped.getVirtualFile)
.getModificationStamp
if (newModificationStamp != modificationStamp) {
val newStatus = modifiedStatus.changeAfterManualModification()
fileChangesMap.put(changeSwapped.getVirtualFile, (newStatus, newModificationStamp))
BuildFileChange.swap(new BuildFileChange(changeSwapped.getBeforeRevision, changeSwapped.getAfterRevision, newStatus))
} else myChange
case _ => myChange
}.getOrElse(myChange)
}
}
)
setChangesToDisplay(updatedChanges)
}
override protected def showDiffForChanges(changesArray: Array[Change], indexInSelection: Int) {
val context: ShowDiffUIContext = new ShowDiffUIContext(false)
val changesArraySwapped: Array[Change] = for (change <- changesArray)
yield BuildFileChange.swap(change.asInstanceOf[BuildFileChange])
ShowDiffAction.showDiffForChange(changesArraySwapped, indexInSelection, myProject, context)
}
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/sbt/project/modifier/ui/BuildFileChangeBrowser.scala
|
Scala
|
apache-2.0
| 2,365 |
package scala.meta.internal.scalacp
import scala.meta.internal.classpath._
import scala.meta.internal.metacp._
import scala.meta.internal.{semanticdb => s}
import scala.meta.cli._
import scala.meta.metacp._
import scala.tools.scalap.scalax.rules.scalasig._
final class Scalacp private (
val symbolIndex: SymbolIndex,
val settings: Settings,
val reporter: Reporter
) extends AnnotationOps
with SymbolInformationOps
with SymbolOps
with TypeOps {
def parse(node: ScalaSigNode): ClassfileInfos = {
val sinfos = node.scalaSig.symbols.toList.flatMap {
case sym: SymbolInfoSymbol => this.sinfos(sym)
case _ => Nil
}
val snonlocalInfos = sinfos.filter(sinfo => !hardlinks.contains(sinfo.symbol))
ClassfileInfos(node.relativeUri, s.Language.SCALA, snonlocalInfos)
}
private def sinfos(sym: SymbolInfoSymbol): List[s.SymbolInformation] = {
if (sym.isSemanticdbLocal) return Nil
if (sym.isUseless) return Nil
val ssym = sym.ssym
if (ssym.contains("$extension")) return Nil
val sinfo = sym.toSymbolInformation(SymlinkChildren)
if (sym.isUsefulField && sym.isMutable) {
List(sinfo) ++ Synthetics.setterInfos(sinfo, SymlinkChildren)
} else {
List(sinfo)
}
}
}
object Scalacp {
def parse(
node: ScalaSigNode,
classpathIndex: ClasspathIndex,
settings: Settings,
reporter: Reporter
): ClassfileInfos = {
val symbolIndex = SymbolIndex(classpathIndex)
val scalacp = new Scalacp(symbolIndex, settings, reporter)
scalacp.parse(node)
}
}
|
olafurpg/scalameta
|
semanticdb/metacp/src/main/scala/scala/meta/internal/scalacp/Scalacp.scala
|
Scala
|
bsd-3-clause
| 1,566 |
package parser
import cats.instances.either._
import cats.instances.list._
import cats.syntax.either._
import cats.syntax.functor._
import cats.syntax.traverse._
import cats.{Monad, SemigroupK}
import org.apache.poi.ss.usermodel.Workbook
import org.apache.poi.ss.util.{AreaReference, CellReference}
import parser.ParserError._
import scala.annotation.tailrec
import scala.reflect.ClassTag
/** Typeclass to read data from a workbook */
trait Parser[A]{ self =>
def parse(workbook: Workbook): Either[ParserError, A]
def flatMap[B](f: A => Parser[B]): Parser[B] = new Parser[B] {
def parse(workbook: Workbook): Either[ParserError, B] =
self.parse(workbook).flatMap(f(_).parse(workbook))
}
}
object Parser {
def apply[A](implicit ev: Parser[A]): Parser[A] = ev
def success[A](value: A): Parser[A] = lift(Right(value))
def fail[A](error: ParserError): Parser[A] = lift(Left(error))
def lift[A](res: Either[ParserError, A]): Parser[A] = new Parser[A] {
def parse(workbook: Workbook): Either[ParserError, A] = res
}
def boolean(name: String): Parser[Boolean] =
single(name).map(_.asBoolean).flatMap(lift)
def booleanRange(name: String): Parser[List[Boolean]] =
range(name).map(_.traverse(_.asBoolean)).flatMap(lift)
def booleanInt(name: String): Parser[Boolean] =
int(name).flatMap{
case 0 => success(false)
case 1 => success(true)
case x => fail(invalidFormat(name, "Boolean (0/1)", x.toString))
}
def numeric(name: String): Parser[Double] =
single(name).map(_.asDouble).flatMap(lift)
def numericRange(name: String): Parser[List[Double]] =
range(name).map(_.traverse(_.asDouble)).flatMap(lift)
def int(name: String): Parser[Int] =
single(name).map(_.asInt).flatMap(lift)
def intRange(name: String): Parser[List[Int]] =
range(name).map(_.traverse(_.asInt)).flatMap(lift)
def string(name: String): Parser[String] =
single(name).map(_.asString).flatMap(lift)
def stringRange(name: String): Parser[List[String]] =
range(name).map(_.traverse(_.asString)).flatMap(lift)
def single(name: String): Parser[SafeCell] =
range(name).flatMap{
case x :: Nil => success(x)
case Nil => fail(invalidFormat(name, "single cell", "empty"))
case _ => fail(invalidFormat(name, "single cell", "several cells"))
}
def range(name: String): Parser[List[SafeCell]] = new Parser[List[SafeCell]] {
def parse(workbook: Workbook): Either[ParserError, List[SafeCell]] =
for {
area <- getArea(workbook, name)
cells <- area.getAllReferencedCells.toList.traverse(getSafeCell(workbook, _))
} yield cells
}
def getArea(workbook: Workbook, name: String): Either[ParserError, AreaReference] =
Either.catchNonFatal(
new AreaReference(workbook.getName(name).getRefersToFormula, workbook.getSpreadsheetVersion)
).leftMap(_ => ParserError.missingName(name))
def getSafeCell(workbook: Workbook, cellRef: CellReference): Either[ParserError, SafeCell] =
Either.catchNonFatal(SafeCell(
workbook
.getSheet(cellRef.getSheetName)
.getRow(cellRef.getRow)
.getCell(cellRef.getCol)
)).leftMap(_ => ParserError.missingCell(cellRef.toString))
implicit val instance: Monad[Parser] with SemigroupK[Parser] = new Monad[Parser] with SemigroupK[Parser] {
def combineK[A](x: Parser[A], y: Parser[A]): Parser[A] = new Parser[A] {
def parse(workbook: Workbook): Either[ParserError, A] =
x.parse(workbook) orElse y.parse(workbook)
}
def flatMap[A, B](fa: Parser[A])(f: A => Parser[B]): Parser[B] =
fa.flatMap(f)
def tailRecM[A, B](a: A)(f: A => Parser[Either[A, B]]): Parser[B] = new Parser[B] {
def parse(workbook: Workbook): Either[ParserError, B] = {
@tailrec
def loop(thisA: A): Either[ParserError, B] = f(thisA).parse(workbook) match {
case Left(a1) => Left(a1)
case Right(Left(a1)) => loop(a1)
case Right(Right(b)) => Right(b)
}
loop(a)
}
}
def pure[A](x: A): Parser[A] = success(x)
}
}
|
julien-truffaut/fp-api
|
core/src/main/scala/parser/Parser.scala
|
Scala
|
apache-2.0
| 4,116 |
package org.denigma.kappa.notebook.graph.layouts
import org.denigma.threejs.PerspectiveCamera
/**
* Created by antonkulaga on 21/06/16.
*/
trait GraphLayout
{
/*
type Node
type Edge
def nodes: Vector[Node]
def edges: Vector[Edge]
*/
def active: Boolean
def tick(width: Double, height: Double, camera: PerspectiveCamera): Unit //ticks
def stop(): Unit
def pause(): Unit
def start(width: Double, height: Double, camera: PerspectiveCamera): Unit
}
|
antonkulaga/kappa-notebook
|
app/js/src/main/scala/org/denigma/kappa/notebook/graph/layouts/GraphLayout.scala
|
Scala
|
mpl-2.0
| 476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.net.URL
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.mutable
import scala.util.{Failure, Success}
import scala.util.control.NonFatal
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.deploy.worker.WorkerWatcher
import org.apache.spark.internal.Logging
import org.apache.spark.rpc._
import org.apache.spark.scheduler.{ExecutorLossReason, TaskDescription}
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.util.{ThreadUtils, Utils}
private[spark] class CoarseGrainedExecutorBackend(
override val rpcEnv: RpcEnv,
driverUrl: String,
executorId: String,
hostname: String,
cores: Int,
userClassPath: Seq[URL],
env: SparkEnv)
extends ThreadSafeRpcEndpoint with ExecutorBackend with Logging {
private[this] val stopping = new AtomicBoolean(false)
var executor: Executor = null
@volatile var driver: Option[RpcEndpointRef] = None
override def onStart() {
logInfo("Connecting to driver: " + driverUrl)
rpcEnv.asyncSetupEndpointRefByURI(driverUrl).flatMap { ref =>
// This is a very fast action so we can use "ThreadUtils.sameThread"
driver = Some(ref)
ref.ask[Boolean](RegisterExecutor(executorId, self, hostname, cores, extractLogUrls))
}(ThreadUtils.sameThread).onComplete {
// This is a very fast action so we can use "ThreadUtils.sameThread"
case Success(msg) =>
// Always receive `true`. Just ignore it
case Failure(e) =>
exitExecutor(1, s"Cannot register with driver: $driverUrl", e, notifyDriver = false)
}(ThreadUtils.sameThread)
}
protected def registerExecutor: Executor =
new Executor(executorId, hostname, env, userClassPath, isLocal = false)
def extractLogUrls: Map[String, String] = {
val prefix = "SPARK_LOG_URL_"
sys.env.filterKeys(_.startsWith(prefix))
.map(e => (e._1.substring(prefix.length).toLowerCase, e._2))
}
override def receive: PartialFunction[Any, Unit] = {
case RegisteredExecutor =>
logInfo("Successfully registered with driver")
try {
executor = registerExecutor
} catch {
case NonFatal(e) =>
exitExecutor(1, "Unable to create executor due to " + e.getMessage, e)
}
case RegisterExecutorFailed(message) =>
exitExecutor(1, "Slave registration failed: " + message)
case LaunchTask(taskDesc) =>
if (executor == null) {
exitExecutor(1, "Received LaunchTask command but executor was null")
} else {
env.taskLogger.logInfo("Got assigned task " + taskDesc.taskId)
executor.launchTask(this, taskId = taskDesc.taskId, attemptNumber = taskDesc.attemptNumber,
taskDesc.name, taskDesc.serializedTask, taskDesc.taskData.decompress(env))
}
case LaunchTasks(tasks, taskDataList) =>
if (executor ne null) {
logDebug("Got assigned tasks " + tasks.map(_.taskId).mkString(","))
for (task <- tasks) {
env.taskLogger.logInfo("Got assigned task " + task.taskId)
val ref = task.taskData.reference
val taskData = if (ref >= 0) taskDataList(ref) else task.taskData
executor.launchTask(this, taskId = task.taskId,
attemptNumber = task.attemptNumber, task.name, task.serializedTask,
taskData.decompress(env))
}
} else {
exitExecutor(1, "Received LaunchTasks command but executor was null")
}
case KillTask(taskId, _, interruptThread) =>
if (executor == null) {
exitExecutor(1, "Received KillTask command but executor was null")
} else {
executor.killTask(taskId, interruptThread)
}
case StopExecutor =>
stopping.set(true)
logInfo("Driver commanded a shutdown")
// Cannot shutdown here because an ack may need to be sent back to the caller. So send
// a message to self to actually do the shutdown.
self.send(Shutdown)
case Shutdown =>
stopping.set(true)
new Thread("CoarseGrainedExecutorBackend-stop-executor") {
override def run(): Unit = {
// executor.stop() will call `SparkEnv.stop()` which waits until RpcEnv stops totally.
// However, if `executor.stop()` runs in some thread of RpcEnv, RpcEnv won't be able to
// stop until `executor.stop()` returns, which becomes a dead-lock (See SPARK-14180).
// Therefore, we put this line in a new thread.
executor.stop()
}
}.start()
}
override def onDisconnected(remoteAddress: RpcAddress): Unit = {
if (stopping.get()) {
logInfo(s"Driver from $remoteAddress disconnected during shutdown")
} else if (driver.exists(_.address == remoteAddress)) {
exitExecutor(1, s"Driver $remoteAddress disassociated! Shutting down.", null,
notifyDriver = false)
} else {
logWarning(s"An unknown ($remoteAddress) driver disconnected.")
}
}
override def statusUpdate(taskId: Long, state: TaskState, data: ByteBuffer) {
val msg = StatusUpdate(executorId, taskId, state, data)
driver match {
case Some(driverRef) => driverRef.send(msg)
case None => logWarning(s"Drop $msg because has not yet connected to driver")
}
}
/**
* This function can be overloaded by other child classes to handle
* executor exits differently. For e.g. when an executor goes down,
* back-end may not want to take the parent process down.
*/
protected def exitExecutor(code: Int,
reason: String,
throwable: Throwable = null,
notifyDriver: Boolean = true) = {
val message = "Executor self-exiting due to : " + reason
if (throwable != null) {
logError(message, throwable)
} else {
logError(message)
}
if (notifyDriver && driver.nonEmpty) {
driver.get.ask[Boolean](
RemoveExecutor(executorId, new ExecutorLossReason(reason))
).onFailure { case e =>
logWarning(s"Unable to notify the driver due to " + e.getMessage, e)
}(ThreadUtils.sameThread)
}
System.exit(code)
}
}
private[spark] object CoarseGrainedExecutorBackend extends Logging {
private def run(
driverUrl: String,
executorId: String,
hostname: String,
cores: Int,
appId: String,
workerUrl: Option[String],
userClassPath: Seq[URL]) {
Utils.initDaemon(log)
SparkHadoopUtil.get.runAsSparkUser { () =>
// Debug code
Utils.checkHost(hostname)
// Bootstrap to fetch the driver's Spark properties.
val executorConf = new SparkConf
val port = executorConf.getInt("spark.executor.port", 0)
val fetcher = RpcEnv.create(
"driverPropsFetcher",
hostname,
port,
executorConf,
new SecurityManager(executorConf),
clientMode = true)
val driver = fetcher.setupEndpointRefByURI(driverUrl)
val cfg = driver.askWithRetry[SparkAppConfig](RetrieveSparkAppConfig)
val props = cfg.sparkProperties ++ Seq[(String, String)](("spark.app.id", appId))
fetcher.shutdown()
// Create SparkEnv using properties we fetched from the driver.
val driverConf = new SparkConf()
for ((key, value) <- props) {
// this is required for SSL in standalone mode
if (SparkConf.isExecutorStartupConf(key)) {
driverConf.setIfMissing(key, value)
} else {
driverConf.set(key, value)
}
}
if (driverConf.contains("spark.yarn.credentials.file")) {
logInfo("Will periodically update credentials from: " +
driverConf.get("spark.yarn.credentials.file"))
SparkHadoopUtil.get.startCredentialUpdater(driverConf)
}
val env = SparkEnv.createExecutorEnv(
driverConf, executorId, hostname, port, cores, cfg.ioEncryptionKey, isLocal = false)
env.rpcEnv.setupEndpoint("Executor", new CoarseGrainedExecutorBackend(
env.rpcEnv, driverUrl, executorId, hostname, cores, userClassPath, env))
workerUrl.foreach { url =>
env.rpcEnv.setupEndpoint("WorkerWatcher", new WorkerWatcher(env.rpcEnv, url))
}
env.rpcEnv.awaitTermination()
SparkHadoopUtil.get.stopCredentialUpdater()
}
}
def main(args: Array[String]) {
var driverUrl: String = null
var executorId: String = null
var hostname: String = null
var cores: Int = 0
var appId: String = null
var workerUrl: Option[String] = None
val userClassPath = new mutable.ListBuffer[URL]()
var argv = args.toList
while (!argv.isEmpty) {
argv match {
case ("--driver-url") :: value :: tail =>
driverUrl = value
argv = tail
case ("--executor-id") :: value :: tail =>
executorId = value
argv = tail
case ("--hostname") :: value :: tail =>
hostname = value
argv = tail
case ("--cores") :: value :: tail =>
cores = value.toInt
argv = tail
case ("--app-id") :: value :: tail =>
appId = value
argv = tail
case ("--worker-url") :: value :: tail =>
// Worker url is used in spark standalone mode to enforce fate-sharing with worker
workerUrl = Some(value)
argv = tail
case ("--user-class-path") :: value :: tail =>
userClassPath += new URL(value)
argv = tail
case Nil =>
case tail =>
// scalastyle:off println
System.err.println(s"Unrecognized options: ${tail.mkString(" ")}")
// scalastyle:on println
printUsageAndExit()
}
}
if (driverUrl == null || executorId == null || hostname == null || cores <= 0 ||
appId == null) {
printUsageAndExit()
}
run(driverUrl, executorId, hostname, cores, appId, workerUrl, userClassPath)
System.exit(0)
}
private def printUsageAndExit() = {
// scalastyle:off println
System.err.println(
"""
|Usage: CoarseGrainedExecutorBackend [options]
|
| Options are:
| --driver-url <driverUrl>
| --executor-id <executorId>
| --hostname <hostname>
| --cores <cores>
| --app-id <appid>
| --worker-url <workerUrl>
| --user-class-path <url>
|""".stripMargin)
// scalastyle:on println
System.exit(1)
}
}
|
SnappyDataInc/spark
|
core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
|
Scala
|
apache-2.0
| 11,382 |
package controllers
import java.util.concurrent.TimeUnit
import org.specs2.mutable.Specification
import play.api.libs.json.Json
import play.api.test.{FakeApplication, FakeRequest}
//import play.api.test.Helpers.{BAD_REQUEST, CREATED, POST, contentAsString, route, running}
import play.api.test.Helpers._
import scala.concurrent.Await
import scala.concurrent.duration.FiniteDuration
/**
* You can mock out a whole application including requests, plugins etc.
* For more information, consult the wiki.
*/
class UsersIT extends Specification {
val timeout: FiniteDuration = FiniteDuration(5, TimeUnit.SECONDS)
"Users" should {
"insert a valid json" in {
running(FakeApplication()) {
val request = FakeRequest.apply(POST, "/user").withJsonBody(Json.obj(
"firstName" -> "Jack",
"lastName" -> "London",
"age" -> 27,
"active" -> true))
val response = route(request)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
result.header.status must equalTo(CREATED)
}
}
"fail inserting a non valid json" in {
running(FakeApplication()) {
val request = FakeRequest.apply(POST, "/user").withJsonBody(Json.obj(
"firstName" -> 98,
"lastName" -> "London",
"age" -> 27))
val response = route(request)
response.isDefined mustEqual true
val result = Await.result(response.get, timeout)
contentAsString(response.get) mustEqual "invalid json"
result.header.status mustEqual BAD_REQUEST
}
}
}
}
|
amsterdam-scala/play-reactive-mongo-coffee-angular
|
test/controllers/UsersIT.scala
|
Scala
|
apache-2.0
| 1,622 |
package spire.macros
import language.experimental.macros
import scala.reflect.macros.Context
import spire.algebra._
object Auto {
object scala {
def semiring[A]: Semiring[A] = macro ScalaAutoMacros.semiringImpl[A]
def rig[A](z: A, o: A): Rig[A] = macro ScalaAutoMacros.rigImpl[A]
def rng[A](z: A): Rng[A] = macro ScalaAutoMacros.rngImpl[A]
def ring[A](z: A, o: A): Ring[A] = macro ScalaAutoMacros.ringImpl[A]
def euclideanRing[A](z: A, o: A)(implicit ev: Eq[A]): EuclideanRing[A] = macro ScalaAutoMacros.euclideanRingImpl[A]
def field[A](z: A, o: A)(implicit ev: Eq[A]): Field[A] = macro ScalaAutoMacros.fieldImpl[A]
def eq[A]: Eq[A] = macro ScalaAutoMacros.eqImpl[A]
// TODO: partialOrder ?
def order[A]: Order[A] = macro ScalaAutoMacros.orderImpl[A]
object collection {
def semigroup[A]: Semigroup[A] = macro ScalaAutoMacros.collectionSemigroupImpl[A]
def monoid[A](z: A): Monoid[A] = macro ScalaAutoMacros.collectionMonoidImpl[A]
}
}
object java {
def semiring[A]: Semiring[A] = macro JavaAutoMacros.semiringImpl[A]
def rig[A](z: A, o: A): Rig[A] = macro JavaAutoMacros.rigImpl[A]
def rng[A](z: A): Rng[A] = macro JavaAutoMacros.rngImpl[A]
def ring[A](z: A, o: A): Ring[A] = macro JavaAutoMacros.ringImpl[A]
def euclideanRing[A](z: A, o: A)(implicit ev: Eq[A]): EuclideanRing[A] = macro JavaAutoMacros.euclideanRingImpl[A]
def field[A](z: A, o: A)(implicit ev: Eq[A]): Field[A] = macro JavaAutoMacros.fieldImpl[A]
def eq[A]: Eq[A] = macro JavaAutoMacros.eqImpl[A]
// TODO: partialOrder ?
def order[A]: Order[A] = macro JavaAutoMacros.orderImpl[A]
object collection {
def monoid[A](empty: A): Monoid[A] = macro JavaAutoMacros.collectionMonoidImpl[A]
}
}
}
abstract class AutoOps {
val c: Context
import c.universe._
def unop[A](name: String, x: String = "x"): c.Expr[A] =
c.Expr[A](Select(Ident(newTermName(x)), newTermName(name)))
def binop[A](name: String, x: String = "x", y: String = "y"): c.Expr[A] =
c.Expr[A](Apply(
Select(Ident(newTermName(x)), newTermName(name)),
List(Ident(newTermName(y)))))
def binopSearch[A: c.WeakTypeTag](names: List[String], x: String = "x", y: String = "y"): Option[c.Expr[A]] =
names find { name => hasMethod1[A, A, A](name) } map (binop[A](_, x, y))
def unopSearch[A: c.WeakTypeTag](names: List[String], x: String = "x"): Option[c.Expr[A]] =
names find { name => hasMethod0[A, A](name) } map (unop[A](_, x))
def hasMethod0[A: c.WeakTypeTag, B: c.WeakTypeTag](name: String): Boolean = {
val tpeA = c.weakTypeTag[A].tpe
val tpeB = c.weakTypeTag[B].tpe
tpeA.members exists { m =>
m.isMethod && m.isPublic && m.name.encoded == name && (m.typeSignature match {
case MethodType(Nil, ret) => ret =:= tpeB
case _ => false
})
}
}
def hasMethod1[A: c.WeakTypeTag, B: c.WeakTypeTag, C: c.WeakTypeTag](name: String): Boolean = {
val tpeA = c.weakTypeTag[A].tpe
val tpeB = c.weakTypeTag[B].tpe
val tpeC = c.weakTypeTag[C].tpe
tpeA.members exists { m =>
m.isMethod && m.isPublic && m.name.encoded == name && (m.typeSignature match {
case MethodType(List(param), ret) =>
param.typeSignature =:= tpeB && ret =:= tpeC
case _ =>
false
})
}
}
def failedSearch(name: String, op: String): c.Expr[Nothing] =
c.abort(c.enclosingPosition,
"Couldn't find matching method for op %s (%s)." format (name, op))
}
abstract class AutoAlgebra extends AutoOps { ops =>
def plus[A: c.WeakTypeTag]: c.Expr[A]
def minus[A: c.WeakTypeTag]: c.Expr[A]
def times[A: c.WeakTypeTag]: c.Expr[A]
def negate[A: c.WeakTypeTag]: c.Expr[A]
def div[A: c.WeakTypeTag]: c.Expr[A]
def quot[A: c.WeakTypeTag]: c.Expr[A]
def mod[A: c.WeakTypeTag](stub: => c.Expr[A] = failedSearch("mod", "%")): c.Expr[A]
def equals: c.Expr[Boolean]
def compare: c.Expr[Int]
def Semiring[A: c.WeakTypeTag](): c.Expr[Semiring[A]] = {
c.universe.reify {
new Semiring[A] {
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
}
}
}
def Rig[A: c.WeakTypeTag](z: c.Expr[A], o: c.Expr[A]): c.Expr[Rig[A]] = {
c.universe.reify {
new Rig[A] {
def zero: A = z.splice
def one: A = o.splice
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
}
}
}
def Rng[A: c.WeakTypeTag](z: c.Expr[A]): c.Expr[Rng[A]] = {
c.universe.reify {
new Rng[A] {
def zero: A = z.splice
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
override def minus(x: A, y: A): A = ops.minus[A].splice
def negate(x: A): A = ops.negate[A].splice
}
}
}
def Ring[A: c.WeakTypeTag](z: c.Expr[A], o: c.Expr[A]): c.Expr[Ring[A]] = {
c.universe.reify {
new Ring[A] {
def zero: A = z.splice
def one: A = o.splice
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
override def minus(x: A, y: A): A = ops.minus[A].splice
def negate(x: A): A = ops.negate[A].splice
}
}
}
def EuclideanRing[A: c.WeakTypeTag](z: c.Expr[A], o: c.Expr[A])
(ev: c.Expr[Eq[A]]): c.Expr[EuclideanRing[A]] = {
c.universe.reify {
new EuclideanRing[A] {
def zero: A = z.splice
def one: A = o.splice
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
override def minus(x: A, y: A): A = ops.minus[A].splice
def negate(x: A): A = ops.negate[A].splice
def quot(x: A, y: A): A = ops.quot[A].splice
def mod(x: A, y: A): A = ops.mod[A]().splice
def gcd(x: A, y: A): A = euclid(x, y)(ev.splice)
}
}
}
def Field[A: c.WeakTypeTag]
(z: c.Expr[A], o: c.Expr[A])(ev: c.Expr[Eq[A]]): c.Expr[Field[A]] = {
c.universe.reify {
new Field[A] {
def zero: A = z.splice
def one: A = o.splice
def plus(x: A, y: A): A = ops.plus[A].splice
def times(x: A, y: A): A = ops.times[A].splice
override def minus(x: A, y: A): A = ops.minus[A].splice
def negate(x: A): A = ops.negate[A].splice
def quot(x: A, y: A): A = ops.div[A].splice
def mod(x: A, y: A): A = ops.mod[A](z).splice
def gcd(x: A, y: A): A = euclid(x, y)(ev.splice)
def div(x: A, y: A): A = ops.div[A].splice
}
}
}
def Eq[A: c.WeakTypeTag](): c.Expr[Eq[A]] = {
c.universe.reify {
new Eq[A] {
def eqv(x: A, y: A): Boolean = ops.equals.splice
}
}
}
def Order[A: c.WeakTypeTag](): c.Expr[Order[A]] = {
c.universe.reify {
new Order[A] {
override def eqv(x: A, y: A): Boolean = ops.equals.splice
def compare(x: A, y: A): Int = ops.compare.splice
}
}
}
}
case class ScalaAlgebra[C <: Context](c: C) extends AutoAlgebra {
def plusplus[A] = binop[A]("$plus$plus")
def plus[A: c.WeakTypeTag] = binop[A]("$plus")
def minus[A: c.WeakTypeTag] = binop[A]("$minus")
def times[A: c.WeakTypeTag] = binop[A]("$times")
def negate[A: c.WeakTypeTag] = unop[A]("unary_$minus")
def quot[A: c.WeakTypeTag] = binopSearch[A]("quot" :: "$div" :: Nil) getOrElse failedSearch("quot", "/~")
def div[A: c.WeakTypeTag] = binop[A]("$div")
def mod[A: c.WeakTypeTag](stub: => c.Expr[A]) = binop[A]("$percent")
def equals = binop[Boolean]("$eq$eq")
def compare = binop[Int]("compare")
}
case class JavaAlgebra[C <: Context](c: C) extends AutoAlgebra {
def plus[A: c.WeakTypeTag] =
binopSearch[A]("add" :: "plus" :: Nil) getOrElse failedSearch("plus", "+")
def minus[A: c.WeakTypeTag] =
binopSearch[A]("subtract" :: "minus" :: Nil) getOrElse failedSearch("minus", "-")
def times[A: c.WeakTypeTag] =
binopSearch[A]("multiply" :: "times" :: Nil) getOrElse failedSearch("times", "*")
def div[A: c.WeakTypeTag] =
binopSearch[A]("divide" :: "div" :: Nil) getOrElse failedSearch("div", "/")
def negate[A: c.WeakTypeTag] =
unopSearch[A]("negate" :: "negative" :: Nil) getOrElse {
// We can implement negate interms of minus. This is actually required
// for JScience's Rational :(
import c.universe._
c.Expr[A](Apply(
Select(Ident(newTermName("zero")), newTermName("minus")),
List(Ident(newTermName("x")))))
}
def quot[A: c.WeakTypeTag] =
binopSearch[A]("quot" :: "divide" :: "div" :: Nil) getOrElse failedSearch("quot", "/~")
def mod[A: c.WeakTypeTag](stub: => c.Expr[A]) =
binopSearch("mod" :: "remainder" :: Nil) getOrElse stub
def equals = binop[Boolean]("equals")
def compare = binop[Int]("compareTo")
}
object ScalaAutoMacros {
def semiringImpl[A: c.WeakTypeTag](c: Context): c.Expr[Semiring[A]] =
ScalaAlgebra[c.type](c).Semiring[A]()
def rigImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A], o: c.Expr[A]): c.Expr[Rig[A]] =
ScalaAlgebra[c.type](c).Rig[A](z, o)
def rngImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A]): c.Expr[Rng[A]] =
ScalaAlgebra[c.type](c).Rng[A](z)
def ringImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A], o: c.Expr[A]): c.Expr[Ring[A]] =
ScalaAlgebra[c.type](c).Ring[A](z, o)
def euclideanRingImpl[A: c.WeakTypeTag](c: Context)
(z: c.Expr[A], o: c.Expr[A])(ev: c.Expr[Eq[A]]): c.Expr[EuclideanRing[A]] =
ScalaAlgebra[c.type](c).EuclideanRing[A](z, o)(ev)
def fieldImpl[A: c.WeakTypeTag](c: Context)
(z: c.Expr[A], o: c.Expr[A])(ev: c.Expr[Eq[A]]): c.Expr[Field[A]] =
ScalaAlgebra[c.type](c).Field[A](z, o)(ev)
def eqImpl[A: c.WeakTypeTag](c: Context): c.Expr[Eq[A]] =
ScalaAlgebra[c.type](c).Eq[A]()
def orderImpl[A: c.WeakTypeTag](c: Context): c.Expr[Order[A]] =
ScalaAlgebra[c.type](c).Order[A]()
def collectionSemigroupImpl[A: c.WeakTypeTag](c: Context): c.Expr[Semigroup[A]] = {
val ops = ScalaAlgebra[c.type](c)
c.universe.reify {
new Semigroup[A] {
def op(x: A, y: A): A = ops.plusplus[A].splice
}
}
}
def collectionMonoidImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A]): c.Expr[Monoid[A]] = {
val ops = ScalaAlgebra[c.type](c)
c.universe.reify {
new Monoid[A] {
def id: A = z.splice
def op(x: A, y: A): A = ops.plusplus[A].splice
}
}
}
}
object JavaAutoMacros {
def semiringImpl[A: c.WeakTypeTag](c: Context): c.Expr[Semiring[A]] =
JavaAlgebra[c.type](c).Semiring[A]()
def rigImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A], o: c.Expr[A]): c.Expr[Rig[A]] =
JavaAlgebra[c.type](c).Rig[A](z, o)
def rngImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A]): c.Expr[Rng[A]] =
JavaAlgebra[c.type](c).Rng[A](z)
def ringImpl[A: c.WeakTypeTag](c: Context)(z: c.Expr[A], o: c.Expr[A]): c.Expr[Ring[A]] =
JavaAlgebra[c.type](c).Ring[A](z, o)
def euclideanRingImpl[A: c.WeakTypeTag](c: Context)
(z: c.Expr[A], o: c.Expr[A])(ev: c.Expr[Eq[A]]): c.Expr[EuclideanRing[A]] =
JavaAlgebra[c.type](c).EuclideanRing[A](z, o)(ev)
def fieldImpl[A: c.WeakTypeTag](c: Context)
(z: c.Expr[A], o: c.Expr[A])(ev: c.Expr[Eq[A]]): c.Expr[Field[A]] =
JavaAlgebra[c.type](c).Field[A](z, o)(ev)
def eqImpl[A: c.WeakTypeTag](c: Context): c.Expr[Eq[A]] =
JavaAlgebra[c.type](c).Eq[A]()
def orderImpl[A: c.WeakTypeTag](c: Context): c.Expr[Order[A]] =
JavaAlgebra[c.type](c).Order[A]()
def collectionMonoidImpl[A: c.WeakTypeTag](c: Context)(empty: c.Expr[A]): c.Expr[Monoid[A]] = {
val ops = JavaAlgebra[c.type](c)
val addx = ops.binop[Unit]("addAll", "z", "x")
val addy = ops.binop[Unit]("addAll", "z", "y")
c.universe.reify {
new Monoid[A] {
def id: A = empty.splice
def op(x: A, y: A): A = {
val z = id
addx.splice
addy.splice
z
}
}
}
}
}
|
lrytz/spire
|
core/src/main/scala/spire/macros/Auto.scala
|
Scala
|
mit
| 11,999 |
package se.hardchee.docker_scala.converters
import spray.httpx.unmarshalling.FromStringDeserializer
import spray.httpx.unmarshalling.MalformedContent
trait GoStringConverters {
implicit val goString2BooleanConverter = new FromStringDeserializer[Boolean] {
def apply(value: String) = value match {
case "1" | "true" | "True" ⇒ Right(true)
case "0" | "false" | "False" ⇒ Right(false)
case x ⇒ Left(MalformedContent("'" + x + "' is not a valid Boolean value"))
}
}
}
trait ToStringSerializers {
abstract class ToStringSerializer[T] {
def apply(a: T): String
}
implicit val Boolean2StringConverter = new ToStringSerializer[Boolean] {
def apply(v: Boolean) = v.toString
}
implicit val Int2StringConverter = new ToStringSerializer[Int] {
def apply(v: Int) = v.toString
}
implicit val Long2StringConverter = new ToStringSerializer[Long] {
def apply(v: Long) = v.toString
}
implicit def transform[T, U](v: Option[T])(implicit ev: ToStringSerializer[T]): Option[String] = {
v.map(ev.apply)
}
}
|
blast-hardcheese/docker-proxy
|
src/main/scala/GoStringConverters.scala
|
Scala
|
isc
| 1,092 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io._
import java.net.URI
import java.util.jar.{JarEntry, JarOutputStream}
import javax.net.ssl.SSLException
import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.lang3.RandomUtils
import org.apache.spark.util.Utils
import SSLSampleConfigs._
class FileServerSuite extends SparkFunSuite with LocalSparkContext {
@transient var tmpDir: File = _
@transient var tmpFile: File = _
@transient var tmpJarUrl: String = _
//是否启用内部身份验证
def newConf: SparkConf = new SparkConf(loadDefaults = false).set("spark.authenticate", "false")
override def beforeEach() {
super.beforeEach()
resetSparkContext()
}
override def beforeAll() {
super.beforeAll()
tmpDir = Utils.createTempDir()
val testTempDir = new File(tmpDir, "test")
testTempDir.mkdir()
val textFile = new File(testTempDir, "FileServerSuite.txt")
//向文本输出流打印对象的格式化表示形式
val pw = new PrintWriter(textFile)
// scalastyle:off println
pw.println("100")
// scalastyle:on println
pw.close()//存储FileServerSuite文件
println("textFile:"+textFile.getCanonicalFile)
val jarFile = new File(testTempDir, "test.jar")
val jarStream = new FileOutputStream(jarFile)
//jar文件输出流
val jar = new JarOutputStream(jarStream, new java.util.jar.Manifest())
val jarEntry = new JarEntry(textFile.getName)
jar.putNextEntry(jarEntry)
println("textFile:"+jarFile.getCanonicalFile)
val in = new FileInputStream(textFile)
//com.google.common.io
ByteStreams.copy(in, jar)
in.close()
jar.close()
jarStream.close()
tmpFile = textFile
tmpJarUrl = jarFile.toURI.toURL.toString
}
override def afterAll() {
super.afterAll()
Utils.deleteRecursively(tmpDir)
}
//分布式本地文件
test("Distributing files locally") {
sc = new SparkContext("local[4]", "test", newConf)
sc.addFile(tmpFile.toString)
println("locally:"+tmpFile.toString)
val testData = Array((1, 1), (1, 1), (2, 1), (3, 5), (2, 2), (3, 0))
val result = sc.parallelize(testData).reduceByKey {
val path = SparkFiles.get("FileServerSuite.txt") //读取文件路径
val in = new BufferedReader(new FileReader(path))
val fileVal = in.readLine().toInt //读取数据100
in.close()
//_ 代表分组的value值
_ * fileVal + _ * fileVal
}.collect()
assert(result.toSet === Set((1, 200), (2, 300), (3, 500)))
}
//分布式本地安全文件
test("Distributing files locally security On") {
val sparkConf = new SparkConf(false)
//是否启用内部身份验证
sparkConf.set("spark.authenticate", "true")
//设置组件之间进行身份验证的密钥
sparkConf.set("spark.authenticate.secret", "good")
sc = new SparkContext("local[4]", "test", sparkConf)
sc.addFile(tmpFile.toString)
assert(sc.env.securityManager.isAuthenticationEnabled() === true)
val testData = Array((1, 1), (1, 1), (2, 1), (3, 5), (2, 2), (3, 0))
val result = sc.parallelize(testData).reduceByKey {
val path = SparkFiles.get("FileServerSuite.txt")
val in = new BufferedReader(new FileReader(path))
val fileVal = in.readLine().toInt
in.close()
_ * fileVal + _ * fileVal
}.collect()
assert(result.toSet === Set((1, 200), (2, 300), (3, 500)))
}
//分布式以网址作为输入本地文件
test("Distributing files locally using URL as input") {
// addFile("file:///....")
sc = new SparkContext("local[4]", "test", newConf)
sc.addFile(new File(tmpFile.toString).toURI.toString)
val testData = Array((1, 1), (1, 1), (2, 1), (3, 5), (2, 2), (3, 0))
val result = sc.parallelize(testData).reduceByKey {
val path = SparkFiles.get("FileServerSuite.txt")
val in = new BufferedReader(new FileReader(path))
val fileVal = in.readLine().toInt
in.close()
_ * fileVal + _ * fileVal
}.collect()
assert(result.toSet === Set((1, 200), (2, 300), (3, 500)))
}
//动态添加本地Jar
test ("Dynamically adding JARS locally") {
sc = new SparkContext("local[4]", "test", newConf)
sc.addJar(tmpJarUrl)
val testData = Array((1, 1))
sc.parallelize(testData).foreach { x =>
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
if (Thread.currentThread.getContextClassLoader.getResource("FileServerSuite.txt") == null) {
throw new SparkException("jar not added")
}
}
}
//在独立群集上分发文件
test("Distributing files on a standalone cluster") {
//sc = new SparkContext("local-cluster[1,1,1024]", "test", newConf)
sc = new SparkContext("local[*]", "test", newConf)
sc.addFile(tmpFile.toString)
val testData = Array((1, 1), (1, 1), (2, 1), (3, 5), (2, 2), (3, 0))
val result = sc.parallelize(testData).reduceByKey {
val path = SparkFiles.get("FileServerSuite.txt")
val in = new BufferedReader(new FileReader(path))
val fileVal = in.readLine().toInt
in.close()
_ * fileVal + _ * fileVal
}.collect()
assert(result.toSet === Set((1, 200), (2, 300), (3, 500)))
}
//独立的集群上动态添加jar
test ("Dynamically adding JARS on a standalone cluster") {
//sc = new SparkContext("local-cluster[1,1,1024]", "test", newConf)
sc = new SparkContext("local[*]", "test", newConf)
sc.addJar(tmpJarUrl)
val testData = Array((1, 1))
sc.parallelize(testData).foreach { x =>
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
if (Thread.currentThread.getContextClassLoader.getResource("FileServerSuite.txt") == null) {
throw new SparkException("jar not added")
}
}
}
//使用local:URL在独立群集上动态添加JARS
test ("Dynamically adding JARS on a standalone cluster using local: URL") {
//sc = new SparkContext("local-cluster[1,1,1024]", "test", newConf)
sc = new SparkContext("local[*]", "test", newConf)
sc.addJar(tmpJarUrl.replace("file", "local"))
val testData = Array((1, 1))
sc.parallelize(testData).foreach { x =>
//Thread.currentThread().getContextClassLoader,可以获取当前线程的引用,getContextClassLoader用来获取线程的上下文类加载器
if (Thread.currentThread.getContextClassLoader.getResource("FileServerSuite.txt") == null) {
throw new SparkException("jar not added")
}
}
}
//HttpFileServer应该使用SSL
test ("HttpFileServer should work with SSL") {
val sparkConf = sparkSSLConfig()
val sm = new SecurityManager(sparkConf)
val server = new HttpFileServer(sparkConf, sm, 0)
try {
server.initialize()
fileTransferTest(server, sm)
} finally {
server.stop()
}
}
//HttpFileServer应该使用SSL和良好的凭据
test ("HttpFileServer should work with SSL and good credentials") {
val sparkConf = sparkSSLConfig()
//是否启用内部身份验证
sparkConf.set("spark.authenticate", "true")
//设置组件之间进行身份验证的密钥
sparkConf.set("spark.authenticate.secret", "good")
val sm = new SecurityManager(sparkConf)
val server = new HttpFileServer(sparkConf, sm, 0)
try {
server.initialize()
fileTransferTest(server, sm)
} finally {
server.stop()
}
}
//HttpFileServer服务不应与有效的SSL和坏的凭据
test ("HttpFileServer should not work with valid SSL and bad credentials") {
val sparkConf = sparkSSLConfig()
//是否启用内部身份验证
sparkConf.set("spark.authenticate", "true")
//设置组件之间进行身份验证的密钥
sparkConf.set("spark.authenticate.secret", "bad")
val sm = new SecurityManager(sparkConf)
val server = new HttpFileServer(sparkConf, sm, 0)
try {
server.initialize()
intercept[IOException] {
fileTransferTest(server)
}
} finally {
server.stop()
}
}
test ("HttpFileServer should not work with SSL when the server is untrusted") {
val sparkConf = sparkSSLConfigUntrusted()
val sm = new SecurityManager(sparkConf)
val server = new HttpFileServer(sparkConf, sm, 0)
try {
server.initialize()
intercept[SSLException] {
fileTransferTest(server)
}
} finally {
server.stop()
}
}
//文件传输测试
def fileTransferTest(server: HttpFileServer, sm: SecurityManager = null): Unit = {
val randomContent = RandomUtils.nextBytes(100)
val file = File.createTempFile("FileServerSuite", "sslTests", tmpDir)
Files.write(randomContent, file)
server.addFile(file)
val uri = new URI(server.serverUri + "/files/" + file.getName)
val connection = if (sm != null && sm.isAuthenticationEnabled()) {
Utils.constructURIForAuthentication(uri, sm).toURL.openConnection()
} else {
uri.toURL.openConnection()
}
if (sm != null) {
Utils.setupSecureURLConnection(connection, sm)
}
//下载文件
val buf = ByteStreams.toByteArray(connection.getInputStream)
assert(buf === randomContent)
}
}
|
tophua/spark1.52
|
core/src/test/scala/org/apache/spark/FileServerSuite.scala
|
Scala
|
apache-2.0
| 10,119 |
package com.ruimo.crop
import javafx.scene.paint.Color
import javafx.application.Application
import javafx.scene.Scene
import javafx.scene.layout.StackPane
import javafx.stage.Stage
import javafx.scene.image.Image
import javafx.scene.image.ImageView
import javafx.scene.control.ScrollPane
class Crop extends Application {
lazy val url = getParameters.getRaw.get(0)
lazy val image = new Image(url)
lazy val width = image.getWidth
lazy val height = image.getHeight
lazy val pixelReader = image.getPixelReader
override def start(primaryStage: Stage) {
inspectImage(image)
val root = new StackPane()
root.getChildren().add(new ScrollPane(new ImageView(image)))
val scene = new Scene(root, 600, 400)
primaryStage.setTitle("Image Read Test")
primaryStage.setScene(scene)
primaryStage.show()
}
def inspectImage(img: Image) {
println("width: " + width)
println("height: " + height)
println("top bound: " + findTopBound)
println("left bound: " + findLeftBound)
println("bottom bound: " + findBottomBound)
println("right bound: " + findRightBound)
}
def blackCountX(x: Int): Int = (0 until height.toInt).foldLeft(0) { (sum, y) =>
sum + (if (pixelReader.getColor(x, y) == Color.BLACK) 1 else 0)
}
def blackCountY(y: Int): Int = (0 until width.toInt).foldLeft(0) { (sum, x) =>
sum + (if (pixelReader.getColor(x, y) == Color.BLACK) 1 else 0)
}
def findBlackBoundFromLeft: Option[Int] = {
val threshold = (height * 0.8).toInt
(0 until width.toInt) find { x => blackCountX(x) > threshold }
}
def findBlackBoundFromRight: Option[Int] = {
val threshold = (height * 0.8).toInt
(width.toInt - 1 to 0 by -1) find { x => blackCountX(x) > threshold }
}
def findBlackBoundFromTop: Option[Int] = {
val threshold = (width * 0.8).toInt
(0 until height.toInt) find { y => blackCountY(y) > threshold }
}
def findBlackBoundFromBottom: Option[Int] = {
val threshold = (width * 0.8).toInt
(height.toInt - 1 to 0 by -1) find { y => blackCountY(y) > threshold }
}
def findNonBlackBoundFromXToRight(startX: Int): Option[Int] = {
val threshold = (height * 0.3).toInt
(startX until width.toInt) find { x => blackCountX(x) < threshold }
}
def findNonBlackBoundFromXToLeft(startX: Int): Option[Int] = {
val threshold = (height * 0.3).toInt
(startX to 0 by -1) find { x => blackCountX(x) < threshold }
}
def findNonBlackBoundFromYToDown(startY: Int): Option[Int] = {
val threshold = (width * 0.3).toInt
(startY until height.toInt) find { y => blackCountY(y) < threshold }
}
def findNonBlackBoundFromYToUp(startY: Int): Option[Int] = {
val threshold = (width * 0.3).toInt
(height.toInt - 1 to 0 by -1) find { y => blackCountY(y) < threshold }
}
def findLeftBound: Option[Int] = findBlackBoundFromLeft.flatMap(findNonBlackBoundFromXToRight)
def findRightBound: Option[Int] = findBlackBoundFromRight.flatMap(findNonBlackBoundFromXToLeft)
def findTopBound: Option[Int] = findBlackBoundFromTop.flatMap(findNonBlackBoundFromYToDown)
def findBottomBound: Option[Int] = findBlackBoundFromBottom.flatMap(findNonBlackBoundFromYToUp)
}
|
ruimo/crop
|
src/main/scala/com/ruimo/crop/Crop.scala
|
Scala
|
apache-2.0
| 3,199 |
package org.drooms.gui.swing
import java.io.File
import java.io.FileOutputStream
import java.io.Writer
import java.util.Properties
import org.drooms.gui.swing.event.EventBusFactory
import org.drooms.gui.swing.event.GameStateChanged
import org.drooms.gui.swing.event.NewTurnAvailable
import org.drooms.gui.swing.util.IOUtils
import org.drooms.impl.DefaultGame
import org.drooms.impl.DroomsGame
import org.drooms.impl.util.PlayerAssembly
import com.typesafe.scalalogging.slf4j.Logging
import javax.swing.SwingUtilities
object RealTimeGameController extends Logging {
/**
* Creates new real-time game controller based on the specified configuration.
*/
def createNew(gameConfig: GameConfig): RealTimeGameController = {
val reportDir = new File("reports")
val playersFile = File.createTempFile("drooms-swing-gui", "players")
playersFile.deleteOnExit()
val output = new FileOutputStream(playersFile)
val props = new Properties()
for (player <- gameConfig.players) {
props.setProperty(player.name, player.strategyClass + "@file://" + player.jar.get)
}
props.store(output, "")
output.flush()
output.close()
new RealTimeGameController(classOf[org.drooms.impl.DefaultGame], reportDir, gameConfig.playgroundFile,
playersFile, gameConfig.players, gameConfig.gameProperties)
}
}
/**
* Class used for controlling (starting, pausing, stopping, etc) real-time Drooms game.
*
* Game is started in new thread and updates the turns as they are finished over time.
*
* @see GameController
*/
class RealTimeGameController(
/** Drooms game class used to drive the game. */
val gameClass: Class[org.drooms.impl.DefaultGame],
/** Directory used to store game reports. */
val reportDir: File,
/** Playground definition */
val playgroundFile: File,
/** List of players that will be playing the game. */
val playersFile: File,
val players: List[PlayerInfo],
/** Game configuration file */
val gamePropertiesFile: File)
extends org.drooms.api.GameProgressListener with Logging {
val eventBus = EventBusFactory.get()
recreateStrategyJars(players)
val playground = new DroomsGame(gameClass, playgroundFile, new PlayerAssembly(playersFile).assemblePlayers(),
gamePropertiesFile, reportDir).getPlayground()
/**
* Turn steps for the current not finished {@link Turn}. These step are gathered from background {@link Game} thread
* based on the incoming events.
*/
private var currentTurnSteps = List[TurnStep]()
private var currentTurnState: TurnState = _
def createInitialState(): TurnState = {
val playgroundWidth = playground.getWidth()
val playgroundHeight = playground.getHeight()
val playgroundEmptyNodes =
(for (
x <- 0 until playgroundWidth;
y <- 0 until playgroundHeight;
if (playground.isAvailable(x, y))
) yield Node(x, y)).toSet
val initialPlayground = new PlaygroundModel(playgroundWidth, playgroundHeight, EventBusFactory.getNoOp())
initialPlayground.emptyNodes(playgroundEmptyNodes)
//initialPlayground.initWorms(wormInitPositions)
val initPlayers = players.map(_.name -> 0).toMap
new TurnState(initialPlayground, initPlayers)
}
/** List of completed turns. */
@volatile
private var finished = false
// TODO use actors??
private var gameThread: Thread = _
private var currentTurnNumber = 0
/**
* Starts new {@link DroomsGame} in background thread
*/
def startOrContinueGame(): Unit = {
currentTurnNumber = 0
currentTurnSteps = List()
currentTurnState = createInitialState()
logger.info("Starting new Drooms game.")
val listener = this
// TODO determine if want to start or continue the game
// recreate the jars with strategies
logger.debug("Players in the game: " + players)
recreateStrategyJars(players)
gameThread = new Thread() {
override def run() {
val game = new DroomsGame(gameClass, playgroundFile, new PlayerAssembly(playersFile).assemblePlayers(), gamePropertiesFile, reportDir)
game.addListener(listener)
game.play("Drooms game")
}
}
// TODO log info like, game props, players info, etc
gameThread.start()
finished = false
eventBus.publish(GameStateChanged(GameRunning))
logger.info("Game successfully started.")
}
private def recreateStrategyJars(playersInfo: List[PlayerInfo]): Unit = {
logger.debug("Re-creating strategy jars...")
for (player <- playersInfo) {
player.strategyDir match {
case Some(dir) =>
logger.info(s"Re-creating strategy jar for player '${player.name}'")
// re-create the jar with fresh contents
IOUtils.createJarFromDir(player.jar.get, dir)
case None => // nothing to do
}
}
}
def pauseGame(): Unit = ???
def restartGame(): Unit = {
logger.info("Restarting game...")
stopGame()
startOrContinueGame()
}
def stopGame(): Unit = {
logger.info("Stopping game...")
gameThread.stop()
currentTurnNumber = 0
}
/**
* Indicates if the next turn is available at the moment.
*
* Game does not have to (currently) have available next turn even though its not yet finished.
* Finished game means that there are no more turns to be performed. Next turn could not be available
* at the time of calling hasNextTurn(), but can be available later, when the background {@link Game} finishes turn.
*
*/
def hasNextTurn(): Boolean = !finished
def isGameFinished(): Boolean = finished
/////////////////////////////////////////////////////////////////////////////
// GameProgressListener methods
/////////////////////////////////////////////////////////////////////////////
/**
* Called from the background running {@link org.drooms.api.Game} before the start of the next turn.
*
* New {@link Turn} is created from current {@link TurnStep} and the list of current turn steps is cleared.
*/
def nextTurn(): Unit = {
// publish current turn
val newTurn = new GameTurn(currentTurnNumber, currentTurnSteps.reverse)
logger.debug(s"New turn number ${currentTurnNumber} available")
currentTurnNumber += 1
currentTurnSteps = List()
val newTurnState = TurnState.updateState(currentTurnState, newTurn)
currentTurnState = newTurnState
SwingUtilities.invokeAndWait(new Runnable() {
def run(): Unit = {
eventBus.publish(NewTurnAvailable(newTurn, newTurnState))
}
})
}
def collectibleAdded(c: org.drooms.api.Collectible): Unit = {
currentTurnSteps ::= new CollectibleAdded(createCollectible(c))
}
def collectibleCollected(c: org.drooms.api.Collectible, p: org.drooms.api.Player, points: Int): Unit = {
currentTurnSteps ::= new CollectibleCollected(p.getName(), createCollectible(c))
}
def collectibleRemoved(c: org.drooms.api.Collectible): Unit = {
currentTurnSteps ::= new CollectibleRemoved(createCollectible(c))
}
private def createCollectible(c: org.drooms.api.Collectible): Collectible = {
new Collectible(Node(c.getAt().getX(), c.getAt().getY()), c.expiresInTurn(), c.getPoints())
}
def playerCrashed(p: org.drooms.api.Player): Unit = {
currentTurnSteps ::= new WormCrashed(p.getName())
}
def playerDeactivated(p: org.drooms.api.Player): Unit = {
currentTurnSteps ::= new WormDeactivated(p.getName())
}
def playerPerformedAction(p: org.drooms.api.Player, m: org.drooms.api.Action, nodes: org.drooms.api.Node*): Unit = {
currentTurnSteps ::= new WormMoved(p.getName(), transformNodes(nodes))
}
def playerSurvived(p: org.drooms.api.Player, points: Int): Unit = {
currentTurnSteps ::= new WormSurvived(p.getName(), points)
}
private def transformNodes(nodes: Seq[org.drooms.api.Node]): List[Node] = {
(for (node <- nodes) yield Node(node.getX(), node.getY())).toList
}
/**
* Write a report of the current state.
*
* @param w
* Where to write.
* @throws IOException
* When it cannot be written.
*/
def write(w: Writer): Unit = {
// called at the end of the game
logger.debug("Game finished!")
finished = true
eventBus.publish(GameStateChanged(GameFinished))
}
}
trait GameState
case object GameNotStarted extends GameState
case object GameRunning extends GameState
case object GamePaused extends GameState
case object GameStopped extends GameState
case object GameFinished extends GameState
|
tomason/drooms
|
drooms-swing-gui/src/main/scala/org/drooms/gui/swing/RealTimeGameController.scala
|
Scala
|
apache-2.0
| 8,472 |
package org.webjars.play
import play.api.http.MimeTypes
import play.api.test.{FakeRequest, PlaySpecification, WithApplication}
class RequireJSSpec extends PlaySpecification {
"RequireJS" should {
// todo: validate this is valid JS
"produce a setup" in new WithApplication {
val requireJs = app.injector.instanceOf[RequireJS]
val result = requireJs.setup()(FakeRequest())
status(result) must equalTo(OK)
contentType(result) must beSome(MimeTypes.JAVASCRIPT)
contentAsString(result) must contain("return [\\n '/' + webJarId")
contentAsString(result) must contain("\\"requirejs\\":\\"2.3.6\\"")
}
"produce a setup using a cdn" in new WithApplication(_.configure("webjars.use-cdn" -> "true")) {
val requireJs = app.injector.instanceOf[RequireJS]
val result = requireJs.setup()(FakeRequest())
status(result) must equalTo(OK)
contentType(result) must beSome(MimeTypes.JAVASCRIPT)
contentAsString(result) must contain("""["https://cdn.jsdelivr.net/webjars/react/0.12.2/react","/react/0.12.2/react","react"]""")
}
}
}
|
webjars/webjars-play
|
src/test/scala/org/webjars/play/RequireJSSpec.scala
|
Scala
|
mit
| 1,118 |
package com.bne.testutil
class MysqlTestServer extends ExternalServer {
override val serverName = "Mysql Server"
override def cmd = Seq("mysql.server","start","--port="+address.get.getPort)
override val serverPresentCmd = ("mysql.server","--help")
override def cmd_shutdown = Seq("mysql.server","stop","--port="+address.get.getPort)
}
|
zhanggl/testutil
|
src/main/scala/com/bne/testutil/MysqlTestServer.scala
|
Scala
|
apache-2.0
| 339 |
package org.cristal.repository
import akka.actor.{Actor, ActorRef, Props}
import org.cristal.api.ApiMessage
import org.cristal.model.{NewUser, User}
import org.cristal.repository.UserRepository.{CreateUser, RetrieveUser, UserCreated}
import org.cristal.repository.dao.UserDAO
import scala.util.{Failure, Success}
import com.github.t3hnar.bcrypt._
class UserRepository(userDAO: UserDAO) extends Actor {
override def receive: Receive = {
case CreateUser(newUser, requester) =>
implicit val _ = context.dispatcher
val encryptedPassword = newUser.password.bcrypt
val user = User(newUser.username, encryptedPassword, newUser.email, newUser.firstName, newUser.lastName)
userDAO.insert(user) onComplete {
case Success(_) => requester ! UserCreated(user)
case Failure(_) =>
}
case RetrieveUser() =>
}
}
object UserRepository {
def props(userDAO: UserDAO) = Props(new UserRepository(userDAO))
case class CreateUser(newUser: NewUser, requester: ActorRef)
case class UserCreated(user: User)
case class RetrieveUser()
}
|
frecano/cristal
|
src/main/scala/org/cristal/repository/UserRepository.scala
|
Scala
|
gpl-3.0
| 1,079 |
package org.vitrivr.adampro.query
import org.vitrivr.adampro.config.AttributeNames
import org.vitrivr.adampro.data.datatypes.TupleID
import org.vitrivr.adampro.data.entity.AttributeDefinition
import org.vitrivr.adampro.query.distance.Distance
import org.vitrivr.adampro.query.distance.Distance.Distance
import org.apache.spark.sql.types.{FloatType, StructField, StructType}
import org.vitrivr.adampro.query.distance.Distance.Distance
/**
* adamtwo
*
* Ivan Giangreco
* August 2015
*/
object Result {
def resultSchema = StructType(Seq(
StructField(AttributeNames.internalIdColumnName, TupleID.SparkTupleID, nullable = true),
StructField(AttributeNames.distanceColumnName, Distance.SparkDistance, nullable = true)
))
}
|
dbisUnibas/ADAMpro
|
src/main/scala/org/vitrivr/adampro/query/Result.scala
|
Scala
|
mit
| 742 |
import sbt._
import Keys._
object Dependencies {
val dispatchVersion = "0.11.3"
val slf4jVersion = "1.7.12"
val log4j2Version = "2.3"
val specs2Version = "3.6.4"
val typesafeLogging = "com.typesafe.scala-logging" %% "scala-logging" % "3.1.0"
val betaMax = "co.freeside" % "betamax" % "1.1.2"
val bMaxTestSupport = "co.freeside" % "betamax-test-support" % "1.1.2"
val groovy = "org.codehaus.groovy" % "groovy-all" % "2.4.4"
def dispatchDep(artifactName: String) = {
"net.databinder.dispatch" %% s"dispatch-${artifactName}" % dispatchVersion
}
def slf4jDep(artifactName: String) = {
"org.slf4j" % s"slf4j-${artifactName}" % slf4jVersion
}
def log4j2Dep(artifactName: String) = {
"org.apache.logging.log4j" % s"log4j-${artifactName}" % log4j2Version
}
def specs2Dep(artifactName: String) = {
"org.specs2" %% s"specs2-${artifactName}" % specs2Version
}
val dispatchDependencies = Seq(dispatchDep("core"), dispatchDep("lift-json"))
val loggingDependencies = Seq(slf4jDep("api"), log4j2Dep("api"), log4j2Dep("core"), log4j2Dep("slf4j-impl"))
val typesafeDeps = Seq("com.github.kxbmap" %% "configs" % "0.2.4")
val testDependencies = Seq(specs2Dep("core") % "test", specs2Dep("matcher-extra") % "test", betaMax % "test", bMaxTestSupport % "test", groovy % "test")
val apiDependencies = dispatchDependencies ++ loggingDependencies ++ testDependencies ++ typesafeDeps
}
|
chriswk/stmdb
|
project/Dependencies.scala
|
Scala
|
mit
| 1,423 |
package com.mesosphere.cosmos.model
case class SearchResponse(packages: Seq[SearchResult])
|
movicha/cosmos
|
cosmos-model/src/main/scala/com/mesosphere/cosmos/model/SearchResponse.scala
|
Scala
|
apache-2.0
| 92 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import akka.event.slf4j.SLF4JLogging
import akka.util.ByteString
import org.ensime.core.Protocol
/**
* FramedStringProtocol is used to support stream based messaging (e.g. SWANK or JERKY over TCP).
* Each message consists of a 6 byte h
*/
trait FramedStringProtocol extends Protocol with SLF4JLogging {
protected def writeString(value: String): ByteString = {
if (value.isEmpty)
throw new IllegalStateException("Message to send is empty")
val data = ByteString(value, "UTF-8")
val header = ByteString("%06x".format(data.length), "UTF-8")
val message = header ++ data
if (log.isTraceEnabled) {
log.trace(message.utf8String)
}
message
}
val headerLen = 6
protected def tryReadString(bytes: ByteString): (Option[String], ByteString) = {
if (bytes.length < headerLen)
(None, bytes) // header is incomplete
else {
val header = bytes.take(headerLen)
val msgLen = Integer.valueOf(header.utf8String, 16).intValue()
if (msgLen == 0)
throw new IllegalStateException("Empty message read from socket!")
val totalMessageBytes = headerLen + msgLen
if (bytes.length < totalMessageBytes)
(None, bytes) // header is complete, but not all of message is ready
else {
// take the header and the message and drop the header
val (messageBytes, remainingBytes) = bytes.splitAt(totalMessageBytes)
val msgUTF8 = messageBytes.drop(headerLen).utf8String
(Some(msgUTF8), remainingBytes)
}
}
}
}
|
d1egoaz/ensime-sbt
|
src/sbt-test/sbt-ensime/ensime-server/server/src/main/scala/org/ensime/server/FramedStringProtocol.scala
|
Scala
|
apache-2.0
| 1,692 |
package org.jetbrains.bsp.project
import java.io.{File, FileReader}
import java.util.{Collections, List => JList, Map => JMap}
import com.google.gson.Gson
import com.intellij.execution.configurations.SimpleJavaParameters
import com.intellij.openapi
import com.intellij.openapi.externalSystem.model.ProjectSystemId
import com.intellij.openapi.externalSystem.{ExternalSystemAutoImportAware, ExternalSystemConfigurableAware, ExternalSystemManager}
import com.intellij.openapi.fileChooser.FileChooserDescriptor
import com.intellij.openapi.options.Configurable
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.Key
import com.intellij.openapi.util.UserDataHolder
import com.intellij.openapi.vfs.LocalFileSystem
import com.intellij.util.Function
import org.jetbrains.bsp._
import org.jetbrains.bsp.project.importing.BspProjectResolver
import org.jetbrains.bsp.settings._
import org.jetbrains.bsp.project.BspExternalSystemManager.DetectExternalProjectFiles
import org.jetbrains.bsp.protocol.BspConnectionConfig
import scala.jdk.CollectionConverters._
import scala.util.Try
class BspExternalSystemManager extends ExternalSystemManager[BspProjectSettings, BspProjectSettingsListener, BspSettings, BspLocalSettings, BspExecutionSettings]
with ExternalSystemConfigurableAware
with ExternalSystemAutoImportAware
{
override def getSystemId: ProjectSystemId = BSP.ProjectSystemId
override def getSettingsProvider: Function[Project, BspSettings] = BspSettings.getInstance(_)
override def getLocalSettingsProvider: Function[Project, BspLocalSettings] = BspLocalSettings.getInstance(_)
override def getExecutionSettingsProvider: Function[openapi.util.Pair[Project, String], BspExecutionSettings] =
pair => BspExecutionSettings.executionSettingsFor(pair.first, new File(pair.second))
override def getProjectResolverClass: Class[BspProjectResolver] = classOf[BspProjectResolver]
override def getTaskManagerClass: Class[BspTaskManager] = classOf[BspTaskManager]
override def getExternalProjectDescriptor: FileChooserDescriptor = new BspOpenProjectDescriptor
override def getConfigurable(project: Project): Configurable = new BspExternalSystemConfigurable(project)
override def enhanceRemoteProcessing(parameters: SimpleJavaParameters): Unit = ()
override def getAffectedExternalProjectPath(changedFileOrDirPath: String, project: Project): String = {
if (detectExternalProjectFiles(project)) {
val file = new File(changedFileOrDirPath)
val isConfigFile = (BspConnectionConfig.isBspConfigFile(file) || BspUtil.isBloopConfigFile(file)) &&
BspUtil.workspaces(project).contains(file.getParentFile.toPath)
if (isConfigFile) file.getParentFile.getAbsolutePath
else null
} else null
}
override def getAffectedExternalProjectFiles(projectPath: String, project: Project): JList[File] = {
if (detectExternalProjectFiles(project)) {
val workspace = new File(projectPath)
val bspConfigs = BspConnectionConfig.workspaceConfigurationFiles(workspace)
val bloopConfigs = BspUtil.bloopConfigDir(workspace).toList
.flatMap(_.listFiles(file => file.getName.endsWith(".json")).toList)
(bspConfigs ++ bloopConfigs).asJava
} else {
Collections.emptyList()
}
}
private def detectExternalProjectFiles(project: Project): Boolean = {
cached(DetectExternalProjectFiles, project) {
if (BspUtil.isBspProject(project) && project.getBasePath != null) {
val workspace = new File(project.getBasePath)
val files = BspConnectionConfig.workspaceConfigurationFiles(workspace)
files
.flatMap(parseAsMap(_).toOption)
.forall { details =>
! details.get("X-detectExternalProjectFiles")
.contains(false)
}
} else true
}
}
private def parseAsMap(file: File): Try[Map[String, Any]] = Try {
new Gson()
.fromJson(new FileReader(file), classOf[JMap[String, _]])
.asScala
.toMap
}
private def cached[A](key: Key[A], holder: UserDataHolder)(compute: => A): A = {
Option(holder.getUserData(key)).getOrElse {
val computed = compute
holder.putUserData(key, computed)
computed
}
}
}
object BspExternalSystemManager {
val DetectExternalProjectFiles: Key[Boolean] = Key.create[Boolean]("BSP.detectExternalProjectFiles")
def parseAsMap(file: File): Map[String, Any] = {
val virtualFile = LocalFileSystem.getInstance.findFileByIoFile(file)
val content = new String(virtualFile.contentsToByteArray())
new Gson().fromJson(content, classOf[JMap[String, _]]).asScala.toMap
}
}
|
JetBrains/intellij-scala
|
bsp/src/org/jetbrains/bsp/project/BspExternalSystemManager.scala
|
Scala
|
apache-2.0
| 4,647 |
/*
* Copyright (C) 2017 Vincibean <Andrea Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.scala.impatient.chapter12
import scala.collection.immutable.Seq
/**
* Modify the previous function to return the input at which the output is largest.
* For example, largestAt(x => 10 * x - x * x, 1 to 10) should return 5. Don't use
* a loop or recursion.
*/
package object exercise6 {
def largestAt(fun: (Int) => Int, inputs: Seq[Int]): Int = if (inputs.isEmpty) {
-1
} else {
inputs.map(x => x -> fun(x)).maxBy(_._2)._1
}
}
|
Vincibean/ScalaForTheImpatient-Solutions
|
src/main/scala/org/vincibean/scala/impatient/chapter12/exercise6/package.scala
|
Scala
|
gpl-3.0
| 1,189 |
package com.argcv.valhalla.string.json
import com.argcv.valhalla.exception.ExceptionHelper.SafeExecWithTrace
import com.argcv.valhalla.reflect.ReflectHelper
import com.google.common.base.CaseFormat
/**
* @author yu
*/
trait JsonHelper {
def toJson(a: Any, compact: Boolean = true) = {
implicit val formats = JsonHelper.jsonFormatsWithDateTime
if (compact) net.liftweb.json.Printer.compact(net.liftweb.json.JsonAST.render(
net.liftweb.json.Extraction.decompose(a)))
else net.liftweb.json.Printer.pretty(net.liftweb.json.JsonAST.render(
net.liftweb.json.Extraction.decompose(a)))
}
def fromJson(s: String) = {
implicit val formats = JsonHelper.jsonFormatsWithDateTime
net.liftweb.json.JsonParser.parse(s).values
}
/**
* convert one string to some other type
*
* @param s string to convert
*/
implicit class JsonConverter(val s: String) {
def toSnakeCase = CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, s)
/**
* @param or or else ?
* @return
*/
def safeToIntOrElse(or: Int) = safeToInt match {
case Some(v: Int) => v
case None => or
}
/**
* @return
*/
def safeToInt = scala.util.control.Exception.catching(classOf[java.lang.NumberFormatException]) opt s.toInt
/**
* @param or or else
* @return
*/
def safeToLongOrElse(or: Long) = safeToLong match {
case Some(v: Long) => v
case None => or
}
/**
* @return
*/
def safeToLong = scala.util.control.Exception.catching(classOf[java.lang.NumberFormatException]) opt s.toLong
/**
* @param or or else
* @return
*/
def safeToDoubleOrElse(or: Double) = safeToDouble match {
case Some(v: Double) => v
case None => or
}
/**
* @return
*/
def safeToDouble = scala.util.control.Exception.catching(classOf[java.lang.NumberFormatException]) opt s.toDouble
/**
* @param or or else
* @return
*/
def safeToBooleanOrElse(or: Boolean) = safeToBoolean match {
case Some(v: Boolean) => v
case None => or
}
/**
* @return
*/
def safeToBoolean = scala.util.control.Exception.catching(classOf[java.lang.IllegalArgumentException]) opt s.toBoolean
/**
* @param or or else
* @return
*/
def parseJsonToMapOrElse(or: Map[String, Any]) = parseJsonToMap match {
case Some(v: Map[String, Any]) => v
case None => or
}
/**
* @return
*/
def parseJsonToMap = scala.util.control.Exception.catching(classOf[java.lang.ClassCastException]) opt parseJson.asInstanceOf[Map[String, Any]]
/**
* @param or or else
* @return
*/
def parseJsonToListOrElse(or: List[Any]) = parseJsonToList match {
case Some(v: List[Any]) => v
case None => or
}
/**
* @return
*/
def parseJsonToList = scala.util.control.Exception.catching(classOf[java.lang.ClassCastException]) opt parseJson.asInstanceOf[List[Any]]
/**
* @return
*/
def parseJson = {
//implicit val formats = net.liftweb.json.DefaultFormats
implicit val formats = JsonHelper.jsonFormatsWithDateTime
net.liftweb.json.JsonParser.parse(s).values
}
/**
* parse string type json to class T
*
* @tparam T type to apply
* @return opt class
*/
def parseJsonToClass[T: scala.reflect.ClassTag]: Option[T] = {
//implicit val formats = net.liftweb.json.DefaultFormats
implicit val formats = JsonHelper.jsonFormatsWithDateTime
implicit val mf = ReflectHelper.classTag2Manifest[T]
SafeExecWithTrace(net.liftweb.json.JsonParser.parse(s).extract[T])
}
}
implicit class ToJson(a: Any) {
def toJson: String = toJson()
def toJson(compact: Boolean = true): String = JsonHelper.toJson(a, compact)
}
}
object JsonHelper extends JsonHelper {
lazy val jsonFormatsWithDateTime = net.liftweb.json.DefaultFormats + DateTimeSerializer()
}
|
yuikns/valhalla
|
src/main/scala/com/argcv/valhalla/string/json/JsonHelper.scala
|
Scala
|
mit
| 4,007 |
package sampleclean.clean.extraction
import sampleclean.api.SampleCleanContext
import org.apache.spark.SparkContext._
import org.apache.spark.sql.SQLContext
import sampleclean.clean.algorithm.SampleCleanAlgorithm
import sampleclean.clean.algorithm.AlgorithmParameters
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SchemaRDD, Row}
/**
* The Abstract Deduplication class builds the structure for
* subclasses that implement deduplication. It has two basic
* primitives a blocking function and then an apply function (implemented)
* in subclasses.
*/
class LearningSplitExtraction(params:AlgorithmParameters,
scc: SampleCleanContext, sampleTableName: String) extends
AbstractExtraction(params, scc, sampleTableName) {
var delimSet:Set[Char] = Set()
if(!params.exists("attr"))
throw new RuntimeException("You need to specify an attribute to split")
val attrCol = scc.getColAsIndex(sampleTableName, params.get("attr").asInstanceOf[String])
val hashCol = scc.getColAsIndex(sampleTableName,"hash")
def extractFunction(data:SchemaRDD): Map[String,RDD[(String,String)]] = {
val extract = data.rdd.map(row => (row(hashCol).asInstanceOf[String], row(attrCol).toString().split(delimSet.toArray)))
var result:Map[String,RDD[(String,String)]] = Map()
for (col <- newCols)
{
result += (col -> extract.map(row => (row._1, getIndexIfExists(row._2,newCols.indexOf(col)) )))
}
return result
}
def addExample(attr:String, output:List[String]) = {
val joined = output.mkString("").toCharArray().toSet
val attrArray = attr.toCharArray().toSet
delimSet = delimSet ++ (attrArray &~ joined)
}
def getIndexIfExists(seq:Array[String], i:Int):String = {
if(i < seq.length)
return seq(i)
else
return ""
}
}
|
sjyk/sampleclean-async
|
src/main/scala/sampleclean/clean/extract/LearningSplitExtraction.scala
|
Scala
|
apache-2.0
| 1,786 |
/*
* Copyright 2014 DataGenerator Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.datagenerator
import java.io.InputStream
import java.util
import org.finra.datagenerator.distributor.SearchDistributor
import org.finra.datagenerator.engine.{Engine, Frontier}
import scala.io
/**
* Engine Implementation for generating data
*
* Created by Brijesh on 5/26/2015.
*/
class RandomNumberEngine extends Engine {
var totalCount: Int = 0
var frontierList = new util.LinkedList[Frontier]()
/**
* Iterate for loop from 0 to Number of Split and create instance of Frontier
* Add them to FrontierList
*
* Call distribute method which distribute data to Spark using Map and Reduce
*
* @param distributor SearchDistributor
*/
def process(distributor: SearchDistributor): Unit = {
for (i <- 0 to RandomNumberEngine.numSplit) {
val frontierImplementation = new RandomNumberFrontier
frontierList.add(frontierImplementation)
}
distributor.distribute(frontierList)
}
/**
* Read the lines from text file using InputStream
* Store these two values to Total Number of Count and Number of Split
*
* @param inputFileStream the model input stream
*/
def setModelByInputFileStream(inputFileStream : InputStream) : Unit = {
val fileLines = io.Source.fromInputStream(inputFileStream).getLines()
try{
totalCount = fileLines.next().toInt
RandomNumberEngine.numSplit = fileLines.next().toInt
}catch {
case e: NumberFormatException => throw new RuntimeException("File should have two lines, one int in each.")
}
/*
try { (totalCount, RandomNumberEngine.numSplit) ;
(fileLines.next().toInt, fileLines.next().toInt)
} catch {
case e: NumberFormatException => throw new RuntimeException("File should have two lines, one int in each.")
}
*/
RandomNumberEngine.numberInEachFrontier = totalCount / RandomNumberEngine.numSplit
}
/**
* Set the model with a string
*
* @param model the model text
*/
def setModelByText(model: String) : Unit = {
// TODO set model with a string
???
}
/**
* bootstrapMin setter
*
* @param min set the desired bootstrap min
* @return this
*/
def setBootstrapMin(min: Int) : Engine = {
???
this
}
}
object RandomNumberEngine {
//Declare static variable in Object RNEngine
var numberInEachFrontier: Int = 0
var numSplit: Int = 0
}
|
Brijeshrpatel9/SingleThreaderProcessingDG
|
dg-spark/src/main/code/org/finra/datagenerator/RandomNumberEngine.scala
|
Scala
|
apache-2.0
| 2,995 |
package s_mach.aeondb.internal
import scala.concurrent.Future
trait FutureMoment[A,B,PB] {
def find(key: A) : Future[Option[B]]
def deactivate(key: A) : FutureMoment[A,B,PB]
def reactivate(key: A, value: B) : FutureMoment[A,B,PB]
def put(key: A, value: B) : FutureMoment[A,B,PB]
def replace(key: A, value: B) : FutureMoment[A,B,PB]
}
|
S-Mach/aeondb
|
src/main/scala/s_mach/aeondb/internal/FutureMoment.scala
|
Scala
|
apache-2.0
| 349 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.