code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.dependencies
import java.io.{File, FileInputStream, PrintStream}
import java.net.{URI, URL}
import java.util.Properties
import java.util.concurrent.ConcurrentHashMap
import coursier.core.Authentication
import coursier.Cache.Logger
import coursier.Dependency
import coursier.core.Repository
import coursier.core.Resolution.ModuleVersion
import coursier.ivy.{IvyRepository, IvyXml}
import coursier.maven.MavenRepository
import org.springframework.core.io.support.PathMatchingResourcePatternResolver
import scala.util.Try
import scalaz.\\/
import scalaz.concurrent.Task
/**
* Represents a dependency downloader for jars that uses Coursier underneath.
*/
class CoursierDependencyDownloader extends DependencyDownloader {
@volatile private var repositories: Seq[Repository] = Nil
@volatile private var printStream: PrintStream = System.out
@volatile private var localDirectory: URI = null
// Initialization
setDownloadDirectory(DependencyDownloader.DefaultDownloadDirectory)
addMavenRepository(DependencyDownloader.DefaultMavenRepository, None)
/**
* Retrieves the dependency and all of its dependencies as jars.
*
* @param groupId The group id associated with the main dependency
* @param artifactId The id of the dependency artifact
* @param version The version of the main dependency
* @param transitive If true, downloads all dependencies of the specified
* dependency
* @param excludeBaseDependencies If true, will exclude any dependencies
* included in the build of the kernel
* @param ignoreResolutionErrors If true, ignores any errors on resolving
* dependencies and attempts to download all
* successfully-resolved dependencies
* @param extraRepositories Additional repositories to use only for this
* dependency
* @param verbose If true, prints out additional information
* @param trace If true, prints trace of download process
*
* @return The sequence of strings pointing to the retrieved dependency jars
*/
override def retrieve(
groupId: String,
artifactId: String,
version: String,
transitive: Boolean,
excludeBaseDependencies: Boolean,
ignoreResolutionErrors: Boolean,
extraRepositories: Seq[(URL, Option[Credentials])] = Nil,
verbose: Boolean,
trace: Boolean,
configuration: Option[String] = None,
artifactType: Option[String] = None,
artifactClassifier: Option[String] = None,
excludes: Set[(String,String)] = Set.empty
): Seq[URI] = {
assert(localDirectory != null)
import coursier._
// Grab exclusions using base dependencies (always exclude scala lang)
val exclusions: Set[(String, String)] = (if (excludeBaseDependencies) {
getBaseDependencies.map(_.module).map(m => (m.organization, m.name))
} else Nil).toSet ++ Set(("org.scala-lang", "*"), ("org.scala-lang.modules", "*")) ++ excludes
// Mark dependency that we want to download
val start = Resolution(Set(
Dependency(
module = Module(organization = groupId, name = artifactId),
version = version,
transitive = transitive,
exclusions = exclusions, // NOTE: Source/Javadoc not downloaded by default
configuration = configuration.getOrElse("default"),
attributes = Attributes(
artifactType.getOrElse(""),
artifactClassifier.getOrElse("")
)
)
))
printStream.println(s"Marking $groupId:$artifactId:$version for download")
lazy val defaultBase = new File(localDirectory).getAbsoluteFile
lazy val downloadLocations = defaultBase
val allRepositories = extraRepositories.map(x => urlToMavenRepository(x._1, x._2.map(_.authentication))) ++ repositories
// Build list of locations to fetch dependencies
val fetchLocations = Seq(ivy2Cache(localDirectory)) ++ allRepositories
val fetch = Fetch.from(
fetchLocations,
Cache.fetch(downloadLocations, logger = Some(new DownloadLogger(verbose, trace)))
)
val fetchUris = localDirectory +: repositoriesToURIs(allRepositories)
if (verbose) {
printStream.println("Preparing to fetch from:")
printStream.println(s"-> ${fetchUris.mkString("\\n-> ")}")
}
// Verify locations where we will download dependencies
val resolution = start.process.run(fetch).unsafePerformSync
// Report any resolution errors
val errors: Seq[(ModuleVersion, Seq[String])] = resolution.metadataErrors
errors.foreach { case (dep, e) =>
printStream.println(s"-> Failed to resolve ${dep._1.toString()}:${dep._2}")
e.foreach(s => printStream.println(s" -> $s"))
}
// If resolution errors, do not download
if (errors.nonEmpty && !ignoreResolutionErrors) return Nil
// Perform task of downloading dependencies
val localArtifacts: Seq[FileError \\/ File] = Task.gatherUnordered(
resolution.artifacts.map(a => Cache.file(
artifact = a,
cache = downloadLocations,
logger = Some(new DownloadLogger(verbose, trace))
).run)).unsafePerformSync
// Print any errors in retrieving dependencies
localArtifacts.flatMap(_.swap.toOption).map(_.message)
.foreach(printStream.println)
// Print success
val uris = localArtifacts.flatMap(_.toOption).map(_.toURI)
if (verbose) uris.map(_.getPath).foreach(p => printStream.println(s"-> New file at $p"))
printStream.println("Obtained " + uris.size + " files")
uris
}
/**
* Adds the specified resolver url as an additional search option.
*
* @param url The string representation of the url
*/
override def addMavenRepository(url: URL, credentials: Option[Credentials]): Unit =
repositories :+= urlToMavenRepository(url, credentials.map(_.authentication))
private def urlToMavenRepository(url: URL, credentials: Option[Authentication]) = MavenRepository(url.toString, authentication = credentials)
/**
* Remove the specified resolver url from the search options.
*
* @param url The url of the repository
*/
override def removeMavenRepository(url: URL): Unit = {
repositories = repositories.filterNot {
case maven: MavenRepository => url.toString == maven.root
case _ => false
}
}
/**
* Sets the printstream to log to.
*
* @param printStream The new print stream to use for output logging
*/
override def setPrintStream(printStream: PrintStream): Unit =
this.printStream = printStream
/**
* Returns a list of all repositories used by the downloader.
*
* @return The list of repositories as URIs
*/
def getRepositories: Seq[URI] = repositoriesToURIs(repositories)
/**
* Returns the current directory where dependencies will be downloaded.
*
* @return The directory as a string
*/
override def getDownloadDirectory: String =
new File(localDirectory).getAbsolutePath
/**
* Sets the directory where all downloaded jars will be stored.
*
* @param directory The directory to use
* @return True if successfully set directory, otherwise false
*/
override def setDownloadDirectory(directory: File): Boolean = {
val path = directory.getAbsolutePath
val cleanPath = if (path.endsWith("/")) path else path + "/"
val dir = new File(cleanPath)
if (!dir.exists() && !dir.mkdirs()) return false
if (!dir.isDirectory) return false
localDirectory = dir.toURI
true
}
private class DownloadLogger(
private val verbose: Boolean,
private val trace: Boolean
) extends Logger {
import scala.collection.JavaConverters._
private val downloadId = new ConcurrentHashMap[String, String]().asScala
private val downloadFile = new ConcurrentHashMap[String, File]().asScala
private val downloadAmount = new ConcurrentHashMap[String, Long]().asScala
private val downloadTotal = new ConcurrentHashMap[String, Long]().asScala
override def foundLocally(url: String, file: File): Unit = {
val id = downloadId.getOrElse(url, url)
val f = s"(${downloadFile.get(url).map(_.getName).getOrElse("")})"
if (verbose) printStream.println(s"=> $id: Found at ${file.getAbsolutePath}")
}
override def downloadingArtifact(url: String, file: File): Unit = {
downloadId.put(url, nextId())
val id = downloadId.getOrElse(url, url)
val f = s"(${downloadFile.get(url).map(_.getName).getOrElse("")})"
if (verbose) printStream.println(s"=> $id $f: Downloading $url")
downloadFile.put(url, file)
}
override def downloadLength(url: String, length: Long): Unit = {
val id = downloadId.getOrElse(url, url)
val f = s"(${downloadFile.get(url).map(_.getName).getOrElse("")})"
if (trace) printStream.println(s"===> $id $f: Is $length total bytes")
downloadTotal.put(url, length)
}
override def downloadProgress(url: String, downloaded: Long): Unit = {
downloadAmount.put(url, downloaded)
val ratio = downloadAmount(url).toDouble / downloadTotal.getOrElse[Long](url, 1).toDouble
val percent = ratio * 100.0
if (trace) printStream.printf(
"===> %s %s: Downloaded %d bytes (%.2f%%)\\n",
downloadId.getOrElse(url, url),
s"(${downloadFile.get(url).map(_.getName).getOrElse("")})",
new java.lang.Long(downloaded),
new java.lang.Double(percent)
)
}
override def downloadedArtifact(url: String, success: Boolean): Unit = {
if (verbose) {
val id = downloadId.getOrElse(url, url)
val f = s"(${downloadFile.get(url).map(_.getName).getOrElse("")})"
if (success) printStream.println(s"=> $id $f: Finished downloading")
else printStream.println(s"=> $id: An error occurred while downloading")
}
}
private val nextId: () => String = (() => {
var counter: Long = 0
() => {
counter += 1
counter.toString
}
})()
}
/**
* Retrieves base dependencies used when building Toree modules.
*
* @return The collection of dependencies
*/
private def getBaseDependencies: Seq[Dependency] = {
import coursier.core.compatibility.xmlParse
// Find all of the *ivy.xml files on the classpath.
val ivyFiles = new PathMatchingResourcePatternResolver().getResources(
"classpath*:**/*ivy.xml"
)
val streams = ivyFiles.map(_.getInputStream)
val contents = streams.map(scala.io.Source.fromInputStream).map(_.getLines())
val nodes = contents.map(c => xmlParse(c.mkString("\\n")))
// Report any errors reading XML
nodes.flatMap(_.left.toOption).foreach(s => printStream.println(s"Error: $s"))
// Grab Ivy XML projects
val projects = nodes.flatMap(_.right.toOption).map(IvyXml.project)
// Report any errors parsing Ivy XML
projects.flatMap(_.swap.toOption).foreach(s => printStream.println(s"Error: $s"))
// Grab dependencies from projects
val dependencies = projects.flatMap(_.toOption).flatMap(_.dependencies.map(_._2))
// Return unique dependencies
dependencies.distinct
}
/**
* Converts the provide repositories to their URI representations.
*
* @param repositories The repositories to convert
* @return The resulting URIs
*/
private def repositoriesToURIs(repositories: Seq[Repository]) =
repositories.map {
case ivy: IvyRepository => ivy.pattern.string
case maven: MavenRepository => maven.root
}.map(s => Try(new URI(s))).filter(_.isSuccess).map(_.get)
/** Creates new Ivy2 local repository using base home URI. */
private def ivy2Local(ivy2HomeUri: URI) = IvyRepository.parse(
ivy2HomeUri.toString + "local/" +
"[organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)" +
"[revision]/[type]s/[artifact](-[classifier]).[ext]"
).toOption.get
/** Creates new Ivy2 cache repository using base home URI. */
private def ivy2Cache(ivy2HomeUri: URI) = IvyRepository.parse(
ivy2HomeUri.toString + "cache/" +
"(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[organisation]/[module]/" +
"[type]s/[artifact]-[revision](-[classifier]).[ext]",
metadataPatternOpt = Some(
ivy2HomeUri + "cache/" +
"(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[organisation]/[module]/" +
"[type]-[revision](-[classifier]).[ext]"
),
withChecksums = false,
withSignatures = false,
dropInfoAttributes = true
).toOption.get
}
sealed abstract class Credentials extends Product with Serializable {
def user: String
def password: String
def host: String
def authentication: Authentication =
Authentication(user, password)
}
object Credentials {
case class FromFile(file: File) extends Credentials {
private lazy val props = {
val p = new Properties()
p.load(new FileInputStream(file))
p
}
private def findKey(keys: Seq[String]) = keys
.iterator
.map(props.getProperty)
.filter(_ != null)
.toStream
.headOption
.getOrElse {
throw new NoSuchElementException(s"${keys.head} key in $file")
}
lazy val user: String = findKey(FromFile.fileUserKeys)
lazy val password: String = findKey(FromFile.filePasswordKeys)
lazy val host: String = findKey(FromFile.fileHostKeys)
}
object FromFile {
// from sbt.Credentials
private val fileUserKeys = Seq("user", "user.name", "username")
private val filePasswordKeys = Seq("password", "pwd", "pass", "passwd")
private val fileHostKeys = Seq("host", "host.name", "hostname", "domain")
}
def apply(file: File): Credentials =
FromFile(file)
}
| lresende/incubator-toree | kernel-api/src/main/scala/org/apache/toree/dependencies/CoursierDependencyDownloader.scala | Scala | apache-2.0 | 14,556 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.FieldGroupTrl
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 07/11/17.
*/
/**
* Field Group Trl Repository
* @param session
* @param executionContext
*/
class FieldGroupTrlRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.FieldGroupTrlRepository[FieldGroupTrl , Int]
with FieldGroupTrlMapping {
def getById(id: Int): Future[FieldGroupTrl] = {
getByLanguage(id , "en_US")
}
def getByLanguage(id: Int , lang : String): Future[FieldGroupTrl] = {
Future(run(queryFieldGroupTrl.filter(fieldGroup => fieldGroup.fieldGroupId == lift(id)
&& fieldGroup.language == lift(lang))).headOption.get)
}
def getByUUID(uuid: UUID): Future[FieldGroupTrl] = {
Future(run(queryFieldGroupTrl.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByFieldGroupTrlId(id : Int) : Future[List[FieldGroupTrl]] = {
Future(run(queryFieldGroupTrl))
}
def getAll() : Future[List[FieldGroupTrl]] = {
Future(run(queryFieldGroupTrl))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[FieldGroupTrl]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countFieldGroupTrl()
elements <- if (offset > count) Future.successful(Nil)
else selectFieldGroupTrl(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countFieldGroupTrl() = {
Future(run(queryFieldGroupTrl.size).toInt)
}
private def selectFieldGroupTrl(offset: Int, limit: Int): Future[Seq[FieldGroupTrl]] = {
Future(run(queryFieldGroupTrl).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/FieldGroupTrlRepository.scala | Scala | gpl-3.0 | 2,987 |
/*
Copyright (c) 2014 by Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ml.dmlc.xgboost4j.scala.spark
import java.io.{File, FileNotFoundException}
import org.apache.spark.SparkConf
import org.apache.spark.ml.feature._
import org.apache.spark.ml.{Pipeline, PipelineModel}
import org.apache.spark.sql.SparkSession
import scala.concurrent.duration._
case class Foobar(TARGET: Int, bar: Double, baz: Double)
class XGBoostSparkPipelinePersistence extends SharedSparkContext with Utils {
override def afterAll(): Unit = {
super.afterAll()
delete(new File("./testxgbPipe"))
delete(new File("./test2xgbPipe"))
}
private def delete(f: File) {
if (f.isDirectory()) {
for (c <- f.listFiles()) {
delete(c)
}
}
if (!f.delete()) {
throw new FileNotFoundException("Failed to delete file: " + f)
}
}
test("test sparks pipeline persistence of dataframe-based model") {
// maybe move to shared context, but requires session to import implicits.
// what about introducing https://github.com/holdenk/spark-testing-base ?
val conf: SparkConf = new SparkConf()
.setAppName("foo")
.setMaster("local[*]")
val spark: SparkSession = SparkSession
.builder()
.config(conf)
.getOrCreate()
import spark.implicits._
// maybe move to shared context, but requires session to import implicits
val df = Seq(Foobar(0, 0.5, 1), Foobar(1, 0.01, 0.8),
Foobar(0, 0.8, 0.5), Foobar(1, 8.4, 0.04))
.toDS
val vectorAssembler = new VectorAssembler()
.setInputCols(df.columns
.filter(!_.contains("TARGET")))
.setOutputCol("features")
val xgbEstimator = new XGBoostEstimator(Map("num_rounds" -> 10,
"tracker_conf" -> TrackerConf(1 minute, "scala")
))
.setFeaturesCol("features")
.setLabelCol("TARGET")
// separate
val predModel = xgbEstimator.fit(vectorAssembler.transform(df))
predModel.write.overwrite.save("test2xgbPipe")
val same2Model = XGBoostModel.load("test2xgbPipe")
assert(java.util.Arrays.equals(predModel.booster.toByteArray, same2Model.booster.toByteArray))
val predParamMap = predModel.extractParamMap()
val same2ParamMap = same2Model.extractParamMap()
assert(predParamMap.get(predModel.useExternalMemory)
=== same2ParamMap.get(same2Model.useExternalMemory))
assert(predParamMap.get(predModel.featuresCol) === same2ParamMap.get(same2Model.featuresCol))
assert(predParamMap.get(predModel.predictionCol)
=== same2ParamMap.get(same2Model.predictionCol))
assert(predParamMap.get(predModel.labelCol) === same2ParamMap.get(same2Model.labelCol))
assert(predParamMap.get(predModel.labelCol) === same2ParamMap.get(same2Model.labelCol))
// chained
val predictionModel = new Pipeline().setStages(Array(vectorAssembler, xgbEstimator)).fit(df)
predictionModel.write.overwrite.save("testxgbPipe")
val sameModel = PipelineModel.load("testxgbPipe")
val predictionModelXGB = predictionModel.stages.collect { case xgb: XGBoostModel => xgb } head
val sameModelXGB = sameModel.stages.collect { case xgb: XGBoostModel => xgb } head
assert(java.util.Arrays.equals(
predictionModelXGB.booster.toByteArray,
sameModelXGB.booster.toByteArray
))
val predictionModelXGBParamMap = predictionModel.extractParamMap()
val sameModelXGBParamMap = sameModel.extractParamMap()
assert(predictionModelXGBParamMap.get(predictionModelXGB.useExternalMemory)
=== sameModelXGBParamMap.get(sameModelXGB.useExternalMemory))
assert(predictionModelXGBParamMap.get(predictionModelXGB.featuresCol)
=== sameModelXGBParamMap.get(sameModelXGB.featuresCol))
assert(predictionModelXGBParamMap.get(predictionModelXGB.predictionCol)
=== sameModelXGBParamMap.get(sameModelXGB.predictionCol))
assert(predictionModelXGBParamMap.get(predictionModelXGB.labelCol)
=== sameModelXGBParamMap.get(sameModelXGB.labelCol))
assert(predictionModelXGBParamMap.get(predictionModelXGB.labelCol)
=== sameModelXGBParamMap.get(sameModelXGB.labelCol))
}
}
| RPGOne/Skynet | xgboost-master/jvm-packages/xgboost4j-spark/src/test/scala/ml/dmlc/xgboost4j/scala/spark/XGBoostSparkPipelinePersistence.scala | Scala | bsd-3-clause | 4,624 |
package com.github.sstone.amqp
import akka.actor.{Props, Actor, ActorSystem}
import com.github.sstone.amqp.Amqp._
import com.rabbitmq.client.ConnectionFactory
import scala.concurrent.duration._
object Test1 extends App {
implicit val system = ActorSystem("mySystem")
// create an AMQP connection
val conn = system.actorOf(ConnectionOwner.props(new ConnectionFactory(), reconnectionDelay = 5.seconds), "connection")
// create an actor that will receive AMQP deliveries
val listener = system.actorOf(Props(new Actor {
def receive = {
case Delivery(_, envelope, _, body) => {
println("got a message: " + new String(body))
sender ! Ack(envelope.getDeliveryTag)
}
}
}))
// create a consumer that will route incoming AMQP messages to our listener
val queueParams = QueueParameters("my_queue", passive = false, durable = false, exclusive = false, autodelete = true)
// we initialize our consumer with an AddBinding request: the queue and the binding will be recreated if the connection
// to the broker is lost and restored
val consumer = ConnectionOwner.createChildActor(conn, Consumer.props(listener, StandardExchanges.amqDirect, queueParams, "my_key", channelParams = None, autoack = true))
// wait till everyone is actually connected to the broker
Amqp.waitForConnection(system, consumer).await()
// run the Producer sample now and see what happens
println("press enter...")
System.in.read()
system.terminate()
}
| gawkermedia/amqp-client | src/test/scala/com/github.sstone/amqp/Test1.scala | Scala | mit | 1,488 |
/*
* IP4S/src/main/scala/core//IPLcore.scala
*
* Copyright (C) 2017 K.Takeuchi
*
* A class that implements the core function of this wrapper.
* This class has main functions of reading, writing and displaying images.
*/
package ip4s
package core
import javax.imageio.ImageIO
import java.awt.image.BufferedImage
import java.io.File
import ip4s.function.{CoreFunction,FileExtensionParser}
import ip4s.core.{Conversion,Pixel}
class IPLcore extends CoreFunction{
//画像を二次元配列として読み込む
override def read(name:String):Array[Array[Pixel]]
= Conversion.convertToArray( ImageIO.read( new File(name) ) )
override def read(file:File):Array[Array[Pixel]]
= Conversion.convertToArray( ImageIO.read( file ) )
//二次元配列化されている画像データを画像として出力する
override def write(name:String, image:Array[Array[Pixel]]):Unit = {
val extension:String = FileExtensionParser.parse(name)
try{
ImageIO.write( Conversion.convertToImage(image), extension, (new File(name)) )
}catch{
case e:Exception => e.printStackTrace()
}
}
override def write(file:File, image:Array[Array[Pixel]]):Unit={
val extension:String = FileExtensionParser.parse(file.getName())
try{
ImageIO.write( Conversion.convertToImage(image), extension, file )
}catch{
case e:Exception => e.printStackTrace()
}
}
} | lrf141/IP4S | src/main/scala/core/IPLcore.scala | Scala | mit | 1,501 |
object A {
sealed trait Expr[+T]
final case class FooExpr() extends Expr[1 | 2]
object Test {
def foo[T](x: Expr[T]): T = x match {
case x: FooExpr =>
3 // error
}
val x: 1 | 2 = foo(FooExpr())
}
}
object B {
trait C
trait D extends C
trait E extends C
trait F extends C
trait G extends C
sealed trait Expr[+T]
final case class FooExpr[+S >: (D & E) | F]() extends Expr[S]
object Test {
def foo[T](x: Expr[T]): T = x match {
case x: FooExpr[(D & E) | F] =>
new D with E
}
val x: (D & E) | F = foo(FooExpr[(D & E) | F]())
}
}
| dotty-staging/dotty | tests/neg/gadt-lhs-union.scala | Scala | apache-2.0 | 609 |
package org.tribbloid.spookystuff
/**
* Created by peng on 2/21/15.
*/
package object entity {
type SortKey = Key with SortKeyHelper
type HiddenKey = Key with HiddenKeyHelper
}
| chenUT/spookystuff | core/src/main/scala/org/tribbloid/spookystuff/entity/package.scala | Scala | apache-2.0 | 186 |
package org.scalaideaextension.compilation
import com.intellij.openapi.compiler.CompileContext
/**
* extension scripts extending this trait will automatically
* be informed about compilation events of projects
*
* @author kostas.kougios
* Date: 22/07/14
*/
trait ProjectCompilationListener
{
def success(context: CompileContext)
}
| kostaskougios/scala-idea-extensions | src/org/scalaideaextension/compilation/ProjectCompilationListener.scala | Scala | apache-2.0 | 351 |
package com.danielasfregola.twitter4s.entities.streaming.common
import com.danielasfregola.twitter4s.entities.streaming.CommonStreamingMessage
/** These events contain an id field indicating the user ID and a collection of
* withheld_in_countries uppercase two-letter country codes.
* For more information see
* <a href="https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/streaming-message-types" target="_blank">
* https://developer.twitter.com/en/docs/tweets/filter-realtime/guides/streaming-message-types</a>.
*/
final case class UserWithheldNotice(user_withheld: UserWithheldId) extends CommonStreamingMessage
final case class UserWithheldId(id: Long, withheld_in_countries: List[String])
| DanielaSfregola/twitter4s | src/main/scala/com/danielasfregola/twitter4s/entities/streaming/common/UserWithheldNotice.scala | Scala | apache-2.0 | 725 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.io._
import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address}
import java.util.{Locale, Random, UUID}
import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadPoolExecutor}
import scala.collection.JavaConversions._
import scala.collection.Map
import scala.collection.mutable.ArrayBuffer
import scala.io.Source
import scala.reflect.ClassTag
import com.google.common.io.Files
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.hadoop.fs.{Path, FileSystem, FileUtil}
import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance}
import org.apache.spark.deploy.SparkHadoopUtil
import java.nio.ByteBuffer
import org.apache.spark.{SparkException, Logging}
/**
* Various utility methods used by Spark.
*/
private[spark] object Utils extends Logging {
/** Serialize an object using Java serialization */
def serialize[T](o: T): Array[Byte] = {
val bos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(bos)
oos.writeObject(o)
oos.close()
return bos.toByteArray
}
/** Deserialize an object using Java serialization */
def deserialize[T](bytes: Array[Byte]): T = {
val bis = new ByteArrayInputStream(bytes)
val ois = new ObjectInputStream(bis)
return ois.readObject.asInstanceOf[T]
}
/** Deserialize an object using Java serialization and the given ClassLoader */
def deserialize[T](bytes: Array[Byte], loader: ClassLoader): T = {
val bis = new ByteArrayInputStream(bytes)
val ois = new ObjectInputStream(bis) {
override def resolveClass(desc: ObjectStreamClass) =
Class.forName(desc.getName, false, loader)
}
return ois.readObject.asInstanceOf[T]
}
/** Deserialize a Long value (used for {@link org.apache.spark.api.python.PythonPartitioner}) */
def deserializeLongValue(bytes: Array[Byte]) : Long = {
// Note: we assume that we are given a Long value encoded in network (big-endian) byte order
var result = bytes(7) & 0xFFL
result = result + ((bytes(6) & 0xFFL) << 8)
result = result + ((bytes(5) & 0xFFL) << 16)
result = result + ((bytes(4) & 0xFFL) << 24)
result = result + ((bytes(3) & 0xFFL) << 32)
result = result + ((bytes(2) & 0xFFL) << 40)
result = result + ((bytes(1) & 0xFFL) << 48)
result + ((bytes(0) & 0xFFL) << 56)
}
/** Serialize via nested stream using specific serializer */
def serializeViaNestedStream(os: OutputStream, ser: SerializerInstance)(f: SerializationStream => Unit) = {
val osWrapper = ser.serializeStream(new OutputStream {
def write(b: Int) = os.write(b)
override def write(b: Array[Byte], off: Int, len: Int) = os.write(b, off, len)
})
try {
f(osWrapper)
} finally {
osWrapper.close()
}
}
/** Deserialize via nested stream using specific serializer */
def deserializeViaNestedStream(is: InputStream, ser: SerializerInstance)(f: DeserializationStream => Unit) = {
val isWrapper = ser.deserializeStream(new InputStream {
def read(): Int = is.read()
override def read(b: Array[Byte], off: Int, len: Int): Int = is.read(b, off, len)
})
try {
f(isWrapper)
} finally {
isWrapper.close()
}
}
/**
* Primitive often used when writing {@link java.nio.ByteBuffer} to {@link java.io.DataOutput}.
*/
def writeByteBuffer(bb: ByteBuffer, out: ObjectOutput) = {
if (bb.hasArray) {
out.write(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining())
} else {
val bbval = new Array[Byte](bb.remaining())
bb.get(bbval)
out.write(bbval)
}
}
def isAlpha(c: Char): Boolean = {
(c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z')
}
/** Split a string into words at non-alphabetic characters */
def splitWords(s: String): Seq[String] = {
val buf = new ArrayBuffer[String]
var i = 0
while (i < s.length) {
var j = i
while (j < s.length && isAlpha(s.charAt(j))) {
j += 1
}
if (j > i) {
buf += s.substring(i, j)
}
i = j
while (i < s.length && !isAlpha(s.charAt(i))) {
i += 1
}
}
return buf
}
private val shutdownDeletePaths = new scala.collection.mutable.HashSet[String]()
// Register the path to be deleted via shutdown hook
def registerShutdownDeleteDir(file: File) {
val absolutePath = file.getAbsolutePath()
shutdownDeletePaths.synchronized {
shutdownDeletePaths += absolutePath
}
}
// Is the path already registered to be deleted via a shutdown hook ?
def hasShutdownDeleteDir(file: File): Boolean = {
val absolutePath = file.getAbsolutePath()
shutdownDeletePaths.synchronized {
shutdownDeletePaths.contains(absolutePath)
}
}
// Note: if file is child of some registered path, while not equal to it, then return true;
// else false. This is to ensure that two shutdown hooks do not try to delete each others
// paths - resulting in IOException and incomplete cleanup.
def hasRootAsShutdownDeleteDir(file: File): Boolean = {
val absolutePath = file.getAbsolutePath()
val retval = shutdownDeletePaths.synchronized {
shutdownDeletePaths.find { path =>
!absolutePath.equals(path) && absolutePath.startsWith(path)
}.isDefined
}
if (retval) {
logInfo("path = " + file + ", already present as root for deletion.")
}
retval
}
/** Create a temporary directory inside the given parent directory */
def createTempDir(root: String = System.getProperty("java.io.tmpdir")): File = {
var attempts = 0
val maxAttempts = 10
var dir: File = null
while (dir == null) {
attempts += 1
if (attempts > maxAttempts) {
throw new IOException("Failed to create a temp directory (under " + root + ") after " +
maxAttempts + " attempts!")
}
try {
dir = new File(root, "spark-" + UUID.randomUUID.toString)
if (dir.exists() || !dir.mkdirs()) {
dir = null
}
} catch { case e: IOException => ; }
}
registerShutdownDeleteDir(dir)
// Add a shutdown hook to delete the temp dir when the JVM exits
Runtime.getRuntime.addShutdownHook(new Thread("delete Spark temp dir " + dir) {
override def run() {
// Attempt to delete if some patch which is parent of this is not already registered.
if (! hasRootAsShutdownDeleteDir(dir)) Utils.deleteRecursively(dir)
}
})
dir
}
/** Copy all data from an InputStream to an OutputStream */
def copyStream(in: InputStream,
out: OutputStream,
closeStreams: Boolean = false)
{
val buf = new Array[Byte](8192)
var n = 0
while (n != -1) {
n = in.read(buf)
if (n != -1) {
out.write(buf, 0, n)
}
}
if (closeStreams) {
in.close()
out.close()
}
}
/**
* Download a file requested by the executor. Supports fetching the file in a variety of ways,
* including HTTP, HDFS and files on a standard filesystem, based on the URL parameter.
*
* Throws SparkException if the target file already exists and has different contents than
* the requested file.
*/
def fetchFile(url: String, targetDir: File) {
val filename = url.split("/").last
val tempDir = getLocalDir
val tempFile = File.createTempFile("fetchFileTemp", null, new File(tempDir))
val targetFile = new File(targetDir, filename)
val uri = new URI(url)
uri.getScheme match {
case "http" | "https" | "ftp" =>
logInfo("Fetching " + url + " to " + tempFile)
val in = new URL(url).openStream()
val out = new FileOutputStream(tempFile)
Utils.copyStream(in, out, true)
if (targetFile.exists && !Files.equal(tempFile, targetFile)) {
tempFile.delete()
throw new SparkException(
"File " + targetFile + " exists and does not match contents of" + " " + url)
} else {
Files.move(tempFile, targetFile)
}
case "file" | null =>
// In the case of a local file, copy the local file to the target directory.
// Note the difference between uri vs url.
val sourceFile = if (uri.isAbsolute) new File(uri) else new File(url)
if (targetFile.exists) {
// If the target file already exists, warn the user if
if (!Files.equal(sourceFile, targetFile)) {
throw new SparkException(
"File " + targetFile + " exists and does not match contents of" + " " + url)
} else {
// Do nothing if the file contents are the same, i.e. this file has been copied
// previously.
logInfo(sourceFile.getAbsolutePath + " has been previously copied to "
+ targetFile.getAbsolutePath)
}
} else {
// The file does not exist in the target directory. Copy it there.
logInfo("Copying " + sourceFile.getAbsolutePath + " to " + targetFile.getAbsolutePath)
Files.copy(sourceFile, targetFile)
}
case _ =>
// Use the Hadoop filesystem library, which supports file://, hdfs://, s3://, and others
val uri = new URI(url)
val conf = SparkHadoopUtil.get.newConfiguration()
val fs = FileSystem.get(uri, conf)
val in = fs.open(new Path(uri))
val out = new FileOutputStream(tempFile)
Utils.copyStream(in, out, true)
if (targetFile.exists && !Files.equal(tempFile, targetFile)) {
tempFile.delete()
throw new SparkException("File " + targetFile + " exists and does not match contents of" +
" " + url)
} else {
Files.move(tempFile, targetFile)
}
}
// Decompress the file if it's a .tar or .tar.gz
if (filename.endsWith(".tar.gz") || filename.endsWith(".tgz")) {
logInfo("Untarring " + filename)
Utils.execute(Seq("tar", "-xzf", filename), targetDir)
} else if (filename.endsWith(".tar")) {
logInfo("Untarring " + filename)
Utils.execute(Seq("tar", "-xf", filename), targetDir)
}
// Make the file executable - That's necessary for scripts
FileUtil.chmod(targetFile.getAbsolutePath, "a+x")
}
/**
* Get a temporary directory using Spark's spark.local.dir property, if set. This will always
* return a single directory, even though the spark.local.dir property might be a list of
* multiple paths.
*/
def getLocalDir: String = {
System.getProperty("spark.local.dir", System.getProperty("java.io.tmpdir")).split(',')(0)
}
/**
* Shuffle the elements of a collection into a random order, returning the
* result in a new collection. Unlike scala.util.Random.shuffle, this method
* uses a local random number generator, avoiding inter-thread contention.
*/
def randomize[T: ClassTag](seq: TraversableOnce[T]): Seq[T] = {
randomizeInPlace(seq.toArray)
}
/**
* Shuffle the elements of an array into a random order, modifying the
* original array. Returns the original array.
*/
def randomizeInPlace[T](arr: Array[T], rand: Random = new Random): Array[T] = {
for (i <- (arr.length - 1) to 1 by -1) {
val j = rand.nextInt(i)
val tmp = arr(j)
arr(j) = arr(i)
arr(i) = tmp
}
arr
}
/**
* Get the local host's IP address in dotted-quad format (e.g. 1.2.3.4).
* Note, this is typically not used from within core spark.
*/
lazy val localIpAddress: String = findLocalIpAddress()
lazy val localIpAddressHostname: String = getAddressHostName(localIpAddress)
private def findLocalIpAddress(): String = {
val defaultIpOverride = System.getenv("SPARK_LOCAL_IP")
if (defaultIpOverride != null) {
defaultIpOverride
} else {
val address = InetAddress.getLocalHost
if (address.isLoopbackAddress) {
// Address resolves to something like 127.0.1.1, which happens on Debian; try to find
// a better address using the local network interfaces
for (ni <- NetworkInterface.getNetworkInterfaces) {
for (addr <- ni.getInetAddresses if !addr.isLinkLocalAddress &&
!addr.isLoopbackAddress && addr.isInstanceOf[Inet4Address]) {
// We've found an address that looks reasonable!
logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" +
" a loopback address: " + address.getHostAddress + "; using " + addr.getHostAddress +
" instead (on interface " + ni.getName + ")")
logWarning("Set SPARK_LOCAL_IP if you need to bind to another address")
return addr.getHostAddress
}
}
logWarning("Your hostname, " + InetAddress.getLocalHost.getHostName + " resolves to" +
" a loopback address: " + address.getHostAddress + ", but we couldn't find any" +
" external IP address!")
logWarning("Set SPARK_LOCAL_IP if you need to bind to another address")
}
address.getHostAddress
}
}
private var customHostname: Option[String] = None
/**
* Allow setting a custom host name because when we run on Mesos we need to use the same
* hostname it reports to the master.
*/
def setCustomHostname(hostname: String) {
// DEBUG code
Utils.checkHost(hostname)
customHostname = Some(hostname)
}
/**
* Get the local machine's hostname.
*/
def localHostName(): String = {
customHostname.getOrElse(localIpAddressHostname)
}
def getAddressHostName(address: String): String = {
InetAddress.getByName(address).getHostName
}
def localHostPort(): String = {
val retval = System.getProperty("spark.hostPort", null)
if (retval == null) {
logErrorWithStack("spark.hostPort not set but invoking localHostPort")
return localHostName()
}
retval
}
def checkHost(host: String, message: String = "") {
assert(host.indexOf(':') == -1, message)
}
def checkHostPort(hostPort: String, message: String = "") {
assert(hostPort.indexOf(':') != -1, message)
}
// Used by DEBUG code : remove when all testing done
def logErrorWithStack(msg: String) {
try { throw new Exception } catch { case ex: Exception => { logError(msg, ex) } }
}
// Typically, this will be of order of number of nodes in cluster
// If not, we should change it to LRUCache or something.
private val hostPortParseResults = new ConcurrentHashMap[String, (String, Int)]()
def parseHostPort(hostPort: String): (String, Int) = {
{
// Check cache first.
var cached = hostPortParseResults.get(hostPort)
if (cached != null) return cached
}
val indx: Int = hostPort.lastIndexOf(':')
// This is potentially broken - when dealing with ipv6 addresses for example, sigh ...
// but then hadoop does not support ipv6 right now.
// For now, we assume that if port exists, then it is valid - not check if it is an int > 0
if (-1 == indx) {
val retval = (hostPort, 0)
hostPortParseResults.put(hostPort, retval)
return retval
}
val retval = (hostPort.substring(0, indx).trim(), hostPort.substring(indx + 1).trim().toInt)
hostPortParseResults.putIfAbsent(hostPort, retval)
hostPortParseResults.get(hostPort)
}
private val daemonThreadFactoryBuilder: ThreadFactoryBuilder =
new ThreadFactoryBuilder().setDaemon(true)
/**
* Wrapper over newCachedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonCachedThreadPool(prefix: String): ThreadPoolExecutor = {
val threadFactory = daemonThreadFactoryBuilder.setNameFormat(prefix + "-%d").build()
Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
}
/**
* Return the string to tell how long has passed in seconds. The passing parameter should be in
* millisecond.
*/
def getUsedTimeMs(startTimeMs: Long): String = {
return " " + (System.currentTimeMillis - startTimeMs) + " ms"
}
/**
* Wrapper over newFixedThreadPool. Thread names are formatted as prefix-ID, where ID is a
* unique, sequentially assigned integer.
*/
def newDaemonFixedThreadPool(nThreads: Int, prefix: String): ThreadPoolExecutor = {
val threadFactory = daemonThreadFactoryBuilder.setNameFormat(prefix + "-%d").build()
Executors.newFixedThreadPool(nThreads, threadFactory).asInstanceOf[ThreadPoolExecutor]
}
private def listFilesSafely(file: File): Seq[File] = {
val files = file.listFiles()
if (files == null) {
throw new IOException("Failed to list files for dir: " + file)
}
files
}
/**
* Delete a file or directory and its contents recursively.
*/
def deleteRecursively(file: File) {
if (file.isDirectory) {
for (child <- listFilesSafely(file)) {
deleteRecursively(child)
}
}
if (!file.delete()) {
throw new IOException("Failed to delete: " + file)
}
}
/**
* Convert a Java memory parameter passed to -Xmx (such as 300m or 1g) to a number of megabytes.
* This is used to figure out how much memory to claim from Mesos based on the SPARK_MEM
* environment variable.
*/
def memoryStringToMb(str: String): Int = {
val lower = str.toLowerCase
if (lower.endsWith("k")) {
(lower.substring(0, lower.length-1).toLong / 1024).toInt
} else if (lower.endsWith("m")) {
lower.substring(0, lower.length-1).toInt
} else if (lower.endsWith("g")) {
lower.substring(0, lower.length-1).toInt * 1024
} else if (lower.endsWith("t")) {
lower.substring(0, lower.length-1).toInt * 1024 * 1024
} else {// no suffix, so it's just a number in bytes
(lower.toLong / 1024 / 1024).toInt
}
}
/**
* Convert a quantity in bytes to a human-readable string such as "4.0 MB".
*/
def bytesToString(size: Long): String = {
val TB = 1L << 40
val GB = 1L << 30
val MB = 1L << 20
val KB = 1L << 10
val (value, unit) = {
if (size >= 2*TB) {
(size.asInstanceOf[Double] / TB, "TB")
} else if (size >= 2*GB) {
(size.asInstanceOf[Double] / GB, "GB")
} else if (size >= 2*MB) {
(size.asInstanceOf[Double] / MB, "MB")
} else if (size >= 2*KB) {
(size.asInstanceOf[Double] / KB, "KB")
} else {
(size.asInstanceOf[Double], "B")
}
}
"%.1f %s".formatLocal(Locale.US, value, unit)
}
/**
* Returns a human-readable string representing a duration such as "35ms"
*/
def msDurationToString(ms: Long): String = {
val second = 1000
val minute = 60 * second
val hour = 60 * minute
ms match {
case t if t < second =>
"%d ms".format(t)
case t if t < minute =>
"%.1f s".format(t.toFloat / second)
case t if t < hour =>
"%.1f m".format(t.toFloat / minute)
case t =>
"%.2f h".format(t.toFloat / hour)
}
}
/**
* Convert a quantity in megabytes to a human-readable string such as "4.0 MB".
*/
def megabytesToString(megabytes: Long): String = {
bytesToString(megabytes * 1024L * 1024L)
}
/**
* Execute a command in the given working directory, throwing an exception if it completes
* with an exit code other than 0.
*/
def execute(command: Seq[String], workingDir: File) {
val process = new ProcessBuilder(command: _*)
.directory(workingDir)
.redirectErrorStream(true)
.start()
new Thread("read stdout for " + command(0)) {
override def run() {
for (line <- Source.fromInputStream(process.getInputStream).getLines) {
System.err.println(line)
}
}
}.start()
val exitCode = process.waitFor()
if (exitCode != 0) {
throw new SparkException("Process " + command + " exited with code " + exitCode)
}
}
/**
* Execute a command in the current working directory, throwing an exception if it completes
* with an exit code other than 0.
*/
def execute(command: Seq[String]) {
execute(command, new File("."))
}
/**
* Execute a command and get its output, throwing an exception if it yields a code other than 0.
*/
def executeAndGetOutput(command: Seq[String], workingDir: File = new File("."),
extraEnvironment: Map[String, String] = Map.empty): String = {
val builder = new ProcessBuilder(command: _*)
.directory(workingDir)
val environment = builder.environment()
for ((key, value) <- extraEnvironment) {
environment.put(key, value)
}
val process = builder.start()
new Thread("read stderr for " + command(0)) {
override def run() {
for (line <- Source.fromInputStream(process.getErrorStream).getLines) {
System.err.println(line)
}
}
}.start()
val output = new StringBuffer
val stdoutThread = new Thread("read stdout for " + command(0)) {
override def run() {
for (line <- Source.fromInputStream(process.getInputStream).getLines) {
output.append(line)
}
}
}
stdoutThread.start()
val exitCode = process.waitFor()
stdoutThread.join() // Wait for it to finish reading output
if (exitCode != 0) {
throw new SparkException("Process " + command + " exited with code " + exitCode)
}
output.toString
}
/**
* A regular expression to match classes of the "core" Spark API that we want to skip when
* finding the call site of a method.
*/
private val SPARK_CLASS_REGEX = """^org\\.apache\\.spark(\\.api\\.java)?(\\.util)?(\\.rdd)?\\.[A-Z]""".r
private[spark] class CallSiteInfo(val lastSparkMethod: String, val firstUserFile: String,
val firstUserLine: Int, val firstUserClass: String)
/**
* When called inside a class in the spark package, returns the name of the user code class
* (outside the spark package) that called into Spark, as well as which Spark method they called.
* This is used, for example, to tell users where in their code each RDD got created.
*/
def getCallSiteInfo: CallSiteInfo = {
val trace = Thread.currentThread.getStackTrace().filter( el =>
(!el.getMethodName.contains("getStackTrace")))
// Keep crawling up the stack trace until we find the first function not inside of the spark
// package. We track the last (shallowest) contiguous Spark method. This might be an RDD
// transformation, a SparkContext function (such as parallelize), or anything else that leads
// to instantiation of an RDD. We also track the first (deepest) user method, file, and line.
var lastSparkMethod = "<unknown>"
var firstUserFile = "<unknown>"
var firstUserLine = 0
var finished = false
var firstUserClass = "<unknown>"
for (el <- trace) {
if (!finished) {
if (SPARK_CLASS_REGEX.findFirstIn(el.getClassName) != None) {
lastSparkMethod = if (el.getMethodName == "<init>") {
// Spark method is a constructor; get its class name
el.getClassName.substring(el.getClassName.lastIndexOf('.') + 1)
} else {
el.getMethodName
}
}
else {
firstUserLine = el.getLineNumber
firstUserFile = el.getFileName
firstUserClass = el.getClassName
finished = true
}
}
}
new CallSiteInfo(lastSparkMethod, firstUserFile, firstUserLine, firstUserClass)
}
def formatSparkCallSite = {
val callSiteInfo = getCallSiteInfo
"%s at %s:%s".format(callSiteInfo.lastSparkMethod, callSiteInfo.firstUserFile,
callSiteInfo.firstUserLine)
}
/** Return a string containing part of a file from byte 'start' to 'end'. */
def offsetBytes(path: String, start: Long, end: Long): String = {
val file = new File(path)
val length = file.length()
val effectiveEnd = math.min(length, end)
val effectiveStart = math.max(0, start)
val buff = new Array[Byte]((effectiveEnd-effectiveStart).toInt)
val stream = new FileInputStream(file)
stream.skip(effectiveStart)
stream.read(buff)
stream.close()
Source.fromBytes(buff).mkString
}
/**
* Clone an object using a Spark serializer.
*/
def clone[T](value: T, serializer: SerializerInstance): T = {
serializer.deserialize[T](serializer.serialize(value))
}
/**
* Detect whether this thread might be executing a shutdown hook. Will always return true if
* the current thread is a running a shutdown hook but may spuriously return true otherwise (e.g.
* if System.exit was just called by a concurrent thread).
*
* Currently, this detects whether the JVM is shutting down by Runtime#addShutdownHook throwing
* an IllegalStateException.
*/
def inShutdown(): Boolean = {
try {
val hook = new Thread {
override def run() {}
}
Runtime.getRuntime.addShutdownHook(hook)
Runtime.getRuntime.removeShutdownHook(hook)
} catch {
case ise: IllegalStateException => return true
}
return false
}
def isSpace(c: Char): Boolean = {
" \\t\\r\\n".indexOf(c) != -1
}
/**
* Split a string of potentially quoted arguments from the command line the way that a shell
* would do it to determine arguments to a command. For example, if the string is 'a "b c" d',
* then it would be parsed as three arguments: 'a', 'b c' and 'd'.
*/
def splitCommandString(s: String): Seq[String] = {
val buf = new ArrayBuffer[String]
var inWord = false
var inSingleQuote = false
var inDoubleQuote = false
var curWord = new StringBuilder
def endWord() {
buf += curWord.toString
curWord.clear()
}
var i = 0
while (i < s.length) {
var nextChar = s.charAt(i)
if (inDoubleQuote) {
if (nextChar == '"') {
inDoubleQuote = false
} else if (nextChar == '\\\\') {
if (i < s.length - 1) {
// Append the next character directly, because only " and \\ may be escaped in
// double quotes after the shell's own expansion
curWord.append(s.charAt(i + 1))
i += 1
}
} else {
curWord.append(nextChar)
}
} else if (inSingleQuote) {
if (nextChar == '\\'') {
inSingleQuote = false
} else {
curWord.append(nextChar)
}
// Backslashes are not treated specially in single quotes
} else if (nextChar == '"') {
inWord = true
inDoubleQuote = true
} else if (nextChar == '\\'') {
inWord = true
inSingleQuote = true
} else if (!isSpace(nextChar)) {
curWord.append(nextChar)
inWord = true
} else if (inWord && isSpace(nextChar)) {
endWord()
inWord = false
}
i += 1
}
if (inWord || inDoubleQuote || inSingleQuote) {
endWord()
}
return buf
}
/* Calculates 'x' modulo 'mod', takes to consideration sign of x,
* i.e. if 'x' is negative, than 'x' % 'mod' is negative too
* so function return (x % mod) + mod in that case.
*/
def nonNegativeMod(x: Int, mod: Int): Int = {
val rawMod = x % mod
rawMod + (if (rawMod < 0) mod else 0)
}
// Handles idiosyncracies with hash (add more as required)
def nonNegativeHash(obj: AnyRef): Int = {
// Required ?
if (obj eq null) return 0
val hash = obj.hashCode
// math.abs fails for Int.MinValue
val hashAbs = if (Int.MinValue != hash) math.abs(hash) else 0
// Nothing else to guard against ?
hashAbs
}
/** Returns a copy of the system properties that is thread-safe to iterator over. */
def getSystemProperties(): Map[String, String] = {
return System.getProperties().clone()
.asInstanceOf[java.util.Properties].toMap[String, String]
}
/**
* Method executed for repeating a task for side effects.
* Unlike a for comprehension, it permits JVM JIT optimization
*/
def times(numIters: Int)(f: => Unit): Unit = {
var i = 0
while (i < numIters) {
f
i += 1
}
}
/**
* Timing method based on iterations that permit JVM JIT optimization.
* @param numIters number of iterations
* @param f function to be executed
*/
def timeIt(numIters: Int)(f: => Unit): Long = {
val start = System.currentTimeMillis
times(numIters)(f)
System.currentTimeMillis - start
}
}
| mkolod/incubator-spark | core/src/main/scala/org/apache/spark/util/Utils.scala | Scala | apache-2.0 | 29,360 |
package service
import model.Profile._
import profile.simple._
import model.Activity
trait ActivityService {
def getActivitiesByUser(activityUserName: String, isPublic: Boolean)(implicit s: Session): List[Activity] =
Activities
.innerJoin(Repositories).on((t1, t2) => t1.byRepository(t2.userName, t2.repositoryName))
.filter { case (t1, t2) =>
if(isPublic){
(t1.activityUserName === activityUserName.bind) && (t2.isPrivate === false.bind)
} else {
(t1.activityUserName === activityUserName.bind)
}
}
.sortBy { case (t1, t2) => t1.activityId desc }
.map { case (t1, t2) => t1 }
.take(30)
.list
def getRecentActivities()(implicit s: Session): List[Activity] =
Activities
.innerJoin(Repositories).on((t1, t2) => t1.byRepository(t2.userName, t2.repositoryName))
.filter { case (t1, t2) => t2.isPrivate === false.bind }
.sortBy { case (t1, t2) => t1.activityId desc }
.map { case (t1, t2) => t1 }
.take(30)
.list
def getRecentActivitiesByOwners(owners : Set[String])(implicit s: Session): List[Activity] =
Activities
.innerJoin(Repositories).on((t1, t2) => t1.byRepository(t2.userName, t2.repositoryName))
.filter { case (t1, t2) => (t2.isPrivate === false.bind) || (t2.userName inSetBind owners) }
.sortBy { case (t1, t2) => t1.activityId desc }
.map { case (t1, t2) => t1 }
.take(30)
.list
def recordCreateRepositoryActivity(userName: String, repositoryName: String, activityUserName: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"create_repository",
s"[user:${activityUserName}] created [repo:${userName}/${repositoryName}]",
None,
currentDate)
def recordCreateIssueActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, title: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"open_issue",
s"[user:${activityUserName}] opened issue [issue:${userName}/${repositoryName}#${issueId}]",
Some(title),
currentDate)
def recordCloseIssueActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, title: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"close_issue",
s"[user:${activityUserName}] closed issue [issue:${userName}/${repositoryName}#${issueId}]",
Some(title),
currentDate)
def recordClosePullRequestActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, title: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"close_issue",
s"[user:${activityUserName}] closed pull request [pullreq:${userName}/${repositoryName}#${issueId}]",
Some(title),
currentDate)
def recordReopenIssueActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, title: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"reopen_issue",
s"[user:${activityUserName}] reopened issue [issue:${userName}/${repositoryName}#${issueId}]",
Some(title),
currentDate)
def recordCommentIssueActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, comment: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"comment_issue",
s"[user:${activityUserName}] commented on issue [issue:${userName}/${repositoryName}#${issueId}]",
Some(cut(comment, 200)),
currentDate)
def recordCommentPullRequestActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, comment: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"comment_issue",
s"[user:${activityUserName}] commented on pull request [pullreq:${userName}/${repositoryName}#${issueId}]",
Some(cut(comment, 200)),
currentDate)
def recordCommentCommitActivity(userName: String, repositoryName: String, activityUserName: String, commitId: String, comment: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"comment_commit",
s"[user:${activityUserName}] commented on commit [commit:${userName}/${repositoryName}@${commitId}]",
Some(cut(comment, 200)),
currentDate
)
def recordCreateWikiPageActivity(userName: String, repositoryName: String, activityUserName: String, pageName: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"create_wiki",
s"[user:${activityUserName}] created the [repo:${userName}/${repositoryName}] wiki",
Some(pageName),
currentDate)
def recordEditWikiPageActivity(userName: String, repositoryName: String, activityUserName: String, pageName: String, commitId: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"edit_wiki",
s"[user:${activityUserName}] edited the [repo:${userName}/${repositoryName}] wiki",
Some(pageName + ":" + commitId),
currentDate)
def recordPushActivity(userName: String, repositoryName: String, activityUserName: String,
branchName: String, commits: List[util.JGitUtil.CommitInfo])(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"push",
s"[user:${activityUserName}] pushed to [branch:${userName}/${repositoryName}#${branchName}] at [repo:${userName}/${repositoryName}]",
Some(commits.map { commit => commit.id + ":" + commit.shortMessage }.mkString("\\n")),
currentDate)
def recordCreateTagActivity(userName: String, repositoryName: String, activityUserName: String,
tagName: String, commits: List[util.JGitUtil.CommitInfo])(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"create_tag",
s"[user:${activityUserName}] created tag [tag:${userName}/${repositoryName}#${tagName}] at [repo:${userName}/${repositoryName}]",
None,
currentDate)
def recordDeleteTagActivity(userName: String, repositoryName: String, activityUserName: String,
tagName: String, commits: List[util.JGitUtil.CommitInfo])(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"delete_tag",
s"[user:${activityUserName}] deleted tag ${tagName} at [repo:${userName}/${repositoryName}]",
None,
currentDate)
def recordCreateBranchActivity(userName: String, repositoryName: String, activityUserName: String, branchName: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"create_branch",
s"[user:${activityUserName}] created branch [branch:${userName}/${repositoryName}#${branchName}] at [repo:${userName}/${repositoryName}]",
None,
currentDate)
def recordDeleteBranchActivity(userName: String, repositoryName: String, activityUserName: String, branchName: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"delete_branch",
s"[user:${activityUserName}] deleted branch ${branchName} at [repo:${userName}/${repositoryName}]",
None,
currentDate)
def recordForkActivity(userName: String, repositoryName: String, activityUserName: String, forkedUserName: String)(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"fork",
s"[user:${activityUserName}] forked [repo:${userName}/${repositoryName}] to [repo:${forkedUserName}/${repositoryName}]",
None,
currentDate)
def recordPullRequestActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, title: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"open_pullreq",
s"[user:${activityUserName}] opened pull request [pullreq:${userName}/${repositoryName}#${issueId}]",
Some(title),
currentDate)
def recordMergeActivity(userName: String, repositoryName: String, activityUserName: String, issueId: Int, message: String)
(implicit s: Session): Unit =
Activities insert Activity(userName, repositoryName, activityUserName,
"merge_pullreq",
s"[user:${activityUserName}] merged pull request [pullreq:${userName}/${repositoryName}#${issueId}]",
Some(message),
currentDate)
private def cut(value: String, length: Int): String =
if(value.length > length) value.substring(0, length) + "..." else value
}
| mqshen/gitbucketTest | src/main/scala/service/ActivityService.scala | Scala | apache-2.0 | 9,512 |
package org.powlab.jeye.decode.processor.wide
import scala.collection.mutable.ArrayBuffer
import org.powlab.jeye.core.Opcodes.OPCODES
import org.powlab.jeye.core.Opcodes.OPCODE_IINC
import org.powlab.jeye.core.Opcodes.OPCODE_WIDE
import org.powlab.jeye.decode.RuntimeOpcode
import org.powlab.jeye.utils.DecodeUtils
import org.powlab.jeye.decode.graph.OpcodeNodes._
import org.powlab.jeye.decode.processor.AbstractInstructionProcessor
import org.powlab.jeye.decode.MethodContext
object WideOpcodeInformator {
private def unwrapWideOpcode(runtimeOpcode: RuntimeOpcode): RuntimeOpcode = {
val number = runtimeOpcode.number
val values = runtimeOpcode.values
val newOpcode = OPCODES(values(0));
val newValues = new ArrayBuffer[Int];
newValues += (values(1) << 8) | values(2);
if (newOpcode == OPCODE_IINC) {
newValues += DecodeUtils.getShort(values(3), values(4))
}
new RuntimeOpcode(number, newOpcode, newValues.toArray)
}
def unwrap(runtimeOpcode: RuntimeOpcode): RuntimeOpcode = {
runtimeOpcode.opcode match {
case OPCODE_WIDE => unwrapWideOpcode(runtimeOpcode)
case _ => runtimeOpcode
}
}
} | powlab/jeye | src/main/scala/org/powlab/jeye/decode/processor/wide/WideOpcodeInformator.scala | Scala | apache-2.0 | 1,158 |
/**
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2011- SINTEF ICT
* Contact: SINTEF ICT <[email protected]>
*
* Module: net.modelbased.sensapp
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
*/
package net.modelbased.sensapp.system.pfe2012
import akka.actor.ActorSystem
import net.modelbased.sensapp.service.database.raw.RawDatabaseService
import net.modelbased.sensapp.service.registry.{ RegistryService, CompositeRegistryService }
import net.modelbased.sensapp.service.dispatch.{ Service => DispatchService }
import net.modelbased.sensapp.service.notifier.{ Service => NotifierService }
import net.modelbased.sensapp.service.converter.{ Service => ConverterService }
import net.modelbased.sensapp.library.system._
abstract class DistributedService(override val system: ActorSystem) extends System {
trait topology {
lazy val partners = new TopologyFileBasedDistribution { implicit val actorSystem = system }
implicit def actorSystem = system
}
}
class DatabaseSystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new RawDatabaseService with topology {})
}
class RegistrySystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new RegistryService with topology {})
}
class CompositeRegistrySystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new CompositeRegistryService with topology {})
}
class DispatchSystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new DispatchService with topology {})
}
class NotifierSystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new NotifierService with topology {})
}
class ConverterSystem(system: ActorSystem) extends DistributedService(system) {
def services = List(new ConverterService with topology {})
}
| SINTEF-9012/sensapp | net.modelbased.sensapp.system.pfe2012/src/main/scala/net/modelbased/sensapp/system/pfe2012/Distribution.scala | Scala | lgpl-3.0 | 2,503 |
/**
* Created by nperez on 7/15/16.
*/
package com.nico.runner
import com.nico.ClusterPublisher.ClusterPublisherApp
import com.nico.DistributedSubscriber.DistributedSubscriberApp
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
object Runner extends App {
//val jsonConfig = args(0)
// parse config and start nodes
Future { DistributedSubscriberApp
.main(List(10, 9090).map(_.toString).toArray)}.map {_ =>
ClusterPublisherApp
.main(List(10, 9091, 1000000).map(_.toString).toArray)
}
readLine()
}
| anicolaspp/distributd-transaction-processor | ClusterRunner/src/main/scala/Runner.scala | Scala | mit | 578 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.testkit
import akka.Done
import akka.persistence.query.Offset
import akka.stream.Materializer
import akka.stream.scaladsl.{ Flow, Sink, Source }
import com.lightbend.internal.broker.TaggedOffsetTopicProducer
import com.lightbend.lagom.internal.scaladsl.api.broker.{ TopicFactory, TopicFactoryProvider }
import com.lightbend.lagom.scaladsl.api.Descriptor.TopicCall
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceSupport.ScalaMethodTopic
import com.lightbend.lagom.scaladsl.api.broker.Topic.TopicId
import com.lightbend.lagom.scaladsl.api.broker.{ Message, Subscriber, Topic }
import com.lightbend.lagom.scaladsl.persistence.AggregateEvent
import com.lightbend.lagom.scaladsl.server.LagomServer
import scala.concurrent.Future
trait TestTopicComponents extends TopicFactoryProvider {
def lagomServer: LagomServer
def materializer: Materializer
override def optionalTopicFactory: Option[TopicFactory] = Some(topicFactory)
override def topicPublisherName: Option[String] = super.topicPublisherName match {
case Some(other) =>
sys.error(s"Cannot provide the test topic factory as the default topic publisher since a default topic publisher has already been mixed into this cake: $other")
case None => Some("test")
}
lazy val topicFactory: TopicFactory = new TestTopicFactory(lagomServer)(materializer)
}
private[lagom] class TestTopicFactory(lagomServer: LagomServer)(implicit materializer: Materializer) extends TopicFactory {
private val topics: Map[TopicId, Service] =
lagomServer.serviceBindings.flatMap { binding =>
binding.descriptor.topics.map { topic =>
topic.topicId -> binding.service.asInstanceOf[Service]
}
}.toMap
override def create[Message](topicCall: TopicCall[Message]): Topic[Message] =
topics.get(topicCall.topicId) match {
case Some(service) =>
topicCall.topicHolder match {
case method: ScalaMethodTopic[Message] =>
method.method.invoke(service) match {
case topicProducer: TaggedOffsetTopicProducer[Message, _] => new TestTopic(topicCall, topicProducer)(materializer)
case _ =>
throw new IllegalArgumentException(s"Testkit does not know how to handle the topic type for ${topicCall.topicId}")
}
case _ =>
throw new IllegalArgumentException(s"Testkit does not know how to handle topic ${topicCall.topicId}")
}
case None =>
throw new IllegalArgumentException(s"${topicCall.topicId} hasn't been resolved")
}
}
private[lagom] class TestTopic[Payload, Event <: AggregateEvent[Event]](
topicCall: TopicCall[Payload],
topicProducer: TaggedOffsetTopicProducer[Payload, Event]
)(implicit materializer: Materializer) extends Topic[Payload] {
override def topicId: TopicId = topicCall.topicId
override def subscribe: Subscriber[Payload] = new TestSubscriber[Payload](identity)
private class TestSubscriber[WrappedPayload](transform: Payload => WrappedPayload) extends Subscriber[WrappedPayload] {
override def withGroupId(groupId: String): Subscriber[WrappedPayload] = this
override def withMetadata = new TestSubscriber[Message[WrappedPayload]](transform.andThen(Message.apply))
override def atMostOnceSource: Source[WrappedPayload, _] = {
val serializer = topicCall.messageSerializer
Source(topicProducer.tags).flatMapMerge(topicProducer.tags.size, { tag =>
topicProducer.readSideStream.apply(tag, Offset.noOffset).map(_._1)
}).map { evt =>
serializer.serializerForRequest.serialize(evt)
}.map { bytes =>
serializer.deserializer(serializer.acceptResponseProtocols.head).deserialize(bytes)
}.map(transform)
}
override def atLeastOnce(flow: Flow[WrappedPayload, Done, _]): Future[Done] =
atMostOnceSource.via(flow).runWith(Sink.ignore)
}
}
| rstento/lagom | testkit/scaladsl/src/main/scala/com/lightbend/lagom/scaladsl/testkit/TestTopicComponents.scala | Scala | apache-2.0 | 4,027 |
package com.bwsw.tstreamstransactionserver.options
import java.util.Properties
class OptionHelper(properties: Properties) {
def checkPropertyOnExistence(property: String)(classType: Class[_]): String = {
Option(properties.getProperty(property))
.getOrElse(throw new NoSuchElementException(
s"No property by key: '$property' has been found for '${classType.getSimpleName}'." +
s"You should define it and restart the program.")
)
}
def castCheck[T](property: String,
constructor: String => T
)(implicit classType: Class[_]): T = {
val value = checkPropertyOnExistence(property)(classType)
try {
constructor(value)
} catch {
case _: IllegalArgumentException =>
throw new IllegalArgumentException(
s"Property '$property' has got an invalid format, but expected another type."
)
}
}
}
| bwsw/tstreams-transaction-server | src/main/scala/com/bwsw/tstreamstransactionserver/options/OptionHelper.scala | Scala | apache-2.0 | 916 |
/*
* Copyright 2001-2017 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.source
import scala.quoted._
class TypeInfo[T](val name: String)
/**
* Companion object for <code>Position</code> that defines an implicit
* method that uses a macro to grab the enclosing position.
*/
object TypeInfo {
def apply[T](name: String): TypeInfo[T] = new TypeInfo[T](name)
implicit inline def gen[T]: TypeInfo[T] = ${ TypeInfoMacro.genTypeInfo[T] }
}
| scalatest/scalatest | dotty/scalactic/src/main/scala/org/scalactic/source/TypeInfo.scala | Scala | apache-2.0 | 998 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.util
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.`type`.InternalType
import org.apache.flink.table.dataview.DataViewSpec
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.calcite.rel.core.AggregateCall
import scala.collection.mutable.ArrayBuffer
/**
* The information about aggregate function call
*
* @param agg calcite agg call
* @param function AggregateFunction or DeclarativeAggregateFunction
* @param aggIndex the index of the aggregate call in the aggregation list
* @param argIndexes the aggregate arguments indexes in the input
* @param externalAccTypes accumulator types
* @param viewSpecs data view specs
* @param externalResultType the result type of aggregate
* @param consumeRetraction whether the aggregate consumes retractions
*/
case class AggregateInfo(
agg: AggregateCall,
function: UserDefinedFunction,
aggIndex: Int,
argIndexes: Array[Int],
externalAccTypes: Array[TypeInformation[_]],
viewSpecs: Array[DataViewSpec],
externalResultType: TypeInformation[_],
consumeRetraction: Boolean)
/**
* The information about shared distinct of the aggregates. It indicates which aggregates are
* distinct aggregates.
*
* @param argIndexes the distinct aggregate arguments indexes in the input
* @param keyType the distinct key type
* @param accType the accumulator type of the shared distinct
* @param excludeAcc whether the distinct acc should excluded from the aggregate accumulator.
* e.g. when this works in incremental mode, returns true, otherwise false.
* @param dataViewSpec data view spec about this distinct agg used to generate state access,
* None when dataview is not worked in state mode
* @param consumeRetraction whether the distinct agg consumes retractions
* @param filterArgs the ordinal of filter argument for each aggregate, -1 means without filter
* @param aggIndexes the distinct aggregate index in the aggregation list
*/
case class DistinctInfo(
argIndexes: Array[Int],
keyType: TypeInformation[_],
accType: TypeInformation[_],
excludeAcc: Boolean,
dataViewSpec: Option[DataViewSpec],
consumeRetraction: Boolean,
filterArgs: ArrayBuffer[Int],
aggIndexes: ArrayBuffer[Int])
/**
* The information contains all aggregate infos, and including input count information.
*
* @param aggInfos the information about every aggregates
* @param count1AggIndex None if input count is not needed, otherwise is needed and the index
* represents the count1 index
* @param count1AggInserted true when the count1 is inserted into agg list,
* false when the count1 is already existent in agg list.
* @param distinctInfos the distinct information, empty if all the aggregates are not distinct
*/
case class AggregateInfoList(
aggInfos: Array[AggregateInfo],
count1AggIndex: Option[Int],
count1AggInserted: Boolean,
distinctInfos: Array[DistinctInfo]) {
def getAggNames: Array[String] = aggInfos.map(_.agg.getName)
def getAccTypes: Array[TypeInformation[_]] = {
aggInfos.flatMap(_.externalAccTypes) ++ distinctInfos.filter(!_.excludeAcc).map(_.accType)
}
def getActualAggregateCalls: Array[AggregateCall] = {
getActualAggregateInfos.map(_.agg)
}
def getActualFunctions: Array[UserDefinedFunction] = {
getActualAggregateInfos.map(_.function)
}
def getActualValueTypes: Array[TypeInformation[_]] = {
getActualAggregateInfos.map(_.externalResultType)
}
def getCount1AccIndex: Option[Int] = {
if (count1AggIndex.nonEmpty) {
var accOffset = 0
aggInfos.indices.foreach { i =>
if (i < count1AggIndex.get) {
accOffset += aggInfos(i).externalAccTypes.length
}
}
Some(accOffset)
} else {
None
}
}
def getActualAggregateInfos: Array[AggregateInfo] = {
if (count1AggIndex.nonEmpty && count1AggInserted) {
// need input count agg and the count1 is inserted,
// which means the count1 shouldn't be calculated in value
aggInfos.zipWithIndex
.filter { case (_, index) => index != count1AggIndex.get }
.map { case (aggInfo, _) => aggInfo }
} else {
aggInfos
}
}
}
| ueshin/apache-flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/util/aggregation.scala | Scala | apache-2.0 | 5,174 |
package eu.timepit.refined.shapeless.typeable
import eu.timepit.refined.W
import eu.timepit.refined.api.{Refined, RefinedTypeOps}
import eu.timepit.refined.string.MatchesRegex
import eu.timepit.refined.types.numeric.PosInt
import org.scalacheck.Prop._
import org.scalacheck.Properties
import shapeless.Typeable
class TypeableSpec extends Properties("shapeless") {
property("Typeable cast success") = secure {
val value: PosInt = PosInt.unsafeFrom(5)
typeableCast[PosInt](5) ?= Some(value)
}
property("Typeable cast fail") = secure {
typeableCast[PosInt](0) ?= None
}
property("Typeable describe") = secure {
typeableDescribe[PosInt] ?= "Refined[Int, Greater[_0]]"
}
property("Typeable cast success string regex") = secure {
type Word = String Refined MatchesRegex[W.`"[a-zA-Z]*"`.T]
object Word extends RefinedTypeOps[Word, String]
val value: Word = Word.unsafeFrom("AlloweD")
typeableCast[Word]("AlloweD") ?= Some(value)
}
property("Typeable cast fail string regex") = secure {
type Word = String Refined MatchesRegex[W.`"[a-zA-Z]*"`.T]
typeableCast[Word]("Not Allowed") ?= None
}
property("Typeable string regex describe") = secure {
type Word = String Refined MatchesRegex[W.`"[a-zA-Z]*"`.T]
typeableDescribe[Word] ?= """Refined[String, MatchesRegex[String([a-zA-Z]*)]]"""
}
private def typeableDescribe[T](implicit T: Typeable[T]): String = T.describe
private def typeableCast[T](value: Any)(implicit T: Typeable[T]): Option[T] = T.cast(value)
}
| fthomas/refined | modules/shapeless/shared/src/test/scala/eu/timepit/refined/shapeless/typeable/TypeableSpec.scala | Scala | mit | 1,537 |
package scalaz.validations
import org.joda.time.DateTime
import org.scalatest.{FlatSpec, Matchers}
import scalaz._
import com.scalaz.model.{Destination, Trip}
import com.scalaz.validation._
class ValidationSpec extends FlatSpec with Matchers {
private val fromDate = new DateTime("2015-02-03T14:15:00.000+08:00")
private val toDate = new DateTime("2015-02-10T14:15:00.000+08:00")
it should "return an error if wrong dateformat is entered" in {
val Failure(result) = Rules.validateDate("12-10-2015")
result.message shouldBe "Incorrect date format"
}
it should "return the validated date if the validation is success" in {
val Success(result) = Rules.validateDate("12/10/2015")
result.toString("dd/mm/yyyy") shouldBe "12/10/2015"
}
it should "return errors if validation fails" in {
val trip = Trip(1, 1, fromDate, toDate, Seq.empty[Destination], "", DateTime.now)
val Failure(result) = Validators.validateTrip(trip)
result.size shouldBe 2
result.head.message shouldBe "Empty description"
result.last.message shouldBe "There should be atleast one destination"
}
it should "return a valid trip object is the validation is successful" in {
val destinations = Seq(
Destination(1, "Amsterdam", "AMS", "Netherlands"),
Destination(2, "Barcelona", "BCN", "Spain")
)
val trip = Trip(1, 1, fromDate, toDate, destinations, "Summer holidays", DateTime.now)
val Success(result) = Validators.validateTrip(trip)
result shouldBe trip
}
}
| rvijayan/scalaz-validations-example | src/test/scala/scalaz/validations/ValidationSpec.scala | Scala | mit | 1,523 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.random
/**
* A class with pseudorandom behavior.
*/
trait Pseudorandom {
/** Set random seed. */
def setSeed(seed: Long)
}
| sryza/spark | core/src/main/scala/org/apache/spark/util/random/Pseudorandom.scala | Scala | apache-2.0 | 962 |
package com.seanshubin.todo.application.domain
import org.scalatest.FunSuite
/*
test-driven-003
Once we got the JettyRunnerTest working, we tried to wire it into the entry point
This made us notice that JettyRunner depends on a port, so now we have to add code that configures the port
Although you can see more settings now, this started with only validating the port
*/
class CommandLineArgumentsValidatorTest extends FunSuite {
test("valid configuration") {
//given
val commandLineArguments = Seq("12345", "host", "23456", "file-exists")
val filesThatExist = Set("file-exists")
val validator = createValidator(commandLineArguments, filesThatExist)
//when
val configuration = validator.validate()
//then
assert(configuration.port === 12345)
assert(configuration.databaseApiHost === "host")
assert(configuration.databaseApiPort === 23456)
}
test("server port is required") {
//given
val commandLineArguments = Seq()
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 0, expected 'server port', was missing")
}
test("server port mut be an integer") {
//given
val commandLineArguments = Seq("blah")
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 0, unable to convert value for 'server port' to an integer, got 'blah'")
}
test("database api host is required") {
//given
val commandLineArguments = Seq("12345")
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 1, expected 'database api host', was missing")
}
test("database api port is required") {
//given
val commandLineArguments = Seq("12345", "host")
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 2, expected 'database api port', was missing")
}
test("database api port is must be an integer") {
//given
val commandLineArguments = Seq("12345", "host", "blah")
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 2, unable to convert value for 'database api port' to an integer, got 'blah'")
}
test("serve from directory must exist") {
//given
val commandLineArguments = Seq("12345", "host", "23456", "file-does-not-exist")
val validator = createValidator(commandLineArguments)
//when
val exception = intercept[RuntimeException] {
validator.validate()
}
//then
assert(exception.getMessage === "In command line arguments at position 3, value for 'serve from directory' must be a path that exists, got 'file-does-not-exist'")
}
def createValidator(commandLineArguments: Seq[String], filesThatExist: Set[String] = Set()): CommandLineArgumentsConfigurationValidator = {
val filesStub = new FilesStub(filesThatExist)
new CommandLineArgumentsConfigurationValidator(commandLineArguments, filesStub)
}
}
| SeanShubin/todo-application | domain/src/test/scala/com/seanshubin/todo/application/domain/CommandLineArgumentsValidatorTest.scala | Scala | unlicense | 3,659 |
package scuff.concurrent
import org.junit._, Assert._
import scala.concurrent.duration._
import scala.util.Try
import scuff.Numbers
class TestFailureBackoff {
def fibonnaciBackoff(maxBackoff: FiniteDuration) =
Numbers.fibonacci.view.dropWhile(_ < 2).map(_.seconds).takeWhile(_ <= maxBackoff)
private var errors: List[Throwable] = null
@Before
def setup() = errors = Nil
def reportError(th: Throwable): Unit = errors = th :: errors
@Test(expected = classOf[IllegalArgumentException])
def `no backoff schedule`(): Unit = {
val ft = new FailureTracker(3, reportError, Nil)
fail(s"Should have failed: $ft")
}
@Test(expected = classOf[IllegalArgumentException])
def `zero failure count threshold`(): Unit = {
val ft = new FailureTracker(0, reportError, List(2.minutes))
fail(s"Should have failed: $ft")
}
@Test
def `finite backoff schedule`(): Unit = {
val error = Try(sys.error("oh no")).failed.get
val threshold = 3
val backoffSchedule = fibonnaciBackoff(2.minutes)
val scheduleLength = backoffSchedule.size
val ft = new FailureTracker(threshold, reportError, backoffSchedule)
assertEquals(0, ft.failureCount)
assertFalse(ft.isTripped)
assertEquals(Duration.Zero, ft.timeout)
val backoffIterator = backoffSchedule.iterator
var currTimeout = backoffIterator.next()
(1 to scheduleLength + 5) foreach { n =>
ft reportFailure error
assertEquals(n, ft.failureCount)
if (ft.failureCount < threshold) {
assertFalse(ft.isTripped)
assertEquals(Duration.Zero, ft.timeout)
} else {
assertTrue(ft.isTripped)
assertEquals(currTimeout, ft.timeout)
if (backoffIterator.hasNext) currTimeout = backoffIterator.next()
}
}
assertEquals(threshold, errors.size)
errors.foreach { err =>
assertSame(error, err)
}
ft.reset()
assertEquals(0, ft.failureCount)
assertFalse(ft.isTripped)
assertEquals(Duration.Zero, ft.timeout)
}
}
| nilskp/scuff | src/test/scala/scuff/concurrent/TestFailureTracker.scala | Scala | mit | 2,015 |
package me.invkrh.cmdchat.util
/**
* Created with IntelliJ IDEA.
* User: invkrh
* Date: 4/7/15
* Time: 9:29 PM
*/
trait Display {
def getPrompt(name: String) = {
s"me ($name) > "
}
// cursor in the next line
def response(txt: String, prompt: String = "") = {
print(s"\\n[ $txt ]\\n\\n" + prompt)
}
// cursor in the current line
def notification(sdr: String, txt: String, prompt: String = "") = {
print(s"\\n\\n[ $sdr > $txt ]\\n\\n" + prompt)
}
// cursor in the current line
def incomingMsg(sdr: String, txt: String, prompt: String = "") = {
print(s"\\n|\\n|\\t\\t\\t\\t\\t\\t\\t\\t\\t\\t[ $sdr > $txt ]\\n|\\n" + prompt)
}
}
| invkrh/akka-spray-playground | cmdchat/src/main/scala/me/invkrh/cmdchat/util/Display.scala | Scala | mit | 655 |
package api.dto
import com.wordnik.swagger.annotations.{ApiModel, ApiModelProperty}
import spray.json.DefaultJsonProtocol
import scala.annotation.meta.field
@ApiModel(description = "Prediction enqueue for potential tweets")
case class PredictionDto(
@(ApiModelProperty @field)(required = true, value = "Tweet to predict (140 chars)")
tweet: String,
@(ApiModelProperty @field)(required = true, value = "Screen name of the tweets author")
author: String)
object PredictionDto extends DefaultJsonProtocol{
implicit val predictionDtoFormat = jsonFormat2(PredictionDto.apply)
}
| twitterist/backend | src/main/scala/api/dto/PredictionDto.scala | Scala | mit | 660 |
/*
,i::,
:;;;;;;;
;:,,::;.
1ft1;::;1tL
t1;::;1,
:;::; _____ __ ___ __
fCLff ;:: tfLLC / ___/ / |/ /____ _ _____ / /_
CLft11 :,, i1tffLi \\__ \\ ____ / /|_/ // __ `// ___// __ \\
1t1i .;; .1tf ___/ //___// / / // /_/ // /__ / / / /
CLt1i :,: .1tfL. /____/ /_/ /_/ \\__,_/ \\___//_/ /_/
Lft1,:;: , 1tfL:
;it1i ,,,:::;;;::1tti AeonDB
.t1i .,::;;; ;1tt Copyright (c) 2014 S-Mach, Inc.
Lft11ii;::;ii1tfL: Author: [email protected]
.L1 1tt1ttt,,Li
...1LLLL...
*/
package s_mach.aeondb
import s_mach.aeondb.impl.CommitBuilder
import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.{FlatSpec, Matchers}
import s_mach.concurrent._
class AeonMapFutureTest extends FlatSpec with Matchers {
implicit val metadata = Metadata(
who = "test",
why = Some("test")
)
"AeonMapTest.future" must "put" in {
val m = Map(1 -> "a", 2 -> "b")
val p = AeonMap(m.toSeq:_*)
p.future { f =>
f.put(3,"c").future
}.get should equal(true)
p.now.toMap.get should equal(m + (3 -> "c"))
p.zomCommit.get should equal(
(CommitBuilder().put(3,"c").result()._2,metadata) ::
Nil
)
}
"AeonMap.now" must "replace" in {
val m = Map(1 -> "a", 2 -> "b")
val p = AeonMap(m.toSeq:_*)
p.now.replace(1,"aa").get should equal(true)
p.now.replace(3,"cc").get should equal(false)
p.now.toMap.get should equal(m - 1 + (1 -> "aa"))
p.zomCommit.get should equal(
(CommitBuilder().replace(1,Some("aa"),1).result()._2,metadata) ::
Nil
)
}
"AeonMap.future" must "deactivate" in {
val m = Map(1 -> "a", 2 -> "b")
val p = AeonMap(m.toSeq:_*)
p.future { f =>
f.deactivate(1).future
}.get should equal(true)
p.now.toMap.get should equal(m - 1)
p.now.deactivate(1).get should equal(false)
p.now.deactivate(3).get should equal(false)
p.zomCommit.get should equal(
(CommitBuilder().deactivate(1,1).result()._2,metadata) ::
Nil
)
}
"AeonMap.future" must "reactivate" in {
val m = Map(1 -> "a", 2 -> "b")
val p = AeonMap(m.toSeq:_*)
p.now.deactivate(1).get should equal(true)
p.now.toMap.get should equal(m - 1)
p.future { f =>
f.reactivate(1,"aa").future
}.get should equal(true)
p.now.toMap.get should equal(m - 1 +(1 -> "aa"))
p.now.reactivate(1,"aaa").get should equal(false)
p.now.reactivate(3,"c").get should equal(false)
p.zomCommit.get should equal(
(CommitBuilder().reactivate(1,"aa",2).result()._2,metadata) ::
(CommitBuilder().deactivate(1,1).result()._2,metadata) ::
Nil
)
}
"AeonMap.future" must "allow combining all operations into one commit" in {
val m = Map(1 -> "a", 2 -> "b", 3 -> "c")
val p = AeonMap(m.toSeq:_*)
p.now.deactivate(1).get should equal(true)
p.now.toMap.get should equal(m - 1)
p.future { _
.put(4,"d")
.replace(3,"cc")
.deactivate(2)
.reactivate(1,"aa").future
}.get should equal(true)
p.now.toMap.get should equal(Map(1 -> "aa", 3 -> "cc", 4 -> "d"))
p.zomCommit.get should equal(
(
CommitBuilder()
.put(4,"d")
.replace(3,Some("cc"),1)
.deactivate(2,1)
.reactivate(1,"aa",2)
.result()._2,
metadata
) ::
(CommitBuilder().deactivate(1,1).result()._2,metadata) ::
Nil
)
}
}
| S-Mach/aeondb | src/test/scala/s_mach/aeondb/AeonMapFutureTest.scala | Scala | apache-2.0 | 3,653 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import cats.Order
import monix.execution.Ack
import monix.execution.Ack.{Continue, Stop}
import scala.util.control.NonFatal
import monix.reactive.Observable.Operator
import monix.reactive.observers.Subscriber
/**
* Common implementation for `minF`, `minByF`, `maxF`, `maxByF`.
*/
private[reactive] abstract class SearchByOrderOperator[A, K]
(key: A => K)(implicit B: Order[K]) extends Operator[A,A] {
def shouldCollect(key: K, current: K): Boolean
final def apply(out: Subscriber[A]): Subscriber.Sync[A] =
new Subscriber.Sync[A] {
implicit val scheduler = out.scheduler
private[this] var isDone = false
private[this] var minValue: A = _
private[this] var minValueU: K = _
private[this] var hasValue = false
def onNext(elem: A): Ack = {
try {
if (!hasValue) {
hasValue = true
minValue = elem
minValueU = key(elem)
} else {
val m = key(elem)
if (shouldCollect(m, minValueU)) {
minValue = elem
minValueU = m
}
}
Continue
} catch {
case ex if NonFatal(ex) =>
onError(ex)
Stop
}
}
def onError(ex: Throwable): Unit =
if (!isDone) {
isDone = true
out.onError(ex)
}
def onComplete(): Unit =
if (!isDone) {
isDone = true
if (!hasValue)
out.onComplete()
else {
out.onNext(minValue)
out.onComplete()
}
}
}
}
private[reactive] final class MinOperator[A](implicit A: Order[A])
extends SearchByOrderOperator[A,A](identity)(A) {
def shouldCollect(key: A, current: A): Boolean =
A.compare(key, current) < 0
}
private[reactive] final class MinByOperator[A, K](f: A => K)
(implicit K: Order[K])
extends SearchByOrderOperator[A, K](f)(K) {
def shouldCollect(key: K, current: K): Boolean =
K.compare(key, current) < 0
}
private[reactive] final class MaxOperator[A](implicit A: Order[A])
extends SearchByOrderOperator[A,A](identity)(A) {
def shouldCollect(key: A, current: A): Boolean =
A.compare(key, current) > 0
}
private[reactive] final class MaxByOperator[A, K](f: A => K)
(implicit K: Order[K])
extends SearchByOrderOperator[A, K](f)(K) {
def shouldCollect(key: K, current: K): Boolean =
K.compare(key, current) > 0
} | Wogan/monix | monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/SearchByOrderOperator.scala | Scala | apache-2.0 | 3,171 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import akka.actor._
import com.google.common.io.ByteStreams
import java.io.{ File, IOException }
import java.util.jar.JarFile
import org.ensime.api._
class DocResolver(
prefix: String,
forceJavaVersion: Option[String] // for testing
)(
implicit
config: EnsimeConfig
) extends Actor with ActorLogging with DocUsecaseHandling {
var htmlToJar = Map.empty[String, File]
var jarNameToJar = Map.empty[String, File]
var docTypes = Map.empty[String, DocType]
sealed trait DocType
case object Javadoc extends DocType
case object Javadoc8 extends DocType
case object Scaladoc extends DocType
// In javadoc docs, index.html has a comment that reads 'Generated by javadoc'
private val JavadocComment = """Generated by javadoc (?:\(([0-9\.]+))?""".r.unanchored
override def preStart(): Unit = {
// On initialisation, do a fast scan (< 1s for 50 jars) to determine
// the package contents of each jar, and whether it's a javadoc or
// scaladoc.
for (
jarFile <- config.allDocJars if jarFile.exists()
) {
try {
val jar = new JarFile(jarFile)
val jarFileName = jarFile.getName
jarNameToJar += jarFileName -> jarFile
docTypes += (jarFileName -> Scaladoc)
val enumEntries = jar.entries()
while (enumEntries.hasMoreElements) {
val entry = enumEntries.nextElement()
if (!entry.isDirectory) {
val f = new File(entry.getName)
val dir = f.getParent
if (dir != null) {
htmlToJar += entry.getName -> jarFile
}
// Check for javadocs
if (entry.getName == "index.html") {
val bytes = ByteStreams.toByteArray(jar.getInputStream(entry))
new String(bytes) match {
case JavadocComment(version: String) if version.startsWith("1.8") =>
docTypes += jarFileName -> Javadoc8
case JavadocComment(_*) =>
docTypes += jarFileName -> Javadoc
case _ =>
}
}
}
}
} catch {
case e: IOException =>
// continue regardless
log.error("Failed to process doc jar: " + jarFile.getName, e)
}
}
}
private def javaFqnToPath(fqn: DocFqn): String = {
if (fqn.typeName == "package") {
fqn.pack.replace(".", "/") + "/package-summary.html"
} else {
fqn.pack.replace(".", "/") + "/" + fqn.typeName + ".html"
}
}
def scalaFqnToPath(fqn: DocFqn): String = {
if (fqn.typeName == "package") {
fqn.pack.replace(".", "/") + "/package.html"
} else fqn.pack.replace(".", "/") + "/" + fqn.typeName + ".html"
}
private def makeLocalUri(jar: File, sig: DocSigPair): String = {
val jarName = jar.getName
val docType = docTypes(jarName)
val java = docType == Javadoc || docType == Javadoc8
if (java) {
val path = javaFqnToPath(sig.java.fqn)
val anchor = sig.java.member.map { s =>
"#" + { if (docType == Javadoc8) toJava8Anchor(s) else s }
}.getOrElse("")
s"$prefix/$jarName/$path$anchor"
} else {
val scalaSig = maybeReplaceWithUsecase(jar, sig.scala)
val anchor = scalaSig.fqn.mkString +
scalaSig.member.map("@" + _).getOrElse("")
s"$prefix/$jarName/index.html#$anchor"
}
}
private val PackRegexp = """^((?:[a-z0-9]+\.)+)""".r
private def guessJar(sig: DocSigPair): Option[(File, DocSigPair)] = {
val scalafqn = scalaFqnToPath(sig.scala.fqn)
val javafqn = javaFqnToPath(sig.java.fqn)
val scala = htmlToJar.get(scalafqn).map((_, sig))
val scala2 = scala.orElse(
htmlToJar.get(scalafqn.replace("$.html", ".html")).map({ file =>
// Documentation for Object doesn't exists but documentation for Class does
val typeName = sig.scala.fqn.typeName.replaceFirst("\\$$", "")
val sigOfClass = sig.copy(scala = sig.scala.copy(fqn = sig.scala.fqn.copy(typeName = typeName)))
(file, sigOfClass)
})
)
scala2.orElse(htmlToJar.get(javafqn).map((_, sig)))
}
private def resolveLocalUri(sig: DocSigPair): Option[String] = {
guessJar(sig) match {
case Some((jar, sig)) =>
Some(makeLocalUri(jar, sig))
case _ =>
log.debug(s"Failed to resolve doc jar for: $sig")
None
}
}
// Javadoc 8 changed the anchor format to remove illegal
// url characters: parens, commas, brackets.
// See https://bugs.eclipse.org/bugs/show_bug.cgi?id=432056
// and https://bugs.openjdk.java.net/browse/JDK-8025633
private val Java8Chars = """(?:,|\(|\)|\[\])""".r
private def toJava8Anchor(anchor: String): String = {
Java8Chars.replaceAllIn(anchor, { m =>
anchor(m.start) match {
case ',' => "-"
case '(' => "-"
case ')' => "-"
case '[' => ":A"
}
})
}
private def toAndroidAnchor(anchor: String): String = anchor.replace(",", ", ")
private def resolveWellKnownUri(sig: DocSigPair): Option[String] = {
if (sig.java.fqn.javaStdLib) {
val path = javaFqnToPath(sig.java.fqn)
val rawVersion = forceJavaVersion.getOrElse(scala.util.Properties.javaVersion)
val version =
if (rawVersion.startsWith("1.8")) "8" else if (rawVersion.startsWith("1.7")) "7" else "6"
val anchor = sig.java.member.map {
m => "#" + { if (version == "8") toJava8Anchor(m) else m }
}.getOrElse("")
Some(s"http://docs.oracle.com/javase/$version/docs/api/$path$anchor")
} else if (sig.java.fqn.androidStdLib) {
val path = javaFqnToPath(sig.java.fqn)
val anchor = sig.java.member.map { m => "#" + toAndroidAnchor(m) }.getOrElse("")
Some(s"http://developer.android.com/reference/$path$anchor")
} else None
}
def resolve(sig: DocSigPair): Option[String] = resolveLocalUri(sig) orElse resolveWellKnownUri(sig)
// for java stuff, really
def resolve(sig: DocSig): Option[String] = resolve(DocSigPair(sig, sig))
def receive: Receive = {
case p: DocSigPair =>
val response = resolve(p) match {
case Some(path) => StringResponse(path)
case None => FalseResponse
}
sender() ! response
}
}
object DocResolver {
def apply(
prefix: String = "docs",
java: Option[String] = None
)(
implicit
config: EnsimeConfig
): Props = Props(classOf[DocResolver], prefix, java, config)
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/core/src/main/scala/org/ensime/core/DocResolver.scala | Scala | apache-2.0 | 6,584 |
package liang.don.dzviewer.log.java
import liang.don.dzviewer.log.{LogLevel, LoggerInterface}
/**
* File Logger using Java I/O libraries.
*
* @author Don Liang
* @Version 0.1.2, 16/09/2011
*/
trait FileLogger extends LoggerInterface {
override def log(message: String) {
// TODO
}
override def log(message: String, logLevel: LogLevel.Value) {
// TODO
}
override def log(message: String, logLevel: LogLevel.Value, exception: Exception) {
// TODO
}
}
| dl2k84/DeepZoomViewer | src/liang/don/dzviewer/log/java/FileLogger.scala | Scala | mit | 484 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check.body
import io.gatling.core.check.CheckMaterializer
import io.gatling.core.check.substring.SubstringCheckType
import io.gatling.http.check.{ HttpCheck, HttpCheckMaterializer }
import io.gatling.http.check.HttpCheckBuilders._
import io.gatling.http.check.HttpCheckScope.Body
import io.gatling.http.response.Response
object HttpBodySubstringCheckMaterializer {
val Instance: CheckMaterializer[SubstringCheckType, HttpCheck, Response, String] =
new HttpCheckMaterializer[SubstringCheckType, String](Body, ResponseBodyStringPreparer)
}
| gatling/gatling | gatling-http/src/main/scala/io/gatling/http/check/body/HttpBodySubstringCheckMaterializer.scala | Scala | apache-2.0 | 1,190 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.Properties
import org.scalatest.FunSuite
import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext}
/**
* Tests that pools and the associated scheduling algorithms for FIFO and fair scheduling work
* correctly.
*/
class PoolSuite extends FunSuite with LocalSparkContext {
def createTaskSetManager(stageId: Int, numTasks: Int, taskScheduler: TaskSchedulerImpl)
: TaskSetManager = {
val tasks = Array.tabulate[Task[_]](numTasks) { i =>
new FakeTask(i, Nil)
}
new TaskSetManager(taskScheduler, new TaskSet(tasks, stageId, 0, 0, null), 0)
}
def scheduleTaskAndVerifyId(taskId: Int, rootPool: Pool, expectedStageId: Int) {
val taskSetQueue = rootPool.getSortedTaskSetQueue
val nextTaskSetToSchedule =
taskSetQueue.find(t => (t.runningTasks + t.tasksSuccessful) < t.numTasks)
assert(nextTaskSetToSchedule.isDefined)
nextTaskSetToSchedule.get.addRunningTask(taskId)
assert(nextTaskSetToSchedule.get.stageId === expectedStageId)
}
test("FIFO Scheduler Test") {
sc = new SparkContext("local", "TaskSchedulerImplSuite")
val taskScheduler = new TaskSchedulerImpl(sc)
val rootPool = new Pool("", SchedulingMode.FIFO, 0, 0)
val schedulableBuilder = new FIFOSchedulableBuilder(rootPool)
schedulableBuilder.buildPools()
val taskSetManager0 = createTaskSetManager(0, 2, taskScheduler)
val taskSetManager1 = createTaskSetManager(1, 2, taskScheduler)
val taskSetManager2 = createTaskSetManager(2, 2, taskScheduler)
schedulableBuilder.addTaskSetManager(taskSetManager0, null)
schedulableBuilder.addTaskSetManager(taskSetManager1, null)
schedulableBuilder.addTaskSetManager(taskSetManager2, null)
scheduleTaskAndVerifyId(0, rootPool, 0)
scheduleTaskAndVerifyId(1, rootPool, 0)
scheduleTaskAndVerifyId(2, rootPool, 1)
scheduleTaskAndVerifyId(3, rootPool, 1)
scheduleTaskAndVerifyId(4, rootPool, 2)
scheduleTaskAndVerifyId(5, rootPool, 2)
}
/**
* This test creates three scheduling pools, and creates task set managers in the first
* two scheduling pools. The test verifies that as tasks are scheduled, the fair scheduling
* algorithm properly orders the two scheduling pools.
*/
test("Fair Scheduler Test") {
val xmlPath = getClass.getClassLoader.getResource("fairscheduler.xml").getFile()
val conf = new SparkConf().set("spark.scheduler.allocation.file", xmlPath)
sc = new SparkContext("local", "TaskSchedulerImplSuite", conf)
val taskScheduler = new TaskSchedulerImpl(sc)
val rootPool = new Pool("", SchedulingMode.FAIR, 0, 0)
val schedulableBuilder = new FairSchedulableBuilder(rootPool, sc.conf)
schedulableBuilder.buildPools()
// Ensure that the XML file was read in correctly.
assert(rootPool.getSchedulableByName("default") != null)
assert(rootPool.getSchedulableByName("1") != null)
assert(rootPool.getSchedulableByName("2") != null)
assert(rootPool.getSchedulableByName("3") != null)
assert(rootPool.getSchedulableByName("1").minShare === 2)
assert(rootPool.getSchedulableByName("1").weight === 1)
assert(rootPool.getSchedulableByName("2").minShare === 3)
assert(rootPool.getSchedulableByName("2").weight === 1)
assert(rootPool.getSchedulableByName("3").minShare === 0)
assert(rootPool.getSchedulableByName("3").weight === 1)
val properties1 = new Properties()
properties1.setProperty("spark.scheduler.pool","1")
val properties2 = new Properties()
properties2.setProperty("spark.scheduler.pool","2")
val taskSetManager10 = createTaskSetManager(0, 1, taskScheduler)
val taskSetManager11 = createTaskSetManager(1, 1, taskScheduler)
val taskSetManager12 = createTaskSetManager(2, 2, taskScheduler)
schedulableBuilder.addTaskSetManager(taskSetManager10, properties1)
schedulableBuilder.addTaskSetManager(taskSetManager11, properties1)
schedulableBuilder.addTaskSetManager(taskSetManager12, properties1)
val taskSetManager23 = createTaskSetManager(3, 2, taskScheduler)
val taskSetManager24 = createTaskSetManager(4, 2, taskScheduler)
schedulableBuilder.addTaskSetManager(taskSetManager23, properties2)
schedulableBuilder.addTaskSetManager(taskSetManager24, properties2)
// Pool 1 share ratio: 0. Pool 2 share ratio: 0. 1 gets scheduled based on ordering of names.
scheduleTaskAndVerifyId(0, rootPool, 0)
// Pool 1 share ratio: 1/2. Pool 2 share ratio: 0. 2 gets scheduled because ratio is lower.
scheduleTaskAndVerifyId(1, rootPool, 3)
// Pool 1 share ratio: 1/2. Pool 2 share ratio: 1/3. 2 gets scheduled because ratio is lower.
scheduleTaskAndVerifyId(2, rootPool, 3)
// Pool 1 share ratio: 1/2. Pool 2 share ratio: 2/3. 1 gets scheduled because ratio is lower.
scheduleTaskAndVerifyId(3, rootPool, 1)
// Pool 1 share ratio: 1. Pool 2 share ratio: 2/3. 2 gets scheduled because ratio is lower.
scheduleTaskAndVerifyId(4, rootPool, 4)
// Neither pool is needy so ordering is based on number of running tasks.
// Pool 1 running tasks: 2, Pool 2 running tasks: 3. 1 gets scheduled because fewer running
// tasks.
scheduleTaskAndVerifyId(5, rootPool, 2)
// Pool 1 running tasks: 3, Pool 2 running tasks: 3. 1 gets scheduled because of naming
// ordering.
scheduleTaskAndVerifyId(6, rootPool, 2)
// Pool 1 running tasks: 4, Pool 2 running tasks: 3. 2 gets scheduled because fewer running
// tasks.
scheduleTaskAndVerifyId(7, rootPool, 4)
}
test("Nested Pool Test") {
sc = new SparkContext("local", "TaskSchedulerImplSuite")
val taskScheduler = new TaskSchedulerImpl(sc)
val rootPool = new Pool("", SchedulingMode.FAIR, 0, 0)
val pool0 = new Pool("0", SchedulingMode.FAIR, 3, 1)
val pool1 = new Pool("1", SchedulingMode.FAIR, 4, 1)
rootPool.addSchedulable(pool0)
rootPool.addSchedulable(pool1)
val pool00 = new Pool("00", SchedulingMode.FAIR, 2, 2)
val pool01 = new Pool("01", SchedulingMode.FAIR, 1, 1)
pool0.addSchedulable(pool00)
pool0.addSchedulable(pool01)
val pool10 = new Pool("10", SchedulingMode.FAIR, 2, 2)
val pool11 = new Pool("11", SchedulingMode.FAIR, 2, 1)
pool1.addSchedulable(pool10)
pool1.addSchedulable(pool11)
val taskSetManager000 = createTaskSetManager(0, 5, taskScheduler)
val taskSetManager001 = createTaskSetManager(1, 5, taskScheduler)
pool00.addSchedulable(taskSetManager000)
pool00.addSchedulable(taskSetManager001)
val taskSetManager010 = createTaskSetManager(2, 5, taskScheduler)
val taskSetManager011 = createTaskSetManager(3, 5, taskScheduler)
pool01.addSchedulable(taskSetManager010)
pool01.addSchedulable(taskSetManager011)
val taskSetManager100 = createTaskSetManager(4, 5, taskScheduler)
val taskSetManager101 = createTaskSetManager(5, 5, taskScheduler)
pool10.addSchedulable(taskSetManager100)
pool10.addSchedulable(taskSetManager101)
val taskSetManager110 = createTaskSetManager(6, 5, taskScheduler)
val taskSetManager111 = createTaskSetManager(7, 5, taskScheduler)
pool11.addSchedulable(taskSetManager110)
pool11.addSchedulable(taskSetManager111)
scheduleTaskAndVerifyId(0, rootPool, 0)
scheduleTaskAndVerifyId(1, rootPool, 4)
scheduleTaskAndVerifyId(2, rootPool, 6)
scheduleTaskAndVerifyId(3, rootPool, 2)
}
}
| Dax1n/spark-core | core/src/test/scala/org/apache/spark/scheduler/PoolSuite.scala | Scala | apache-2.0 | 8,220 |
/*
* NodeImpl.scala
* (Cord)
*
* Copyright (c) 2015-2020 Hanns Holger Rutz.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.cord
package impl
import scala.collection.mutable
trait NodeImpl {
node: Node =>
object state extends State with ModelImpl[State.Update] {
private val map = mutable.Map.empty[String, Any]
def put(key: String, value: Any): Unit =
map.put(key, value).fold {
dispatch(State.Added(node, key = key, value = value))
} { oldValue =>
if (value != oldValue) dispatch(State.Changed(node, key = key, before = oldValue, now = value))
}
def get(key: String): Option[Any] = map.get(key)
def remove(key: String): Unit =
map.remove(key).foreach { oldValue =>
dispatch(State.Removed(node, key = key, value = oldValue))
}
}
def dispose(): Unit = ()
}
| Sciss/Cord | src/main/scala/de/sciss/cord/impl/NodeImpl.scala | Scala | lgpl-2.1 | 997 |
package scalydomain
import java.io.File
import java.util.concurrent.{BlockingQueue, LinkedBlockingQueue}
import scala.concurrent._
import scala.concurrent.duration._
import scala.collection.JavaConversions._
import scala.collection.mutable.SortedSet
import scala.io.Source
import scala.math.pow
import scala.util.matching.Regex
import ExecutionContext.Implicits.global
import scalydomain.core.DomainDb
import scalydomain.core.ModelDbReader
import scalydomain.core.MarkovChainGenerator
case class CliOptions(domainDbFile: File = new File("."),
modelDbFile: Option[File] = None,
wordListFile: Option[File] = None,
prefix: String = "",
maxLength: Int = -1,
domainsToGenerate: Int = 20,
pattern: Option[Regex] = None,
includeWords: String = "",
sort: Boolean = false)
object Generate {
def compilePattern(pattern: String) = {
val re = pattern.map { char =>
char match {
case 'L' => "[a-z]"
case 'N' => "\\\\d"
case 'V' => "[aeiouy]" //TODO: Is there a way to use Unicode char class to make this work for other writing systems?
case 'C' => "[^aeiou]"
case x => x
}
}.mkString
println(s"Limiting domains to those matching regex $re")
Some(new Regex("^" + re + "$"))
}
def main(args: Array[String]): Unit = {
val optParser = new scopt.OptionParser[CliOptions]("generate") {
head("generate", "SNAPSHOT")
opt[File]('d', "domaindb") required() action { (x, c) =>
c.copy(domainDbFile = x) } text("Path to domain database file which contains list of taken domain names")
opt[File]('m', "modeldb") optional() action { (x, c) =>
c.copy(modelDbFile = Some(x)) } text("Use the Markov model at this location to generate domains")
opt[File]('w', "wordlist") optional() action { (x, c) =>
c.copy(wordListFile = Some(x)) } text("Use the Markov model at this location to generate domains")
opt[String]('p', "prefix") optional() action { (x, c) =>
c.copy(prefix = x) } text("Generate only domain names that start with this prefix")
opt[Int]('l', "maxlength") optional() action { (x, c) =>
c.copy(maxLength = x) } text("Generate only domain names that are no longer than this")
opt[Int]('c', "count") optional() action { (x, c) =>
c.copy(domainsToGenerate = x) } text("Generate this many domains")
opt[String]('f', "pattern") optional() action { (x, c) =>
c.copy(pattern = compilePattern(x)) } text("Generate domains that match this pattern (LNCV plus regex)")
opt[String]('i', "include") optional() action { (x, c) =>
c.copy(includeWords = x) } text("Include these words in the generated output")
opt[Unit]('s', "sort") optional() action { (x, c) =>
c.copy(sort = true) } text("Sort output with highest score first")
checkConfig { c =>
(c.modelDbFile, c.wordListFile) match {
case (Some(_), None) | (None, Some(_)) => success
case (Some(_), Some(_)) => failure("either a modeldb or wordlist must be specified, but not both")
case (None, None) => failure("must specify either a modeldb or wordlist")
}
}
}
val config = optParser.parse(args, CliOptions()).get
val domainDb = new DomainDb(config.domainDbFile.getPath())
val markov = config.modelDbFile match {
case Some(modelDbFile) => {
val modelDb = new ModelDbReader(modelDbFile.getPath())
Some(new MarkovChainGenerator(modelDb))
}
case None => None
}
val generatedNames = SortedSet[String]()
try {
println("Generating domain names")
val domainHose = config.includeWords.split(",").filter(_.length > 0).toStream #::: wordlistGenerator(config) #::: markovGenerator(config, markov)
val acceptableDomains = domainHose.filter(acceptableDomain(config, domainDb, generatedNames, _))
val domainsWithScores = acceptableDomains.map { domain =>
val p: Array[Double] = markov match {
case Some(markovGenerator) => markovGenerator.computeCharacterProbabilities(domain).toArray
case None => Array()
}
val scores = p.sorted
val charProbabilities = (domain+"$").zip(p).map { case (c, prob) => f"P($c)=$prob%4f" }.mkString(",")
(domain, scores, charProbabilities)
}
val ordering = new Ordering[(String, Array[Double], String)] {
def compare(x: (String, Array[Double], String), y: (String, Array[Double], String)) : Int = {
x._2.zip(y._2).dropWhile(tuple => tuple._1 == tuple._2).headOption match {
case Some((lhs, rhs)) => Ordering[Double].compare(rhs, lhs)
case None => Ordering[Int].compare(y._2.length, x._2.length)
}
}
}
val domainOutput = config.sort match {
case true => domainsWithScores.take(config.domainsToGenerate).toArray.sorted(ordering).toStream
case false => domainsWithScores.take(config.domainsToGenerate).toStream
}
domainOutput.foreach { case (domain, _, charProbabilities) =>
generatedNames += domain
println(f"\\t$domain\\t$charProbabilities")
}
} finally {
domainDb.close
}
}
def markovGenerator(config: CliOptions, markov: Option[MarkovChainGenerator]): Stream[String] = {
markov match {
case Some(markovGenerator) => Stream.continually { markovGenerator.generate(config.maxLength, config.prefix) }
case None => Stream.empty
}
}
def wordlistGenerator(config: CliOptions): Stream[String] = {
config.wordListFile match {
case Some(wordListFile) => {
//Only include lines that are valid domain names, meaning no whitespace or punctuation
var re = """^\\w([\\w\\-]*\\w)?$""".r
val lines = for (line <- Source.fromFile(wordListFile).getLines) yield line.toLowerCase
lines.collect { line =>
line match {
case re(_*) if (config.maxLength == -1 || config.maxLength >= config.prefix.length + line.length) && line.length > 1 => config.prefix + line
}
}.toStream
}
case None => Stream.empty
}
}
def acceptableDomain(config: CliOptions, domainDb: DomainDb, generatedNames: SortedSet[String], domain: String) = {
(
!domainDb.domainExists(domain) &&
!generatedNames.contains(domain) &&
(config.pattern match {
case Some(re) => !re.findPrefixOf(domain).isEmpty
case None => true
})
)
}
}
| anelson/scalydomain | generate/main.scala | Scala | apache-2.0 | 6,171 |
/*
* Copyright (c) 2012-2017 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.core
package typeclasses
/**
* Type class to render data into it base type `D`
* and lowest-level (`String`) common for all aps
*
* @tparam D generic type in which instance can be represented
*/
trait NormalizeData[D] {
/**
* Render data instance to its base type `D`
*/
def normalize(container: SelfDescribingData[D]): D
}
| snowplow/iglu | 0-common/scala-core/src/main/scala/com.snowplowanalytics.iglu/core/typeclasses/NormalizeData.scala | Scala | apache-2.0 | 1,089 |
package pamflet
import com.tristanhunt.knockoff._
trait IdentifiedHeaders extends Discounter { self: TextWriter =>
def headerText( spans : Seq[Span] ) : String = {
val stringWriter = new java.io.StringWriter
spans.map( self.spanToText(_)(stringWriter) )
stringWriter.toString
}
override def headerToXHTML = (level, spans) => {
val name = BlockNames.encode(BlockNames.textOf(spans))
val spanned = spans.map(spanToXHTML)
val anchored = spanned ++
<a href={ "#" + name } class="header-link"><span class="header-link-content"> </span></a>
level match {
case 1 => <h1 id={name}>{ anchored }</h1>
case 2 => <h2 id={name}>{ anchored }</h2>
case 3 => <h3 id={name}>{ anchored }</h3>
case 4 => <h4 id={name}>{ anchored }</h4>
case 5 => <h5 id={name}>{ anchored }</h5>
case 6 => <h6>{ spanned }</h6>
case _ =>
<div class={ "header" + level }>{ spanned }</div>
}
}
}
object BlockNames {
/** Do not generate ids for higher levels than this */
val maxLevel = 5
def encode(str: String) =
java.net.URLEncoder.encode(str.trim(), "utf-8")
def fragment(str: String) = "#" + encode(str)
def textOf(spans: Seq[Span]) =
spans.flatMap {
case t: Text => Seq(t.content)
case h: HTMLSpan => Seq(h.html)
case _ => Seq()
}.mkString("")
def name(blocks: Seq[Block]) =
blocks.view.collect {
case h: Header => textOf(h.spans)
}.headOption.getOrElse { "Untitled" }
}
| n8han/pamflet | knockoff/src/main/scala/headers.scala | Scala | lgpl-3.0 | 1,502 |
package com.ee.assets.transformers
import org.specs2.mutable.Specification
class CommonRootNamerTest extends Specification {
"common root namer" should {
"name" in {
pending("coming..")
}
}
}
| edeustace/assets-loader | plugin/test/com/ee/assets/transformers/CommonRootNamerTest.scala | Scala | mit | 215 |
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.index
import java.util.Date
import com.vividsolutions.jts.geom.{Geometry, MultiPolygon, Polygon}
import org.joda.time.{DateTime, Interval}
import org.locationtech.geomesa.core.filter._
import org.locationtech.geomesa.utils.geohash.GeohashUtils
import org.locationtech.geomesa.utils.geohash.GeohashUtils._
import org.locationtech.geomesa.utils.geotools.GeometryUtils
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter._
import org.opengis.filter.expression.{Expression, Literal, PropertyName}
import org.opengis.filter.spatial._
import org.opengis.filter.temporal.{After, Before, During}
import org.opengis.temporal.Period
import scala.collection.JavaConversions._
object FilterHelper {
// Let's handle special cases with topological filters.
def updateTopologicalFilters(filter: Filter, featureType: SimpleFeatureType) = {
filter match {
case dw: DWithin => rewriteDwithin(dw)
case op: BBOX => visitBBOX(op, featureType)
case op: Within => visitBinarySpatialOp(op, featureType)
case op: Intersects => visitBinarySpatialOp(op, featureType)
case op: Overlaps => visitBinarySpatialOp(op, featureType)
case _ => filter
}
}
def visitBinarySpatialOp(op: BinarySpatialOperator, featureType: SimpleFeatureType): Filter = {
val e1 = op.getExpression1.asInstanceOf[PropertyName]
val e2 = op.getExpression2.asInstanceOf[Literal]
val geom = e2.evaluate(null, classOf[Geometry])
val safeGeometry = getInternationalDateLineSafeGeometry(geom)
updateToIDLSafeFilter(op, safeGeometry, featureType)
}
def visitBBOX(op: BBOX, featureType: SimpleFeatureType): Filter = {
val e1 = op.getExpression1.asInstanceOf[PropertyName]
val e2 = op.getExpression2.asInstanceOf[Literal]
val geom = addWayPointsToBBOX( e2.evaluate(null, classOf[Geometry]) )
val safeGeometry = getInternationalDateLineSafeGeometry(geom)
updateToIDLSafeFilter(op, safeGeometry, featureType)
}
def updateToIDLSafeFilter(op: BinarySpatialOperator, geom: Geometry, featureType: SimpleFeatureType): Filter = geom match {
case p: Polygon =>
dispatchOnSpatialType(op, featureType.getGeometryDescriptor.getLocalName, p)
case mp: MultiPolygon =>
val polygonList = getGeometryListOf(geom)
val filterList = polygonList.map {
p => dispatchOnSpatialType(op, featureType.getGeometryDescriptor.getLocalName, p)
}
ff.or(filterList)
}
def getGeometryListOf(inMP: Geometry): Seq[Geometry] =
for( i <- 0 until inMP.getNumGeometries ) yield inMP.getGeometryN(i)
def dispatchOnSpatialType(op: BinarySpatialOperator, property: String, geom: Geometry): Filter = op match {
case op: Within => ff.within( ff.property(property), ff.literal(geom) )
case op: Intersects => ff.intersects( ff.property(property), ff.literal(geom) )
case op: Overlaps => ff.overlaps( ff.property(property), ff.literal(geom) )
case op: BBOX => val envelope = geom.getEnvelopeInternal
ff.bbox( ff.property(property), envelope.getMinX, envelope.getMinY,
envelope.getMaxX, envelope.getMaxY, op.getSRS )
}
def addWayPointsToBBOX(g: Geometry): Geometry = {
val gf = g.getFactory
val geomArray = g.getCoordinates
val correctedGeom = GeometryUtils.addWayPoints(geomArray).toArray
gf.createPolygon(correctedGeom)
}
// Rewrites a Dwithin (assumed to express distance in meters) in degrees.
def rewriteDwithin(op: DWithin): Filter = {
val e2 = op.getExpression2.asInstanceOf[Literal]
val geom = e2.getValue.asInstanceOf[Geometry]
val distanceDegrees = GeometryUtils.distanceDegrees(geom, op.getDistance)
// NB: The ECQL spec doesn't allow for us to put the measurement in "degrees",
// but that's how this filter will be used.
ff.dwithin(
op.getExpression1,
op.getExpression2,
distanceDegrees,
"meters")
}
def extractGeometry(bso: BinarySpatialOperator): Seq[Geometry] = {
bso match {
// The Dwithin has already between rewritten.
case dwithin: DWithin =>
val e2 = dwithin.getExpression2.asInstanceOf[Literal]
val geom = e2.getValue.asInstanceOf[Geometry]
val buffer = dwithin.getDistance
val bufferedGeom = geom.buffer(buffer)
Seq(GeohashUtils.getInternationalDateLineSafeGeometry(bufferedGeom))
case bs =>
bs.getExpression1.evaluate(null, classOf[Geometry]) match {
case g: Geometry => Seq(GeohashUtils.getInternationalDateLineSafeGeometry(g))
case _ =>
bso.getExpression2.evaluate(null, classOf[Geometry]) match {
case g: Geometry => Seq(GeohashUtils.getInternationalDateLineSafeGeometry(g))
}
}
}
}
// NB: This method assumes that the filters represent a collection of 'and'ed temporal filters.
def extractTemporal(dtFieldName: Option[String]): Seq[Filter] => Interval = {
import org.locationtech.geomesa.utils.filters.Typeclasses.BinaryFilter
import org.locationtech.geomesa.utils.filters.Typeclasses.BinaryFilter.ops
def endpointFromBinaryFilter[B: BinaryFilter](b: B, dtfn: String) = {
val exprToDT: Expression => DateTime = ex => new DateTime(ex.evaluate(null, classOf[Date]))
if (b.left.toString == dtfn) {
Right(exprToDT(b.right)) // the left side is the field name; the right is the endpoint
} else {
Left(exprToDT(b.left)) // the right side is the field name; the left is the endpoint
}
}
def intervalFromAfterLike[B: BinaryFilter](b: B, dtfn: String) =
endpointFromBinaryFilter(b, dtfn) match {
case Right(dt) => new Interval(dt, IndexSchema.maxDateTime)
case Left(dt) => new Interval(IndexSchema.minDateTime, dt)
}
def intervalFromBeforeLike[B: BinaryFilter](b: B, dtfn: String) =
endpointFromBinaryFilter(b, dtfn) match {
case Right(dt) => new Interval(IndexSchema.minDateTime, dt)
case Left(dt) => new Interval(dt, IndexSchema.maxDateTime)
}
def extractInterval(dtfn: String): Filter => Interval = {
case during: During =>
val p = during.getExpression2.evaluate(null, classOf[Period])
val start = p.getBeginning.getPosition.getDate
val end = p.getEnding.getPosition.getDate
new Interval(start.getTime, end.getTime)
case between: PropertyIsBetween =>
val start = between.getLowerBoundary.evaluate(null, classOf[Date])
val end = between.getUpperBoundary.evaluate(null, classOf[Date])
new Interval(start.getTime, end.getTime)
// NB: Interval semantics correspond to "at or after"
case after: After => intervalFromAfterLike(after, dtfn)
case before: Before => intervalFromBeforeLike(before, dtfn)
case lt: PropertyIsLessThan => intervalFromBeforeLike(lt, dtfn)
// NB: Interval semantics correspond to <
case le: PropertyIsLessThanOrEqualTo => intervalFromBeforeLike(le, dtfn)
// NB: Interval semantics correspond to >=
case gt: PropertyIsGreaterThan => intervalFromAfterLike(gt, dtfn)
case ge: PropertyIsGreaterThanOrEqualTo => intervalFromAfterLike(ge, dtfn)
case a: Any =>
throw new Exception(s"Expected temporal filters. Received an $a.")
}
dtFieldName match {
case None =>
_ => IndexSchema.everywhen
case Some(dtfn) =>
filters => filters.map(extractInterval(dtfn)).fold(IndexSchema.everywhen)( _.overlap(_))
}
}
def filterListAsAnd(filters: Seq[Filter]): Option[Filter] = filters match {
case Nil => None
case _ => Some(recomposeAnd(filters))
}
def recomposeAnd(s: Seq[Filter]): Filter = if (s.tail.isEmpty) s.head else ff.and(s)
/**
* Finds the first filter satisfying the condition and returns the rest in the same order they were in
*/
def findFirst(pred: Filter => Boolean)(s: Seq[Filter]): (Option[Filter], Seq[Filter]) =
if (s.isEmpty) (None, s) else {
val h = s.head
val t = s.tail
if (pred(h)) (Some(h), t) else {
val (x, xs) = findFirst(pred)(t)
(x, h +: xs)
}
}
def decomposeAnd(f: Filter): Seq[Filter] = {
f match {
case b: And => b.getChildren.toSeq.flatMap(decomposeAnd)
case f: Filter => Seq(f)
}
}
}
trait IndexFilterHelpers {
def buildFilter(geom: Geometry, interval: Interval): KeyPlanningFilter =
(IndexSchema.somewhere(geom), IndexSchema.somewhen(interval)) match {
case (None, None) => AcceptEverythingFilter
case (None, Some(i)) =>
if (i.getStart == i.getEnd) DateFilter(i.getStart)
else DateRangeFilter(i.getStart, i.getEnd)
case (Some(p), None) => SpatialFilter(p)
case (Some(p), Some(i)) =>
if (i.getStart == i.getEnd) SpatialDateFilter(p, i.getStart)
else SpatialDateRangeFilter(p, i.getStart, i.getEnd)
}
def netGeom(geom: Geometry): Geometry =
Option(geom).map(_.intersection(IndexSchema.everywhere)).orNull
def netInterval(interval: Interval): Interval = interval match {
case null => null
case _ => IndexSchema.everywhen.overlap(interval)
}
def netPolygon(poly: Polygon): Polygon = poly match {
case null => null
case p if p.covers(IndexSchema.everywhere) =>
IndexSchema.everywhere
case p if IndexSchema.everywhere.covers(p) => p
case _ => poly.intersection(IndexSchema.everywhere).
asInstanceOf[Polygon]
}
} | kevinwheeler/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/index/FilterHelper.scala | Scala | apache-2.0 | 10,263 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import java.lang.{Long => JLong}
import java.lang.reflect.InvocationTargetException
import scala.collection.mutable
import scala.util.Try
import org.apache.hadoop.yarn.api.records.Resource
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.deploy.yarn.config._
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.resource.ResourceID
import org.apache.spark.resource.ResourceUtils.{AMOUNT, FPGA, GPU}
import org.apache.spark.util.{CausedBy, Utils}
/**
* This helper class uses some of Hadoop 3 methods from the YARN API,
* so we need to use reflection to avoid compile error when building against Hadoop 2.x
*/
private object ResourceRequestHelper extends Logging {
private val AMOUNT_AND_UNIT_REGEX = "([0-9]+)([A-Za-z]*)".r
private val RESOURCE_INFO_CLASS = "org.apache.hadoop.yarn.api.records.ResourceInformation"
val YARN_GPU_RESOURCE_CONFIG = "yarn.io/gpu"
val YARN_FPGA_RESOURCE_CONFIG = "yarn.io/fpga"
private[yarn] def getYarnResourcesAndAmounts(
sparkConf: SparkConf,
componentName: String): Map[String, String] = {
sparkConf.getAllWithPrefix(s"$componentName").map { case (key, value) =>
val splitIndex = key.lastIndexOf('.')
if (splitIndex == -1) {
val errorMessage = s"Missing suffix for ${componentName}${key}, you must specify" +
s" a suffix - $AMOUNT is currently the only supported suffix."
throw new IllegalArgumentException(errorMessage.toString())
}
val resourceName = key.substring(0, splitIndex)
val resourceSuffix = key.substring(splitIndex + 1)
if (!AMOUNT.equals(resourceSuffix)) {
val errorMessage = s"Unsupported suffix: $resourceSuffix in: ${componentName}${key}, " +
s"only .$AMOUNT is supported."
throw new IllegalArgumentException(errorMessage.toString())
}
(resourceName, value)
}.toMap
}
/**
* Convert Spark resources into YARN resources.
* The only resources we know how to map from spark configs to yarn configs are
* gpus and fpgas, everything else the user has to specify them in both the
* spark.yarn.*.resource and the spark.*.resource configs.
*/
private[yarn] def getYarnResourcesFromSparkResources(
confPrefix: String,
sparkConf: SparkConf
): Map[String, String] = {
Map(GPU -> YARN_GPU_RESOURCE_CONFIG, FPGA -> YARN_FPGA_RESOURCE_CONFIG).map {
case (rName, yarnName) =>
(yarnName -> sparkConf.get(ResourceID(confPrefix, rName).amountConf, "0"))
}.filter { case (_, count) => count.toLong > 0 }
}
/**
* Validates sparkConf and throws a SparkException if any of standard resources (memory or cores)
* is defined with the property spark.yarn.x.resource.y
* Need to reject all combinations of AM / Driver / Executor and memory / CPU cores resources, as
* Spark has its own names for them (memory, cores),
* but YARN have its names too: (memory, memory-mb, mb) and (cores, vcores, cpu-vcores).
* We need to disable every possible way YARN could receive the resource definitions above.
*/
def validateResources(sparkConf: SparkConf): Unit = {
val resourceDefinitions = Seq[(String, String)](
(AM_MEMORY.key, YARN_AM_RESOURCE_TYPES_PREFIX + "memory"),
(DRIVER_MEMORY.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "memory"),
(EXECUTOR_MEMORY.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "memory"),
(AM_MEMORY.key, YARN_AM_RESOURCE_TYPES_PREFIX + "mb"),
(DRIVER_MEMORY.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "mb"),
(EXECUTOR_MEMORY.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "mb"),
(AM_MEMORY.key, YARN_AM_RESOURCE_TYPES_PREFIX + "memory-mb"),
(DRIVER_MEMORY.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "memory-mb"),
(EXECUTOR_MEMORY.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "memory-mb"),
(AM_CORES.key, YARN_AM_RESOURCE_TYPES_PREFIX + "cores"),
(DRIVER_CORES.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "cores"),
(EXECUTOR_CORES.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "cores"),
(AM_CORES.key, YARN_AM_RESOURCE_TYPES_PREFIX + "vcores"),
(DRIVER_CORES.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "vcores"),
(EXECUTOR_CORES.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "vcores"),
(AM_CORES.key, YARN_AM_RESOURCE_TYPES_PREFIX + "cpu-vcores"),
(DRIVER_CORES.key, YARN_DRIVER_RESOURCE_TYPES_PREFIX + "cpu-vcores"),
(EXECUTOR_CORES.key, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "cpu-vcores"),
(ResourceID(SPARK_EXECUTOR_PREFIX, "fpga").amountConf,
s"${YARN_EXECUTOR_RESOURCE_TYPES_PREFIX}${YARN_FPGA_RESOURCE_CONFIG}"),
(ResourceID(SPARK_DRIVER_PREFIX, "fpga").amountConf,
s"${YARN_DRIVER_RESOURCE_TYPES_PREFIX}${YARN_FPGA_RESOURCE_CONFIG}"),
(ResourceID(SPARK_EXECUTOR_PREFIX, "gpu").amountConf,
s"${YARN_EXECUTOR_RESOURCE_TYPES_PREFIX}${YARN_GPU_RESOURCE_CONFIG}"),
(ResourceID(SPARK_DRIVER_PREFIX, "gpu").amountConf,
s"${YARN_DRIVER_RESOURCE_TYPES_PREFIX}${YARN_GPU_RESOURCE_CONFIG}"))
val errorMessage = new mutable.StringBuilder()
resourceDefinitions.foreach { case (sparkName, resourceRequest) =>
val resourceRequestAmount = s"${resourceRequest}.${AMOUNT}"
if (sparkConf.contains(resourceRequestAmount)) {
errorMessage.append(s"Error: Do not use $resourceRequestAmount, " +
s"please use $sparkName instead!\\n")
}
}
if (errorMessage.nonEmpty) {
throw new SparkException(errorMessage.toString())
}
}
/**
* Sets resource amount with the corresponding unit to the passed resource object.
* @param resources resource values to set
* @param resource resource object to update
*/
def setResourceRequests(
resources: Map[String, String],
resource: Resource): Unit = {
require(resource != null, "Resource parameter should not be null!")
logDebug(s"Custom resources requested: $resources")
if (resources.isEmpty) {
// no point in going forward, as we don't have anything to set
return
}
if (!isYarnResourceTypesAvailable()) {
logWarning("Ignoring custom resource requests because " +
"the version of YARN does not support it!")
return
}
val resInfoClass = Utils.classForName(RESOURCE_INFO_CLASS)
val setResourceInformationMethod =
try {
resource.getClass.getMethod("setResourceInformation", classOf[String], resInfoClass)
} catch {
case e: NoSuchMethodException =>
throw new SparkException(
s"Cannot find setResourceInformation in ${resource.getClass}. " +
"This is likely due to a JAR conflict between different YARN versions.", e)
}
resources.foreach { case (name, rawAmount) =>
try {
val AMOUNT_AND_UNIT_REGEX(amountPart, unitPart) = rawAmount
val amount = amountPart.toLong
val unit = unitPart match {
case "g" => "G"
case "t" => "T"
case "p" => "P"
case _ => unitPart
}
logDebug(s"Registering resource with name: $name, amount: $amount, unit: $unit")
val resourceInformation = createResourceInformation(name, amount, unit, resInfoClass)
setResourceInformationMethod.invoke(
resource, name, resourceInformation.asInstanceOf[AnyRef])
} catch {
case _: MatchError =>
throw new IllegalArgumentException(s"Resource request for '$name' ('$rawAmount') " +
s"does not match pattern $AMOUNT_AND_UNIT_REGEX.")
case CausedBy(e: IllegalArgumentException) =>
throw new IllegalArgumentException(s"Invalid request for $name: ${e.getMessage}")
case e: InvocationTargetException if e.getCause != null => throw e.getCause
}
}
}
private def createResourceInformation(
resourceName: String,
amount: Long,
unit: String,
resInfoClass: Class[_]): Any = {
val resourceInformation =
if (unit.nonEmpty) {
val resInfoNewInstanceMethod = resInfoClass.getMethod("newInstance",
classOf[String], classOf[String], JLong.TYPE)
resInfoNewInstanceMethod.invoke(null, resourceName, unit, amount.asInstanceOf[JLong])
} else {
val resInfoNewInstanceMethod = resInfoClass.getMethod("newInstance",
classOf[String], JLong.TYPE)
resInfoNewInstanceMethod.invoke(null, resourceName, amount.asInstanceOf[JLong])
}
resourceInformation
}
/**
* Checks whether Hadoop 2.x or 3 is used as a dependency.
* In case of Hadoop 3 and later, the ResourceInformation class
* should be available on the classpath.
*/
def isYarnResourceTypesAvailable(): Boolean = {
Try(Utils.classForName(RESOURCE_INFO_CLASS)).isSuccess
}
}
| pgandhi999/spark | resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ResourceRequestHelper.scala | Scala | apache-2.0 | 9,652 |
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2.column
import java.nio.ByteBuffer
import java.nio.ByteOrder
import it.unimi.dsi.fastutil.bytes.ByteArrayList
import it.unimi.dsi.fastutil.ints.IntArrayList
import org.apache.hadoop.hive.serde2.ByteStream
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector
import shark.execution.serialization.KryoSerializer
class ComplexColumnBuilder(oi: ObjectInspector) extends ColumnBuilder[ByteStream.Output] {
private var _arr: ByteArrayList = null
private var _lengthArr: IntArrayList = null
override def initialize(initialSize: Int) {
_arr = new ByteArrayList(initialSize * ColumnIterator.COMPLEX_TYPE_SIZE)
_lengthArr = new IntArrayList(initialSize)
super.initialize(initialSize)
}
override def append(o: Object, oi: ObjectInspector) {
append(o.asInstanceOf[ByteStream.Output])
}
override def appendNull() {
// A complex data type is always serialized before passing into this. It is not
// possible to be null.
throw new UnsupportedOperationException
}
override def append(v: ByteStream.Output) {
_lengthArr.add(v.getCount)
_arr.addElements(_arr.size(), v.getData, 0, v.getCount)
}
// Don't collect stats for complex data types.
override def stats = null
override def build: ByteBuffer = {
val objectInspectorSerialized = KryoSerializer.serialize(oi)
val buf = ByteBuffer.allocate(
_lengthArr.size * 4 + _arr.size + ColumnIterator.COLUMN_TYPE_LENGTH +
objectInspectorSerialized.size + 8)
buf.order(ByteOrder.nativeOrder())
buf.putLong(ColumnIterator.COMPLEX)
buf.putLong(objectInspectorSerialized.size)
buf.put(objectInspectorSerialized)
var i = 0
var runningOffset = 0
while (i < _lengthArr.size) {
val len = _lengthArr.get(i)
buf.putInt(len)
buf.put(_arr.elements(), runningOffset, len)
runningOffset += len
i += 1
}
buf.rewind()
buf
}
}
| sameeragarwal/blinkdb_dev | src/main/scala/shark/memstore2/column/ComplexColumnBuilder.scala | Scala | apache-2.0 | 2,598 |
package mesosphere.marathon.upgrade
import mesosphere.marathon.state.AppDefinition.VersionInfo
import mesosphere.marathon.state.{ AppDefinition, Group, Timestamp }
import org.slf4j.LoggerFactory
/**
* Tools related to app/group versioning.
*/
object GroupVersioningUtil {
private[this] val log = LoggerFactory.getLogger(getClass)
/**
* Calculate a new group from the given `to` parameter that sets the version of all changed apps
* to the given `version`.
*
* @param version the version of all changed apps
* @param from the original group
* @param to the updated group
* @return the updated group with updated app versions
*/
def updateVersionInfoForChangedApps(version: Timestamp, from: Group, to: Group): Group = {
def updateAppVersionInfo(maybeOldApp: Option[AppDefinition], newApp: AppDefinition): AppDefinition = {
val newVersionInfo = maybeOldApp match {
case None =>
log.info(s"[${newApp.id}]: new app detected")
AppDefinition.VersionInfo.forNewConfig(newVersion = version)
case Some(oldApp) =>
if (oldApp.isUpgrade(newApp)) {
log.info(s"[${newApp.id}]: upgrade detected for app (oldVersion ${oldApp.versionInfo})")
oldApp.versionInfo.withConfigChange(newVersion = version)
} else if (oldApp.isOnlyScaleChange(newApp)) {
log.info(s"[${newApp.id}]: scaling op detected for app (oldVersion ${oldApp.versionInfo})")
oldApp.versionInfo.withScaleOrRestartChange(newVersion = version)
} else if (oldApp.versionInfo != newApp.versionInfo && newApp.versionInfo == VersionInfo.NoVersion) {
log.info(s"[${newApp.id}]: restart detected for app (oldVersion ${oldApp.versionInfo})")
oldApp.versionInfo.withScaleOrRestartChange(newVersion = version)
} else {
oldApp.versionInfo
}
}
newApp.copy(versionInfo = newVersionInfo)
}
val originalApps = from.transitiveAppsById
val updatedTargetApps = to.transitiveApps.flatMap { newApp =>
val updated = updateAppVersionInfo(originalApps.get(newApp.id), newApp)
if (updated.versionInfo != newApp.versionInfo) Some(updated) else None
}
updatedTargetApps.foldLeft(to) { (resultGroup, updatedApp) =>
resultGroup.updateApp(updatedApp.id, _ => updatedApp, version)
}
}
}
| timcharper/marathon | src/main/scala/mesosphere/marathon/upgrade/GroupVersioningUtil.scala | Scala | apache-2.0 | 2,386 |
/*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart
sealed trait Notice {
def message: String
}
case class Info(message: String) extends Notice
case class Warning(message: String) extends Notice
case class Error(message: String) extends Notice
| gorcz/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/Notice.scala | Scala | apache-2.0 | 825 |
package com.github.rnowling.bps.datagenerator.spark
import com.github.rnowling.bps.datagenerator.datamodels.{Store,Customer,PurchasingProfile,Transaction}
import com.github.rnowling.bps.datagenerator.{DataLoader,StoreGenerator,CustomerGenerator,PurchasingProfileGenerator,TransactionGenerator}
import com.github.rnowling.bps.datagenerator.framework.SeedFactory
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.SparkContext._
import java.util.ArrayList
import scala.util.Random
import java.io.File
import java.util.Date
object SparkDriver {
private var nStores: Int = -1
private var nCustomers: Int = -1
private var simulationLength: Double = -1.0
private var seed: Long = -1
private var outputDir: File = new File(".")
private val NPARAMS = 5
private def printUsage() {
val usage: String = "BigPetStore Data Generator\\n" +
"\\n" +
"Usage: spark-submit ... outputDir nStores nCustomers simulationLength [seed]\\n" +
"\\n" +
"outputDir - (string) directory to write files\\n" +
"nStores - (int) number of stores to generate\\n" +
"nCustomers - (int) number of customers to generate\\n" +
"simulationLength - (float) number of days to simulate\\n" +
"seed - (long) seed for RNG. If not given, one is reandomly generated.\\n"
println(usage)
}
private def parseArgs(args: Array[String]) {
if(args.length != NPARAMS && args.length != (NPARAMS - 1)) {
printUsage()
System.exit(1)
}
var i = 0
outputDir = new File(args(i))
if(! outputDir.exists()) {
System.err.println("Given path (" + args(i) + ") does not exist.\\n")
printUsage()
System.exit(1)
}
if(! outputDir.isDirectory()) {
System.err.println("Given path (" + args(i) + ") is not a directory.\\n")
printUsage()
System.exit(1)
}
i += 1
try {
nStores = args(i).toInt
}
catch {
case _ : NumberFormatException =>
System.err.println("Unable to parse '" + args(i) + "' as an integer for nStores.\\n")
printUsage()
System.exit(1)
}
i += 1
try {
nCustomers = args(i).toInt
}
catch {
case _ : NumberFormatException =>
System.err.println("Unable to parse '" + args(i) + "' as an integer for nCustomers.\\n")
printUsage()
System.exit(1)
}
i += 1
try {
simulationLength = args(i).toDouble
}
catch {
case _ : NumberFormatException =>
System.err.println("Unable to parse '" + args(i) + "' as a float for simulationLength.\\n")
printUsage()
System.exit(1)
}
if(args.length == NPARAMS) {
i += 1
try {
seed = args(i).toLong
}
catch {
case _ : NumberFormatException =>
System.err.println("Unable to parse '" + args(i) + "' as a long for seed.\\n")
printUsage()
System.exit(1)
}
}
else {
seed = (new Random()).nextLong
}
}
def main(args: Array[String]) {
parseArgs(args)
val inputData = new DataLoader().loadData()
val seedFactory = new SeedFactory(seed);
println("Generating stores...")
val stores : ArrayList[Store] = new ArrayList()
val storeGenerator = new StoreGenerator(inputData, seedFactory);
for(i <- 1 to nStores) {
val store = storeGenerator.generate()
stores.add(store)
}
println("Done.")
println("Generating customers...")
var customers: List[Customer] = List()
val custGen = new CustomerGenerator(inputData, stores, seedFactory)
for(i <- 1 to nCustomers) {
val customer = custGen.generate()
customers = customer :: customers
}
println("Done.")
println("Creating SparkConf")
val conf = new SparkConf().setAppName("BPS Data Generator")
println("Creating SparkContext")
val sc = new SparkContext(conf)
println("Broadcasting stores and products")
val storesBC = sc.broadcast(stores)
val productBC = sc.broadcast(inputData.getProductCategories())
val customerRDD = sc.parallelize(customers)
val nextSeed = seedFactory.getNextSeed()
println("Defining transaction DAG")
val transactionRDD = customerRDD.mapPartitionsWithIndex { (index, custIter) =>
val seedFactory = new SeedFactory(nextSeed ^ index)
val transactionIter = custIter.map{ customer =>
val products = productBC.value
val profileGen = new PurchasingProfileGenerator(products, seedFactory)
val profile = profileGen.generate()
val transGen = new TransactionGenerator(customer, profile, storesBC.value, products,
seedFactory)
var transactions : List[Transaction] = List()
var transaction = transGen.generate()
while(transaction.getDateTime() < simulationLength) {
transactions = transaction :: transactions
transaction = transGen.generate()
}
transactions
}
transactionIter
}.flatMap( s => s)
println("Generating transactions...")
val nTrans = transactionRDD.count()
println(s"Generated $nTrans transactions.")
val initialDate : Long = new Date().getTime()
val transactionStringsRDD = transactionRDD.map { t =>
var records : List[String] = List()
val products = t.getProducts()
for(i <- 0 until products.size()) {
val p = products.get(i)
val name = t.getCustomer().getName()
val custLocation = t.getCustomer().getLocation()
val storeLocation = t.getStore().getLocation()
// days -> milliseconds = days * 24 h / day * 60 min / hr * 60 sec / min * 1000 ms / sec
val dateMS = (t.getDateTime * 24.0 * 60.0 * 60.0 * 1000.0).toLong
val date = new Date(initialDate + dateMS)
var record = ""
record += t.getStore().getId() + ","
record += storeLocation.getZipcode() + ","
record += storeLocation.getCity() + ","
record += storeLocation.getState() + ","
record += t.getCustomer().getId() + ","
record += name.getFirst() + "," + name.getSecond() + ","
record += custLocation.getZipcode() + ","
record += custLocation.getCity() + ","
record += custLocation.getState() + ","
record += t.getId() + ","
record += date + ","
record += p
records = record :: records
}
records
}.flatMap { s => s }
transactionStringsRDD.saveAsTextFile(outputDir + "/transactions.txt")
sc.stop()
}
}
| rnowling/bigpetstore-data-generator | spark_driver/src/main/scala/com/github/rnowling/bps/datagenerator/spark/Driver.scala | Scala | apache-2.0 | 6,464 |
package mimir.util
import org.apache.spark.sql.Row
import mimir.algebra._
import mimir.provenance.Provenance
import java.sql.SQLException
import java.util.Calendar
import java.sql.Date
import java.sql.Timestamp
import org.apache.spark.sql.DataFrame
import mimir.exec.spark.RAToSpark
import scala.reflect.runtime.universe.{ runtimeMirror}
import org.spark_project.guava.reflect.ClassPath
import org.clapper.classutil.ClassFinder
import java.io.File
import org.apache.spark.sql.types.DateType
import org.apache.spark.sql.types.TimestampType
import org.apache.spark.sql.types.LongType
import org.apache.spark.sql.functions.unix_timestamp
object SparkUtils {
//TODO:there are a bunch of hacks in this conversion function because type conversion in operator translator
// needs to be done correctly
def convertFunction(t: Type, field: Integer): (Row => PrimitiveValue) =
{
val checkNull: ((Row, => PrimitiveValue) => PrimitiveValue) = {
(r, call) => {
if(r.isNullAt(field)){ NullPrimitive() }
else { call }
}
}
t match {
case TAny() => if(!ExperimentalOptions.isEnabled("NXNULL")) { (r) => NullPrimitive() } else throw new SQLException(s"Can't extract TAny: $field")
case TFloat() => (r) => checkNull(r, FloatPrimitive(r.getDouble(field)))
case TInt() => (r) => checkNull(r, {
try {
IntPrimitive(r.getLong(field))
} catch {
case t: Throwable => {
try {
IntPrimitive(r.getInt(field))
} catch {
case t: Throwable => {
val sval = r.getString(field)
//TODO: somehow mimir_rowid is sometimes an int and has '-'
// from makeRowIDProjectArgs
try {
if(sval.equalsIgnoreCase("-")) IntPrimitive(-1L)
else IntPrimitive(r.getString(field).toLong)
}
catch {
case t: Throwable => {
NullPrimitive()
}
}
}
}
}
} })
//TODO: This is a work around for when loading data from jdbc spark datasource the schema is
// not being interpreted by mimir correctly and everything is varchars in mimir
// but the underlying types are different so r.getString errors. I need to fix the
// jdbc loads to use the correct schema in mimir: Mike 10/2019
case TString() => (r) => checkNull(r, { r.get(field) match {
case s:String => StringPrimitive(s)
case x => StringPrimitive(x.toString())
} })
case TRowId() => (r) => checkNull(r, { RowIdPrimitive(r.getString(field)) })
case TBool() => (r) => checkNull(r, {
try {
BoolPrimitive(r.getInt(field) != 0)
} catch {
case t: Throwable => {
try {
BoolPrimitive(r.getBoolean(field))
} catch {
case t: Throwable => {
BoolPrimitive(r.getString(field).equalsIgnoreCase("true"))
}
}
}
} })
case TType() => (r) => checkNull(r, { TypePrimitive(Type.fromString(r.getString(field))) })
case TDate() => (r) => { val d = r.getDate(field); if(d == null){ NullPrimitive() } else { convertDate(d) } }
case TTimestamp() => (r) => {
val t = r.getTimestamp(field);
if(t == null){ NullPrimitive() }
else { convertTimestamp(t) }
}
case TInterval() => (r) => { TextUtils.parseInterval(r.getString(field)) }
case TUser(t) => convertFunction(TypeRegistry.baseType(t), field)
}
}
def convertField(t: Type, results: Row, field: Integer): PrimitiveValue =
{
convertFunction(
t match {
case TAny() => RAToSpark.getMimirType(results.schema.fields(field).dataType)
case _ => t
},
field
)(results)
}
def convertDate(time:Long): DatePrimitive =
{
val cal = Calendar.getInstance();
cal.setTime(new Date(time))
convertDate(cal)
}
def convertDate(c: Calendar): DatePrimitive =
DatePrimitive(c.get(Calendar.YEAR), c.get(Calendar.MONTH)+1, c.get(Calendar.DATE))
def convertDate(d: Date): DatePrimitive =
{
val cal = Calendar.getInstance();
cal.setTime(d)
convertDate(cal)
}
def convertDate(d: DatePrimitive): Date =
{
val cal = Calendar.getInstance()
cal.set(d.y, d.m, d.d);
new Date(cal.getTime().getTime());
}
def convertTimestamp(time:Long): TimestampPrimitive =
{
val cal = Calendar.getInstance();
cal.setTime(new Timestamp(time))
convertTimestamp(cal)
}
def convertTimestamp(c: Calendar): TimestampPrimitive =
TimestampPrimitive(c.get(Calendar.YEAR), c.get(Calendar.MONTH)+1, c.get(Calendar.DATE),
c.get(Calendar.HOUR_OF_DAY), c.get(Calendar.MINUTE), c.get(Calendar.SECOND),
c.get(Calendar.MILLISECOND))
def convertTimestamp(ts: Timestamp): TimestampPrimitive =
{
val cal = Calendar.getInstance();
cal.setTime(ts)
convertTimestamp(cal)
}
def convertTimestamp(ts: TimestampPrimitive): Timestamp =
{
val cal = Calendar.getInstance()
cal.set(ts.y, ts.m, ts.d, ts.hh, ts.mm, ts.ss);
new Timestamp(cal.getTime().getTime());
}
def extractAllRows(results: DataFrame): SparkDataFrameIterable =
extractAllRows(results, RAToSpark.structTypeToMimirSchema(results.schema).map(_._2))
def extractAllRows(results: DataFrame, schema: Seq[Type]): SparkDataFrameIterable =
{
new SparkDataFrameIterable(results.collect().iterator, schema)
}
def getSparkKryoClasses() = {
/*val finder = ClassFinder(List(new File(".")))
val classes = finder.getClasses // classes is an Iterator[ClassInfo]
val classMap = ClassFinder.classInfoMap(classes) // runs iterator out, once
val models = ClassFinder.concreteSubclasses("mimir.models.Model", classMap).map(clazz => Class.forName(clazz.name)).toSeq
val operators = ClassFinder.concreteSubclasses("mimir.algebra.Operator", classMap).map(clazz => Class.forName(clazz.name)).toSeq
val expressions = ClassFinder.concreteSubclasses("mimir.algebra.Expression", classMap).map(clazz => Class.forName(clazz.name)).toSeq
println((models ++ operators ++ expressions).map(_.getName).mkString("\\", \\""))
(models ++ operators ++ expressions).toArray*/
// INFO:
// Mike @ 7/20/2019
// We use the Kryo serializer because it performs better, but it requires registration of a list of classes that will be serialized.
// The above code uses classfinder to generate that list of classes, but there is a ASM conflict (3.1 and 6) in dependencies
// that breaks when using assembly or corsier, so, for now, just hardcode the class names here and add a test case that will alert us
// if this list gets out of sync with reality.
Seq( "mimir.models.SimplePickerModel",
"mimir.models.UniformDistribution$",
"mimir.models.CommentModel",
"mimir.models.SimpleSparkClassifierModel",
"mimir.models.WarningModel",
"mimir.models.SimpleFuncDepModel",
"mimir.models.EditDistanceMatchModel",
"mimir.models.RepairKeyModel",
"mimir.models.TypeInferenceModel",
"mimir.models.NoOpModel",
"mimir.models.FacetModel",
"mimir.models.DefaultMetaModel",
"mimir.models.DetectHeaderModel",
"mimir.models.SimpleSeriesModel",
"mimir.models.GeocodingModel",
"mimir.models.MissingKeyModel",
"mimir.algebra.Limit",
"mimir.algebra.Union",
"mimir.algebra.HardTable",
"mimir.algebra.Join",
"mimir.algebra.Table",
"mimir.algebra.AdaptiveView",
"mimir.algebra.LeftOuterJoin",
"mimir.algebra.Sort",
"mimir.algebra.Select",
"mimir.algebra.Aggregate",
"mimir.exec.mode.StatsQuery",
"mimir.algebra.View",
"mimir.algebra.Project",
"mimir.algebra.TypePrimitive",
"mimir.algebra.RowIdVar",
"mimir.algebra.NullPrimitive",
"mimir.algebra.FloatPrimitive",
"mimir.algebra.BoolPrimitive",
"mimir.algebra.Conditional",
"mimir.ctables.vgterm.BestGuess",
"mimir.algebra.Not",
"mimir.algebra.JDBCVar",
"mimir.ctables.vgterm.IsAcknowledged",
"mimir.algebra.DatePrimitive",
"mimir.algebra.IsNullExpression",
"mimir.algebra.VGTerm",
"mimir.algebra.Var",
"mimir.algebra.TimestampPrimitive",
"mimir.algebra.IntPrimitive",
"mimir.algebra.IntervalPrimitive",
"mimir.algebra.Arithmetic",
"mimir.ctables.vgterm.DomainDumper",
"mimir.algebra.DataWarning",
"mimir.ctables.vgterm.Sampler",
"mimir.algebra.Comparison",
"mimir.algebra.RowIdPrimitive",
"mimir.algebra.StringPrimitive",
"mimir.algebra.Function",
"mimir.algebra.CastExpression",
"mimir.algebra.DrawSamples",
"mimir.algebra.sampling.SampleRowsUniformly",
"mimir.algebra.sampling.SampleStratifiedOn"
).map( className =>
Class.forName(className)).toArray
}
def getDataFrameWithProvFromQuery(db:mimir.Database, query:Operator) : (Seq[(ID, Type)], DataFrame) = {
val prov = Provenance.compile(query)
val oper = prov._1
val provenanceCols:Seq[ID] = prov._2
val operWProv = Project(query.columnNames.map { name => ProjectArg(name, Var(name)) } :+
ProjectArg(Provenance.rowidColnameBase,
Function(Provenance.mergeRowIdFunction, provenanceCols.map( Var(_) ) )), oper )
val dfPreOut = db.compiler.compileToSparkWithoutRewrites(operWProv)
val dfOutDt = dfPreOut.schema.fields.filter(col => Seq(DateType).contains(col.dataType)).foldLeft(dfPreOut)((init, cur) => init.withColumn(cur.name,unix_timestamp(init(cur.name)).cast(LongType)*1000))
val dfOut = dfOutDt.schema.fields.filter(col => Seq(TimestampType).contains(col.dataType)).foldLeft(dfOutDt)((init, cur) => init.withColumn(cur.name,init(cur.name).cast(LongType)*1000) )
(db.typechecker.schemaOf(operWProv).map(el => el._2 match {
//case TDate() => (el._1, TInt())
//case TTimestamp() => (el._1, TInt())
case _ => el
}), dfOut)
}
def getDataFrameFromQuery(db:mimir.Database, query:Operator) : (Seq[(ID, Type)], DataFrame) = {
val dfPreOut = db.compiler.compileToSparkWithRewrites(query)
val dfOutDt = dfPreOut.schema.fields.filter(col => Seq(DateType).contains(col.dataType)).foldLeft(dfPreOut)((init, cur) => init.withColumn(cur.name,unix_timestamp(init(cur.name)).cast(LongType)*1000))
val dfOut = dfOutDt.schema.fields.filter(col => Seq(TimestampType).contains(col.dataType)).foldLeft(dfOutDt)((init, cur) => init.withColumn(cur.name,init(cur.name).cast(LongType)*1000) )
(db.typechecker.schemaOf(query), dfOut)
}
}
class SparkDataFrameIterable(results: Iterator[Row], schema: Seq[Type])
extends Iterator[Seq[PrimitiveValue]]
{
def next(): List[PrimitiveValue] =
{
val ret = schema.
zipWithIndex.
map( t => SparkUtils.convertField(t._1, results.next(), t._2) ).
toList
return ret;
}
def hasNext(): Boolean = results.hasNext
def close(): Unit = { }
override def toList() = results.toList.map(row => schema.
zipWithIndex.
map(t => SparkUtils.convertField(t._1, row, t._2)))
def flush: Seq[Seq[PrimitiveValue]] =
{
val ret = toList
close()
return ret
}
}
| UBOdin/mimir | src/main/scala/mimir/util/SparkUtils.scala | Scala | apache-2.0 | 11,792 |
/*
Copyright 2012 Denis Bardadym
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package code.snippet
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb.util.Helpers._
import net.liftweb.util.Props
import xml.{Text, NodeSeq}
import code.model.UserDoc
import code.lib.Sitemap._
import main.Constants._
/**
* Created by IntelliJ IDEA.
* User: den
* Date: 24.09.11
* Time: 15:40
* To change this template use File | Settings | File Templates.
*/
class MyMenu {
def your =
UserDoc.currentUser match {
case Full(u) => "li *" #> <a href={userRepos.calcHref(u)}>Your page</a>
case _ => "li" #> NodeSeq.Empty
}
def admin =
UserDoc.currentUser match {
case Full(u) => "li *" #> <a href={userAdmin.calcHref(u)}>Admin</a>
case _ => "li" #> NodeSeq.Empty
}
def signIn =
UserDoc.currentUser match {
case Full(u) => "li *" #> SHtml.a(()=> { UserDoc.logoutCurrentUser; S.redirectTo(S.referer openOr "/") }, Text("Log Out"))
case _ => "li" #> (<li><a href={login.loc.calcDefaultHref}>Log In</a></li> ++
(if(Props.getBool(USER_REGISTRATION_ENABLED, true))
<li><a href={newUser.loc.calcDefaultHref}>Register</a></li>
else NodeSeq.Empty))
}
def users = {
UserDoc.currentUser match {
case Full(u) if u.admin.get => "li *" #> <a href={adminUsers.loc.calcDefaultHref}>Users</a>
case _ => "li" #> NodeSeq.Empty
}
}
} | Khalith-Basha/demo | src/main/scala/code/snippet/MyMenu.scala | Scala | apache-2.0 | 2,018 |
package services
import java.util
import config.ApplicationConfig
import connectors.GameConnector
import exceptions.ServerNotRespondingException
import handlers.ServerRequestHandler
import helpers.TestSpec
import models.UserModel
import org.mockito.ArgumentMatchers
import org.mockito.Mockito._
import org.slf4j.Logger
import org.spongepowered.api.entity.living.player.Player
import org.spongepowered.api.{Game, Server}
import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
class UserServiceSpec extends TestSpec {
def setupMockService(serverResponse: Try[Server], config: ApplicationConfig): UserService = {
val mockGame = mock[Game]
val mockLogger = mock[Logger]
val mockHandler = new ServerRequestHandler(mockGame, mockLogger)
val mockConnector = mock[GameConnector]
when(mockGame.isServerAvailable)
.thenReturn(true)
when(mockConnector.fetchServer())
.thenReturn(serverResponse)
new UserService(mockConnector, mockHandler, config)
}
def setupMockServer(playerList: util.Collection[Player]): Server = {
val mockServer = mock[Server]
when(mockServer.getOnlinePlayers)
.thenReturn(playerList)
mockServer
}
def setupMockPlayer(name: String, isMod: Boolean = false): Player = {
val mockPlayer = mock[Player]
when(mockPlayer.getName)
.thenReturn(name)
when(mockPlayer.hasPermission(ArgumentMatchers.any()))
.thenReturn(isMod)
mockPlayer
}
def setupMockConfig(isFiltering: Boolean): ApplicationConfig = {
val mockConfig = mock[ApplicationConfig]
when(mockConfig.hideMods)
.thenReturn(isFiltering)
mockConfig
}
"Calling .fetchOnlinePlayers" should {
val config = setupMockConfig(false)
"return a list of players from the server" in {
val playerList = List(mock[Player], mock[Player])
val server = setupMockServer(playerList.asJava)
val service = setupMockService(Success(server), config)
val result = service.fetchOnlinePlayers()
result shouldBe Success(playerList)
}
"return a failure when an error is thrown" in {
val service = setupMockService(Failure(ServerNotRespondingException()), config)
val result = service.fetchOnlinePlayers()
result shouldBe Failure(ServerNotRespondingException())
}
}
"Calling .fetchOnlineUsers" should {
"return a list of users from the server" in {
val config = setupMockConfig(false)
val mockPlayers = List(setupMockPlayer("name1"), setupMockPlayer("name2", isMod = true))
val server = setupMockServer(mockPlayers.asJava)
val service = setupMockService(Success(server), config)
val result = service.fetchOnlineUsers()
result shouldBe Success(List(UserModel("name1"), UserModel("name2")))
}
"return a filtered list if filtering is enabled" in {
val config = setupMockConfig(true)
val mockPlayers = List(setupMockPlayer("name1"), setupMockPlayer("name2", isMod = true))
val server = setupMockServer(mockPlayers.asJava)
val service = setupMockService(Success(server), config)
val result = service.fetchOnlineUsers()
result shouldBe Success(List(UserModel("name1")))
}
"return a failure when an error is thrown" in {
val config = setupMockConfig(false)
val service = setupMockService(Failure(ServerNotRespondingException()), config)
val result = service.fetchOnlineUsers()
result shouldBe Failure(ServerNotRespondingException())
}
}
}
| jameshforster/ToL-Scout-Plugin | src/test/scala/services/UserServiceSpec.scala | Scala | apache-2.0 | 3,535 |
/*
* Copyright 2016 org.NLP4L
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nlp4l.lucene
import com.typesafe.config.{ConfigObject, Config, ConfigFactory}
import org.apache.lucene.analysis.custom.CustomAnalyzer
import org.apache.lucene.analysis.{Analyzer => LuceneAnalyzer}
import org.nlp4l.lucene.analysis.{Analyzer}
import scala.collection.JavaConversions
import scala.collection.immutable.HashMap
/**
* Object for loading schemas.
*/
object SchemaLoader {
/**
* Load a schema configuration from the resource with given name. [[InvalidSchemaException]] will be thrown if the resource is not found in the current class path .
*
* @param resource the resource name to schema configuration
* @return a new [[Schema]] instance
*/
def load(resource: String): Schema = {
val conf = ConfigFactory.load(resource)
read(conf)
}
/**
* Load a schema configuration from given file path. [[InvalidSchemaException]] will be thrown if the file is not found.
*
* @param path the path to schema configuration
* @return a new [[Schema]] instance
*/
def loadFile(path: String): Schema = {
val conf = ConfigFactory.parseFile(new java.io.File(path))
read(conf)
}
def read(conf: Config): Schema = {
val schema = if (conf.hasPath("schema")) conf.getConfig("schema") else throw new InvalidSchemaException("No root object \"schema\".")
// custom analyzer definitions
val analyzers =
if (schema.hasPath("analyzers")) JavaConversions.asScalaIterator(schema.getConfigList("analyzers").iterator).map(buildAnalyzer(_)).toMap
else Map.empty[String, Analyzer]
val defAnalyzer = if (schema.hasPath("defAnalyzer")) schema.getAnyRef("defAnalyzer") else throw new InvalidSchemaException("The path \"schema.defAnalyzer\" is mandatory.")
val defANalyzerObj =
if (defAnalyzer.isInstanceOf[String]) analyzers.get(defAnalyzer.asInstanceOf[String]) match {
case Some(a) => a
case _ => throw new InvalidSchemaException("Unknown analyzer name: " + defAnalyzer.asInstanceOf[String])
} else {
// build Analyzer in place
buildAnalyzer(schema.getConfig("defAnalyzer"), false)._2
}
val fields = if (schema.hasPath("fields")) schema.getConfigList("fields") else throw new InvalidSchemaException("The path \"schema.fields\" is mandatery")
if (fields.isEmpty) throw new InvalidSchemaException("\"schema.fields\" must have one or more field definitions")
val fieldTypes = JavaConversions.asScalaIterator(fields.listIterator()).map(f => {
val name = if (f.hasPath("name")) f.getString("name") else throw new InvalidSchemaException("The path \"schema.fields.[N].name\" is mandatory")
val analyzer = if (f.hasPath("analyzer")) f.getAnyRef("analyzer") else null
val indexed = if (f.hasPath("indexed")) f.getBoolean("indexed") else false
val stored = if (f.hasPath("stored")) f.getBoolean("stored") else false
val termVector = if (f.hasPath("termVector")) f.getBoolean("termVector") else false
val termPosition = if (termVector && f.hasPath("positions")) f.getBoolean("positions") else false
val termOffset = if (termVector && f.hasPath("offsets")) f.getBoolean("offsets") else false
val analyzerObj =
if (analyzer == null) {
null
} else if (analyzer.isInstanceOf[String]) analyzers.get(analyzer.asInstanceOf[String]) match {
case Some(a) => a
case _ => throw new InvalidSchemaException("Unknown analyzer name: " + defAnalyzer.asInstanceOf[String])
} else {
// build custom analyzer in place
buildAnalyzer(f.getConfig("analyzer"), false)._2
}
(name, FieldType(analyzerObj, indexed, stored, termVector, termPosition, termOffset))
}).toMap
Schema(defANalyzerObj, fieldTypes)
}
def readAnalyzer(analyzerConf: Config): Analyzer = {
buildAnalyzer(analyzerConf, false)._2
}
private def buildAnalyzer(analyzer: Config, nameRequired: Boolean = true): (String, Analyzer) = {
val name =
if (analyzer.hasPath("name")) analyzer.getString("name")
else if (!nameRequired) ""
else throw new InvalidSchemaException("The path \"schema.analyzers.[N].name\" is mandatory.")
if (analyzer.hasPath("class")) {
// create a new Analyzer instance with no arguments.
val analyzerObj = try {
val clazz = Class.forName(analyzer.getString("class"))
Analyzer(clazz.newInstance.asInstanceOf[LuceneAnalyzer])
} catch {
case e: Exception => throw e
}
(name, analyzerObj)
} else {
// create a new Analyzer instance with CustomAnalyzer.Builder
// parse the Tokenizer settings. This is mandatory.
val tokenizer = if (analyzer.hasPath("tokenizer")) analyzer.getConfig("tokenizer") else throw new InvalidSchemaException("The path \"schema.analyzers.[N].tokenizer\" is mandatory.")
val tokenizerClazz = if (tokenizer.hasPath("factory")) tokenizer.getString("factory") else throw new InvalidSchemaException("The path \"schema.analyzers.[N].tokenizer.factory\" is mandatory.")
val tokenizerParams: Map[String, String] =
if (tokenizer.hasPath("params")) JavaConversions.asScalaIterator(tokenizer.getConfigList("params").iterator).map { c => (c.getString("name"), c.getString("value")) }.toMap
else Map.empty[String, String]
// parse settings for CharFilters. This is not mandatory or can be empty.
val charFilters = if (analyzer.hasPath("char_filters")) JavaConversions.asScalaIterator(analyzer.getConfigList("char_filters").iterator) else List.empty[Config]
// parse setting for TokenFilters. This is not mandatory or can be empty.
val tokenFilters = if (analyzer.hasPath("filters")) JavaConversions.asScalaIterator(analyzer.getConfigList("filters").iterator) else List.empty[Config]
// build the Analyzer instance with CumsomAnalyzer.Builder
// TODO: more readable...
val builder = CustomAnalyzer.builder
val jParams = JavaConversions.mapAsJavaMap(collection.mutable.HashMap.empty ++ tokenizerParams)
builder.withTokenizer(tokenizerClazz, jParams)
charFilters.foreach(filter => {
val factory = if (filter.hasPath("factory")) filter.getString("factory") else throw new InvalidSchemaException("The path \"schema.analyzers.[N].char_filters.[N].factory\" is mandatory.")
val params = if (filter.hasPath("params")) JavaConversions.asScalaIterator(filter.getConfigList("params").iterator).map { c => (c.getString("name"), c.getString("value")) }.toMap
else Map.empty[String, String]
val jParams = JavaConversions.mapAsJavaMap(collection.mutable.HashMap.empty ++ params)
builder.addCharFilter(factory, jParams)
})
tokenFilters.foreach(filter => {
val factory = if (filter.hasPath("factory")) filter.getString("factory") else throw new InvalidSchemaException("The path \"schema.analyzers.[N].char_filters.[N].factory\" is mandatory.")
val params: Map[String, String] =
if (filter.hasPath("params")) JavaConversions.asScalaIterator(filter.getConfigList("params").iterator).map { c => (c.getString("name"), c.getString("value")) }.toMap
else Map.empty[String, String]
val mParams = collection.mutable.HashMap.empty ++ params
val jParams = JavaConversions.mapAsJavaMap(collection.mutable.HashMap.empty ++ params)
builder.addTokenFilter(factory, jParams)
})
(name, Analyzer(builder.build))
}
}
}
class InvalidSchemaException(msg: String) extends RuntimeException(msg)
| fubuki/nlp4l | app/org/nlp4l/lucene/SchemaLoader.scala | Scala | apache-2.0 | 8,112 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.util
import java.util.Comparator
import java.{lang => jl, util => ju}
import org.junit.Assert._
import org.junit.Test
import org.scalajs.testsuite.utils.CollectionsTestBase
import scala.reflect.ClassTag
trait CollectionsOnCollectionsTest extends CollectionsTestBase {
def factory: CollectionFactory
def testMinMax1[T <: AnyRef with Comparable[T]: ClassTag](
factory: CollectionFactory, toElem: Int => T, isMin: Boolean): Unit = {
val coll = factory.empty[T]
coll.addAll(rangeOfElems(toElem))
val minMax = if (isMin) range.head else range.last
def getMinMax(): T =
if (isMin) ju.Collections.min(coll)
else ju.Collections.max(coll)
assertEquals(0, getMinMax().compareTo(toElem(minMax)))
coll match {
case list: ju.List[_] =>
ju.Collections.shuffle(list, new ju.Random(42))
assertEquals(0, getMinMax().compareTo(toElem(minMax)))
ju.Collections.shuffle(list, new ju.Random(100000))
assertEquals(0, getMinMax().compareTo(toElem(minMax)))
case _ =>
}
}
def testMinMax2[T: ClassTag](factory: CollectionFactory, toElem: Int => T,
isMin: Boolean, cmp: ju.Comparator[T]): Unit = {
val coll = factory.empty[T]
coll.addAll(rangeOfElems(toElem))
val minMax = if (isMin) range.head else range.last
def getMinMax: T =
if (isMin) ju.Collections.min(coll, cmp)
else ju.Collections.max(coll, cmp)
assertEquals(0, cmp.compare(getMinMax, toElem(minMax)))
coll match {
case list: ju.List[_] =>
ju.Collections.shuffle(list, new ju.Random(42))
assertEquals(0, cmp.compare(getMinMax, toElem(minMax)))
ju.Collections.shuffle(list, new ju.Random(100000))
assertEquals(0, cmp.compare(getMinMax, toElem(minMax)))
case _ =>
}
}
@Test def minOnComparables(): Unit = {
def test[T <: AnyRef with Comparable[T]: ClassTag](toElem: Int => T): Unit =
testMinMax1(factory, toElem, true)
test[jl.Integer](jl.Integer.valueOf)
test[jl.Long](_.toLong)
test[jl.Double](_.toDouble)
}
@Test def minWithComparator(): Unit = {
def test[T: ClassTag](toElem: Int => T, cmpFun: (T, T) => Int): Unit = {
testMinMax2(factory, toElem, true, new Comparator[T] {
override def compare(o1: T, o2: T): Int = cmpFun(o1, o2)
})
}
test[jl.Integer](_.toInt, (x: jl.Integer, y: jl.Integer) => x.compareTo(y))
test[jl.Long](_.toLong, (x: jl.Long, y: jl.Long) => x.compareTo(y))
test[jl.Double](_.toDouble, (x: jl.Double, y: jl.Double) => x.compareTo(y))
}
@Test def maxOnComparables(): Unit = {
def test[T <: AnyRef with Comparable[T]: ClassTag](toElem: Int => T): Unit =
testMinMax1(factory, toElem, false)
test[jl.Integer](jl.Integer.valueOf)
test[jl.Long](_.toLong)
test[jl.Double](_.toDouble)
}
@Test def maxWithComparator(): Unit = {
def test[T: ClassTag](toElem: Int => T, cmpFun: (T, T) => Int): Unit = {
testMinMax2(factory, toElem, false, new Comparator[T] {
override def compare(o1: T, o2: T): Int = cmpFun(o1, o2)
})
}
test[jl.Integer](_.toInt, (x: jl.Integer, y: jl.Integer) => x.compareTo(y))
test[jl.Long](_.toLong, (x: jl.Long, y: jl.Long) => x.compareTo(y))
test[jl.Double](_.toDouble, (x: jl.Double, y: jl.Double) => x.compareTo(y))
}
@Test def frequency(): Unit = {
def test[E: ClassTag](toElem: Int => E): Unit = {
val coll = factory.empty[E]
def expectAllFrequenciesToBe(n: Int): Unit = {
for (i <- range)
assertEquals(n, ju.Collections.frequency(coll, toElem(i)))
}
expectAllFrequenciesToBe(0)
coll.addAll(rangeOfElems(toElem))
expectAllFrequenciesToBe(1)
coll.addAll(rangeOfElems(toElem))
coll match {
case _: ju.Set[_] => expectAllFrequenciesToBe(1)
case _: ju.List[_] => expectAllFrequenciesToBe(2)
case _ => // Undefined behaviour
}
}
test[jl.Integer](_.toInt)
test[jl.Long](_.toLong)
test[jl.Double](_.toDouble)
test[String](_.toString)
}
@Test def addAll(): Unit = {
def test[E: ClassTag](toElem: Int => E): Unit = {
val coll = factory.empty[E]
assertFalse(ju.Collections.addAll(coll))
assertTrue(coll.isEmpty)
assertTrue(ju.Collections.addAll(coll, toElem(0), toElem(1)))
assertTrue(coll.contains(toElem(0)))
assertTrue(coll.contains(toElem(1)))
}
test[jl.Integer](_.toInt)
test[jl.Long](_.toLong)
test[jl.Double](_.toDouble)
}
@Test def unmodifiableCollection(): Unit = {
def test[E: ClassTag](toElem: Int => E): Unit = {
val coll = factory.empty[E]
testCollectionUnmodifiability(ju.Collections.unmodifiableCollection(coll),
toElem(0))
coll.addAll(rangeOfElems(toElem))
testCollectionUnmodifiability(ju.Collections.unmodifiableCollection(coll),
toElem(0))
}
test[jl.Integer](_.toInt)
test[jl.Long](_.toLong)
test[jl.Double](_.toDouble)
test[String](_.toString)
}
}
| scala-js/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/CollectionsOnCollectionsTest.scala | Scala | apache-2.0 | 5,372 |
package com.softwaremill.bootzooka.email.application
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.util.{Date, Properties}
import javax.activation.{DataHandler, DataSource}
import javax.mail.internet.{InternetAddress, MimeBodyPart, MimeMessage, MimeMultipart}
import javax.mail.{Address, Message, Session, Transport}
import com.typesafe.scalalogging.StrictLogging
/**
* Copied from softwaremill-common:
* https://github.com/softwaremill/softwaremill-common/blob/master/softwaremill-sqs/src/main/java/com/softwaremill/common/sqs/email/EmailSender.java
*/
object SmtpEmailSender extends StrictLogging {
def send(
smtpHost: String,
smtpPort: String,
smtpUsername: String,
smtpPassword: String,
verifySSLCertificate: Boolean,
sslConnection: Boolean,
from: String,
encoding: String,
emailDescription: EmailDescription,
attachmentDescriptions: AttachmentDescription*
) {
val props = setupSmtpServerProperties(sslConnection, smtpHost, smtpPort, verifySSLCertificate)
// Get a mail session
val session = Session.getInstance(props)
val m = new MimeMessage(session)
m.setFrom(new InternetAddress(from))
val to = convertStringEmailsToAddresses(emailDescription.emails)
val replyTo = convertStringEmailsToAddresses(emailDescription.replyToEmails)
val cc = convertStringEmailsToAddresses(emailDescription.ccEmails)
val bcc = convertStringEmailsToAddresses(emailDescription.bccEmails)
m.setRecipients(javax.mail.Message.RecipientType.TO, to)
m.setRecipients(Message.RecipientType.CC, cc)
m.setRecipients(Message.RecipientType.BCC, bcc)
m.setReplyTo(replyTo)
m.setSubject(emailDescription.subject, encoding)
m.setSentDate(new Date())
if (attachmentDescriptions.nonEmpty) {
addAttachments(m, emailDescription.message, encoding, attachmentDescriptions: _*)
} else {
m.setText(emailDescription.message, encoding, "plain")
}
val transport = createSmtpTransportFrom(session, sslConnection)
try {
connectToSmtpServer(transport, smtpUsername, smtpPassword)
sendEmail(transport, m, emailDescription, to)
} finally {
transport.close()
}
}
private def setupSmtpServerProperties(
sslConnection: Boolean,
smtpHost: String,
smtpPort: String,
verifySSLCertificate: Boolean
): Properties = {
// Setup mail server
val props = new Properties()
if (sslConnection) {
props.put("mail.smtps.host", smtpHost)
props.put("mail.smtps.port", smtpPort)
props.put("mail.smtps.starttls.enable", "true")
if (!verifySSLCertificate) {
props.put("mail.smtps.ssl.checkserveridentity", "false")
props.put("mail.smtps.ssl.trust", "*")
}
} else {
props.put("mail.smtp.host", smtpHost)
props.put("mail.smtp.port", smtpPort)
}
props
}
private def createSmtpTransportFrom(session: Session, sslConnection: Boolean): Transport =
if (sslConnection) session.getTransport("smtps") else session.getTransport("smtp")
private def sendEmail(transport: Transport, m: MimeMessage, emailDescription: EmailDescription, to: Array[Address]) {
transport.sendMessage(m, m.getAllRecipients)
logger.debug("Mail '" + emailDescription.subject + "' sent to: " + to.mkString(","))
}
private def connectToSmtpServer(transport: Transport, smtpUsername: String, smtpPassword: String) {
if (smtpUsername != null && smtpUsername.nonEmpty) {
transport.connect(smtpUsername, smtpPassword)
} else {
transport.connect()
}
}
private def convertStringEmailsToAddresses(emails: Array[String]): Array[Address] =
emails.map(new InternetAddress(_))
private def addAttachments(
mimeMessage: MimeMessage,
msg: String,
encoding: String,
attachmentDescriptions: AttachmentDescription*
) {
val multiPart = new MimeMultipart()
val textPart = new MimeBodyPart()
multiPart.addBodyPart(textPart)
textPart.setText(msg, encoding, "plain")
for (attachmentDescription <- attachmentDescriptions) {
val binaryPart = new MimeBodyPart()
multiPart.addBodyPart(binaryPart)
val ds = new DataSource() {
def getInputStream =
new ByteArrayInputStream(attachmentDescription.content)
def getOutputStream = {
val byteStream = new ByteArrayOutputStream()
byteStream.write(attachmentDescription.content)
byteStream
}
def getContentType =
attachmentDescription.contentType
def getName =
attachmentDescription.filename
}
binaryPart.setDataHandler(new DataHandler(ds))
binaryPart.setFileName(attachmentDescription.filename)
binaryPart.setDescription("")
}
mimeMessage.setContent(multiPart)
}
case class EmailDescription(
emails: Array[String],
message: String,
subject: String,
replyToEmails: Array[String],
ccEmails: Array[String],
bccEmails: Array[String]
) {
def this(emails: List[String], message: String, subject: String) =
this(emails.toArray, message, subject, Array(), Array(), Array())
}
case class AttachmentDescription(content: Array[Byte], filename: String, contentType: String)
}
| aywengo/bootzooka | backend/src/main/scala/com/softwaremill/bootzooka/email/application/SmtpEmailSender.scala | Scala | apache-2.0 | 5,345 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.carbondata.restructure.vectorreader
import java.math.BigDecimal
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.Spark2QueryTest
import org.scalatest.BeforeAndAfterAll
class ChangeDataTypeTestCases extends Spark2QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("DROP TABLE IF EXISTS changedatatypetest")
sql("DROP TABLE IF EXISTS hivetable")
}
test("test change datatype on existing column and load data, insert into hive table") {
def test_change_column_load_insert() = {
beforeAll
sql(
"CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
"timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
sql(
"CREATE TABLE hivetable(intField BIGINT,stringField STRING,charField STRING,timestampField "
+ "TIMESTAMP,decimalField DECIMAL(6,2)) STORED AS PARQUET")
sql("INSERT INTO TABLE hivetable SELECT * FROM changedatatypetest")
afterAll
}
sqlContext.setConf("carbon.enable.vector.reader", "true")
test_change_column_load_insert()
sqlContext.setConf("carbon.enable.vector.reader", "false")
test_change_column_load_insert()
}
test("test datatype change and filter") {
def test_change_datatype_and_filter() = {
beforeAll
sql(
"CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
"timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField > 99"),
Seq(Row("abc"), Row("abc")))
checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField < 99"), Seq())
checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField = 100"),
Seq(Row("abc"), Row("abc")))
afterAll
}
sqlContext.setConf("carbon.enable.vector.reader", "true")
test_change_datatype_and_filter
sqlContext.setConf("carbon.enable.vector.reader", "false")
test_change_datatype_and_filter
}
test("test change int datatype and load data") {
def test_change_int_and_load() = {
beforeAll
sql(
"CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
"timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
checkAnswer(sql("SELECT SUM(intField) FROM changedatatypetest"), Row(200))
afterAll
}
sqlContext.setConf("carbon.enable.vector.reader", "true")
test_change_int_and_load()
sqlContext.setConf("carbon.enable.vector.reader", "false")
test_change_int_and_load()
}
test("test change decimal datatype and compaction") {
def test_change_decimal_and_compaction() = {
beforeAll
sql(
"CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
"timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
sql("ALTER TABLE changedatatypetest CHANGE decimalField decimalField DECIMAL(9,5)")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
checkAnswer(sql("SELECT decimalField FROM changedatatypetest"),
Seq(Row(new BigDecimal("21.23").setScale(5)), Row(new BigDecimal("21.23").setScale(5))))
sql("ALTER TABLE changedatatypetest COMPACT 'major'")
checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "0Compacted")
checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "1Compacted")
checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "0.1Success")
afterAll
}
sqlContext.setConf("carbon.enable.vector.reader", "true")
test_change_decimal_and_compaction()
sqlContext.setConf("carbon.enable.vector.reader", "false")
test_change_decimal_and_compaction()
}
test("test to change int datatype to long") {
def test_change_int_to_long() = {
beforeAll
sql(
"CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
"timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ s"decimalField')")
sql("ALTER TABLE changedatatypetest CHANGE intField intField LONG")
checkAnswer(sql("SELECT intField FROM changedatatypetest LIMIT 1"), Row(100))
afterAll
}
sqlContext.setConf("carbon.enable.vector.reader", "true")
test_change_int_to_long()
sqlContext.setConf("carbon.enable.vector.reader", "false")
test_change_int_to_long()
}
override def afterAll {
sql("DROP TABLE IF EXISTS changedatatypetest")
sql("DROP TABLE IF EXISTS hivetable")
sqlContext.setConf("carbon.enable.vector.reader", "false")
}
}
| shivangi1015/incubator-carbondata | integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala | Scala | apache-2.0 | 7,486 |
package org.wartremover
package contrib.warts
import scala.concurrent.Future
object DiscardedFuture extends WartTraverser {
val message: String =
"""andThen discards the return value of callback.
|To chain the result of Future to other Future, use flatMap.
|""".stripMargin
def apply(u: WartUniverse): u.Traverser = {
import u.universe._
val andThenMethodName: TermName = TermName("andThen")
val futureSymbol = typeOf[Future[Any]]
val andThenMethod = futureSymbol.member(andThenMethodName)
val futureTypeSymbol = futureSymbol.typeSymbol
require(andThenMethod != NoSymbol)
require(futureTypeSymbol != NoSymbol)
new Traverser {
override def traverse(tree: Tree): Unit = {
tree match {
// Ignore trees marked by SuppressWarnings
case t if hasWartAnnotation(u)(t) =>
case Apply(Apply(method, List(callback)), _) if method.symbol == andThenMethod && callback.tpe
.typeArgs(1)
.typeSymbol == futureTypeSymbol =>
error(u)(tree.pos, message)
super.traverse(tree)
case _ =>
super.traverse(tree)
}
}
}
}
}
| wartremover/wartremover-contrib | core/src/main/scala/wartremover/contrib/warts/DiscardedFuture.scala | Scala | apache-2.0 | 1,188 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys.version
import com.typesafe.tools.mima.core._
import com.typesafe.tools.mima.core.MissingClassProblem
import com.typesafe.tools.mima.core.MissingTypesProblem
import com.typesafe.tools.mima.core.ProblemFilters._
import com.typesafe.tools.mima.plugin.MimaKeys.{mimaBinaryIssueFilters, mimaPreviousArtifacts}
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
object MimaBuild {
def excludeMember(fullName: String) = Seq(
ProblemFilters.exclude[MissingMethodProblem](fullName),
// Sometimes excluded methods have default arguments and
// they are translated into public methods/fields($default$) in generated
// bytecode. It is not possible to exhaustively list everything.
// But this should be okay.
ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$2"),
ProblemFilters.exclude[MissingMethodProblem](fullName+"$default$1"),
ProblemFilters.exclude[MissingFieldProblem](fullName),
ProblemFilters.exclude[IncompatibleResultTypeProblem](fullName),
ProblemFilters.exclude[IncompatibleMethTypeProblem](fullName),
ProblemFilters.exclude[IncompatibleFieldTypeProblem](fullName)
)
// Exclude a single class
def excludeClass(className: String) = Seq(
excludePackage(className),
ProblemFilters.exclude[MissingClassProblem](className),
ProblemFilters.exclude[MissingTypesProblem](className)
)
// Exclude a Spark class, that is in the package org.apache.spark
def excludeSparkClass(className: String) = {
excludeClass("org.apache.spark." + className)
}
// Exclude a Spark package, that is in the package org.apache.spark
def excludeSparkPackage(packageName: String) = {
excludePackage("org.apache.spark." + packageName)
}
def ignoredABIProblems(base: File, currentSparkVersion: String) = {
// Excludes placed here will be used for all Spark versions
val defaultExcludes = Seq()
// Read package-private excludes from file
val classExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-class-excludes")
val memberExcludeFilePath = file(base.getAbsolutePath + "/.generated-mima-member-excludes")
val ignoredClasses: Seq[String] =
if (!classExcludeFilePath.exists()) {
Seq()
} else {
IO.read(classExcludeFilePath).split("\\n")
}
val ignoredMembers: Seq[String] =
if (!memberExcludeFilePath.exists()) {
Seq()
} else {
IO.read(memberExcludeFilePath).split("\\n")
}
defaultExcludes ++ ignoredClasses.flatMap(excludeClass) ++
ignoredMembers.flatMap(excludeMember) ++ MimaExcludes.excludes(currentSparkVersion)
}
def mimaSettings(sparkHome: File, projectRef: ProjectRef) = {
val organization = "org.apache.spark"
val previousSparkVersion = "2.0.0"
val project = projectRef.project
val fullId = "spark-" + project + "_2.11"
mimaDefaultSettings ++
Seq(mimaPreviousArtifacts := Set(organization % fullId % previousSparkVersion),
mimaBinaryIssueFilters ++= ignoredABIProblems(sparkHome, version.value))
}
}
| wangyixiaohuihui/spark2-annotation | project/MimaBuild.scala | Scala | apache-2.0 | 4,017 |
package org.json4s
/**
* Do not use any type hints.
*/
case object NoTypeHints extends TypeHints {
val hints: List[Class[_]] = Nil
def hintFor(clazz: Class[_]) = None
def classFor(hint: String, parent: Class[_]) = None
override def shouldExtractHints(clazz: Class[_]) = false
}
| xuwei-k/json4s | core/src/main/scala/org/json4s/NoTypeHints.scala | Scala | apache-2.0 | 289 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers.dsl
import org.scalatest._
import org.scalatest.matchers.should.Matchers._
import org.scalatest.funspec.AnyFunSpec
class ResultOfAllElementsOfApplicationSpec extends AnyFunSpec {
describe("ResultOfAllElementsOfApplication ") {
it("should have pretty toString when right is empty") {
val result = new ResultOfAllElementsOfApplication(Vector.empty)
result.toString should be ("allElementsOf (Vector())")
}
it("should have pretty toString when right contains 1 element") {
val result = new ResultOfAllElementsOfApplication(Vector("Bob"))
result.toString should be ("allElementsOf (Vector(\\"Bob\\"))")
}
it("should have pretty toString when right contains > 1 elements") {
val result = new ResultOfAllElementsOfApplication(Vector("Bob", "Alice"))
result.toString should be ("allElementsOf (Vector(\\"Bob\\", \\"Alice\\"))")
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/matchers/dsl/ResultOfAllElementsOfApplicationSpec.scala | Scala | apache-2.0 | 1,524 |
package com.sksamuel.scrimage.filter
import com.sksamuel.scrimage.BufferedOpFilter
/** @author Stephen Samuel */
class RGBFilter(r: Double = 0, g: Double = 0, b: Double = 0) extends BufferedOpFilter {
require(r <= 1)
require(g <= 1)
require(b <= 1)
val op = new thirdparty.jhlabs.image.RGBAdjustFilter()
op.setBFactor(b.toFloat)
op.setRFactor(r.toFloat)
op.setGFactor(g.toFloat)
}
object RGBFilter {
def apply(r: Double = 0, g: Double = 0, b: Double = 0): RGBFilter = new RGBFilter(r, g, b)
}
| carlosFattor/scrimage | scrimage-filters/src/main/scala/com/sksamuel/scrimage/filter/RGBFilter.scala | Scala | apache-2.0 | 511 |
package domain.kanban
import domain.{ Entity, Enum, EnumEntry, ValueObject }
import domain.user.{ User, UserAuthority, UserId }
/**
* かんばんドメイン.
* @param kanbanId かんばんID
* @param configuration かんばん構成情報
* @param joinedUsers 参加ユーザ
*/
case class Kanban(
kanbanId: Option[KanbanId],
configuration: KanbanConfiguration,
joinedUsers: Seq[JoinedUser]
) extends Entity[Kanban] {
/**
* @inheritdoc
*/
override def sameIdentityAs(other: Kanban): Boolean = (for {
thisId <- this.kanbanId
otherId <- other.kanbanId
} yield thisId.sameValueAs(otherId)) getOrElse false
/** 参加ユーザMap. */
private[this] lazy val joinedUserMap: Map[UserId, JoinedUser] = (joinedUsers map (v => (v.userId, v))).toMap
/**
* かんばんに参加しているか?
* 対象ユーザが
* ・かんばん参加ユーザに含まれる
* ・Application管理者
* のいずれかの場合、参加しているとみなします
* @param user 対象ユーザ
* @return かんばんに参加している場合、true
*/
def isJoined(user: User): Boolean = user.userId match {
case Some(userId) =>
joinedUserMap.contains(userId) || user.authority == UserAuthority.ApplicationAdministrator
case _ => false
}
/**
* かんばんの管理者か?
* 対象ユーザが
* ・かんばん参加ユーザに含まれ、かんばん管理者である
* ・Application管理者
* のいずれかの場合、かんばん管理者とみなします
* @param user 対象ユーザ
* @return かんばんの管理者の場合、true
*/
def isAdministrator(user: User): Boolean = user.userId match {
case Some(userId) =>
(joinedUserMap get userId).exists { v => v.authority == KanbanAuthority.Administrator } ||
user.authority == UserAuthority.ApplicationAdministrator
case _ => false
}
/**
* かんばん情報変更.
* かんばん名、説明文の更新をし、新しいインスタンスを返します
* @param configuration かんばん構成情報
* @return 更新後Board情報
*/
def updated(configuration: KanbanConfiguration): Kanban = {
this.copy(configuration = configuration)
}
/**
* かんばんRow生成.
* @param user 対象ユーザ
* @return かんばんRow
*/
def toKanbanRow(user: User) = KanbanRow(
id = kanbanId.get.id,
title = configuration.title,
description = configuration.description,
archiveStatus = configuration.kanbanStatus.code,
lockVersion = configuration.lockVersion,
authority = if (isAdministrator(user)) "1" else "0"
)
}
/**
* かんばんObject.
*/
object Kanban {
/**
* かんばん初期データ生成.
* 登録したユーザをかんばんの管理者として登録します
* @param kanbanTitle かんばんタイトル
* @param kanbanDescription かんばん説明
* @param loginUserId 登録ユーザID
* @return かんばんドメイン
*/
def createInitKanban(kanbanTitle: String, kanbanDescription: String, loginUserId: Long): Kanban = {
Kanban(
kanbanId = None,
configuration = KanbanConfiguration(
title = kanbanTitle,
description = kanbanDescription,
kanbanStatus = KanbanStatus.Open
),
joinedUsers = Seq(
JoinedUser(
userId = UserId(loginUserId),
authority = KanbanAuthority.Administrator,
name = ""
)
)
)
}
}
/**
* かんばんID.
* @param id ID値
*/
case class KanbanId(
id: Long
) extends ValueObject[KanbanId] {
override def sameValueAs(other: KanbanId): Boolean = this.id == other.id
}
/**
* かんばん構成情報.
* @param title かんばんタイトル
* @param description 説明文
* @param kanbanStatus かんばんの状態
* @param lockVersion lockVersion
*/
case class KanbanConfiguration(
title: String,
description: String,
kanbanStatus: KanbanStatus,
lockVersion: Long = 1L
)
//かんばんの状態
sealed abstract class KanbanStatus(override val code: String) extends EnumEntry
object KanbanStatus extends Enum[KanbanStatus] {
/** Open(デフォルトで表示される). */
case object Open extends KanbanStatus("0")
/** Archive(検索条件を指定しないと表示されない). */
case object Archive extends KanbanStatus("1")
protected val values = Seq(Open, Archive)
}
| nemuzuka/vss-kanban | src/main/scala/domain/kanban/Kanban.scala | Scala | mit | 4,446 |
package db
import com.bryzek.apidoc.common.v0.models.{Audit, ReferenceGuid}
import anorm._
import anorm.JodaParameterMetaData._
import org.joda.time.DateTime
import java.util.UUID
object AuditsDao {
def query(tableName: String) = {
Seq(
queryCreation(tableName),
s"${tableName}.updated_at",
s"${tableName}.updated_by_guid"
).mkString(", ")
}
def queryCreation(tableName: String) = {
Seq(
s"${tableName}.created_at",
s"${tableName}.created_by_guid"
).mkString(", ")
}
def queryWithAlias(tableName: String, prefix: String) = {
Seq(
s"${tableName}.created_at as ${prefix}_created_at",
s"${tableName}.created_by_guid as ${prefix}_created_by_guid",
s"${tableName}.updated_at as ${prefix}_updated_at",
s"${tableName}.updated_by_guid as ${prefix}_updated_by_guid"
).mkString(", ")
}
def queryCreationWithAlias(tableName: String, prefix: String) = {
Seq(
s"${tableName}.created_at as ${prefix}_created_at",
s"${tableName}.created_by_guid as ${prefix}_created_by_guid"
).mkString(", ")
}
private[db] def fromRow(
row: anorm.Row,
prefix: Option[String] = None
): Audit = {
val p = prefix.map( _ + "_").getOrElse("")
Audit(
createdAt = row[DateTime](s"${p}created_at"),
createdBy = ReferenceGuid(
guid = row[UUID](s"${p}created_by_guid")
),
updatedAt = row[DateTime](s"${p}updated_at"),
updatedBy = ReferenceGuid(
guid = row[UUID](s"${p}updated_by_guid")
)
)
}
private[db] def fromRowCreation(
row: anorm.Row,
prefix: Option[String] = None
): Audit = {
val p = prefix.map( _ + "_").getOrElse("")
val createdAt = row[DateTime](s"${p}created_at")
val createdBy = ReferenceGuid(
guid = row[UUID](s"${p}created_by_guid")
)
Audit(
createdAt = createdAt,
createdBy = createdBy,
updatedAt = createdAt,
updatedBy = createdBy
)
}
}
| Seanstoppable/apidoc | api/app/db/AuditsDao.scala | Scala | mit | 1,983 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package internal
import scala.language.implicitConversions
import scala.reflect.api.Universe
import scala.reflect.macros.Attachments
trait Internals extends api.Internals {
self: SymbolTable =>
type Internal = MacroInternalApi
lazy val internal: Internal = new SymbolTableInternal {}
@deprecated("compatibility with Scala 2.10 EOL", "2.13.0")
type Compat = MacroCompatApi
@deprecated("compatibility with Scala 2.10 EOL", "2.13.0")
lazy val compat: Compat = new Compat {}
trait SymbolTableInternal extends MacroInternalApi {
lazy val reificationSupport: ReificationSupportApi = self.build
def createImporter(from0: Universe): Importer { val from: from0.type } = self.mkImporter(from0)
def newScopeWith(elems: Symbol*): Scope = self.newScopeWith(elems: _*)
def enter(scope: Scope, sym: Symbol): scope.type = { scope.enter(sym); scope }
def unlink(scope: Scope, sym: Symbol): scope.type = { scope.unlink(sym); scope }
def freeTerms(tree: Tree): List[FreeTermSymbol] = tree.freeTerms
def freeTypes(tree: Tree): List[FreeTypeSymbol] = tree.freeTypes
def substituteSymbols(tree: Tree, from: List[Symbol], to: List[Symbol]): Tree = tree.substituteSymbols(from, to)
def substituteTypes(tree: Tree, from: List[Symbol], to: List[Type]): Tree = tree.substituteTypes(from, to)
def substituteThis(tree: Tree, clazz: Symbol, to: => Tree): Tree = tree.substituteThis(clazz, to)
def attachments(tree: Tree): Attachments { type Pos = Position } = tree.attachments
def updateAttachment[T: ClassTag](tree: Tree, attachment: T): tree.type = tree.updateAttachment(attachment)
def removeAttachment[T: ClassTag](tree: Tree): tree.type = tree.removeAttachment[T]
def setPos(tree: Tree, newpos: Position): tree.type = tree.setPos(newpos)
def setType(tree: Tree, tp: Type): tree.type = tree.setType(tp)
def defineType(tree: Tree, tp: Type): tree.type = tree.defineType(tp)
def setSymbol(tree: Tree, sym: Symbol): tree.type = tree.setSymbol(sym)
def setOriginal(tt: TypeTree, tree: Tree): TypeTree = tt.setOriginal(tree)
def captureVariable(vble: Symbol): Unit = self.captureVariable(vble)
def referenceCapturedVariable(vble: Symbol): Tree = self.referenceCapturedVariable(vble)
def capturedVariableType(vble: Symbol): Type = self.capturedVariableType(vble)
def classDef(sym: Symbol, impl: Template): ClassDef = self.ClassDef(sym, impl)
def moduleDef(sym: Symbol, impl: Template): ModuleDef = self.ModuleDef(sym, impl)
def valDef(sym: Symbol, rhs: Tree): ValDef = self.ValDef(sym, rhs)
def valDef(sym: Symbol): ValDef = self.ValDef(sym)
def defDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef = self.DefDef(sym, mods, vparamss, rhs)
def defDef(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree): DefDef = self.DefDef(sym, vparamss, rhs)
def defDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef = self.DefDef(sym, mods, rhs)
def defDef(sym: Symbol, rhs: Tree): DefDef = self.DefDef(sym, rhs)
def defDef(sym: Symbol, rhs: List[List[Symbol]] => Tree): DefDef = self.DefDef(sym, rhs)
def typeDef(sym: Symbol, rhs: Tree): TypeDef = self.TypeDef(sym, rhs)
def typeDef(sym: Symbol): TypeDef = self.TypeDef(sym)
def labelDef(sym: Symbol, params: List[Symbol], rhs: Tree): LabelDef = self.LabelDef(sym, params, rhs)
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): tree.type = { new ChangeOwnerTraverser(prev, next).traverse(tree); tree }
lazy val gen = self.treeBuild
def isFreeTerm(symbol: Symbol): Boolean = symbol.isFreeTerm
def asFreeTerm(symbol: Symbol): FreeTermSymbol = symbol.asFreeTerm
def isFreeType(symbol: Symbol): Boolean = symbol.isFreeType
def asFreeType(symbol: Symbol): FreeTypeSymbol = symbol.asFreeType
def newTermSymbol(symbol: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = symbol.newTermSymbol(name, pos, flags)
def newModuleAndClassSymbol(symbol: Symbol, name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = symbol.newModuleAndClassSymbol(name, pos, flags)
def newMethodSymbol(symbol: Symbol, name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = symbol.newMethodSymbol(name, pos, flags)
def newTypeSymbol(symbol: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = symbol.newTypeSymbol(name, pos, flags)
def newClassSymbol(symbol: Symbol, name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = symbol.newClassSymbol(name, pos, flags)
def newFreeTerm(name: String, value: => Any, flags: FlagSet = NoFlags, origin: String = null): FreeTermSymbol = reificationSupport.newFreeTerm(name, value, flags, origin)
def newFreeType(name: String, flags: FlagSet = NoFlags, origin: String = null): FreeTypeSymbol = reificationSupport.newFreeType(name, flags, origin)
def isErroneous(symbol: Symbol): Boolean = symbol.isErroneous
def isSkolem(symbol: Symbol): Boolean = symbol.isSkolem
def deSkolemize(symbol: Symbol): Symbol = symbol.deSkolemize
def initialize(symbol: Symbol): symbol.type = symbol.initialize
def fullyInitialize(symbol: Symbol): symbol.type = definitions.fullyInitializeSymbol(symbol).asInstanceOf[symbol.type]
def fullyInitialize(tp: Type): tp.type = definitions.fullyInitializeType(tp).asInstanceOf[tp.type]
def fullyInitialize(scope: Scope): scope.type = definitions.fullyInitializeScope(scope).asInstanceOf[scope.type]
def flags(symbol: Symbol): FlagSet = symbol.flags
def attachments(symbol: Symbol): Attachments { type Pos = Position } = symbol.attachments
def updateAttachment[T: ClassTag](symbol: Symbol, attachment: T): symbol.type = symbol.updateAttachment(attachment)
def removeAttachment[T: ClassTag](symbol: Symbol): symbol.type = symbol.removeAttachment[T]
def setOwner(symbol: Symbol, newowner: Symbol): symbol.type = { symbol.owner = newowner; symbol }
def setInfo(symbol: Symbol, tpe: Type): symbol.type = symbol.setInfo(tpe)
def setAnnotations(symbol: Symbol, annots: Annotation*): symbol.type = symbol.setAnnotations(annots: _*)
def setName(symbol: Symbol, name: Name): symbol.type = symbol.setName(name)
def setPrivateWithin(symbol: Symbol, sym: Symbol): symbol.type = symbol.setPrivateWithin(sym)
def setFlag(symbol: Symbol, flags: FlagSet): symbol.type = symbol.setFlag(flags)
def resetFlag(symbol: Symbol, flags: FlagSet): symbol.type = symbol.resetFlag(flags)
def thisType(sym: Symbol): Type = self.ThisType(sym)
def singleType(pre: Type, sym: Symbol): Type = self.SingleType(pre, sym)
def superType(thistpe: Type, supertpe: Type): Type = self.SuperType(thistpe, supertpe)
def constantType(value: Constant): ConstantType = self.ConstantType(value)
def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = self.TypeRef(pre, sym, args)
def refinedType(parents: List[Type], decls: Scope): RefinedType = self.RefinedType(parents, decls)
def refinedType(parents: List[Type], decls: Scope, clazz: Symbol): RefinedType = self.RefinedType(parents, decls, clazz)
def refinedType(parents: List[Type], owner: Symbol): Type = self.refinedType(parents, owner)
def refinedType(parents: List[Type], owner: Symbol, decls: Scope): Type = self.RefinedType(parents, decls, owner)
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = self.refinedType(parents, owner, decls, pos)
def intersectionType(tps: List[Type]): Type = self.intersectionType(tps)
def intersectionType(tps: List[Type], owner: Symbol): Type = self.intersectionType(tps, owner)
def classInfoType(parents: List[Type], decls: Scope, typeSymbol: Symbol): ClassInfoType = self.ClassInfoType(parents, decls, typeSymbol)
def methodType(params: List[Symbol], resultType: Type): MethodType = self.MethodType(params, resultType)
def nullaryMethodType(resultType: Type): NullaryMethodType = self.NullaryMethodType(resultType)
def polyType(typeParams: List[Symbol], resultType: Type): PolyType = self.PolyType(typeParams, resultType)
def existentialType(quantified: List[Symbol], underlying: Type): ExistentialType = self.ExistentialType(quantified, underlying)
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = self.existentialAbstraction(tparams, tpe0)
def annotatedType(annotations: List[Annotation], underlying: Type): AnnotatedType = self.AnnotatedType(annotations, underlying)
def typeBounds(lo: Type, hi: Type): TypeBounds = self.TypeBounds(lo, hi)
def boundedWildcardType(bounds: TypeBounds): BoundedWildcardType = self.BoundedWildcardType(bounds)
def subpatterns(tree: Tree): Option[List[Tree]] = tree.attachments.get[SubpatternsAttachment].map(_.patterns.map(duplicateAndKeepPositions))
type Decorators = MacroDecoratorApi
lazy val decorators: Decorators = new MacroDecoratorApi {
override type ScopeDecorator[T <: Scope] = MacroScopeDecoratorApi[T]
override implicit def scopeDecorator[T <: Scope](scope: T): ScopeDecorator[T] = new MacroScopeDecoratorApi[T](scope)
override type TreeDecorator[T <: Tree] = MacroTreeDecoratorApi[T]
override implicit def treeDecorator[T <: Tree](tree: T): TreeDecorator[T] = new MacroTreeDecoratorApi[T](tree)
override type TypeTreeDecorator[T <: TypeTree] = MacroTypeTreeDecoratorApi[T]
override implicit def typeTreeDecorator[T <: TypeTree](tt: T): TypeTreeDecorator[T] = new MacroTypeTreeDecoratorApi[T](tt)
override type SymbolDecorator[T <: Symbol] = MacroSymbolDecoratorApi[T]
override implicit def symbolDecorator[T <: Symbol](symbol: T): SymbolDecorator[T] = new MacroSymbolDecoratorApi[T](symbol)
override type TypeDecorator[T <: Type] = TypeDecoratorApi[T]
override implicit def typeDecorator[T <: Type](tp: T): TypeDecorator[T] = new TypeDecoratorApi[T](tp)
}
}
lazy val treeBuild = new self.TreeGen {
def mkAttributedQualifier(tpe: Type): Tree = self.gen.mkAttributedQualifier(tpe)
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = self.gen.mkAttributedQualifier(tpe, termSym)
def mkAttributedRef(pre: Type, sym: Symbol): RefTree = self.gen.mkAttributedRef(pre, sym)
def mkAttributedRef(sym: Symbol): RefTree = self.gen.mkAttributedRef(sym)
def stabilize(tree: Tree): Tree = self.gen.stabilize(tree)
def mkAttributedStableRef(pre: Type, sym: Symbol): Tree = self.gen.mkAttributedStableRef(pre, sym)
def mkAttributedStableRef(sym: Symbol): Tree = self.gen.mkAttributedStableRef(sym)
def mkUnattributedRef(sym: Symbol): RefTree = self.gen.mkUnattributedRef(sym)
def mkUnattributedRef(fullName: Name): RefTree = self.gen.mkUnattributedRef(fullName)
def mkAttributedThis(sym: Symbol): This = self.gen.mkAttributedThis(sym)
def mkAttributedIdent(sym: Symbol): RefTree = self.gen.mkAttributedIdent(sym)
def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree = self.gen.mkAttributedSelect(qual, sym)
def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, methodName, targs, args)
def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(method, targs, args)
def mkMethodCall(method: Symbol, args: List[Tree]): Tree = self.gen.mkMethodCall(method, args)
def mkMethodCall(target: Tree, args: List[Tree]): Tree = self.gen.mkMethodCall(target, args)
def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, methodName, args)
def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(receiver, method, targs, args)
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree = self.gen.mkMethodCall(target, targs, args)
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree = self.gen.mkNullaryCall(method, targs)
def mkRuntimeUniverseRef: Tree = self.gen.mkRuntimeUniverseRef
def mkZero(tp: Type): Tree = self.gen.mkZero(tp)
def mkCast(tree: Tree, pt: Type): Tree = self.gen.mkCast(tree, pt)
}
}
| lrytz/scala | src/reflect/scala/reflect/internal/Internals.scala | Scala | apache-2.0 | 12,628 |
package controllers
import play.api.Play.current
import play.api.cache.Cache
import play.api.mvc._
import scala.xml.NodeSeq
import models._
import models.dao._
object Feeds extends Controller {
val feedGen = new FeedGenerator(DAOFactory.itemDAO)
def latest = Action { implicit request =>
FeedStatsHelper.incrementDownloadCount(request.remoteAddress)
val feedXml = Cache.getOrElse[NodeSeq]("allItems.feed", 3600) {
feedGen.allItemsFeed("http://" + request.host)
}
Ok(feedXml)
}
}
| aspectcg15/play-app | app/controllers/Feeds.scala | Scala | gpl-3.0 | 513 |
// Copyright (C) 2016 IBM Corp. All Rights Reserved.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.ibm.watson.developer_cloud.concept_insights.v2.model
import com.ibm.watson.developer_cloud.service.GenericModel
import com.ibm.watson.developer_cloud.utils.Validation
/**
* Created by Martin Harvan on 11/04/16.
*/
case class Concept(
_abstract: Option[String],
id: String,
label: String,
link: Option[String],
name: Option[String],
ontology: Option[List[String]],
thumbnail: Option[String]
) extends GenericModel
case class ConceptMetadata(
_abstract: Option[String],
id: String,
label: String,
link: Option[String],
ontology: Option[List[String]],
thumbnail: Option[String],
_type: Option[String]
) extends GenericModel
case class Concepts(
concepts: List[Concept]
) extends GenericModel
object Concept //TODO from grah
| kane77/watson-scala-wrapper | src/main/scala/com/ibm/watson/developer_cloud/concept_insights/v2/model/Concepts.scala | Scala | apache-2.0 | 1,495 |
package free
import mu.ListInstr._
object ListMuExample {
def main(args: Array[String]) {
val xs = cons(1, cons(2, cons(3, cons(4, cons(5, nil)))))
val vs = cons(6, cons(7, cons(8, cons(9, cons(10, nil)))))
print(show(xs))
print(" -- Start list")
println()
print(show(map(xs)(_ + 2)))
print(" -- Add 2 to every element")
println()
print(show(filter(xs)(_ % 2 == 0)))
print(" -- Filter even number")
println()
print(find(xs)(_ == 3))
print(" -- Find number 3")
println()
print(foldRight(xs, 0)(_ + _))
print(" -- Sum every element")
println()
foldRight(xs, ()){ case (a, _) => print("(" + a + ")")}
print(" -- Right traversal")
println()
foldLeft(xs, ()){ case (_, a) => print("(" + a + ")")}
print(" -- Left traversal")
println()
print(show(append(xs, vs)))
print(" -- Append")
println()
}
}
| YoEight/psug-free | src/main/scala/free/ListMuExample.scala | Scala | mit | 910 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.user.accountsettings.view.parameters
import com.esofthead.mycollab.vaadin.mvp.ScreenData
/**
* @author MyCollab Ltd.
* @since 5.1.0
*/
object BillingScreenData {
class CancelAccount(params: Object) extends ScreenData[Object](params) {
def this() = this(null)
}
class BillingSummary(params: Object) extends ScreenData[Object](params) {
def this() = this(null)
}
}
| maduhu/mycollab | mycollab-web/src/main/scala/com/esofthead/mycollab/module/user/accountsettings/view/parameters/BillingScreenData.scala | Scala | agpl-3.0 | 1,136 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.returns
import forms.ReturnFrequencyForm
import org.jsoup.Jsoup
import views.VatRegViewSpec
import views.html.returns.return_frequency_view
class ReturnFrequencyViewSpec extends VatRegViewSpec {
val view = app.injector.instanceOf[return_frequency_view]
implicit val doc = Jsoup.parse(view(form = ReturnFrequencyForm.form, showAAS = true, showMonthly = false).body)
object ExpectedContent {
val heading = "When will the business do its VAT Returns?"
val title = "When will the business do its VAT Returns?"
val para = "Usually, VAT-registered businesses submit their VAT returns and payments to HM Revenue and Customs 4 times a year."
val detailsSummary = "About the Annual Accounting Scheme"
val detailsPara1 = "businesses on the scheme:"
val detailsPara1Bullet1 = "submit one VAT Return a year, rather than quarterly or monthly returns"
val detailsPara1Bullet2 = "make monthly or quarterly payments, based on an HMRC estimate of their end-of-year VAT bill"
val detailsPara2 = "It may not suit businesses that:"
val detailsPara2Bullet1 = "want to keep up to date with the exact amount of VAT they owe or need to reclaim"
val detailsPara2Bullet2 = "regularly reclaim more VAT than they charge, because they will only get one VAT refund a year"
val findOutMoreLinkText = "Find out more about the Annual Accounting Scheme (opens in new tab)"
val label = "Select yes if you expect the business to regularly claim VAT refunds from HMRC"
val continue = "Save and continue"
val quarterly = "Quarterly"
val annually = "The business would like to join the Annual Accounting Scheme"
}
"The return frequency page" must {
"have a back link in new Setup" in new ViewSetup {
doc.hasBackLink mustBe true
}
"have the correct heading" in new ViewSetup {
doc.heading mustBe Some(ExpectedContent.heading)
}
"have a progressive disclosure" in new ViewSetup {
doc.details mustBe Some(Details(ExpectedContent.detailsSummary,
ExpectedContent.detailsPara1 + " " +
ExpectedContent.detailsPara1Bullet1 + " " +
ExpectedContent.detailsPara1Bullet2 + " " +
ExpectedContent.detailsPara2 + " " +
ExpectedContent.detailsPara2Bullet1 + " " +
ExpectedContent.detailsPara2Bullet2 + " " +
ExpectedContent.findOutMoreLinkText))
}
"have frequency radio options" in new ViewSetup {
doc.radio("quarterly") mustBe Some(ExpectedContent.quarterly)
doc.radio("annual") mustBe Some(ExpectedContent.annually)
}
"have a primary action" in new ViewSetup {
doc.submitButton mustBe Some(ExpectedContent.continue)
}
}
}
| hmrc/vat-registration-frontend | test/views/returns/ReturnFrequencyViewSpec.scala | Scala | apache-2.0 | 3,302 |
package com.programmaticallyspeaking.ncd.infra
import scala.language.implicitConversions
object BetterOption {
implicit def option2Better[A](opt: Option[A]): BetterOption[A] = new BetterOption[A](opt)
class BetterOption[A](opt: Option[A]) {
def toEither(msg: => String): Either[String, A] = opt match {
case Some(a) => Right(a)
case None => Left(msg)
}
}
}
| provegard/ncdbg | src/main/scala/com/programmaticallyspeaking/ncd/infra/BetterOption.scala | Scala | bsd-3-clause | 388 |
import xml.Node
object Test extends App {
val body: Node = <elem>hi</elem>
println ((body: AnyRef, "foo") match {
case (node: Node, "bar") => "bye"
case (ser: Serializable, "foo") => "hi"
})
println ((body, "foo") match {
case (node: Node, "bar") => "bye"
case (ser: Serializable, "foo") => "hi"
})
println ((body: AnyRef, "foo") match {
case (node: Node, "foo") => "bye"
case (ser: Serializable, "foo") => "hi"
})
println ((body: AnyRef, "foo") match {
case (node: Node, "foo") => "bye"
case (ser: Serializable, "foo") => "hi"
})
}
| som-snytt/dotty | tests/pending/run/t4124.scala | Scala | apache-2.0 | 616 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import SharedHelpers._
import Matchers._
class ShouldBeASymbolSpec extends Spec with FileMocks {
object `The be a ('symbol) syntax` {
def `should do nothing if the object has an appropriately named method, which returns true` {
fileMock should be a ('file)
isFileMock should be a ('file)
}
def `should throw TestFailedException with an appropriate error message if the object has an appropriately named method, but it returns false` {
val ex5 = intercept[TestFailedException] {
List(1, 2) should be a ('empty)
}
assert(ex5.message === Some("List(1, 2) was not a empty"))
assert(ex5.failedCodeFileName === Some("ShouldBeASymbolSpec.scala"))
assert(ex5.failedCodeLineNumber === Some(thisLineNumber - 4))
}
def `should throw TestFailedException if no <symbol> or is<Symbol> method exists` {
val ex1 = intercept[TestFailedException] {
noPredicateMock should be a ('apple)
}
ex1.getMessage should equal ("NoPredicateMock has neither an apple nor an isApple method")
// Check message for name that starts with a consonant (should use a instead of an)
val ex2 = intercept[TestFailedException] {
noPredicateMock should be a ('file)
}
ex2.getMessage should equal ("NoPredicateMock has neither a file nor an isFile method")
}
def `should do nothing if the object has an appropriately named method, which returns false when used with not` {
notFileMock should not { be a ('file) }
notFileMock should not be a ('file)
isNotFileMock should not { be a ('file) }
isNotFileMock should not be a ('file)
}
def `should throw TestFailedException if no <symbol> or is<Symbol> method exists, when used with not` {
val ex1 = intercept[TestFailedException] {
noPredicateMock should not { be a ('apple) }
}
ex1.getMessage should equal ("NoPredicateMock has neither an apple nor an isApple method")
val ex2 = intercept[TestFailedException] {
noPredicateMock should not (be a ('directory))
}
ex2.getMessage should equal ("NoPredicateMock has neither a directory nor an isDirectory method")
val ex3 = intercept[TestFailedException] {
noPredicateMock should not be a ('apple)
}
ex3.getMessage should equal ("NoPredicateMock has neither an apple nor an isApple method")
val ex4 = intercept[TestFailedException] {
noPredicateMock should not be a ('directory)
}
ex4.getMessage should equal ("NoPredicateMock has neither a directory nor an isDirectory method")
}
def `should do nothing if the object has an appropriately named method, which returns true, when used in a logical-and expression` {
fileMock should ((be a ('file)) and (be a ('file)))
fileMock should (be a ('file) and (be a ('file)))
fileMock should (be a ('file) and be a ('file))
isFileMock should ((be a ('file)) and (be a ('file)))
isFileMock should (be a ('file) and (be a ('file)))
isFileMock should (be a ('file) and be a ('file))
}
def `should do nothing if the object has an appropriately named method, which returns true, when used in a logical-or expression` {
fileMock should ((be a ('directory)) or (be a ('file)))
fileMock should (be a ('directory) or (be a ('file)))
fileMock should (be a ('directory) or be a ('file))
isFileMock should ((be a ('directory)) or (be a ('file)))
isFileMock should (be a ('directory) or (be a ('file)))
isFileMock should (be a ('directory) or be a ('file))
fileMock should ((be a ('file)) or (be a ('directory)))
fileMock should (be a ('file) or (be a ('directory)))
fileMock should (be a ('file) or be a ('directory))
isFileMock should ((be a ('file)) or (be a ('directory)))
isFileMock should (be a ('file) or (be a ('directory)))
isFileMock should (be a ('file) or be a ('directory))
}
def `should do nothing if the object has an appropriately named method, which returns false, when used in a logical-and expression with not` {
notFileMock should (not (be a ('file)) and not (be a ('file)))
notFileMock should ((not be a ('file)) and (not be a ('file)))
notFileMock should (not be a ('file) and not be a ('file))
isNotFileMock should (not (be a ('file)) and not (be a ('file)))
isNotFileMock should ((not be a ('file)) and (not be a ('file)))
isNotFileMock should (not be a ('file) and not be a ('file))
}
def `should do nothing if the object has an appropriately named method, which returns false, when used in a logical-or expression with not` {
notFileMock should (not (be a ('file)) or not (be a ('file)))
notFileMock should ((not be a ('file)) or (not be a ('file)))
notFileMock should (not be a ('file) or not be a ('file))
isNotFileMock should (not (be a ('file)) or not (be a ('file)))
isNotFileMock should ((not be a ('file)) or (not be a ('file)))
isNotFileMock should (not be a ('file) or not be a ('file))
notFileMock should (not (be a ('directory)) or not (be a ('file)))
notFileMock should ((not be a ('directory)) or (not be a ('file)))
notFileMock should (not be a ('directory) or not be a ('file))
isNotFileMock should (not (be a ('directory)) or not (be a ('file)))
isNotFileMock should ((not be a ('directory)) or (not be a ('file)))
isNotFileMock should (not be a ('directory) or not be a ('file))
}
def `should throw TestFailedException if the object has an appropriately named method, which returns false` {
val caught1 = intercept[TestFailedException] {
notFileMock should be a ('file)
}
assert(caught1.getMessage === "NotFileMock was not a file")
val caught2 = intercept[TestFailedException] {
isNotFileMock should be a ('file)
}
assert(caught2.getMessage === "IsNotFileMock was not a file")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns true when used with not` {
val caught1 = intercept[TestFailedException] {
fileMock should not { be a ('file) }
}
assert(caught1.getMessage === "FileMock was a file")
val caught2 = intercept[TestFailedException] {
fileMock should not be a ('file)
}
assert(caught2.getMessage === "FileMock was a file")
val caught3 = intercept[TestFailedException] {
isFileMock should not { be a ('file) }
}
assert(caught3.getMessage === "IsFileMock was a file")
val caught4 = intercept[TestFailedException] {
isFileMock should not be a ('file)
}
assert(caught4.getMessage === "IsFileMock was a file")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns false, when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
fileMock should ((be a ('file)) and (be a ('directory)))
}
assert(caught1.getMessage === "FileMock was a file, but FileMock was not a directory")
val caught2 = intercept[TestFailedException] {
fileMock should (be a ('file) and (be a ('directory)))
}
assert(caught2.getMessage === "FileMock was a file, but FileMock was not a directory")
val caught3 = intercept[TestFailedException] {
fileMock should (be a ('file) and be a ('directory))
}
assert(caught3.getMessage === "FileMock was a file, but FileMock was not a directory")
val caught4 = intercept[TestFailedException] {
isFileMock should ((be a ('file)) and (be a ('directory)))
}
assert(caught4.getMessage === "IsFileMock was a file, but IsFileMock was not a directory")
val caught5 = intercept[TestFailedException] {
isFileMock should (be a ('file) and (be a ('directory)))
}
assert(caught5.getMessage === "IsFileMock was a file, but IsFileMock was not a directory")
val caught6 = intercept[TestFailedException] {
isFileMock should (be a ('file) and be a ('directory))
}
assert(caught6.getMessage === "IsFileMock was a file, but IsFileMock was not a directory")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns false, when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
notFileMock should ((be a ('file)) or (be a ('file)))
}
assert(caught1.getMessage === "NotFileMock was not a file, and NotFileMock was not a file")
val caught2 = intercept[TestFailedException] {
notFileMock should (be a ('file) or (be a ('file)))
}
assert(caught2.getMessage === "NotFileMock was not a file, and NotFileMock was not a file")
val caught3 = intercept[TestFailedException] {
notFileMock should (be a ('file) or be a ('file))
}
assert(caught3.getMessage === "NotFileMock was not a file, and NotFileMock was not a file")
val caught4 = intercept[TestFailedException] {
isNotFileMock should ((be a ('file)) or (be a ('file)))
}
assert(caught4.getMessage === "IsNotFileMock was not a file, and IsNotFileMock was not a file")
val caught5 = intercept[TestFailedException] {
isNotFileMock should (be a ('file) or (be a ('file)))
}
assert(caught5.getMessage === "IsNotFileMock was not a file, and IsNotFileMock was not a file")
val caught6 = intercept[TestFailedException] {
isNotFileMock should (be a ('file) or be a ('file))
}
assert(caught6.getMessage === "IsNotFileMock was not a file, and IsNotFileMock was not a file")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns true, when used in a logical-and expression with not` {
val caught1 = intercept[TestFailedException] {
fileMock should (not (be a ('directory)) and not (be a ('file)))
}
assert(caught1.getMessage === "FileMock was not a directory, but FileMock was a file")
val caught2 = intercept[TestFailedException] {
fileMock should ((not be a ('directory)) and (not be a ('file)))
}
assert(caught2.getMessage === "FileMock was not a directory, but FileMock was a file")
val caught3 = intercept[TestFailedException] {
fileMock should (not be a ('directory) and not be a ('file))
}
assert(caught3.getMessage === "FileMock was not a directory, but FileMock was a file")
val caught4 = intercept[TestFailedException] {
isFileMock should (not (be a ('directory)) and not (be a ('file)))
}
assert(caught4.getMessage === "IsFileMock was not a directory, but IsFileMock was a file")
val caught5 = intercept[TestFailedException] {
isFileMock should ((not be a ('directory)) and (not be a ('file)))
}
assert(caught5.getMessage === "IsFileMock was not a directory, but IsFileMock was a file")
val caught6 = intercept[TestFailedException] {
isFileMock should (not be a ('directory) and not be a ('file))
}
assert(caught6.getMessage === "IsFileMock was not a directory, but IsFileMock was a file")
// Check that the error message "short circuits"
val caught7 = intercept[TestFailedException] {
fileMock should (not (be a ('file)) and not (be a ('directory)))
}
assert(caught7.getMessage === "FileMock was a file")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns true, when used in a logical-or expression with not` {
val caught1 = intercept[TestFailedException] {
fileMock should (not (be a ('file)) or not (be a ('file)))
}
assert(caught1.getMessage === "FileMock was a file, and FileMock was a file")
val caught2 = intercept[TestFailedException] {
fileMock should ((not be a ('file)) or (not be a ('file)))
}
assert(caught2.getMessage === "FileMock was a file, and FileMock was a file")
val caught3 = intercept[TestFailedException] {
fileMock should (not be a ('file) or not be a ('file))
}
assert(caught3.getMessage === "FileMock was a file, and FileMock was a file")
val caught4 = intercept[TestFailedException] {
isFileMock should (not (be a ('file)) or not (be a ('file)))
}
assert(caught4.getMessage === "IsFileMock was a file, and IsFileMock was a file")
val caught5 = intercept[TestFailedException] {
isFileMock should ((not be a ('file)) or (not be a ('file)))
}
assert(caught5.getMessage === "IsFileMock was a file, and IsFileMock was a file")
val caught6 = intercept[TestFailedException] {
isFileMock should (not be a ('file) or not be a ('file))
}
assert(caught6.getMessage === "IsFileMock was a file, and IsFileMock was a file")
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/ShouldBeASymbolSpec.scala | Scala | apache-2.0 | 13,696 |
import sbt.Keys._
import sbt._
import uk.gov.hmrc.DefaultBuildSettings._
import uk.gov.hmrc.PublishingSettings._
import uk.gov.hmrc.{ShellPrompt, SbtBuildInfo}
object HmrcBuild extends Build {
import de.heikoseeberger.sbtheader.AutomateHeaderPlugin
import uk.gov.hmrc.DefaultBuildSettings._
import uk.gov.hmrc.PublishingSettings._
import uk.gov.hmrc.{SbtBuildInfo, ShellPrompt}
val nameApp = "accessibility-driver"
val versionApp = "1.1.0"
val appDependencies = Seq(
"org.seleniumhq.selenium" % "selenium-java" % "2.45.0",
"org.seleniumhq.selenium" % "selenium-firefox-driver" % "2.45.0",
"org.littleshoot" % "littleproxy" % "1.0.0-beta8",
"joda-time" % "joda-time" % "2.7",
"org.joda" % "joda-convert" % "1.7",
"commons-codec" % "commons-codec" % "1.10",
"commons-io" % "commons-io" % "2.4",
"org.jsoup" % "jsoup" % "1.8.2"
)
lazy val playBreadcrumb = Project(nameApp, file("."))
.enablePlugins(AutomateHeaderPlugin)
.settings(version := versionApp)
.settings(scalaSettings : _*)
.settings(defaultSettings(false) : _*)
.settings(
targetJvm := "jvm-1.7",
shellPrompt := ShellPrompt(versionApp),
libraryDependencies ++= appDependencies,
scalaVersion := "2.11.5",
organization := "uk.gov.hmrc",
crossScalaVersions := Seq("2.10.4", "2.11.5"),
resolvers := Seq(
"typesafe-releases" at "http://repo.typesafe.com/typesafe/releases/",
"typesafe-snapshots" at "http://repo.typesafe.com/typesafe/snapshots/"
)
)
.settings(publishAllArtefacts: _*)
.settings(SbtBuildInfo(): _*)
.settings(POMMetadata(): _*)
.settings(HeaderSettings())
}
object POMMetadata {
def apply() = {
pomExtra :=
<url>https://www.gov.uk/government/organisations/hm-revenue-customs</url>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
</license>
</licenses>
<scm>
<connection>scm:[email protected]:hmrc/accessibility-driver.git</connection>
<developerConnection>scm:[email protected]:hmrc/accessibility-driver.git</developerConnection>
<url>[email protected]:hmrc/accessibility-driver.git</url>
</scm>
<developers>
<developer>
<id>nicfellows</id>
<name>Nic Fellows</name>
<url>http://www.nicshouse.co.uk</url>
</developer>
</developers>
}
}
object HeaderSettings {
import de.heikoseeberger.sbtheader.HeaderPlugin.autoImport._
import de.heikoseeberger.sbtheader.license.Apache2_0
def apply() = headers := Map("scala" -> Apache2_0("2015", "HM Revenue & Customs"))
}
| nicf82/accessibility-driver | project/HmrcBuild.scala | Scala | apache-2.0 | 2,730 |
package org.elasticmq.rest.sqs
import org.elasticmq.actor.reply._
import org.elasticmq.msg.ClearQueue
import org.elasticmq.rest.sqs.Action.PurgeQueue
import org.elasticmq.rest.sqs.Constants._
import org.elasticmq.rest.sqs.directives.ElasticMQDirectives
trait PurgeQueueDirectives { this: ElasticMQDirectives with QueueURLModule =>
def purgeQueue(p: AnyParams) = {
p.action(PurgeQueue) {
queueActorFromRequest(p) { queueActor =>
for {
_ <- queueActor ? ClearQueue()
} yield {
respondWith {
<PurgeQueueResponse>
<ResponseMetadata>
<RequestId>{EmptyRequestId}</RequestId>
</ResponseMetadata>
</PurgeQueueResponse>
}
}
}
}
}
}
| adamw/elasticmq | rest/rest-sqs/src/main/scala/org/elasticmq/rest/sqs/PurgeQueueDirectives.scala | Scala | apache-2.0 | 771 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Classe entity.
*/
class ClasseGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connectionHeader("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authentication = Map(
"Content-Type" -> """application/json""",
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"Authorization" -> "${access_token}"
)
val scn = scenario("Test the Classe entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authenticate")
.headers(headers_http_authentication)
.body(StringBody("""{"username":"admin", "password":"admin"}""")).asJSON
.check(header.get("Authorization").saveAs("access_token"))).exitHereIfFailed
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all classes")
.get("/api/classes")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new classe")
.post("/api/classes")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "name":"SAMPLE_TEXT", "number":"0", "quantity":"0", "start":"2020-01-01T00:00:00.000Z", "end":"2020-01-01T00:00:00.000Z"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_classe_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created classe")
.get("${new_classe_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created classe")
.delete("${new_classe_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| israeleriston/scientific-week | backend-java/src/test/gatling/simulations/ClasseGatlingTest.scala | Scala | mit | 3,372 |
/*
* Copyright (c) 2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package hadoop
package shredder
// Specs2
import org.specs2.Specification
class TypeHierarchySpec extends Specification { def is =
"This is a specification to test the TypeHierarchy case class" ^
p^
"a TypeHierarchy should be convertible to JSON" ! e1^
"the complete method should finalize a partial TypeHierarchy" ! e2^
end
val EventId = "f81d4fae-7dec-11d0-a765-00a0c91e6bf6"
val CollectorTimestamp = "2014-04-29 09:00:54.000"
def e1 = {
val hierarchy =
TypeHierarchy(
rootId = EventId,
rootTstamp = CollectorTimestamp,
refRoot = "events",
refTree = List("events", "new_ticket"),
refParent = "events")
// TODO: add missing refTree
hierarchy.toJsonNode.toString must_== s"""{"rootId":"${EventId}","rootTstamp":"${CollectorTimestamp}","refRoot":"events","refTree":["events","new_ticket"],"refParent":"events"}"""
}
def e2 = {
val partial = Shredder.makePartialHierarchy(EventId, CollectorTimestamp)
partial.complete(List("link_click", "elementClasses")) must_==
TypeHierarchy(
rootId = EventId,
rootTstamp = CollectorTimestamp,
refRoot = "events",
refTree = List("events", "link_click", "elementClasses"),
refParent = "link_click")
}
}
| guardian/snowplow | 3-enrich/scala-hadoop-shred/src/test/scala/com.snowplowanalytics.snowplow.enrich.hadoop/shredder/TypeHierarchySpec.scala | Scala | apache-2.0 | 2,268 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.spark.serialization
import java.beans.Introspector
import java.lang.reflect.Method
import scala.collection.mutable.HashMap
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
import org.apache.commons.logging.LogFactory
private[spark] object ReflectionUtils {
val caseClassCache = new HashMap[Class[_], (Boolean, Iterable[String])]
val javaBeanCache = new HashMap[Class[_], Array[(String, Method)]]
//SI-6240
protected[spark] object ReflectionLock
private def checkCaseClass(clazz: Class[_]): Boolean = {
ReflectionLock.synchronized {
// reliable case class identifier only happens through class symbols...
runtimeMirror(clazz.getClassLoader()).classSymbol(clazz).isCaseClass
}
}
private def doGetCaseClassInfo(clazz: Class[_]): Iterable[String] = {
ReflectionLock.synchronized {
runtimeMirror(clazz.getClassLoader()).classSymbol(clazz).toType.declarations.collect {
case m: MethodSymbol if m.isCaseAccessor => m.name.toString()
}
}
}
private def isCaseClassInsideACompanionModule(clazz: Class[_], arity: Int): Boolean = {
if (!classOf[Serializable].isAssignableFrom(clazz)) {
false
}
// check 'copy' synthetic methods - they are public so go with getMethods
val copyMethods = clazz.getMethods.collect {
case m: Method if m.getName.startsWith("copy$default$") => m.getName
}
arity == copyMethods.length
}
// TODO: this is a hack since we expect the field declaration order to be according to the source but there's no guarantee
private def caseClassInfoInsideACompanionModule(clazz: Class[_], arity: Int): Iterable[String] = {
// fields are private so use the 'declared' variant
var counter: Int = 0
clazz.getDeclaredFields.collect {
case field if (counter < arity) => counter += 1; field.getName
}
}
private def doGetCaseClassValues(target: AnyRef, props: Iterable[String]) = {
val product = target.asInstanceOf[Product].productIterator
val tuples = for (y <- props) yield (y, product.next)
tuples.toMap
}
private def checkCaseClassCache(p: Product) = {
caseClassCache.getOrElseUpdate(p.getClass, {
var isCaseClazz = checkCaseClass(p.getClass)
var info = if (isCaseClazz) doGetCaseClassInfo(p.getClass) else null
if (!isCaseClazz) {
isCaseClazz = isCaseClassInsideACompanionModule(p.getClass, p.productArity)
if (isCaseClazz) {
// Todo: Fix this logger usage
LogFactory.getLog(classOf[ScalaValueWriter]).warn(
String.format("[%s] is detected as a case class in Java but not in Scala and thus " +
"its properties might be detected incorrectly - make sure the @ScalaSignature is available within the class bytecode " +
"and/or consider moving the case class from its companion object/module", p.getClass))
}
info = if (isCaseClazz) caseClassInfoInsideACompanionModule(p.getClass(), p.productArity) else null
}
(isCaseClazz, info)
})
}
def isCaseClass(p: Product) = {
checkCaseClassCache(p)._1
}
def caseClassValues(p: Product) = {
doGetCaseClassValues(p.asInstanceOf[AnyRef], checkCaseClassCache(p)._2)
}
private def checkJavaBeansCache(o: Any) = {
javaBeanCache.getOrElseUpdate(o.getClass, {
javaBeansInfo(o.getClass)
})
}
def isJavaBean(value: Any) = {
!checkJavaBeansCache(value).isEmpty
}
def javaBeanAsMap(value: Any) = {
javaBeansValues(value, checkJavaBeansCache(value))
}
private def javaBeansInfo(clazz: Class[_]) = {
Introspector.getBeanInfo(clazz).getPropertyDescriptors().collect {
case pd if (pd.getName != "class" && pd.getReadMethod() != null) => (pd.getName, pd.getReadMethod)
}.sortBy(_._1)
}
private def javaBeansValues(target: Any, info: Array[(String, Method)]) = {
info.map(in => (in._1, in._2.invoke(target))).toMap
}
} | wangcy6/storm_app | Elasticsearch/elasticsearch-hadoop-master/spark/core/main/scala/org/elasticsearch/spark/serialization/ReflectionUtils.scala | Scala | apache-2.0 | 4,764 |
def a = {}
def foo = {
def b = {}
println(this./* offset: 4 */a)
println(this./* resolved: false */b)
} | ilinum/intellij-scala | testdata/resolve2/inheritance/this/element/Function.scala | Scala | apache-2.0 | 111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
import org.apache.spark.SparkContext
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.execution.ui.{SparkListenerSQLExecutionEnd, SparkListenerSQLExecutionStart}
import org.apache.spark.sql.internal.StaticSQLConf.SQL_EVENT_TRUNCATE_LENGTH
import org.apache.spark.util.Utils
object SQLExecution {
val EXECUTION_ID_KEY = "spark.sql.execution.id"
private val _nextExecutionId = new AtomicLong(0)
private def nextExecutionId: Long = _nextExecutionId.getAndIncrement
private val executionIdToQueryExecution = new ConcurrentHashMap[Long, QueryExecution]()
def getQueryExecution(executionId: Long): QueryExecution = {
executionIdToQueryExecution.get(executionId)
}
private val testing = sys.props.contains(IS_TESTING.key)
private[sql] def checkSQLExecutionId(sparkSession: SparkSession): Unit = {
val sc = sparkSession.sparkContext
// only throw an exception during tests. a missing execution ID should not fail a job.
if (testing && sc.getLocalProperty(EXECUTION_ID_KEY) == null) {
// Attention testers: when a test fails with this exception, it means that the action that
// started execution of a query didn't call withNewExecutionId. The execution ID should be
// set by calling withNewExecutionId in the action that begins execution, like
// Dataset.collect or DataFrameWriter.insertInto.
throw new IllegalStateException("Execution ID should be set")
}
}
/**
* Wrap an action that will execute "queryExecution" to track all Spark jobs in the body so that
* we can connect them with an execution.
*/
def withNewExecutionId[T](
sparkSession: SparkSession,
queryExecution: QueryExecution,
name: Option[String] = None)(body: => T): T = {
val sc = sparkSession.sparkContext
val oldExecutionId = sc.getLocalProperty(EXECUTION_ID_KEY)
val executionId = SQLExecution.nextExecutionId
sc.setLocalProperty(EXECUTION_ID_KEY, executionId.toString)
executionIdToQueryExecution.put(executionId, queryExecution)
try {
// sparkContext.getCallSite() would first try to pick up any call site that was previously
// set, then fall back to Utils.getCallSite(); call Utils.getCallSite() directly on
// streaming queries would give us call site like "run at <unknown>:0"
val callSite = sc.getCallSite()
val truncateLength = sc.conf.get(SQL_EVENT_TRUNCATE_LENGTH)
val desc = Option(sc.getLocalProperty(SparkContext.SPARK_JOB_DESCRIPTION))
.filter(_ => truncateLength > 0)
.map { sqlStr =>
val redactedStr = Utils
.redact(sparkSession.sessionState.conf.stringRedactionPattern, sqlStr)
redactedStr.substring(0, Math.min(truncateLength, redactedStr.length))
}.getOrElse(callSite.shortForm)
withSQLConfPropagated(sparkSession) {
var ex: Option[Throwable] = None
val startTime = System.nanoTime()
try {
sc.listenerBus.post(SparkListenerSQLExecutionStart(
executionId = executionId,
description = desc,
details = callSite.longForm,
physicalPlanDescription = queryExecution.toString,
// `queryExecution.executedPlan` triggers query planning. If it fails, the exception
// will be caught and reported in the `SparkListenerSQLExecutionEnd`
sparkPlanInfo = SparkPlanInfo.fromSparkPlan(queryExecution.executedPlan),
time = System.currentTimeMillis()))
body
} catch {
case e: Throwable =>
ex = Some(e)
throw e
} finally {
val endTime = System.nanoTime()
val event = SparkListenerSQLExecutionEnd(executionId, System.currentTimeMillis())
// Currently only `Dataset.withAction` and `DataFrameWriter.runCommand` specify the `name`
// parameter. The `ExecutionListenerManager` only watches SQL executions with name. We
// can specify the execution name in more places in the future, so that
// `QueryExecutionListener` can track more cases.
event.executionName = name
event.duration = endTime - startTime
event.qe = queryExecution
event.executionFailure = ex
sc.listenerBus.post(event)
}
}
} finally {
executionIdToQueryExecution.remove(executionId)
sc.setLocalProperty(EXECUTION_ID_KEY, oldExecutionId)
}
}
/**
* Wrap an action with a known executionId. When running a different action in a different
* thread from the original one, this method can be used to connect the Spark jobs in this action
* with the known executionId, e.g., `BroadcastExchangeExec.relationFuture`.
*/
def withExecutionId[T](sparkSession: SparkSession, executionId: String)(body: => T): T = {
val sc = sparkSession.sparkContext
val oldExecutionId = sc.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
withSQLConfPropagated(sparkSession) {
try {
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, executionId)
body
} finally {
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, oldExecutionId)
}
}
}
/**
* Wrap an action with specified SQL configs. These configs will be propagated to the executor
* side via job local properties.
*/
def withSQLConfPropagated[T](sparkSession: SparkSession)(body: => T): T = {
val sc = sparkSession.sparkContext
// Set all the specified SQL configs to local properties, so that they can be available at
// the executor side.
val allConfigs = sparkSession.sessionState.conf.getAllConfs
val originalLocalProps = allConfigs.collect {
case (key, value) if key.startsWith("spark") =>
val originalValue = sc.getLocalProperty(key)
sc.setLocalProperty(key, value)
(key, originalValue)
}
try {
body
} finally {
for ((key, value) <- originalLocalProps) {
sc.setLocalProperty(key, value)
}
}
}
}
| pgandhi999/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SQLExecution.scala | Scala | apache-2.0 | 7,016 |
package org.dohrm.toolkit.context
import com.github.tototoshi.slick.GenericJodaSupport
import org.dohrm.toolkit.actor.response.{ExceptionError, Error}
import slick.driver.JdbcProfile
import slick.jdbc.JdbcBackend.DatabaseDef
/**
* @author michaeldohr
* @since 29/05/16
*/
trait JdbcConfig {
val jodaSupport: GenericJodaSupport
val driver: JdbcProfile
def db: DatabaseDef
def exceptionToErrorMapper: PartialFunction[Throwable, Error] = PartialFunction.empty
final def defaultExceptionToErrorMapper: PartialFunction[Throwable, Error] = {
case e => ExceptionError(e)
}
}
trait JdbcContext {
implicit val jdbcConfig: JdbcConfig
}
| dohr-michael/storyline | src/main/scala/org/dohrm/toolkit/context/JdbcContext.scala | Scala | mit | 658 |
package org.scalajs.testinterface
import language.experimental.macros
/** Dummy object to get the right shadowing for 2.10 / 2.11 cross compilation */
private object Compat210 {
object blackbox { // scalastyle:ignore
type Context = scala.reflect.macros.Context
}
}
import Compat210._
object TestUtils {
import scala.reflect.macros._ // shadows blackbox from above
import blackbox.Context
def newInstance(name: String, loader: ClassLoader)(args: Seq[AnyRef]): AnyRef =
macro newInstance_impl
def newInstance_impl(c: Context)(name: c.Expr[String],
loader: c.Expr[ClassLoader])(
args: c.Expr[Seq[AnyRef]]): c.Expr[AnyRef] = c.universe.reify {
val clazz = loader.splice.loadClass(name.splice)
val ctors = clazz.getConstructors()
if (ctors.size != 1) {
throw new IllegalArgumentException(
"You may only call newInstance with single-ctor classes")
}
val ctor = ctors.head
ctor.newInstance(args.splice: _*).asInstanceOf[AnyRef]
}
def loadModule(name: String, loader: ClassLoader): AnyRef =
macro loadModule_impl
def loadModule_impl(c: Context)(name: c.Expr[String],
loader: c.Expr[ClassLoader]): c.Expr[AnyRef] = c.universe.reify {
val clazz = loader.splice.loadClass(name.splice + "$")
clazz.getField("MODULE$").get(null)
}
}
| jmnarloch/scala-js | stubs/src/main/scala/org/scalajs/testinterface/TestUtils.scala | Scala | bsd-3-clause | 1,330 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs
import java.net.{URLDecoder, URLEncoder}
import java.nio.ByteBuffer
import java.nio.charset.Charset
import akka.actor.{Address, AddressFromURIString}
import akka.util.ByteString
import com.typesafe.scalalogging.Logger
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.CreateMode
import org.apache.zookeeper.KeeperException.NodeExistsException
import scala.language.implicitConversions
import scala.util.Try
import scala.util.control.NonFatal
import scala.jdk.CollectionConverters._
package object cluster {
trait SegmentationLogic {
val segmentsSize:Int
def segmentation(partitionKey:ByteString): String = s"segment-${Math.abs(partitionKey.hashCode()) % segmentsSize}"
def partitionZkPath(partitionKey:ByteString): String = s"/segments/${segmentation(partitionKey)}/${keyToPath(partitionKey)}"
def sizeOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/$$size"
def servantsOfParZkPath(partitionKey:ByteString): String = s"${partitionZkPath(partitionKey)}/servants"
}
case class DefaultSegmentationLogic(segmentsSize:Int) extends SegmentationLogic
def guarantee(path:String, data:Option[Array[Byte]], mode:CreateMode = CreateMode.EPHEMERAL)
(implicit zkClient:CuratorFramework, logger:Logger):String = {
try{
data match {
case None => zkClient.create.withMode(mode).forPath(path)
case Some(bytes) => zkClient.create.withMode(mode).forPath(path, bytes)
}
}
catch{
case e: NodeExistsException =>
if(data.nonEmpty && data.get.length > 0){
zkClient.setData().forPath(path, data.get)
}
path
case NonFatal(e) =>
logger.info("leader znode creation failed due to %s\\n", e)
path
}
}
def safelyDiscard(path:String, recursive: Boolean = true)(implicit zkClient: CuratorFramework): String = Try {
if(recursive) zkClient.getChildren.forPath(path).asScala.foreach(child => safelyDiscard(s"$path/$child", recursive))
zkClient.delete.forPath(path)
path
} getOrElse path
def keyToPath(name:String):String = URLEncoder.encode(name, "utf-8")
def pathToKey(name:String):String = URLDecoder.decode(name, "utf-8")
private[cluster] val BYTES_OF_INT = Integer.SIZE / java.lang.Byte.SIZE
implicit def intToBytes(integer:Int):Array[Byte] = {
val buf = ByteBuffer.allocate(BYTES_OF_INT)
buf.putInt(integer)
buf.rewind
buf.array()
}
val UTF_8 = Charset.forName("utf-8")
implicit class ByteConversions(val bytes: Array[Byte]) extends AnyVal {
def toAddress: Option[Address] =
Option(bytes) flatMap (b => if (b.length <= 0) None else Some(AddressFromURIString(new String(b, UTF_8))))
def toInt: Int = ByteBuffer.wrap(bytes).getInt
def toUtf8: String = new String(bytes, UTF_8)
def toByteString: ByteString = ByteString(bytes)
def toAddressSet: Set[Address] = Try {
new String(bytes, UTF_8).split("[,]").map(seg => AddressFromURIString(seg.trim)).toSet
} getOrElse Set.empty
}
implicit def byteStringToUtf8(bs:ByteString):String = new String(bs.toArray, UTF_8)
implicit def addressToBytes(address:Address):Array[Byte] = {
address.toString.getBytes(UTF_8)
}
implicit def addressSetToBytes(members: Set[Address]): Array[Byte] = {
members.mkString(",").getBytes(UTF_8)
}
}
| akara/squbs | squbs-zkcluster/src/main/scala/org/squbs/cluster/package.scala | Scala | apache-2.0 | 3,991 |
package io.github.quark.resolver
import java.util.concurrent.atomic.AtomicReference
import akka.http.scaladsl.server.PathMatcher.Matched
import io.github.quark.resolver.ServiceResolver._
import akka.http.scaladsl.server.PathMatchers.{Remaining, Segment, Slash}
import io.github.quark.stage.PipelineStage.Input
trait ServiceResolver {
protected def routes: AtomicReference[Map[ServiceID, ServiceLocation]]
def findServiceLocation(request: Input): Option[ResolverResponse] = {
val inputPath = request.uri.path
pathMatcher(inputPath) match {
case Matched(_, (serviceID, rest)) if rest.nonEmpty =>
routes.get().get(serviceID).map((_, rest))
case _ => None
}
}
private val pathMatcher = Slash ~ Segment ~ Remaining
}
object ServiceResolver {
type ServiceID = String
type ServiceLocation = String
type RestPath = String
type ResolverResponse = (ServiceLocation, RestPath)
}
| burakkose/Quark | src/main/scala/io/github/quark/resolver/ServiceResolver.scala | Scala | apache-2.0 | 922 |
package com.circusoc.simplesite.members
import java.sql.{Connection, DriverManager}
import com.circusoc.simplesite._
import com.circusoc.simplesite.users.AuthenticatedUser
import com.circusoc.simplesite.users.permissions.{CanEditTagsPermission, CanUpdateMembers}
import org.codemonkey.simplejavamail.Email
import org.dbunit.DBTestCase
import org.dbunit.database.DatabaseConnection
import org.dbunit.dataset.IDataSet
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder
import org.dbunit.operation.DatabaseOperation
import org.joda.time.DateTime
import org.scalatest.Matchers._
import org.scalatest.prop.PropertyChecks
import org.scalatest.{BeforeAndAfter, FlatSpecLike}
import scalikejdbc._
class MemberSpec extends DBTestCase with FlatSpecLike with BeforeAndAfter with PropertyChecks {
implicit val config = new WithConfig {
override val port: Int = 8080
override val db: com.circusoc.simplesite.DB = new com.circusoc.simplesite.DB {
override val poolName = 'memberspec
override def setup() = {
Class.forName("org.h2.Driver")
val url = s"jdbc:h2:mem:memberspec;DB_CLOSE_DELAY=-1"
ConnectionPool.add(poolName, url, "sa", "")
}
}
override val hire: Hire = new Hire {}
override val mailer: MailerLike = new MailerLike {
override def sendMail(email: Email): Unit = throw new NotImplementedError()
}
override val paths: PathConfig = new PathConfig {}
}
def getJDBC: Connection = {
Class.forName("org.h2.Driver")
val c = DriverManager.getConnection(s"jdbc:h2:mem:memberspec;DB_CLOSE_DELAY=-1", "sa", "")
c.setAutoCommit(true)
c
}
config.db.setup()
DBSetup.setup()(config)
val conn = new DatabaseConnection(getJDBC)
DatabaseOperation.CLEAN_INSERT.execute(conn, getDataSet())
override def getDataSet: IDataSet = new FlatXmlDataSetBuilder().
build(classOf[MemberSpec].
getResourceAsStream("/com/circusoc/simplesite/members/MembersDBSpec.xml"))
it should "get members by email" in {
val member = config.db.getDB.readOnly(implicit session => Member.getMember("[email protected]"))
assert(member.isDefined)
member.get.name should be("steve")
member.get.lastPayment should be (new DateTime(2014, 1, 1, 0, 0, 0))
member.get.lastWaiver should be (new DateTime(2015, 1, 1, 0, 0, 0))
}
it should "get members by id" in {
val member = config.db.getDB.readOnly(implicit session => Member.getMember(1))
assert(member.isDefined)
member.get.name should be("steve")
member.get.lastPayment should be (new DateTime(2014, 1, 1, 0, 0, 0))
member.get.lastWaiver should be (new DateTime(2015, 1, 1, 0, 0, 0))
}
it should "get members by name search" in {
val members = config.db.getDB.readOnly(implicit session => Member.searchMembers("eve"))
assert(members.length == 1)
val member = members.head
member.name should be("steve")
member.lastPayment should be (new DateTime(2014, 1, 1, 0, 0, 0))
member.lastWaiver should be (new DateTime(2015, 1, 1, 0, 0, 0))
}
it should "get members by email search" in {
val members = config.db.getDB.readOnly(implicit session => Member.searchMembers("example.com"))
assert(members.length == 2)
val names = members.map(_.name).sorted
val emails = members.map(_.email).sorted
names should be(List("bob", "steve"))
emails should be(List(Some("[email protected]"), Some("[email protected]")))
}
it should "get members by student number search" in {
val members = config.db.getDB.readOnly(implicit session => Member.searchMembers("333222111"))
assert(members.length == 1)
members.head.id should be(3)
}
it should "update payment and waivers" in {
val member = config.db.getDB.readOnly(implicit session => Member.getMember(3)).head
val updatedMember = config.db.getDB.autoCommit { implicit session =>
Member.recordPaymentAndWaiver(member, TestUpdatePermission())
}
val newMember = config.db.getDB.readOnly(implicit session => Member.getMember(3)).head
newMember should be(updatedMember)
}
it should "get the list of subscribed users" in {
val members = config.db.getDB.readOnly(implicit session => Member.getSubscribedEmails())
members.sorted should be(List("[email protected]", "[email protected]"))
}
it should "get all the members" in {
val members = config.db.getDB.readOnly(implicit session => Member.getAllMembers)
members.map(_.id).sorted should be(List(1, 2, 3))
}
it should "add members" in {
val anne = config.db.getDB.localTx { implicit s =>
val anne1 = Member.newMember("Anne Alison", None, Some(StudentRecord("z325555", true)), false).left.get
val anne2 = Member.searchMembers("anne").head
anne1 should be(anne2)
anne1
}
val anne3 = config.db.getDB.readOnly(implicit s => Member.searchMembers("anne").head)
anne should be(anne3)
}
it should "be impossible to have two users with the same name" in {
val broken = config.db.getDB.localTx { implicit s =>
Member.newMember("steve", None, Some(StudentRecord("xxxxxx", true)), false)
}
broken should be(Right(DuplicateNameError))
}
it should "be impossible to have two users with the same email" in {
val broken = config.db.getDB.localTx { implicit s =>
Member.newMember("bobbert", Some("[email protected]"), Some(StudentRecord("xxxxxx", true)), false)
}
broken should be(Right(DuplicateEmailError))
}
it should "be impossible to have two users with the same student number" in {
val broken = config.db.getDB.localTx { implicit s =>
Member.newMember("bobbert", Some("[email protected]"), Some(StudentRecord("333222111", true)), false)
}
broken should be(Right(DuplicateSNumError))
}
it should "create proofs from authenticated users" in {
val autheduser = new AuthenticatedUser(1, "steve", Set(CanUpdateMembers))
HasUpdatePermission(autheduser)
}
it should "reject proofs when the user doens't have permission" in {
val autheduser = new AuthenticatedUser(1, "steve", Set(CanEditTagsPermission))
intercept[AssertionError]{
HasUpdatePermission(autheduser)
}
}
"mapMessage" should "throw unknown exceptions" in {
val e = new org.h2.jdbc.JdbcSQLException("", "", "", 0, null, "")
intercept[org.h2.jdbc.JdbcSQLException] {
Member.mapMessage(e)
}
}
}
| ririw/circusoc-backend | src/test/scala/com/circusoc/simplesite/members/MemberSpec.scala | Scala | agpl-3.0 | 6,385 |
package com.trafigura.chess
object PrintLayoutsForTestCase extends App {
val pieces = List(King, King, Queen, Bishop, Rook, Knight)
val chessVariants = new ChessLayouts(false, 9, 6, pieces :_*)
println(s"Test case is to find all layouts for $pieces on 9x6 deck")
private val layouts = chessVariants.findLayouts.size
println(s"Result is $layouts")
}
object TestPieces extends App {
assert(King((2, 2), (1, 1)))
assert(King((2, 2), (2, 1)))
assert(King((2, 2), (3, 1)))
assert(King((2, 2), (1, 2)))
assert(King((2, 2), 1 -> 3))
assert(King(2 -> 2, 3 -> 2))
assert(King(2 -> 2, 3 -> 3))
assert(King(2 -> 2, 2 -> 3))
assert(!King(2 -> 2, 2 -> 5))
assert(!King(1 -> 1, 3 -> 1))
assert(!King(1 -> 1, 1 -> 3))
assert(Bishop(1 -> 1, 2 -> 2))
assert(Bishop(1 -> 1, 3 -> 3))
assert(!Bishop(1 -> 1, 2 -> 1))
assert(!Bishop(1 -> 1, 3 -> 1))
assert(!Bishop(1 -> 1, 2 -> 1))
assert(!Bishop(1 -> 1, 1 -> 2))
assert(!Bishop(1 -> 1, 1 -> 3))
assert(!Bishop(1 -> 1, 3 -> 2))
assert(Rook(1 -> 1, 2 -> 1))
assert(Rook(1 -> 1, 2 -> 1))
assert(Rook(1 -> 1, 1 -> 3))
assert(Rook(2 -> 2, 2 -> 1))
assert(Rook(2 -> 2, 2 -> 3))
assert(Rook(2 -> 2, 3 -> 2))
assert(!Rook(2 -> 2, 1 -> 1))
assert(!Rook(2 -> 2, 3 -> 3))
assert(Knight(1 -> 1, 2 -> 3))
assert(Knight(1 -> 1, 3 -> 2))
assert(Knight(1 -> 1, 2 -> 3))
assert(!Knight(1 -> 1, 2 -> 2))
assert(!Knight(1 -> 1, 1 -> 3))
assert(!Knight(1 -> 1, 3 -> 3))
assert(Queen(1 -> 1, 2 -> 1))
assert(Queen(1 -> 1, 2 -> 2))
assert(Queen(1 -> 1, 3 -> 3))
assert(Queen(1 -> 1, 1 -> 2))
assert(Queen(1 -> 1, 1 -> 3))
assert(!Queen(1 -> 1, 2 -> 3))
assert(!Queen(1 -> 1, 3 -> 2))
} | dobrynya/chepila | src/main/scala/com/trafigura/chess/Tests.scala | Scala | apache-2.0 | 1,689 |
package scorex.api.http
import javax.ws.rs.Path
import akka.actor.ActorRefFactory
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import io.swagger.annotations._
import play.api.libs.json.Json
import scorex.app.Application
import scorex.crypto.encode.Base58
@Path("/wallet")
@Api(value = "/wallet", description = "Wallet-related calls")
case class WalletApiRoute(application: Application)(implicit val context: ActorRefFactory)
extends ApiRoute with CommonTransactionApiFunctions {
val settings = application.settings
private val wallet = application.wallet
override lazy val route = root ~ seed
@Path("/seed")
@ApiOperation(value = "Seed", notes = "Export wallet seed", httpMethod = "GET")
def seed: Route = {
path("wallet" / "seed") {
withAuth {
getJsonRoute {
lazy val response = JsonResponse(Json.obj("seed" -> Base58.encode(wallet.seed)), StatusCodes.OK)
walletNotExists(wallet).getOrElse(response)
}
}
}
}
@Path("/")
@ApiOperation(value = "Wallet", notes = "Display whether wallet exists or not", httpMethod = "GET")
def root: Route = {
path("wallet") {
getJsonRoute {
JsonResponse(Json.obj("exists" -> wallet.exists()), StatusCodes.OK)
}
}
}
}
| B83YPoj/Waves | src/main/scala/scorex/api/http/WalletApiRoute.scala | Scala | apache-2.0 | 1,301 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010.consumer
import java.{util => ju}
import java.util.concurrent.ConcurrentHashMap
import org.apache.commons.pool2.{BaseKeyedPooledObjectFactory, PooledObject, SwallowedExceptionListener}
import org.apache.commons.pool2.impl.{DefaultEvictionPolicy, DefaultPooledObject, GenericKeyedObjectPool, GenericKeyedObjectPoolConfig}
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.sql.kafka010._
import org.apache.spark.sql.kafka010.consumer.InternalKafkaConsumerPool._
import org.apache.spark.sql.kafka010.consumer.KafkaDataConsumer.CacheKey
/**
* Provides object pool for [[InternalKafkaConsumer]] which is grouped by [[CacheKey]].
*
* This class leverages [[GenericKeyedObjectPool]] internally, hence providing methods based on
* the class, and same contract applies: after using the borrowed object, you must either call
* returnObject() if the object is healthy to return to pool, or invalidateObject() if the object
* should be destroyed.
*
* The soft capacity of pool is determined by "spark.kafka.consumer.cache.capacity" config value,
* and the pool will have reasonable default value if the value is not provided.
* (The instance will do its best effort to respect soft capacity but it can exceed when there's
* a borrowing request and there's neither free space nor idle object to clear.)
*
* This class guarantees that no caller will get pooled object once the object is borrowed and
* not yet returned, hence provide thread-safety usage of non-thread-safe [[InternalKafkaConsumer]]
* unless caller shares the object to multiple threads.
*/
private[consumer] class InternalKafkaConsumerPool(
objectFactory: ObjectFactory,
poolConfig: PoolConfig) extends Logging {
def this(conf: SparkConf) = {
this(new ObjectFactory, new PoolConfig(conf))
}
// the class is intended to have only soft capacity
assert(poolConfig.getMaxTotal < 0)
private val pool = {
val internalPool = new GenericKeyedObjectPool[CacheKey, InternalKafkaConsumer](
objectFactory, poolConfig)
internalPool.setSwallowedExceptionListener(CustomSwallowedExceptionListener)
internalPool
}
/**
* Borrows [[InternalKafkaConsumer]] object from the pool. If there's no idle object for the key,
* the pool will create the [[InternalKafkaConsumer]] object.
*
* If the pool doesn't have idle object for the key and also exceeds the soft capacity,
* pool will try to clear some of idle objects.
*
* Borrowed object must be returned by either calling returnObject or invalidateObject, otherwise
* the object will be kept in pool as active object.
*/
def borrowObject(key: CacheKey, kafkaParams: ju.Map[String, Object]): InternalKafkaConsumer = {
updateKafkaParamForKey(key, kafkaParams)
if (size >= poolConfig.softMaxSize) {
logWarning("Pool exceeds its soft max size, cleaning up idle objects...")
pool.clearOldest()
}
pool.borrowObject(key)
}
/** Returns borrowed object to the pool. */
def returnObject(consumer: InternalKafkaConsumer): Unit = {
pool.returnObject(extractCacheKey(consumer), consumer)
}
/** Invalidates (destroy) borrowed object to the pool. */
def invalidateObject(consumer: InternalKafkaConsumer): Unit = {
pool.invalidateObject(extractCacheKey(consumer), consumer)
}
/** Invalidates all idle consumers for the key */
def invalidateKey(key: CacheKey): Unit = {
pool.clear(key)
}
/**
* Closes the keyed object pool. Once the pool is closed,
* borrowObject will fail with [[IllegalStateException]], but returnObject and invalidateObject
* will continue to work, with returned objects destroyed on return.
*
* Also destroys idle instances in the pool.
*/
def close(): Unit = {
pool.close()
}
def reset(): Unit = {
// this is the best-effort of clearing up. otherwise we should close the pool and create again
// but we don't want to make it "var" only because of tests.
pool.clear()
}
def numIdle: Int = pool.getNumIdle
def numIdle(key: CacheKey): Int = pool.getNumIdle(key)
def numActive: Int = pool.getNumActive
def numActive(key: CacheKey): Int = pool.getNumActive(key)
def size: Int = numIdle + numActive
def size(key: CacheKey): Int = numIdle(key) + numActive(key)
// TODO: revisit the relation between CacheKey and kafkaParams - for now it looks a bit weird
// as we force all consumers having same (groupId, topicPartition) to have same kafkaParams
// which might be viable in performance perspective (kafkaParams might be too huge to use
// as a part of key), but there might be the case kafkaParams could be different -
// cache key should be differentiated for both kafkaParams.
private def updateKafkaParamForKey(key: CacheKey, kafkaParams: ju.Map[String, Object]): Unit = {
// We can assume that kafkaParam should not be different for same cache key,
// otherwise we can't reuse the cached object and cache key should contain kafkaParam.
// So it should be safe to put the key/value pair only when the key doesn't exist.
val oldKafkaParams = objectFactory.keyToKafkaParams.putIfAbsent(key, kafkaParams)
require(oldKafkaParams == null || kafkaParams == oldKafkaParams, "Kafka parameters for same " +
s"cache key should be equal. old parameters: $oldKafkaParams new parameters: $kafkaParams")
}
private def extractCacheKey(consumer: InternalKafkaConsumer): CacheKey = {
new CacheKey(consumer.topicPartition, consumer.kafkaParams)
}
}
private[consumer] object InternalKafkaConsumerPool {
object CustomSwallowedExceptionListener extends SwallowedExceptionListener with Logging {
override def onSwallowException(e: Exception): Unit = {
logError(s"Error closing Kafka consumer", e)
}
}
class PoolConfig(conf: SparkConf) extends GenericKeyedObjectPoolConfig[InternalKafkaConsumer] {
private var _softMaxSize = Int.MaxValue
def softMaxSize: Int = _softMaxSize
init()
def init(): Unit = {
_softMaxSize = conf.get(CONSUMER_CACHE_CAPACITY)
val jmxEnabled = conf.get(CONSUMER_CACHE_JMX_ENABLED)
val minEvictableIdleTimeMillis = conf.get(CONSUMER_CACHE_TIMEOUT)
val evictorThreadRunIntervalMillis = conf.get(
CONSUMER_CACHE_EVICTOR_THREAD_RUN_INTERVAL)
// NOTE: Below lines define the behavior, so do not modify unless you know what you are
// doing, and update the class doc accordingly if necessary when you modify.
// 1. Set min idle objects per key to 0 to avoid creating unnecessary object.
// 2. Set max idle objects per key to 3 but set total objects per key to infinite
// which ensures borrowing per key is not restricted.
// 3. Set max total objects to infinite which ensures all objects are managed in this pool.
setMinIdlePerKey(0)
setMaxIdlePerKey(3)
setMaxTotalPerKey(-1)
setMaxTotal(-1)
// Set minimum evictable idle time which will be referred from evictor thread
setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis)
setSoftMinEvictableIdleTimeMillis(-1)
// evictor thread will run test with ten idle objects
setTimeBetweenEvictionRunsMillis(evictorThreadRunIntervalMillis)
setNumTestsPerEvictionRun(10)
setEvictionPolicy(new DefaultEvictionPolicy[InternalKafkaConsumer]())
// Immediately fail on exhausted pool while borrowing
setBlockWhenExhausted(false)
setJmxEnabled(jmxEnabled)
setJmxNamePrefix("kafka010-cached-simple-kafka-consumer-pool")
}
}
class ObjectFactory extends BaseKeyedPooledObjectFactory[CacheKey, InternalKafkaConsumer] {
val keyToKafkaParams = new ConcurrentHashMap[CacheKey, ju.Map[String, Object]]()
override def create(key: CacheKey): InternalKafkaConsumer = {
Option(keyToKafkaParams.get(key)) match {
case Some(kafkaParams) => new InternalKafkaConsumer(key.topicPartition, kafkaParams)
case None => throw new IllegalStateException("Kafka params should be set before " +
"borrowing object.")
}
}
override def wrap(value: InternalKafkaConsumer): PooledObject[InternalKafkaConsumer] = {
new DefaultPooledObject[InternalKafkaConsumer](value)
}
override def destroyObject(key: CacheKey, p: PooledObject[InternalKafkaConsumer]): Unit = {
p.getObject.close()
}
}
}
| wangmiao1981/spark | external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/consumer/InternalKafkaConsumerPool.scala | Scala | apache-2.0 | 9,245 |
import javax.servlet.ServletContext
import _root_.akka.actor.{ActorSystem, Props}
import com.naughtyzombie.recipesearch._
import com.naughtyzombie.recipesearch.actor.{EsActor, EsIndexer}
import com.naughtyzombie.recipesearch.controller.{ElasticSearchController, FileController, GreetingController}
import org.scalatra._
class ScalatraBootstrap extends LifeCycle {
val system = ActorSystem()
val esActor = system.actorOf(Props[EsActor])
val esIndexer = system.actorOf(Props[EsIndexer])
/*val server = new ElasticsearchServer*/
override def init(context: ServletContext) {
context.mount(new RecipeSearchServlet, "/*")
context.mount(new GreetingController, "/sample/*")
context.mount(new FileController(system, esIndexer), "/file/*")
context.mount(new ElasticSearchController(system, esActor, esIndexer),"/actors/*")
/* server.start()
server.createAndWaitForIndex("recipes")*/
}
override def destroy(context: ServletContext): Unit = {
/*server.stop()
system.shutdown()*/
}
}
| pram/recipesearch | server/src/main/scala/ScalatraBootstrap.scala | Scala | mit | 1,026 |
package com.karasiq.bootstrap4.navbar
import scala.language.postfixOps
import com.karasiq.bootstrap.context.JSRenderingContext
import com.karasiq.bootstrap.jquery.BootstrapJQueryContext
trait JSNavigationBars { self: JSRenderingContext with NavigationBars with BootstrapJQueryContext⇒
implicit class JSNavigation(nav: NavComponent) {
/**
* Selects tab by ID
* @param id Tab ID
*/
def selectTab(id: String): Unit = {
jQuery(s"a[data-target='#${nav.tabId(id)}']").tab("show")
}
/**
* Selects tab by index
* @param i Tab index, starting from `0`
*/
def selectTab(i: Int): Unit = {
val tabs = nav.navTabs.now
require(i >= 0 && tabs.length > i, s"Invalid tab index: $i")
this.selectTab(tabs(i).id)
}
}
}
| Karasiq/scalajs-bootstrap | library-v4/js/src/main/scala/com/karasiq/bootstrap4/navbar/JSNavigationBars.scala | Scala | mit | 794 |
package mr.merc.unit
import scala.Option.option2Iterable
import scala.util.Random
import mr.merc.map.hex.TerrainHex
import mr.merc.map.objects.{House, MapObject, WoodenBridge}
import mr.merc.map.terrain._
import mr.merc.map.terrain.TerrainKind._
import DefenceType._
import SoldierState._
import mr.merc.unit.SoldierTypeAttribute._
import mr.merc.unit.AttackAttribute._
object Attack {
private val maxChance = 100
def resolveAttack(chance: ChanceOfSuccess): Boolean = chance.chanceNumber >= Random.nextInt(maxChance)
def battle(attackerHex: TerrainHex, defenderHex: TerrainHex, attackerSelection: Attack, defenderSelection: Option[Attack],
f: ChanceOfSuccess => Boolean = resolveAttack): List[AttackResult] = {
require(attackerHex.soldier.isDefined)
require(defenderHex.soldier.isDefined)
val attacker = attackerHex.soldier.get
val defender = defenderHex.soldier.get
val rounds = if (attackerSelection.attributes.contains(Berserk) ||
defenderSelection.exists(_.attributes.contains(Berserk))) {
30
} else {
1
}
val resultAttacks = for (i <- 0 until rounds) yield {
val attackerDefence = calculateSoldierDefence(attacker, attackerHex)
val defenderDefence = calculateSoldierDefence(defender, defenderHex)
val attackerStrikes = generateAttacks(true, attacker, defender, defenderDefence, attackerSelection, defenderSelection, f)
val defenderStrikes = defenderSelection match {
case Some(attack) => generateAttacks(false, defender, attacker, attackerDefence, attack, Some(attackerSelection), f)
case None => Nil
}
if (defenderSelection.exists(_.attributes.contains(Firststrike)) && !attackerSelection.attributes.contains(Firststrike)) {
mergeAttacks(defenderStrikes, attackerStrikes)
} else {
mergeAttacks(attackerStrikes, defenderStrikes)
}
}
val filteredAttacks = filterNotNeededAttacks(attacker, defender, resultAttacks.toList.flatten, attackerSelection, defenderSelection)
// and here we are changing state
filteredAttacks foreach (_.applyDamage())
filteredAttacks
}
private def generateAttacks(attackerIsAttacking: Boolean, attacker: Soldier, defender: Soldier, defence: SoldierDefence, attackersAttack: Attack, defendersAttack: Option[Attack], f: ChanceOfSuccess => Boolean): List[AttackResult] = {
val retVal = for (i <- 0 until attackersAttack.count) yield {
val damage = Attack.possibleAttackersDamage(attackerIsAttacking, attacker, defender, attackersAttack, defendersAttack)
val drained = if (attackersAttack.attributes.contains(Drain)) {
damage / 2
} else {
0
}
val success = f(attackersAttack.chanceOfSuccess(defence))
AttackResult(attackerIsAttacking, attacker, defender, attackersAttack, success, damage, drained)
}
retVal.toList
}
def calculateSoldierDefence(soldier: Soldier, hex: TerrainHex): SoldierDefence = {
if (hex.mapObj.exists(_.isInstanceOf[House]) || hex.terrain.is(WallsKind)) {
SoldierDefence(soldier.soldierType.defence(BuildingDefence))
} else if (hex.mapObj.contains(WoodenBridge) || hex.terrain.isOneOf(RoadKind, GrassKind)) {
SoldierDefence(soldier.soldierType.defence(GrassDefence))
} else {
val d = List[(TerrainKind, DefenceType)](
MountainKind -> MountainDefence,
WaterKind -> WaterDefence,
SwampKind -> SwampDefence,
ForestKind -> ForestDefence,
HillKind -> HillDefence,
SnowKind -> SnowDefence,
IceKind -> IceDefence,
SandKind -> SandDefence).find(x => hex.terrain.is(x._1)).map(_._2).
getOrElse(sys.error(s"Failed to find defence for terrain type ${hex.terrain} for soldier type ${soldier.soldierType.name}"))
SoldierDefence(soldier.soldierType.defence(d))
}
}
private def mergeAttacks(attacker: List[AttackResult], defender: List[AttackResult], acc: List[AttackResult] = Nil): List[AttackResult] = {
if (attacker.isEmpty) {
acc ::: defender
} else if (defender.isEmpty) {
acc ::: attacker
} else {
mergeAttacks(attacker.tail, defender.tail, acc ::: List(attacker.head, defender.head))
}
}
private def fixDrain(maxHp: Int, currentHp: Int, drain: Int): Int = {
if (currentHp + drain > maxHp) {
maxHp - currentHp
} else {
drain
}
}
private def filterNotNeededAttacks(attacker: Soldier, defender: Soldier, attacks: List[AttackResult], attackerAttack: Attack, defenderAttack: Option[Attack]): List[AttackResult] = {
var attackerState = attacker.hp
var defenderState = defender.hp
var attackerSlowed = attacker.state.contains(Slowed)
var defenderSlowed = defender.state.contains(Slowed)
attacks.flatMap(res => {
if (attackerState <= 0 || defenderState <= 0) {
None
} else if (res.success) {
if (res.attacker == attacker) {
val possibleDamage = possibleAttackersDamage(true, attacker, defender, attackerAttack, defenderAttack)
val damage = if (attackerSlowed) possibleDamage / 2 else possibleDamage
defenderState -= damage
if (attackerAttack.attributes.contains(Slow)) {
defenderSlowed = true
}
if (defenderState < 0) {
val actualDamage = damage + defenderState
val drain = if (res.attackersAttack.attributes.contains(Drain)) {
actualDamage / 2
} else {
0
}
val finalDrain = fixDrain(attacker.hp, attackerState, drain)
attackerState += finalDrain
Some(AttackResult(res.isAttackerAttackingThisRound, res.attacker, res.defender, res.attackersAttack, res.success, actualDamage, finalDrain))
} else {
val finalDrain = fixDrain(attacker.hp, attackerState, res.drained)
attackerState += finalDrain
Some(AttackResult(res.isAttackerAttackingThisRound, res.attacker, res.defender, res.attackersAttack, res.success, res.damage, finalDrain))
}
} else {
val possibleDamage = possibleAttackersDamage(false, defender, attacker, defenderAttack.get, Some(attackerAttack))
val damage = if (defenderSlowed) possibleDamage / 2 else possibleDamage
attackerState -= damage
if (defenderAttack.get.attributes.contains(Slow)) {
attackerSlowed = true
}
if (attackerState < 0) {
val actualDamage = damage + attackerState
val drain = if (res.attackersAttack.attributes.contains(Drain)) {
actualDamage / 2
} else {
0
}
val finalDrain = fixDrain(defender.hp, defenderState, drain)
defenderState += finalDrain
Some(AttackResult(res.isAttackerAttackingThisRound, res.attacker, res.defender, res.attackersAttack, res.success, actualDamage, finalDrain))
} else {
val finalDrain = fixDrain(defender.hp, defenderState, res.drained)
defenderState += finalDrain
Some(AttackResult(res.isAttackerAttackingThisRound, res.attacker, res.defender, res.attackersAttack, res.success, res.damage, finalDrain))
}
}
} else {
Some(res)
}
})
}
// when defender deals damage, first parameter is false, otherwise true
def possibleAttackersDamage(actualAttackerAttacks: Boolean, attacker: Soldier, defender: Soldier, attackersAttack: Attack, defendersAttack: Option[Attack]): Int = {
val resistance = if (actualAttackerAttacks && defender.soldierType.attributes.contains(Steadfast)) {
val res = defender.soldierType.resistance(attackersAttack.attackType)
if (res <= 0) {
res
} else if (res * 2 > 50) {
50
} else {
res * 2
}
} else {
defender.soldierType.resistance(attackersAttack.attackType)
}
val damageWithResistances = attackersAttack.damage * (100 - resistance) / 100
val damage = if (actualAttackerAttacks && attackersAttack.attributes.contains(Charge) ||
!actualAttackerAttacks && defendersAttack.exists(_.attributes.contains(Charge))) {
damageWithResistances * 2
} else {
damageWithResistances
}
if (attacker.state.contains(Slowed)) damage / 2 else damage
}
def selectBestAttackForDefender(attacker: Soldier, defender: Soldier, attackersAttack: Attack): Option[Attack] = {
val ranged = attackersAttack.ranged
val rangedAttacks = defender.soldierType.attacks.filter(_.ranged == ranged)
if (rangedAttacks.isEmpty) {
None
} else if (rangedAttacks.size == 1) {
Some(rangedAttacks(0))
} else {
val sorted = rangedAttacks.sortBy(ra => Attack.possibleAttackersDamage(false, defender, attacker, ra, Some(attackersAttack)) * ra.count)
Some(sorted.last)
}
}
}
case class Attack(index: Int, damage: Int, count: Int, attackType: AttackType,
ranged: Boolean, attributes: Set[AttackAttribute] = Set()) {
def chanceOfSuccess(enemysDefence: SoldierDefence): ChanceOfSuccess = if (attributes.contains(Magical)) {
ChanceOfSuccess(70)
} else if (attributes.contains(Marksman) && enemysDefence.defence > 40) {
ChanceOfSuccess(60)
} else {
ChanceOfSuccess(100 - enemysDefence.defence)
}
}
case class ChanceOfSuccess(chanceNumber: Int) extends AnyVal
case class SoldierDefence(defence: Int) extends AnyVal | RenualdMarch/merc | src/main/scala/mr/merc/unit/Attack.scala | Scala | gpl-3.0 | 9,497 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.service
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import com.waz.log.LogSE._
import com.waz.model
import com.waz.model.GenericContent._
import com.waz.model._
import com.waz.service.conversation.{ConversationOrderEventsService, ConversationsContentUpdaterImpl}
import com.waz.service.messages.{MessagesContentUpdater, ReactionsService, ReceiptService}
import scala.concurrent.Future.traverse
class GenericMessageService(selfUserId: UserId,
messages: MessagesContentUpdater,
convs: ConversationsContentUpdaterImpl,
convEvents: ConversationOrderEventsService,
reactions: ReactionsService,
receipts: ReceiptService,
users: UserService) extends DerivedLogTag {
import com.waz.threading.Threading.Implicits.Background
val eventProcessingStage = EventScheduler.Stage[GenericMessageEvent] { (_, events) =>
def lastForConv(items: Seq[(RConvId, RemoteInstant)]) = items.groupBy(_._1).map { case (conv, times) => times.maxBy(_._2.toEpochMilli) }
val incomingReactions = events collect {
case GenericMessageEvent(_, time, from, GenericMessage(_, Reaction(msg, action))) => Liking(msg, from, time, action)
}
val lastRead = lastForConv(events collect {
case GenericMessageEvent(_, _, _, GenericMessage(_, LastRead(conv, time))) => (conv, time)
})
val cleared = lastForConv(events collect {
case GenericMessageEvent(_, _, userId, GenericMessage(_, Cleared(conv, time))) if userId == selfUserId => (conv, time)
})
val deleted = events collect {
case GenericMessageEvent(_, _, _, GenericMessage(_, MsgDeleted(_, msg))) => msg
}
val confirmed = events.collect {
case GenericMessageEvent(_, _, _, GenericMessage(_, DeliveryReceipt(msgs))) => msgs
}.flatten
val availabilities = (events collect {
case GenericMessageEvent(_, _, userId, GenericMessage(_, AvailabilityStatus(available))) => userId -> available
}).toMap
val read = events.collect {
case GenericMessageEvent(_, time, from, GenericMessage(_, Proto.ReadReceipt(msgs))) => msgs.map { msg =>
model.ReadReceipt(msg, from, time)
}
}.flatten
for {
_ <- messages.deleteOnUserRequest(deleted)
_ <- traverse(lastRead) { case (remoteId, timestamp) =>
convs.processConvWithRemoteId(remoteId, retryAsync = true) { conv => convs.updateConversationLastRead(conv.id, timestamp) }
}
_ <- reactions.processReactions(incomingReactions)
_ <- traverse(cleared) { case (remoteId, timestamp) =>
convs.processConvWithRemoteId(remoteId, retryAsync = true) { conv => convs.updateConversationCleared(conv.id, timestamp) }
}
_ <- receipts.processDeliveryReceipts(confirmed)
_ <- receipts.processReadReceipts(read)
_ <- users.storeAvailabilities(availabilities)
} yield ()
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/main/scala/com/waz/service/GenericMessageService.scala | Scala | gpl-3.0 | 3,716 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.test
import java.util.UUID
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.services.{ AuthenticatorService, IdentityService }
import com.mohiva.play.silhouette.api.util.Clock
import com.mohiva.play.silhouette.impl.authenticators._
import com.mohiva.play.silhouette.impl.daos.AuthenticatorDAO
import com.mohiva.play.silhouette.impl.util.{ DefaultFingerprintGenerator, SecureRandomIDGenerator }
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc.RequestHeader
import scala.collection.mutable
import scala.concurrent.{ ExecutionContext, Future }
import scala.reflect.runtime.universe._
/**
* A fake identity.
*
* @param loginInfo The linked login info for an identity.
*/
case class FakeIdentity(loginInfo: LoginInfo) extends Identity
/**
* A fake identity service implementation which can handle a predefined list of identities.
*
* @param identities A list of (login info -> identity) pairs this service is responsible for.
* @tparam I The type of the identity to handle.
*/
class FakeIdentityService[I <: Identity](identities: (LoginInfo, I)*) extends IdentityService[I] {
/**
* Retrieves an identity that matches the specified login info.
*
* @param loginInfo The login info to retrieve an identity.
* @return The retrieved identity or None if no identity could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[I]] = {
Future.successful(identities.find(_._1 == loginInfo).map(_._2))
}
}
/**
* A fake authenticator DAO which stores authenticators in memory.
*
* @tparam T The type of the authenticator to handle.
*/
class FakeAuthenticatorDAO[T <: StorableAuthenticator] extends AuthenticatorDAO[T] {
/**
* The data store for the OAuth1 info.
*/
var data: mutable.HashMap[String, T] = mutable.HashMap()
/**
* Finds the authenticator for the given ID.
*
* @param id The authenticator ID.
* @return The found authenticator or None if no authenticator could be found for the given ID.
*/
def find(id: String): Future[Option[T]] = {
Future.successful(data.get(id))
}
/**
* Adds a new authenticator.
*
* @param authenticator The authenticator to add.
* @return The added authenticator.
*/
def add(authenticator: T): Future[T] = {
data += (authenticator.id -> authenticator)
Future.successful(authenticator)
}
/**
* Updates an already existing authenticator.
*
* @param authenticator The authenticator to update.
* @return The updated authenticator.
*/
def update(authenticator: T): Future[T] = {
data += (authenticator.id -> authenticator)
Future.successful(authenticator)
}
/**
* Removes the authenticator for the given ID.
*
* @param id The authenticator ID.
* @return An empty future.
*/
def remove(id: String): Future[Unit] = {
data -= id
Future.successful(())
}
}
/**
* A fake session authenticator service.
*/
case class FakeSessionAuthenticatorService() extends SessionAuthenticatorService(
new SessionAuthenticatorSettings(),
new DefaultFingerprintGenerator(),
Clock())
/**
* A fake cookie authenticator service.
*/
case class FakeCookieAuthenticatorService() extends CookieAuthenticatorService(
new CookieAuthenticatorSettings(),
None,
new DefaultFingerprintGenerator(),
new SecureRandomIDGenerator(),
Clock())
/**
* A fake bearer token authenticator service.
*/
case class FakeBearerTokenAuthenticatorService() extends BearerTokenAuthenticatorService(
new BearerTokenAuthenticatorSettings(),
new FakeAuthenticatorDAO[BearerTokenAuthenticator],
new SecureRandomIDGenerator(),
Clock())
/**
* A fake JWT authenticator service.
*/
case class FakeJWTAuthenticatorService() extends JWTAuthenticatorService(
new JWTAuthenticatorSettings(sharedSecret = UUID.randomUUID().toString, encryptSubject = false),
None,
new SecureRandomIDGenerator(),
Clock())
/**
* A fake Dummy authenticator service.
*/
case class FakeDummyAuthenticatorService() extends DummyAuthenticatorService
/**
* A fake authenticator service factory.
*/
object FakeAuthenticatorService {
/**
* Creates a new fake authenticator for the given authenticator type.
*
* @tparam T The type of the authenticator.
* @return A fully configured authenticator instance.
*/
def apply[T <: Authenticator: TypeTag](): AuthenticatorService[T] = {
(typeOf[T] match {
case t if t <:< typeOf[SessionAuthenticator] => FakeSessionAuthenticatorService()
case t if t <:< typeOf[CookieAuthenticator] => FakeCookieAuthenticatorService()
case t if t <:< typeOf[BearerTokenAuthenticator] => FakeBearerTokenAuthenticatorService()
case t if t <:< typeOf[JWTAuthenticator] => FakeJWTAuthenticatorService()
case t if t <:< typeOf[DummyAuthenticator] => FakeDummyAuthenticatorService()
}).asInstanceOf[AuthenticatorService[T]]
}
}
/**
* A fake authenticator.
*
* @param loginInfo The linked login info for an identity.
* @param id The ID of the authenticator.
* @param isValid True if the authenticator is valid, false otherwise.
*/
case class FakeAuthenticator(loginInfo: LoginInfo, id: String = UUID.randomUUID().toString, isValid: Boolean = true)
extends StorableAuthenticator
/**
* A fake authenticator factory.
*/
object FakeAuthenticator {
/**
* Creates a new fake authenticator for the given authenticator type.
*
* @param loginInfo The login info for which the authenticator should be created.
* @param env The Silhouette environment.
* @param requestHeader The request header.
* @tparam E The type of the environment,
* @return A authenticator instance.
*/
def apply[E <: Env](loginInfo: LoginInfo)(implicit env: Environment[E], requestHeader: RequestHeader): E#A = {
env.authenticatorService.create(loginInfo)
}
}
/**
* A fake environment implementation.
*
* @param identities A list of (login info -> identity) pairs to return inside a Silhouette action.
* @param requestProviders The list of request providers.
* @param eventBus The event bus implementation.
* @param executionContext The execution context to handle the asynchronous operations.
* @param tt The type tag of the authenticator type.
* @tparam E The type of the environment.
*/
case class FakeEnvironment[E <: Env](
identities: Seq[(LoginInfo, E#I)],
requestProviders: Seq[RequestProvider] = Seq(),
eventBus: EventBus = EventBus())(implicit val executionContext: ExecutionContext, tt: TypeTag[E#A])
extends Environment[E] {
/**
* The identity service implementation.
*/
val identityService: IdentityService[E#I] = new FakeIdentityService[E#I](identities: _*)
/**
* The authenticator service implementation.
*/
val authenticatorService: AuthenticatorService[E#A] = FakeAuthenticatorService[E#A]()
}
| cemcatik/play-silhouette | silhouette-testkit/app/com/mohiva/play/silhouette/test/Fakes.scala | Scala | apache-2.0 | 7,539 |
package com.github.dronegator.nlp.vocabulary.ToolMiniLanguage
import akka.NotUsed
import akka.stream.scaladsl._
import com.github.dronegator.nlp.component.tokenizer.Tokenizer._
import com.github.dronegator.nlp.utils._
import com.github.dronegator.nlp.vocabulary.Vocabulary
import com.github.dronegator.nlp.vocabulary.VocabularyTools._
import com.typesafe.scalalogging.LazyLogging
import scala.collection.immutable.TreeMap
/**
* Created by cray on 10/2/16.
*/
object ExampleFlow extends LazyLogging {
type StatementId = Int
type StatementTokens = Set[Token]
case class ExampleFlowState(id2Statment: Map[StatementId, StatementTokens],
token2Statement: Map[Token, Set[StatementId]],
size2Statement: TreeMap[Int, Set[StatementId]],
token: Set[Token],
nextStatementId: StatementId,
countStatement: Int,
statement: Set[StatementId],
id2StatementOrig: Map[StatementId, Statement])
def state(vocabulary: Vocabulary) =
vocabulary.statements
.filter { x =>
x.length > 4 && x.length < 15
}
.distinct
.foldLeft(ExampleFlowState(Map(), Map(), TreeMap(), Set(), 1, 0, Set(), Map())) {
case (af, statement) =>
val statementId = af.nextStatementId
val statementTokens = statement.toSet
ExampleFlowState(
id2Statment = af.id2Statment + (statementId -> statementTokens),
statementTokens
.foldLeft(af.token2Statement) {
case (map, token) =>
map + {
token -> (map.getOrElse(token, Set()) + statementId)
}
},
af.size2Statement + (statementTokens.size -> (af.size2Statement.getOrElse(statementTokens.size, Set()) + statementId)),
af.token,
nextStatementId = af.nextStatementId + 1,
0,
Set(),
id2StatementOrig = af.id2StatementOrig + (statementId -> statement)
)
}
def apply(vocabulary: Vocabulary): Flow[Token, (Token, List[Statement]), NotUsed] = {
Flow[Token]
.scan((Option.empty[(Token, List[Statement])], state(vocabulary))) {
case ((_, af), token) =>
try {
val changedStatement = af.token2Statement.getOrElse(token, Set())
val id2Statement = af.id2Statment ++
changedStatement
.toIterator
.map { statementId =>
statementId -> (af.id2Statment.getOrElse(statementId, Set()) - token)
}
.toMap
val size2Statement = changedStatement.toIterator
.foldLeft(af.size2Statement) {
case (size2Statement, statementId) =>
val size = af.id2Statment.getOrElse(statementId, Set()).size
size2Statement +
(size -> (size2Statement.getOrElse(size, Set()) - statementId)) +
((size - 1) -> (size2Statement.getOrElse(size - 1, Set()) + statementId))
}
val (statement, statementAsExample) = size2Statement.headOption.map {
case (size, newStatement) =>
logger.info(s"headOption=$size statements=${newStatement.size} token=$token word=${vocabulary.wordMap.getOrElse(token, "***")}")
val statement = ((af.statement ++ newStatement) & af.token2Statement.getOrElse(token, Set()))
.iterator
.flatMap(x => af.id2StatementOrig.get(x).map(x -> _))
.collect {
case (statementId, statement) if statement.size > 7 && !(statement contains TokenPreDef.Comma.value) =>
// println(af.id2Statment(statementId).flatMap(x => vocabulary.wordMap.get(x)))
// println(vocabulary.untokenize(statement))
val probabilityStatement = vocabulary.probability(statement) / vocabulary.statementDenominator(statement)
val probability3Gram = statement
.sliding(3)
.collect {
case key@t1 :: `token` :: t3 :: _ =>
//vocabulary.map2ToMiddle.get(t1 :: t3 :: Nil)
vocabulary.pNGram3.getOrElse(key, {
println(s"Can not find word=${vocabulary.wordMap.getOrElse(token, "***")} in ${vocabulary.untokenize(statement)}")
(1.0)
})
}
.headOption
(statementId, statement, probability3Gram, probabilityStatement)
}
.collect {
case (statementId, statement, Some(probability3Gram), probabilityStatement) =>
(statementId, statement, probability3Gram, probabilityStatement)
}
.sortBy(x => (-x._3 * -x._4))
.take(8)
.map(x => (x._1, x._2))
.toList
(af.statement ++ newStatement -- statement.map(_._1), statement)
}.getOrElse((af.statement, List()))
val nextAf = af.copy(
id2Statment = id2Statement,
token2Statement = af.token2Statement - token,
size2Statement = size2Statement - 0,
token = af.token + token,
statement = statement,
countStatement = af.countStatement + size2Statement.getOrElse(0, Set()).size
)
//logger.info(s"tokenSize=${nextAf.token.size} statementSize=${nextAf.countStatement} token=$token word=${vocabulary.wordMap.getOrElse(token, "***")}")
(Some((token, statementAsExample.map(_._2).toList)), nextAf)
}
catch {
case th: Throwable =>
th.printStackTrace()
throw th
}
}
.collect {
case (Some(x), _) =>
//x.map(_._1)
x
}
}
private def calcRest(statements: List[Statement], tokens: Set[Token]) =
statements
.groupBy { statement =>
statement.filterNot(tokens).distinct.length
}
.filter(_._1 > 0)
}
| dronegator/nlp | akka-utils/src/main/scala/com/github/dronegator/nlp/vocabulary/ToolMiniLanguage/ExamplesComponent.scala | Scala | apache-2.0 | 6,455 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
import javax.swing.JTabbedPane
object TabbedPane {
object Layout extends Enumeration {
import JTabbedPane._
val Wrap : Layout.Value = Value(WRAP_TAB_LAYOUT)
val Scroll: Layout.Value = Value(SCROLL_TAB_LAYOUT)
}
class Page protected[TabbedPane](parent0: TabbedPane, title0: String, content0: Component, tip0: String) extends Proxy {
def self: Any = content0
def this(title0: String, content0: Component, tip0: String) =
this(null, title0, content0, tip0)
def this(title0: String, content0: Component) =
this(title0, content0, "")
content = content0 // first add component, *then* set other things
title = title0
tip = tip0
protected[TabbedPane] var parent: TabbedPane = parent0
protected var _title: String = title0
def title: String = _title
def title_=(t: String): Unit = {
// beware to keep this order since, index depends on the _old_ title
if (parent != null) parent.peer.setTitleAt(index, t)
_title = t
}
protected var _content: Component = content0
def content: Component = _content//UIElement.cachedWrapper(peer.getComponentAt(index).asInstanceOf[JComponent])
def content_=(c: Component): Unit = { _content = c; if (parent != null) parent.peer.setComponentAt(index, c.peer) }
protected var _tip: String = tip0
def tip: String = _tip//peer.getToolTipTextAt(index)
def tip_=(t: String): Unit = { _tip = t; if (parent != null) parent.peer.setToolTipTextAt(index, if(t == "") null else t) }
protected var _enabled = true
def enabled: Boolean = _enabled//peer.isEnabledAt(index)
def enabled_=(b: Boolean): Unit = { _enabled = b; if (parent != null) parent.peer.setEnabledAt(index, b) }
protected var _mnemonic: Int = -1
def mnemonic: Int = _mnemonic//peer.getMnemonicAt(index)
def mnemonic_=(k: Int): Unit = { _mnemonic = k; if (parent != null) parent.peer.setMnemonicAt(index, k)}
protected var _foreground: Color = null
def foreground: Color = _foreground//peer.getForegroundAt(index)
def foreground_=(c: Color): Unit = { _foreground = c; if (parent != null) parent.peer.setForegroundAt(index, c)}
protected var _background: Color = null
def background: Color = _background //peer.getBackgroundAt(index)
def background_=(c: Color): Unit = { _background = c; if (parent != null) parent.peer.setBackgroundAt(index, c)}
def bounds: Rectangle = parent.peer.getBoundsAt(index)
// TODO: icon, disabledIcon
def index: Int = if(parent != null) parent.peer.indexOfTab(title) else 0//_index
//protected[TabbedPane] var _index: Int = index0
}
}
/**
* Displays the contents of one of several pages at a time. For each page a tab is
* visible at all times. The user can click on one of these tabs to move the
* corresponding page to the front.
*
* @see javax.swing.JTabbedPane
*/
class TabbedPane extends Component with Publisher {
override lazy val peer: JTabbedPane = new JTabbedPane with SuperMixin
import TabbedPane._
object pages extends BufferWrapper[Page] {
def runCount: Int = peer.getTabRunCount
def remove(n: Int): Page = {
val t = apply(n)
peer.removeTabAt(n)
t.parent = null
//for(i <- n to length) apply(i)._index -= 1
t
}
override def insert(n: Int, t: Page): Unit = {
//for(i <- n to length) apply(i)._index += 1
t.parent = TabbedPane.this
peer.insertTab(t.title, null, t.content.peer, if(t.tip == "") null else t.tip, n)
}
override def addOne(t: Page): this.type = {
t.parent = TabbedPane.this
peer.addTab(t.title, null, t.content.peer, if(t.tip == "") null else t.tip)
this
}
def length: Int = peer.getTabCount
def apply(n: Int): Page = new Page(TabbedPane.this, peer.getTitleAt(n),
UIElement.cachedWrapper[Component](peer.getComponentAt(n).asInstanceOf[javax.swing.JComponent]),
peer.getToolTipTextAt(n))
}
def tabLayoutPolicy: Layout.Value = Layout(peer.getTabLayoutPolicy)
def tabLayoutPolicy_=(p: Layout.Value): Unit = { peer.setTabLayoutPolicy(p.id) }
def tabPlacement: Alignment.Value = Alignment(peer.getTabPlacement)
/**
* Possible values are Left, Right, Top, Bottom.
*/
def tabPlacement_=(b: Alignment.Value): Unit = peer.setTabPlacement(b.id)
/**
* The current page selection
*/
object selection extends Publisher {
def page: Page = pages(index)
def page_=(p: Page): Unit = { index = p.index }
def index: Int = peer.getSelectedIndex
def index_=(n: Int): Unit = peer.setSelectedIndex(n)
peer.addChangeListener(new javax.swing.event.ChangeListener {
def stateChanged(e: javax.swing.event.ChangeEvent): Unit =
publish(event.SelectionChanged(TabbedPane.this))
})
}
}
| scala/scala-swing | src/main/scala/scala/swing/TabbedPane.scala | Scala | apache-2.0 | 5,107 |
/*
* Copyright 2013 Steve Vickers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.extensions.dsl.criteria
import scala.language.{
dynamics,
implicitConversions
}
import reactivemongo.bson._
/**
* The '''Expression''' type defines a recursive propositional abstract
* syntax tree central to the MongoDB embedded domain-specific language (EDSL).
* It is the main abstraction used to provide the EDSL and results in being
* able to write:
*
* {{{
* import Untyped._
*
* val edslQuery = criteria.first < 10 && (
* criteria.second >= 20.0 || criteria.second.in (0.0, 1.0)
* );
* }}}
*
* And have that equivalent to this filter:
*
* {{{
* val bsonQuery = BSONDocument (
* "$and" ->
* BSONArray (
* BSONDocument (
* "first" -> BSONDocument ("$lt" -> BSONInteger (10))
* ),
* BSONDocument (
* "$or" ->
* BSONArray (
* BSONDocument (
* "second" -> BSONDocument ("$gte" -> BSONDouble (20.0))
* ),
* BSONDocument (
* "second" ->
* BSONDocument (
* "$in" -> BSONArray (BSONDouble (0.0), BSONDouble (1.0))
* )
* )
* )
* )
* )
* );
* }}}
*
* @author svickers
*
*/
case class Expression(name: Option[String], element: BSONElement) {
/// Class Imports
import Expression._
/**
* The logical negation operator attempts to invert this '''Expression'''
* by using complimentary operators if possible, falling back to the
* general-case wrapping in a `$not` operator.
*/
def unary_! : Expression =
this match {
case Expression(Some(term), ("$in", vals)) =>
Expression(term, ("$nin", vals));
case Expression(Some(term), ("$nin", vals)) =>
Expression(term, ("$in", vals));
case Expression(Some(term), ("$ne", vals)) =>
Expression(term, (term, vals));
case Expression(Some(term), (field, vals)) if (field == term) =>
Expression(term, ("$ne", vals));
case Expression(None, ("$nor", vals)) =>
Expression(None, ("$or" -> vals));
case Expression(None, ("$or", vals)) =>
Expression(None, ("$nor" -> vals));
case Expression(Some("$not"), el) =>
Expression(None, el);
case Expression(Some(n), _) =>
Expression(Some("$not"), (n -> BSONDocument(element)));
case Expression(None, el) =>
Expression(Some("$not"), el);
}
/**
* Conjunction: ''AND''.
*/
def &&(rhs: Expression): Expression = combine("$and", rhs);
/**
* Negation of conjunction: ''NOR''.
*/
def !&&(rhs: Expression): Expression = combine("$nor", rhs);
/**
* Disjunction: ''OR''.
*/
def ||(rhs: Expression): Expression = combine("$or", rhs);
/**
* The isEmpty method reports as to whether or not this '''Expression'''
* has neither a `name` nor an assigned value.
*/
def isEmpty: Boolean = name.isEmpty && element._1.isEmpty;
private def combine(op: String, rhs: Expression): Expression =
if (rhs.isEmpty)
this;
else
element match {
case (`op`, arr: BSONArray) =>
Expression(
None,
(op, arr ++ BSONArray(toBSONDocument(rhs)))
);
case ("", _) =>
rhs;
case _ =>
Expression(
None,
(
op -> BSONArray(toBSONDocument(this),
toBSONDocument(rhs))
));
}
}
object Expression {
/**
* The empty property is provided so that ''monoid'' definitions for
* '''Expression''' can be easily provided.
*/
val empty = new Expression(None, "" -> BSONDocument.empty);
/**
* The apply method provides functional-style creation syntax for
* [[reactivemongo.extensions.dsl.criteria.Expression]] instances.
*/
def apply(name: String, element: BSONElement): Expression =
new Expression(Some(name), element);
/// Implicit Conversions
implicit object ExpressionWriter extends BSONDocumentWriter[Expression] {
override def write(expr: Expression): BSONDocument =
toBSONDocument(expr);
}
implicit def toBSONDocument(expr: Expression): BSONDocument =
expr match {
case Expression(Some(name), (field, element)) if (name == field) =>
BSONDocument(field -> element);
case Expression(Some(name), element) =>
BSONDocument(name -> BSONDocument(element));
case Expression(None, ("", _)) =>
BSONDocument.empty;
case Expression(None, element) =>
BSONDocument(element);
}
implicit def toBSONElement(expr: Expression): BSONElement =
expr.element;
}
| fehmicansaglam/reactivemongo-extensions | bson/src/main/scala/dsl/criteria/Expression.scala | Scala | apache-2.0 | 5,119 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.Tables
import org.scalatest.events.Event
trait ParallelSuites extends EventHelpers {
def suite1: Suite
def suite2: Suite
def assertParallelSuites(events: List[Event]): Unit
}
object ParallelTestExecutionParallelSuiteExamples extends Tables {
def parallelExamples =
Table(
"pair",
// SKIP-SCALATESTJS,NATIVE-START
new ExampleParallelTestExecutionParallelSpecPair,
// SKIP-SCALATESTJS,NATIVE-END
new ExampleParallelTestExecutionParallelFunSuitePair,
new ExampleParallelTestExecutionParallelFunSpecPair,
new ExampleParallelTestExecutionParallelFeatureSpecPair,
new ExampleParallelTestExecutionParallelFlatSpecPair,
new ExampleParallelTestExecutionParallelFreeSpecPair,
new ExampleParallelTestExecutionParallelPropSpecPair,
new ExampleParallelTestExecutionParallelWordSpecPair
)
}
// SKIP-SCALATESTJS,NATIVE-START
class ExampleParallelTestExecutionParallelSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderSpec
def suite2 = new ExampleParallelTestExecutionOrderOtherSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 16)
checkSuiteStarting(events(0), suite1.suiteId)
checkTestStarting(events(1), "test 1")
checkTestSucceeded(events(2), "test 1")
checkTestStarting(events(3), "test 2")
checkTestSucceeded(events(4), "test 2")
checkTestStarting(events(5), "test 3")
checkTestSucceeded(events(6), "test 3")
checkSuiteCompleted(events(7), suite1.suiteId)
checkSuiteStarting(events(8), suite2.suiteId)
checkTestStarting(events(9), "test 1")
checkTestSucceeded(events(10), "test 1")
checkTestStarting(events(11), "test 2")
checkTestSucceeded(events(12), "test 2")
checkTestStarting(events(13), "test 3")
checkTestSucceeded(events(14), "test 3")
checkSuiteCompleted(events(15), suite2.suiteId)
}
}
// SKIP-SCALATESTJS,NATIVE-END
class ExampleParallelTestExecutionParallelFunSuitePair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderFunSuite
def suite2 = new ExampleParallelTestExecutionOrderFixtureFunSuite
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 16)
checkSuiteStarting(events(0), suite1.suiteId)
checkTestStarting(events(1), "Test 1")
checkTestSucceeded(events(2), "Test 1")
checkTestStarting(events(3), "Test 2")
checkTestSucceeded(events(4), "Test 2")
checkTestStarting(events(5), "Test 3")
checkTestSucceeded(events(6), "Test 3")
checkSuiteCompleted(events(7), suite1.suiteId)
checkSuiteStarting(events(8), suite2.suiteId)
checkTestStarting(events(9), "Fixture Test 1")
checkTestSucceeded(events(10), "Fixture Test 1")
checkTestStarting(events(11), "Fixture Test 2")
checkTestSucceeded(events(12), "Fixture Test 2")
checkTestStarting(events(13), "Fixture Test 3")
checkTestSucceeded(events(14), "Fixture Test 3")
checkSuiteCompleted(events(15), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelFunSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderFunSpec
def suite2 = new ExampleParallelTestExecutionOrderFixtureFunSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 28)
checkSuiteStarting(events(0), suite1.suiteId)
checkScopeOpened(events(1), "Scope 1")
checkTestStarting(events(2), "Scope 1 Test 1")
checkTestSucceeded(events(3), "Scope 1 Test 1")
checkTestStarting(events(4), "Scope 1 Test 2")
checkTestSucceeded(events(5), "Scope 1 Test 2")
checkScopeClosed(events(6), "Scope 1")
checkScopeOpened(events(7), "Scope 2")
checkTestStarting(events(8), "Scope 2 Test 3")
checkTestSucceeded(events(9), "Scope 2 Test 3")
checkTestStarting(events(10), "Scope 2 Test 4")
checkTestSucceeded(events(11), "Scope 2 Test 4")
checkScopeClosed(events(12), "Scope 2")
checkSuiteCompleted(events(13), suite1.suiteId)
checkSuiteStarting(events(14), suite2.suiteId)
checkScopeOpened(events(15), "Fixture Scope 1")
checkTestStarting(events(16), "Fixture Scope 1 Fixture Test 1")
checkTestSucceeded(events(17), "Fixture Scope 1 Fixture Test 1")
checkTestStarting(events(18), "Fixture Scope 1 Fixture Test 2")
checkTestSucceeded(events(19), "Fixture Scope 1 Fixture Test 2")
checkScopeClosed(events(20), "Fixture Scope 1")
checkScopeOpened(events(21), "Fixture Scope 2")
checkTestStarting(events(22), "Fixture Scope 2 Fixture Test 3")
checkTestSucceeded(events(23), "Fixture Scope 2 Fixture Test 3")
checkTestStarting(events(24), "Fixture Scope 2 Fixture Test 4")
checkTestSucceeded(events(25), "Fixture Scope 2 Fixture Test 4")
checkScopeClosed(events(26), "Fixture Scope 2")
checkSuiteCompleted(events(27), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelFeatureSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderFeatureSpec
def suite2 = new ExampleParallelTestExecutionOrderFixtureFeatureSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 28)
checkSuiteStarting(events(0), suite1.suiteId)
checkScopeOpened(events(1), "Feature: Scope 1")
checkTestStarting(events(2), "Feature: Scope 1 Scenario: Test 1")
checkTestSucceeded(events(3), "Feature: Scope 1 Scenario: Test 1")
checkTestStarting(events(4), "Feature: Scope 1 Scenario: Test 2")
checkTestSucceeded(events(5), "Feature: Scope 1 Scenario: Test 2")
checkScopeClosed(events(6), "Feature: Scope 1")
checkScopeOpened(events(7), "Feature: Scope 2")
checkTestStarting(events(8), "Feature: Scope 2 Scenario: Test 3")
checkTestSucceeded(events(9), "Feature: Scope 2 Scenario: Test 3")
checkTestStarting(events(10), "Feature: Scope 2 Scenario: Test 4")
checkTestSucceeded(events(11), "Feature: Scope 2 Scenario: Test 4")
checkScopeClosed(events(12), "Feature: Scope 2")
checkSuiteCompleted(events(13), suite1.suiteId)
checkSuiteStarting(events(14), suite2.suiteId)
checkScopeOpened(events(15), "Feature: Fixture Scope 1")
checkTestStarting(events(16), "Feature: Fixture Scope 1 Scenario: Fixture Test 1")
checkTestSucceeded(events(17), "Feature: Fixture Scope 1 Scenario: Fixture Test 1")
checkTestStarting(events(18), "Feature: Fixture Scope 1 Scenario: Fixture Test 2")
checkTestSucceeded(events(19), "Feature: Fixture Scope 1 Scenario: Fixture Test 2")
checkScopeClosed(events(20), "Feature: Fixture Scope 1")
checkScopeOpened(events(21), "Feature: Fixture Scope 2")
checkTestStarting(events(22), "Feature: Fixture Scope 2 Scenario: Fixture Test 3")
checkTestSucceeded(events(23), "Feature: Fixture Scope 2 Scenario: Fixture Test 3")
checkTestStarting(events(24), "Feature: Fixture Scope 2 Scenario: Fixture Test 4")
checkTestSucceeded(events(25), "Feature: Fixture Scope 2 Scenario: Fixture Test 4")
checkScopeClosed(events(26), "Feature: Fixture Scope 2")
checkSuiteCompleted(events(27), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelFlatSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderFlatSpec
def suite2 = new ExampleParallelTestExecutionOrderFixtureFlatSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 28)
checkSuiteStarting(events(0), suite1.suiteId)
checkScopeOpened(events(1), "Scope 1")
checkTestStarting(events(2), "Scope 1 should Test 1")
checkTestSucceeded(events(3), "Scope 1 should Test 1")
checkTestStarting(events(4), "Scope 1 should Test 2")
checkTestSucceeded(events(5), "Scope 1 should Test 2")
checkScopeClosed(events(6), "Scope 1")
checkScopeOpened(events(7), "Scope 2")
checkTestStarting(events(8), "Scope 2 should Test 3")
checkTestSucceeded(events(9), "Scope 2 should Test 3")
checkTestStarting(events(10), "Scope 2 should Test 4")
checkTestSucceeded(events(11), "Scope 2 should Test 4")
checkScopeClosed(events(12), "Scope 2")
checkSuiteCompleted(events(13), suite1.suiteId)
checkSuiteStarting(events(14), suite2.suiteId)
checkScopeOpened(events(15), "Fixture Scope 1")
checkTestStarting(events(16), "Fixture Scope 1 should Fixture Test 1")
checkTestSucceeded(events(17), "Fixture Scope 1 should Fixture Test 1")
checkTestStarting(events(18), "Fixture Scope 1 should Fixture Test 2")
checkTestSucceeded(events(19), "Fixture Scope 1 should Fixture Test 2")
checkScopeClosed(events(20), "Fixture Scope 1")
checkScopeOpened(events(21), "Fixture Scope 2")
checkTestStarting(events(22), "Fixture Scope 2 should Fixture Test 3")
checkTestSucceeded(events(23), "Fixture Scope 2 should Fixture Test 3")
checkTestStarting(events(24), "Fixture Scope 2 should Fixture Test 4")
checkTestSucceeded(events(25), "Fixture Scope 2 should Fixture Test 4")
checkScopeClosed(events(26), "Fixture Scope 2")
checkSuiteCompleted(events(27), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelFreeSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderFreeSpec
def suite2 = new ExampleParallelTestExecutionOrderFixtureFreeSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 28)
checkSuiteStarting(events(0), suite1.suiteId)
checkScopeOpened(events(1), "Scope 1")
checkTestStarting(events(2), "Scope 1 Test 1")
checkTestSucceeded(events(3), "Scope 1 Test 1")
checkTestStarting(events(4), "Scope 1 Test 2")
checkTestSucceeded(events(5), "Scope 1 Test 2")
checkScopeClosed(events(6), "Scope 1")
checkScopeOpened(events(7), "Scope 2")
checkTestStarting(events(8), "Scope 2 Test 3")
checkTestSucceeded(events(9), "Scope 2 Test 3")
checkTestStarting(events(10), "Scope 2 Test 4")
checkTestSucceeded(events(11), "Scope 2 Test 4")
checkScopeClosed(events(12), "Scope 2")
checkSuiteCompleted(events(13), suite1.suiteId)
checkSuiteStarting(events(14), suite2.suiteId)
checkScopeOpened(events(15), "Fixture Scope 1")
checkTestStarting(events(16), "Fixture Scope 1 Fixture Test 1")
checkTestSucceeded(events(17), "Fixture Scope 1 Fixture Test 1")
checkTestStarting(events(18), "Fixture Scope 1 Fixture Test 2")
checkTestSucceeded(events(19), "Fixture Scope 1 Fixture Test 2")
checkScopeClosed(events(20), "Fixture Scope 1")
checkScopeOpened(events(21), "Fixture Scope 2")
checkTestStarting(events(22), "Fixture Scope 2 Fixture Test 3")
checkTestSucceeded(events(23), "Fixture Scope 2 Fixture Test 3")
checkTestStarting(events(24), "Fixture Scope 2 Fixture Test 4")
checkTestSucceeded(events(25), "Fixture Scope 2 Fixture Test 4")
checkScopeClosed(events(26), "Fixture Scope 2")
checkSuiteCompleted(events(27), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelPropSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderPropSpec
def suite2 = new ExampleParallelTestExecutionOrderFixturePropSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 16)
checkSuiteStarting(events(0), suite1.suiteId)
checkTestStarting(events(1), "Test 1")
checkTestSucceeded(events(2), "Test 1")
checkTestStarting(events(3), "Test 2")
checkTestSucceeded(events(4), "Test 2")
checkTestStarting(events(5), "Test 3")
checkTestSucceeded(events(6), "Test 3")
checkSuiteCompleted(events(7), suite1.suiteId)
checkSuiteStarting(events(8), suite2.suiteId)
checkTestStarting(events(9), "Fixture Test 1")
checkTestSucceeded(events(10), "Fixture Test 1")
checkTestStarting(events(11), "Fixture Test 2")
checkTestSucceeded(events(12), "Fixture Test 2")
checkTestStarting(events(13), "Fixture Test 3")
checkTestSucceeded(events(14), "Fixture Test 3")
checkSuiteCompleted(events(15), suite2.suiteId)
}
}
class ExampleParallelTestExecutionParallelWordSpecPair extends ParallelSuites {
def suite1 = new ExampleParallelTestExecutionOrderWordSpec
def suite2 = new ExampleParallelTestExecutionOrderFixtureWordSpec
def assertParallelSuites(events: List[Event]): Unit = {
assert(events.size === 28)
checkSuiteStarting(events(0), suite1.suiteId)
checkScopeOpened(events(1), "Scope 1")
checkTestStarting(events(2), "Scope 1 should Test 1")
checkTestSucceeded(events(3), "Scope 1 should Test 1")
checkTestStarting(events(4), "Scope 1 should Test 2")
checkTestSucceeded(events(5), "Scope 1 should Test 2")
checkScopeClosed(events(6), "Scope 1")
checkScopeOpened(events(7), "Scope 2")
checkTestStarting(events(8), "Scope 2 should Test 3")
checkTestSucceeded(events(9), "Scope 2 should Test 3")
checkTestStarting(events(10), "Scope 2 should Test 4")
checkTestSucceeded(events(11), "Scope 2 should Test 4")
checkScopeClosed(events(12), "Scope 2")
checkSuiteCompleted(events(13), suite1.suiteId)
checkSuiteStarting(events(14), suite2.suiteId)
checkScopeOpened(events(15), "Fixture Scope 1")
checkTestStarting(events(16), "Fixture Scope 1 should Fixture Test 1")
checkTestSucceeded(events(17), "Fixture Scope 1 should Fixture Test 1")
checkTestStarting(events(18), "Fixture Scope 1 should Fixture Test 2")
checkTestSucceeded(events(19), "Fixture Scope 1 should Fixture Test 2")
checkScopeClosed(events(20), "Fixture Scope 1")
checkScopeOpened(events(21), "Fixture Scope 2")
checkTestStarting(events(22), "Fixture Scope 2 should Fixture Test 3")
checkTestSucceeded(events(23), "Fixture Scope 2 should Fixture Test 3")
checkTestStarting(events(24), "Fixture Scope 2 should Fixture Test 4")
checkTestSucceeded(events(25), "Fixture Scope 2 should Fixture Test 4")
checkScopeClosed(events(26), "Fixture Scope 2")
checkSuiteCompleted(events(27), suite2.suiteId)
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/ParallelTestExecutionParallelSuiteExamples.scala | Scala | apache-2.0 | 14,743 |
//
// $Id$
//
// Wiggle - a 2D game development library - http://code.google.com/p/wiggle/
// Copyright 2008-2010 Michael Bayne
// Distributed under the "Simplified BSD License" in LICENSE.txt
package wiggle.util
/**
* Represents an activity performed bit by bit every frame. Tasks can be composed in sequence or in
* parallel.
*/
abstract class Task
{
/** Initializes a task and prepares it for execution. A task should reset any internal state in
* this method as it may be reinitialized and reused after being used once. The call to
* {@link #init} will immediately be followed by a call to {@link #tick}. */
def init (time :Float) {
}
/** Ticks the task, causing it to perform its operation.
*
* @param time the current timestamp.
*
* @return true if the task is complete (and should be removed), false if the task is still
* processing.
*/
def tick (time :Float) :Boolean
/** Adds this task to the target taskable. */
def bind (target :Taskable) {
target.add(this)
}
}
/**
* Convenience methods for creating many standard kinds of tasks.
*/
object Task
{
/** Creates a task that executes the supplied tasks in parallel. */
def parallel (tasks :Task*) = new Parallel(tasks)
/** Creates a task that executes the supplied tasks one after another. */
def sequence (tasks :Task*) = new Sequence(tasks)
/** Creates a task that delays for the specified time. Useful in a sequence. */
def delay (delay :Float) = new Delay(delay)
/** Creates a task that executes the supplied tasks after the specified delay. */
def after (delay :Float, task :Task) = sequence(new Delay(delay), task)
/** Repeats the supplied task indefinitely. */
def repeat (task :Task) = new Repeat(task)
/** Performs many tasks in parallel and completes when all of its subtasks have completed. */
class Parallel (tasks :Seq[Task]) extends Task
{
assert(tasks.length > 0)
override def init (time :Float) {
var idx = 0; while (idx < _tasks.length) {
_tasks(idx).init(time)
}
}
override def tick (time :Float) = {
var complete = true
var idx = 0; while (idx < _tasks.length) {
if (_tasks(idx) != null) {
if (_tasks(idx).tick(time)) _tasks(idx) = null
else complete = false
}
idx = idx+1
}
complete
}
private[this] val _tasks = tasks.toArray
}
/** Performs a set of tasks in order, starting the next task after the previous has completed. */
class Sequence (tasks :Seq[Task]) extends Task
{
assert(tasks.length > 0)
override def init (time :Float) {
_remain = _tasks
}
override def tick (time :Float) = {
if (_active == null) {
_active = _remain.head
_remain = _remain.tail
_active.init(time)
}
if (_active.tick(time)) {
_active = null
}
_active == null && _remain.length == 0
}
private[this] val _tasks = tasks.toList
private[this] var _active :Task = null
private[this] var _remain :List[Task] = null
}
/** Delays for the specified period. Generally used with {@link Sequence} for fun and profit. */
class Delay (delay :Float) extends Task
{
override def init (time :Float) {
_end = time + delay
}
override def tick (time :Float) = {
time > _end
}
private[this] var _end :Float = 0
}
/** Repeats the supplied task over and over again. */
class Repeat (task :Task) extends Task
{
override def init (time :Float) {
task.init(time)
}
override def tick (time :Float) = {
if (task.tick(time)) {
task.init(time) // reinit every time we finish
}
false
}
}
}
| zdevzee/wiggle | src/main/scala/wiggle/util/Task.scala | Scala | bsd-3-clause | 3,734 |
import org.scalatest._
import java.net._
import java.io._
class BootstrapSuite extends ParseSuite {
var dir = new File(new File(System.getProperty("sbt.paths.tests.source")).getAbsolutePath)
def isProjectRoot(dir: File) = dir != null && new File(dir.getAbsolutePath + File.separatorChar + "project" + File.separatorChar + "build.scala").exists
while (dir != null && !isProjectRoot(dir)) dir = dir.getParentFile
test("ProjectDir (" + dir.getAbsolutePath + ")")(assert(isProjectRoot(dir)))
if (isProjectRoot(dir)) {
def loop(dir: File): Unit = {
def bootstrapTest(src: File): Unit = {
test("tokenize " + src.getAbsolutePath) {
import scala.meta._
import scala.meta.dialects.Scala211
val toks = src.tokens
val codec = scala.io.Codec(java.nio.charset.Charset.forName("UTF-8"))
val content = scala.io.Source.fromFile(src)(codec).mkString
// check #1: everything's covered
var isFail = false
def fail(msg: String) = { isFail = true; println(msg) }
val bitmap = new Array[Boolean](content.length)
val tokenmap = scala.collection.mutable.Map[Int, List[Token]]()
toks.foreach(tok => {
var i = tok.start
while (i < tok.end) {
if (i < 0 || content.length <= i) fail("TOKEN OUT OF BOUNDS AT " + i + ": " + tok)
else {
tokenmap(i) = tok +: tokenmap.getOrElse(i, Nil)
if (bitmap(i)) fail("TOKENS OVERLAP AT " + i + ": " + tokenmap(i).mkString(", "))
bitmap(i) = true
}
i += 1
}
})
bitmap.zipWithIndex.filter(!_._1).foreach{ case (_, i) => fail("TOKENS DON'T COVER " + i) }
// check #2: tostring works
if (!isFail && content != toks.map(_.show[Syntax]).mkString) {
isFail = true
println("CORRELATION FAILED")
println("EXPECTED: \\n" + content)
println("ACTUAL: \\n" + toks.map(_.show[Syntax]).mkString)
}
assert(!isFail)
}
test("parse " + src.getAbsolutePath) {
try {
import scala.meta._
import scala.meta.dialects.Scala211
val tree = src.parse[Source]
// check #1: everything's positioned
def check(tree: Tree): Boolean = {
def loop(x: Any): Boolean = x match {
case x: Tree => check(x)
case x: ::[_] => x.forall(loop)
case x: Some[_] => loop(x.get)
case x => true
}
tree.tokens.isAuthentic && tree.productIterator.toList.forall(loop)
}
if (!check(tree)) {
import scala.meta.ui.Positions.Colorful
println(tree.show[Positions])
assert(false)
}
// check #2: everything's covered
val codec = scala.io.Codec(java.nio.charset.Charset.forName("UTF-8"))
val content = scala.io.Source.fromFile(src)(codec).mkString
assert(tree.start.offset == 0)
assert(tree.end.offset == content.length)
assert(tree.start.line == 0)
assert(tree.end.line == content.count(_ == '\\n'))
} catch {
case ex: scala.meta.ParseException if ex.message.contains("XML literals are not supported") => pending
}
}
}
dir.listFiles.filter(_.isFile).filter(_.getName.endsWith(".scala")).map(bootstrapTest)
dir.listFiles.filter(_.isDirectory).map(loop)
}
loop(dir)
}
} | smarter/scalameta | tests/src/test/scala/parser/BootstrapSuite.scala | Scala | bsd-3-clause | 3,620 |
package org.scalacoin.script.control
import org.scalatest.{MustMatchers, FlatSpec}
/**
* Created by chris on 1/8/16.
*/
class ControlOperationsFactoryTest extends FlatSpec with MustMatchers with ControlOperationsFactory {
"ControlOperationsFactory" must "match a string with a control operation" in {
fromString("OP_ELSE") must be (Some(OP_ELSE))
fromString("OP_ENDIF") must be (Some(OP_ENDIF))
fromString("OP_IF") must be (Some(OP_IF))
fromString("OP_NOTIF") must be (Some(OP_NOTIF))
fromString("OP_RETURN") must be (Some(OP_RETURN))
fromString("OP_VERIFY") must be (Some(OP_VERIFY))
fromString("RANDOM") must be (None)
}
}
| TomMcCabe/scalacoin | src/test/scala/org/scalacoin/script/control/ControlOperationsFactoryTest.scala | Scala | mit | 663 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack
import monix.reactive.Observable.Operator
import monix.reactive.observers.Subscriber
import scala.concurrent.Future
private[reactive] final class ZipWithIndexOperator[A] extends Operator[A, (A, Long)] {
def apply(out: Subscriber[(A, Long)]): Subscriber[A] =
new Subscriber[A] {
implicit val scheduler = out.scheduler
private[this] var index = 0L
def onNext(elem: A): Future[Ack] = {
val oldIndex = index
index += 1
out.onNext((elem, oldIndex))
}
def onError(ex: Throwable): Unit =
out.onError(ex)
def onComplete(): Unit =
out.onComplete()
}
}
| alexandru/monifu | monix-reactive/shared/src/main/scala/monix/reactive/internal/operators/ZipWithIndexOperator.scala | Scala | apache-2.0 | 1,378 |
package ch.ninecode.cim
import java.io.File
import org.apache.spark.sql.SparkSession
class CIMSparkSuite extends ch.ninecode.SparkSuite
{
val FILE_DEPOT = "data/"
// test file names
val FILENAME: String = s"${FILE_DEPOT}RealGrid/CGMES_v2.4.15_RealGridTestConfiguration_EQ_v2.xml"
// number of elements in the file
// get number of lines at the top level with:
// grep -P "^[\\t]<cim" RealGrid/CGMES_v2.4.15_RealGridTestConfiguration_EQ_v2.xml | wc
val ELEMENTS1x = 127686
override def run (testName: Option[String], args: org.scalatest.Args): org.scalatest.Status =
{
// unpack the zip file
new Unzip().unzip(s"${FILE_DEPOT}CGMES_v2.4.15_TestConfigurations_v4.0.3.zip", FILE_DEPOT)
new Unzip().unzip(s"${FILE_DEPOT}RealGrid/CGMES_v2.4.15_RealGridTestConfiguration_v2.zip", s"${FILE_DEPOT}RealGrid/")
// run the tests
val ret = super.run(testName, args)
// erase the unpacked files
deleteRecursive(new File(s"${FILE_DEPOT}MicroGrid/"))
deleteRecursive(new File(s"${FILE_DEPOT}MicroGrid_Error/"))
deleteRecursive(new File(s"${FILE_DEPOT}MiniGrid/"))
deleteRecursive(new File(s"${FILE_DEPOT}SmallGrid/"))
deleteRecursive(new File(s"${FILE_DEPOT}RealGrid/"))
ret
}
test("Basic")
{
implicit session: SparkSession =>
val options = Map[String, String](
"ch.ninecode.cim.make_edges" -> "true",
"ch.ninecode.cim.do_topo_islands" -> "true")
val elements = readFile(FILENAME, options)
assert(elements.count() === ELEMENTS1x)
val edges = session.sqlContext.sql("select * from edges")
val count = edges.count
markup(s"edge count: $count")
assert(count === 8348)
}
test("Dedup")
{
implicit session: SparkSession =>
val elements1 = readFile(FILENAME)
val count1 = elements1.count()
assert(count1 === ELEMENTS1x)
val options = Map[String, String]("ch.ninecode.cim.do_deduplication" -> "true")
val elements2 = readFile(s"$FILENAME,$FILENAME", options)
val count2 = elements2.count()
assert(count1 === count2)
}
}
| derrickoswald/CIMScala | CIMReader/src/test/scala/ch/ninecode/cim/CIMSparkSuite.scala | Scala | mit | 2,279 |
package me.arcticlight.animations
import me.arcticlight.animations.ScalaTween._
import me.arcticlight.animations.ScalaTween.DefaultInterpolations._
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class TweenSpec extends FlatSpec with Matchers {
"A Tween" should "interpolate from 0 -> 1 in a full step" in {
val v = AnimationTarget(0f)
Tween(v, 0f, 1f).seekTo(1f)
v.target shouldBe 1f
}
it should "interpolate from 0 -> .5 -> 1 (seeked halfway)" in {
val v = AnimationTarget(0f)
Tween(v, 0f, 1f).seekTo(.5f)
v.target shouldBe .5f
}
it should "interpolate from 0 -> .5 -> 1 (seeked twice)" in {
val v = AnimationTarget(0f)
val t = Tween(v, 0f, 1f)
t.seekTo(.5f)
v.target shouldBe .5f
t.seekTo(1f)
v.target shouldBe 1f
}
"A SeqTimeline" should "interpolate (0,0,0) to (1,.5,0) when seeked halfway" in {
val v1 = AnimationTarget(0f)
val v2 = AnimationTarget(0f)
val v3 = AnimationTarget(0f)
val t = SeqTimeline(
Tween(v1, 0f, 1f),
Tween(v2, 0f, 1f),
Tween(v3, 0f, 1f)
)
t.seekTo(t.duration/2f)
v1.target shouldBe 1f
v2.target shouldBe .5f
v3.target shouldBe 0f
}
it should "interpolate (0,0,0) to (1,1,.5) when seeked to 5/6ths" in {
val v1 = AnimationTarget(0f)
val v2 = AnimationTarget(0f)
val v3 = AnimationTarget(0f)
val t = SeqTimeline(
Tween(v1, 0f, 1f),
Tween(v2, 0f, 1f),
Tween(v3, 0f, 1f)
)
t.seekTo(t.duration * (5f/6f))
v1.target shouldBe 1f
v2.target shouldBe 1f
v3.target shouldBe .5f
}
it should "interpolate (0,0,0) to (1,1,1) and back to (0,0,0) when seeked 0->1->0" in {
val v1 = AnimationTarget(0f)
val v2 = AnimationTarget(0f)
val v3 = AnimationTarget(0f)
val t = SeqTimeline(
Tween(v1, 0f, 1f),
Tween(v2, 0f, 1f),
Tween(v3, 0f, 1f)
)
t.seekTo(t.duration)
v1.target shouldBe 1f
v2.target shouldBe 1f
v3.target shouldBe 1f
t.seekTo(0f)
v1.target shouldBe 0f
v2.target shouldBe 0f
v3.target shouldBe 0f
}
it should "interpolate (0,0,0) to (1,1,0) on an edge" in {
val v1 = AnimationTarget(0f)
val v2 = AnimationTarget(0f)
val v3 = AnimationTarget(0f)
val t = SeqTimeline(
Tween(v1, 0f, 1f),
Tween(v2, 0f, 1f),
Tween(v3, 0f, 1f)
)
t.seekTo(t.duration/3f*2f)
v1.target shouldBe 1f
v2.target shouldBe 1f
v3.target shouldBe 0f
}
it should "interpolate (0,0,0)->(1,1,0)->(1,0,0) on animation edges" in {
val v1 = AnimationTarget(0f)
val v2 = AnimationTarget(0f)
val v3 = AnimationTarget(0f)
val t = SeqTimeline(
Tween(v1, 0f, 1f),
Tween(v2, 0f, 1f),
Tween(v3, 0f, 1f)
)
t.seekTo(t.duration/3f*2f)
t.seekTo(t.duration/3f)
v1.target shouldBe 1f
v2.target shouldBe 0f
v3.target shouldBe 0f
}
} | ArcticLight/ScalaTween | src/test/scala/me/arcticlight/animations/TweenSpec.scala | Scala | mit | 2,995 |
package models.domain.redis
import play.api.libs.json._
import scala.Some
/**
* @author kamekoopa
*/
class Informations(val informations: Seq[Information])
object Informations {
import scala.language.implicitConversions
implicit def informationsToSeq(informations: Informations) = informations.informations
def apply(lines: Array[String]) = {
val infos:Seq[Information] = lines
.map(line => Information(line))
.collect({ case Some(i) => i })
.toSeq
new Informations(infos)
}
}
import models.domain.redis.InformationWrites._
object InformationsWrites {
implicit object DefaultInformationsWrites extends Writes[Informations]{
def writes(o: Informations): JsValue = {
val jsons = o.map ({ i: Information =>
Json.toJson(i)(DefaultInformationWrites)
})
Json.toJson(jsons)
}
}
}
| kamekoopa/redis-miruo | app/models/domain/redis/Informations.scala | Scala | apache-2.0 | 856 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package network
package server
trait MessageHandlerRegistryComponent {
val messageHandlerRegistry: MessageHandlerRegistry
}
private case class MessageHandlerEntry[RequestMsg, ResponseMsg]
(is: InputSerializer[RequestMsg, ResponseMsg], os: OutputSerializer[RequestMsg, ResponseMsg], handler: RequestMsg => ResponseMsg)
class MessageHandlerRegistry {
@volatile private var handlerMap =
Map.empty[String, MessageHandlerEntry[_ <: Any, _ <: Any]]
def registerHandler[RequestMsg, ResponseMsg](handler: RequestMsg => ResponseMsg)
(implicit is: InputSerializer[RequestMsg, ResponseMsg], os: OutputSerializer[RequestMsg, ResponseMsg]) {
if(handler == null) throw new NullPointerException
handlerMap += (is.requestName -> MessageHandlerEntry(is, os, handler))
}
@throws(classOf[InvalidMessageException])
def inputSerializerFor[RequestMsg, ResponseMsg](messageName: String): InputSerializer[RequestMsg, ResponseMsg] = {
handlerMap.get(messageName).map(_.is)
.getOrElse(throw buildException(messageName))
.asInstanceOf[InputSerializer[RequestMsg, ResponseMsg]]
}
@throws(classOf[InvalidMessageException])
def outputSerializerFor[RequestMsg, ResponseMsg](messageName: String): OutputSerializer[RequestMsg, ResponseMsg] = {
handlerMap.get(messageName).map(_.os)
.getOrElse(throw buildException(messageName))
.asInstanceOf[OutputSerializer[RequestMsg, ResponseMsg]]
}
@throws(classOf[InvalidMessageException])
def handlerFor[RequestMsg, ResponseMsg](request: RequestMsg)
(implicit is: InputSerializer[RequestMsg, ResponseMsg]): RequestMsg => ResponseMsg = {
handlerFor[RequestMsg, ResponseMsg](is.requestName)
}
@throws(classOf[InvalidMessageException])
def handlerFor[RequestMsg, ResponseMsg](messageName: String): RequestMsg => ResponseMsg = {
handlerMap.get(messageName).map(_.handler)
.getOrElse(throw buildException(messageName))
.asInstanceOf[RequestMsg => ResponseMsg]
}
def buildException(messageName: String) =
new InvalidMessageException("%s is not a registered method. Methods registered are %s".format(messageName, "(" + handlerMap.keys.mkString(",") + ")"))
} | linkedin-sna/norbert | network/src/main/scala/com/linkedin/norbert/network/server/MessageHandlerRegistryComponent.scala | Scala | apache-2.0 | 2,886 |
package com.arcusys.learn.liferay.update.version300
import com.arcusys.learn.liferay.LiferayClasses.LUpgradeProcess
import com.arcusys.learn.liferay.update.SlickDBContext
import com.arcusys.learn.liferay.update.version300.migrations.CertificateMemberMigration
import com.arcusys.valamis.web.configuration.ioc.Configuration
import com.escalatesoft.subcut.inject.BindingModule
class DBUpdater3002(val bindingModule: BindingModule)
extends LUpgradeProcess
with SlickDBContext{
override def getThreshold = 3002
def this() = this(Configuration)
override def doUpgrade(): Unit = {
new CertificateMemberMigration(db, driver).migrate()
}
}
| igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version300/DBUpdater3002.scala | Scala | gpl-3.0 | 655 |
/**
* (c) Copyright 2012 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.testing.fakehtable
import scala.collection.JavaConverters.asScalaBufferConverter
import org.apache.commons.codec.binary.Hex
import org.apache.hadoop.hbase.HTableDescriptor
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.HTable
import org.junit.Assert
import org.junit.Test
/** Tests for the FakeHBase class. */
class TestFakeHBase {
/** Test the basic API of FakeHBase. */
@Test
def testFakeHBase(): Unit = {
val hbase = new FakeHBase()
val desc = new HTableDescriptor("table-name")
hbase.Admin.createTable(desc)
val tables = hbase.Admin.listTables()
Assert.assertEquals(1, tables.length)
Assert.assertEquals("table-name", tables(0).getNameAsString())
}
/** Test the fake implementation of HBaseAdmin.getTableRegions(). */
@Test
def testSimpleRegionSplit(): Unit = {
val hbase = new FakeHBase()
val desc = new HTableDescriptor("table-name")
hbase.Admin.createTable(desc, null, null, numRegions = 2)
val regions = hbase.Admin.getTableRegions("table-name".getBytes).asScala
Assert.assertEquals(2, regions.size)
assert(regions.head.getStartKey.isEmpty)
assert(regions.last.getEndKey.isEmpty)
for (i <- 0 until regions.size - 1) {
Assert.assertEquals(
regions(i).getEndKey.toSeq,
regions(i + 1).getStartKey.toSeq)
}
Assert.assertEquals(
"7fffffffffffffffffffffffffffffff",
Hex.encodeHexString(regions(0).getEndKey))
}
/** Tests that FakeHTable instances appear as valid instances of HTable. */
@Test
def testFakeHTableAsInstanceOfHTable(): Unit = {
val hbase = new FakeHBase()
val desc = new HTableDescriptor("table")
hbase.Admin.createTable(desc)
val conf = HBaseConfiguration.create()
val htable: HTable = hbase.InterfaceFactory.create(conf, "table").asInstanceOf[HTable]
val locations = htable.getRegionLocations()
Assert.assertEquals(1, locations.size)
val location = htable.getRegionLocation("row key")
Assert.assertEquals(locations.keySet.iterator.next, location.getRegionInfo)
}
@Test
def testAdminFactory(): Unit = {
val hbase = new FakeHBase()
val conf = HBaseConfiguration.create()
val admin = hbase.AdminFactory.create(conf)
admin.close()
}
}
| kijiproject/fake-hbase | src/test/scala/org/kiji/testing/fakehtable/TestFakeHBase.scala | Scala | apache-2.0 | 3,022 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.server.middleware
import cats.effect.IO
import cats.effect.testkit.TestContext
import cats.implicits._
import org.http4s.Http4sSuite
import org.http4s.HttpApp
import org.http4s.Request
import org.http4s.Status
import org.http4s.dsl.io._
import org.http4s.server.middleware.Throttle._
import org.http4s.syntax.all._
import scala.concurrent.duration._
class ThrottleSuite extends Http4sSuite {
test("LocalTokenBucket should contain initial number of tokens equal to specified capacity") {
val someRefillTime = 1234.milliseconds
val capacity = 5
val createBucket =
TokenBucket.local[IO](capacity, someRefillTime)
createBucket.flatMap { testee =>
val takeFiveTokens: IO[List[TokenAvailability]] =
(1 to 5).toList.traverse(_ => testee.takeToken)
val checkTokensUpToCapacity =
takeFiveTokens.map(tokens => tokens.contains(TokenAvailable))
(checkTokensUpToCapacity, testee.takeToken.map(_.isInstanceOf[TokenUnavailable]))
.mapN(_ && _)
}.assert
}
test("LocalTokenBucket should add another token at specified interval when not at capacity") {
val ctx = TestContext()
val capacity = 1
val createBucket =
TokenBucket.local[IO](capacity, 100.milliseconds)
val takeTokenAfterRefill = createBucket.flatMap { testee =>
testee.takeToken *> IO.sleep(101.milliseconds) *>
testee.takeToken
}
takeTokenAfterRefill
.map { result =>
ctx.advanceAndTick(101.milliseconds)
result
}
.assertEquals(TokenAvailable)
}
test("LocalTokenBucket should not add another token at specified interval when at capacity") {
val ctx = TestContext()
val capacity = 5
val createBucket =
TokenBucket.local[IO](capacity, 100.milliseconds)
val takeExtraToken = createBucket.flatMap { testee =>
val takeFiveTokens: IO[List[TokenAvailability]] = (1 to 5).toList.traverse { _ =>
testee.takeToken
}
IO.sleep(300.milliseconds) >> takeFiveTokens >> testee.takeToken
}
takeExtraToken
.map { result =>
ctx.advanceAndTick(300.milliseconds)
result
}
.map(_.isInstanceOf[TokenUnavailable])
.assert
}
test(
"LocalTokenBucket should only return a single token when only one token available and there are multiple concurrent requests"
) {
val capacity = 1
val createBucket =
TokenBucket.local[IO](capacity, 100.milliseconds)
val takeTokensSimultaneously = createBucket.flatMap { testee =>
(1 to 5).toList.parTraverse(_ => testee.takeToken)
}
takeTokensSimultaneously
.map { result =>
result.count(_ == TokenAvailable)
}
.assertEquals(1)
}
test(
"LocalTokenBucket should return the time until the next token is available when no token is available".flaky
) {
val ctx = TestContext()
val capacity = 1
val createBucket =
TokenBucket.local[IO](capacity, 100.milliseconds)
val takeTwoTokens = createBucket.flatMap { testee =>
testee.takeToken *> IO.sleep(75.milliseconds) *> testee.takeToken
}
takeTwoTokens.map { result =>
ctx.advanceAndTick(75.milliseconds)
result match {
case TokenUnavailable(t) => t.exists(_ <= 25.milliseconds)
case _ => false
}
}.assert
}
private val alwaysOkApp = HttpApp[IO] { _ =>
Ok()
}
test("Throttle / should allow a request to proceed when the rate limit has not been reached") {
val limitNotReachedBucket = new TokenBucket[IO] {
override def takeToken: IO[TokenAvailability] = TokenAvailable.pure[IO]
}
val testee = Throttle(limitNotReachedBucket, defaultResponse[IO] _)(alwaysOkApp)
val req = Request[IO](uri = uri"/")
testee(req).map(_.status).assertEquals(Status.Ok)
}
test(" Throttle / should deny a request when the rate limit had been reached") {
val limitReachedBucket = new TokenBucket[IO] {
override def takeToken: IO[TokenAvailability] = TokenUnavailable(None).pure[IO]
}
val testee = Throttle(limitReachedBucket, defaultResponse[IO] _)(alwaysOkApp)
val req = Request[IO](uri = uri"/")
testee(req).map(_.status).assertEquals(Status.TooManyRequests)
}
}
| http4s/http4s | server/shared/src/test/scala/org/http4s/server/middleware/ThrottleSuite.scala | Scala | apache-2.0 | 4,843 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.