code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package sample.persistence
import scala.concurrent.duration._
import akka.actor._
import akka.persistence._
object ViewExample extends App {
class ExamplePersistentActor extends PersistentActor {
override def persistenceId = "sample-id-4"
var count = 1
def receiveCommand: Receive = {
case payload: String =>
println(s"persistentActor received ${payload} (nr = ${count})")
persist(payload + count) { evt =>
count += 1
}
}
def receiveRecover: Receive = {
case _: String => count += 1
}
}
class ExampleView extends PersistentView {
private var numReplicated = 0
override def persistenceId: String = "sample-id-4"
override def viewId = "sample-view-id-4"
def receive = {
case "snap" =>
println(s"view saving snapshot")
saveSnapshot(numReplicated)
case SnapshotOffer(metadata, snapshot: Int) =>
numReplicated = snapshot
println(s"view received snapshot offer ${snapshot} (metadata = ${metadata})")
case payload if isPersistent =>
numReplicated += 1
println(s"view replayed event ${payload} (num replicated = ${numReplicated})")
case SaveSnapshotSuccess(metadata) =>
println(s"view saved snapshot (metadata = ${metadata})")
case SaveSnapshotFailure(metadata, reason) =>
println(s"view snapshot failure (metadata = ${metadata}), caused by ${reason}")
case payload =>
println(s"view received other message ${payload}")
}
}
val system = ActorSystem("example")
val persistentActor = system.actorOf(Props(classOf[ExamplePersistentActor]))
val view = system.actorOf(Props(classOf[ExampleView]))
import system.dispatcher
system.scheduler.schedule(Duration.Zero, 2.seconds, persistentActor, "scheduled")
system.scheduler.schedule(Duration.Zero, 5.seconds, view, "snap")
}
|
writeonly/scalaservices
|
akka-sample/src/main/scala/sample/persistence/ViewExample.scala
|
Scala
|
unlicense
| 1,891 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.http.model
import scala.collection.JavaConverters._
import io.gatling.commons.util.StringHelper._
import io.netty.buffer.Unpooled
import io.netty.handler.codec.http._
import org.asynchttpclient.netty.util.ByteBufUtils
object SafeHttpResponse {
def fromNettyResponse(nettyResponse: FullHttpResponse): SafeHttpResponse =
SafeHttpResponse(
nettyResponse.protocolVersion,
nettyResponse.status,
nettyResponse.headers,
nettyResponse.trailingHeaders,
ByteBufUtils.byteBuf2Bytes(nettyResponse.content)
)
}
case class SafeHttpResponse(
httpVersion: HttpVersion,
status: HttpResponseStatus,
headers: HttpHeaders,
trailingHeaders: HttpHeaders,
body: Array[Byte]
) {
def toNettyResponse: FullHttpResponse = {
val response = new DefaultFullHttpResponse(httpVersion, status, Unpooled.wrappedBuffer(body))
response.headers.set(headers)
response.trailingHeaders.set(trailingHeaders)
response
}
def summary: String =
s"""$httpVersion $status
|${(headers.asScala ++ trailingHeaders.asScala).map { entry => s"${entry.getKey}: ${entry.getValue}" }.mkString(Eol)}""".stripMargin
}
|
timve/gatling
|
gatling-recorder/src/main/scala/io/gatling/recorder/http/model/SafeHttpResponse.scala
|
Scala
|
apache-2.0
| 1,839 |
class Bippo {
def length: Int = 123
class Tree
}
package object p1 {
class A
implicit class B(val s: String) { def bippy = s }
val c: Bippo = new Bippo
type D = String
}
package object p2 {
class A
implicit class B(val s: String) { def bippy = s }
val c: Bippo = new Bippo
type D = Int
}
trait NoWarn {
{
import p1._ // no warn
println("abc".bippy)
}
{
import p1._ // no warn
println(new A)
}
{
import p1.B // no warn
println("abc".bippy)
}
{
import p1._ // no warn
import c._ // no warn
println(length)
}
{
import p1._ // no warn
import c._ // no warn
val x: Tree = null
println(x)
}
{
import p1.D // no warn
val x: D = null
println(x)
}
}
trait Warn {
{
import p1.A // warn
println(123)
}
{
import p1.{ A, B } // warn on A
println("abc".bippy)
}
{
import p1.{ A, B } // warn on both
println(123)
}
{
import p1._ // no warn (technically this could warn, but not worth the effort to unroll unusedness transitively)
import c._ // warn
println(123)
}
{
import p1._ // warn
println(123)
}
{
class Tree
import p1._ // no warn
import c._ // warn
val x: Tree = null
println(x)
}
{
import p1.c._ // warn
println(123)
}
}
trait Nested {
{
import p1._ // warn
trait Warn { // warn about unused local trait for good measure
import p2._
println(new A)
println("abc".bippy)
}
println("")
}
{
import p1._ // no warn
trait NoWarn {
import p2.B // no warn
println("abc".bippy)
println(new A)
}
println(new NoWarn { })
}
{
import p1.A // warn
trait Warn {
import p2.A
println(new A)
}
println(new Warn { })
}
}
|
som-snytt/dotty
|
tests/untried/neg/warn-unused-imports.scala
|
Scala
|
apache-2.0
| 1,841 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.console
import _root_.org.fusesource.scalate.test.FunSuiteSupport
class SourceLineTest extends FunSuiteSupport {
val line = SourceLine(1, "abcd")
test("split line") {
assertResult(("", "a", "bcd")) { line.splitOnCharacter(0) }
assertResult(("a", "b", "cd")) { line.splitOnCharacter(1) }
assertResult(("ab", "c", "d")) { line.splitOnCharacter(2) }
assertResult(("abc", "d", "")) { line.splitOnCharacter(3) }
assertResult(("abcd", "", "")) { line.splitOnCharacter(4) }
assertResult(("abcd", "", "")) { line.splitOnCharacter(5) }
}
}
|
scalate/scalate
|
scalate-jersey/src/test/scala/org/fusesource/scalate/console/SourceLineTest.scala
|
Scala
|
apache-2.0
| 1,335 |
package org.http4s
package server
package middleware
import java.nio.charset.StandardCharsets
import org.http4s.server.middleware.EntityLimiter.EntityTooLarge
import org.specs2.mutable.Specification
import scodec.bits.ByteVector
import scalaz.stream.Process.emit
import Method._
import Status._
class EntityLimiterSpec extends Specification {
import Http4s._
val s = HttpService {
case r: Request if r.uri.path == "/echo" => r.decode[String](Response(Ok).withBody)
}
val b = emit(ByteVector.view("hello".getBytes(StandardCharsets.UTF_8)))
"EntityLimiter" should {
"Allow reasonable entities" in {
EntityLimiter(s, 100).apply(Request(POST, uri("/echo"), body = b))
.map(_ => -1)
.run must be_==(-1)
}
"Limit the maximum size of an EntityBody" in {
EntityLimiter(s, 3).apply(Request(POST, uri("/echo"), body = b))
.map(_ => -1)
.handle { case EntityTooLarge(i) => i }
.run must be_==(3)
}
"Chain correctly with other HttpServices" in {
val s2 = HttpService {
case r: Request if r.uri.path == "/echo2" => r.decode[String](Response(Ok).withBody)
}
val st = EntityLimiter(s, 3)
(st.apply(Request(POST, uri("/echo2"), body = b))
.map(_ => -1)
.run must be_==(-1)) &&
(st.apply(Request(POST, uri("/echo"), body = b))
.map(_ => -1)
.handle { case EntityTooLarge(i) => i }
.run must be_==(3))
}
}
}
|
hvesalai/http4s
|
server/src/test/scala/org/http4s/server/middleware/EntityLimiterSpec.scala
|
Scala
|
apache-2.0
| 1,479 |
package com.overviewdocs.query
import scala.collection.immutable
/** A way of searching for documents using a search index.
*
* This is modeled after ElasticSearch's (JSON) Query DSL. See
* http://www.elastic.co/guide/en/elasticsearch/reference/1.x/query-dsl.html
*/
sealed trait Query {
/** Returns an equivalent Query, with AndQuery(AndQuery(a, b), c) rewritten
* to AndQuery(a, b, c).
*/
def flatten: Query
}
sealed trait BooleanQuery extends Query
case class AndQuery(nodes: immutable.Seq[Query]) extends Query with BooleanQuery {
override def flatten = AndQuery(nodes.flatMap(_ match {
case AndQuery(children) => {
// recurse
children.flatMap(_.flatten match {
case AndQuery(subChildren) => subChildren
case query => Vector(query)
})
}
case query => Vector(query)
}))
}
case class OrQuery(nodes: immutable.Seq[Query]) extends Query with BooleanQuery {
override def flatten = OrQuery(nodes.flatMap(_ match {
case OrQuery(children) => {
// recurse
children.flatMap(_.flatten match {
case OrQuery(subChildren) => subChildren
case query => Vector(query)
})
}
case query => Vector(query)
}))
}
case class NotQuery(node: Query) extends Query with BooleanQuery {
override def flatten = NotQuery(node.flatten)
}
case object AllQuery extends Query with BooleanQuery {
override def flatten = this
}
sealed trait FieldQuery extends Query {
val field: Field
override def flatten = this
}
case class PhraseQuery(field: Field, phrase: String) extends Query with FieldQuery
case class PrefixQuery(field: Field, prefix: String) extends Query with FieldQuery
case class FuzzyTermQuery(field: Field, term: String, fuzziness: Option[Int]) extends Query with FieldQuery
case class ProximityQuery(field: Field, phrase: String, slop: Int) extends Query with FieldQuery
/** A regular-expression query.
*
* The regex hasn't actually been parsed: that will happen during execution and
* cause a warning (and match no documents) if it's invalid. (Rationale: the
* query language doesn't have any invalid input.)
*/
case class RegexQuery(field: Field, regex: String) extends Query with FieldQuery
object Query {
def walkFields(query: Query)(f: Field => Unit): Unit = query match {
case AllQuery => {}
case AndQuery(nodes) => { nodes.foreach(q => walkFields(q)(f)) }
case OrQuery(nodes) => { nodes.foreach(q => walkFields(q)(f)) }
case NotQuery(p) => walkFields(p)(f)
case fq: FieldQuery => f(fq.field)
}
}
|
overview/overview-server
|
common/src/main/scala/com/overviewdocs/query/Query.scala
|
Scala
|
agpl-3.0
| 2,556 |
package org.vertx.scala.router
import java.io.FileNotFoundException
import java.net.URLEncoder
import org.vertx.scala.FutureOps
import org.vertx.scala.core._
import org.vertx.scala.core.file.FileProps
import org.vertx.scala.core.http.{HttpServerRequest, HttpServerResponse}
import FutureOps._
import org.vertx.scala.router.routing._
import scala.concurrent.Future
import scala.util.{Failure, Success}
/**
* The Router trait can be extended to give access to an easy way to write nice routes for your
* HTTP server.
*
* @author <a href="http://www.campudus.com/">Joern Bernhardt</a>
*/
trait Router extends (HttpServerRequest => Unit) {
this: VertxAccess =>
type Routing = PartialFunction[RouteMatch, Reply]
/**
* Override this method to define your routes for the request handler.
*
* @param req The HttpServerRequest that came in.
* @return A partial function that matches all routes.
*/
def routes(implicit req: HttpServerRequest): Routing
/**
* Override this method for your custom authentication check.
*
* @param req The HttpServerRequest to check.
* @return A future telling whether the request is authorized (true) or not (false / Exception).
*/
protected def checkAuthentication(req: HttpServerRequest): Future[Boolean] = {
Future.successful(false)
}
/** The working directory */
protected def workingDirectory: String = "./"
/** File to send if the given file in SendFile was not found. */
protected def notFoundFile: String = "404.html"
/**
* Use this to check for authentication in your routes.
*
* @param replyIfAuthed The reply to send if the request is successfully authed.
* @param req The HttpServerRequest that was sent.
* @return The given replyIfAuthed or an Error reply that the request wasn't authed.
*/
final protected def authed(replyIfAuthed: => Reply)(implicit req: HttpServerRequest): Reply = {
def unauthorized(ex: Option[Throwable]): Reply = ex match {
case Some(cause: RouterException) => Error(cause)
case Some(cause) => Error(RouterException(message = "Unauthorized", cause = cause, id = "UNAUTHORIZED", statusCode = 401))
case None => Error(RouterException(message = "Unauthorized", id = "UNAUTHORIZED", statusCode = 401))
}
req.pause()
AsyncReply(checkAuthentication(req) map { authenticated =>
req.resume()
if (authenticated) replyIfAuthed
else unauthorized(None)
} recover {
case x =>
req.resume()
unauthorized(Some(x))
})
}
private val noRouteMatch: RouteMatch => Reply =
_ => Error(RouterException(message = "No route matched.", id = "NO_ROUTE", statusCode = 404))
private def matcherFor(routeMatch: RouteMatch, req: HttpServerRequest): Reply = {
val pf: PartialFunction[RouteMatch, Reply] = routes(req)
val tryAllThenNoRouteMatch: Function[RouteMatch, Reply] = _ => pf.applyOrElse(All(req.path()), noRouteMatch)
pf.applyOrElse(routeMatch, tryAllThenNoRouteMatch)
}
private def fileExists(file: String): Future[String] = asyncResultToFuture {
tryFn: ResultHandler[Boolean] => vertx.fileSystem.exists(file, tryFn)
} map {
case true => file
case false => throw new FileNotFoundException(file)
}
private def addIndexToDirName(path: String): String =
if (path.endsWith("/")) path + "index.html"
else path + "/index.html"
private def directoryToIndexFile(path: String): Future[String] = asyncResultToFuture {
tryFn: ResultHandler[FileProps] => vertx.fileSystem.lprops(path, tryFn)
} flatMap { fp =>
if (fp.isDirectory) fileExists(addIndexToDirName(path))
else Future.successful(path)
}
private def urlEncode(str: String) = URLEncoder.encode(str, "UTF-8")
private def endResponse(resp: HttpServerResponse, reply: SyncReply): Unit = {
reply match {
case NoBody =>
resp.end()
case Ok(js) =>
resp.setStatusCode(200)
resp.setStatusMessage("OK")
resp.putHeader("Content-type", "application/json")
resp.end(js.encode())
case SendFile(path, absolute) =>
(for {
exists <- fileExists(if (absolute) path else s"$workingDirectory/$path")
file <- directoryToIndexFile(exists)
} yield {
logger.info(s"Serving file $file after receiving request for: $path")
resp.sendFile(file, notFoundFile)
}) recover {
case ex: FileNotFoundException =>
endResponse(resp, Error(RouterException("File not found", ex, "errors.routing.fileNotFound", 404)))
case ex =>
endResponse(resp, Error(RouterException("send file exception", ex, "errors.routing.sendFile", 500)))
}
case Error(RouterException(_, cause, id, 404)) =>
logger.info(s"File not found", cause)
resp.setStatusCode(404)
resp.setStatusMessage("NOT FOUND")
resp.sendFile(notFoundFile)
case Error(RouterException(message, cause, id, statusCode)) =>
logger.warn(s"Error $statusCode: $message", cause)
resp.setStatusCode(statusCode)
resp.setStatusMessage(id)
message match {
case null => resp.end()
case msg => resp.end(msg)
}
}
}
private def sendReply(req: HttpServerRequest, reply: Reply): Unit = {
logger.debug(s"Sending back reply as response: $reply")
reply match {
case AsyncReply(future) =>
future.onComplete {
case Success(r) => sendReply(req, r)
case Failure(x: RouterException) => endResponse(req.response(), errorReplyFromException(x))
case Failure(x: Throwable) => endResponse(req.response(), Error(routerException(x)))
}
case SetCookie(key, value, nextReply) =>
req.response().headers().addBinding("Set-Cookie", s"${urlEncode(key)}=${urlEncode(value)}")
sendReply(req, nextReply)
case Header(key, value, nextReply) =>
req.response().putHeader(key, value)
sendReply(req, nextReply)
case x: SyncReply => endResponse(req.response(), x)
}
}
private def routerException(ex: Throwable): RouterException = ex match {
case x: RouterException => x
case x => RouterException(message = x.getMessage, cause = x)
}
private def errorReplyFromException(ex: RouterException) = Error(ex)
/**
* To be able to use this in `HttpServer.requestHandler()`, the Router needs to be a `HttpServerRequest => Unit`. This
* apply method starts the magic to be able to use `override def request() = ...` for the routes.
*
* @param req The HttpServerRequest that comes in.
*/
override final def apply(req: HttpServerRequest): Unit = {
logger.info(s"${req.method()}-Request: ${req.uri()}")
val reply = try {
val path = req.path()
val routeMatch: RouteMatch = req.method() match {
case "GET" => Get(path)
case "PUT" => Put(path)
case "POST" => Post(path)
case "DELETE" => Delete(path)
case "OPTIONS" => Options(path)
case "HEAD" => Head(path)
case "TRACE" => Trace(path)
case "PATCH" => Patch(path)
case "CONNECT" => Connect(path)
}
matcherFor(routeMatch, req)
} catch {
case ex: RouterException =>
errorReplyFromException(ex)
case ex: Throwable =>
logger.warn(s"Uncaught Exception for request ${req.absoluteURI()}", ex)
errorReplyFromException(routerException(ex))
}
sendReply(req, reply)
}
}
|
galderz/mod-lang-scala
|
src/main/scala/org/vertx/scala/router/Router.scala
|
Scala
|
apache-2.0
| 7,488 |
import akka.http.scaladsl.model.ContentTypes.`application/json`
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import com.delprks.productservicesprototype.domain.Offer
import com.delprks.productservicesprototype.domain.response.{ErrorResponse, Responses}
import util.AbstractOffersSpec
class MultipleOffersSpec extends AbstractOffersSpec {
"The service" should {
"return a response with offers" in new OffersScope {
insertOffer()
Get("/offers") ~> Route.seal(routes) ~> check {
status must be equalTo StatusCodes.OK
contentType must be equalTo `application/json`
responseAs[Responses[Offer]].total must be > 0
}
}
"return a response with the requested limit" in new OffersScope {
insertOffer()
Get("/offers?limit=1") ~> Route.seal(routes) ~> check {
status must be equalTo StatusCodes.OK
responseAs[Responses[Offer]].total must be equalTo 1
}
}
"return a 400 Bad Request when a limit greater than the maximum is requested" in new OffersScope {
Get("/offers?limit=101") ~> Route.seal(routes) ~> check {
status must be equalTo StatusCodes.BadRequest
responseAs[ErrorResponse].errors.size must be > 0
}
}
}
}
|
delprks/product-services-prototype
|
src/it/scala/MultipleOffersSpec.scala
|
Scala
|
mit
| 1,282 |
package edu.mit.csail.cap.query
package analysis
/** Query a trace for a feature from a demo trace */
sealed trait CallQuery extends Ordered[CallQuery] {
/** Identifying, descriptive methods */
def methods: Set[Method]
/**
* Returns a stream of seed events for matches.
* The first event in a match is the ancestor of all events in a match.
*/
def search(t: BoundaryAnalysis): Traversable[List[Event]]
/** Quick check before search (optimization) */
def has(t: BoundaryAnalysis): Boolean
/** Find number of matches */
def count(t: BoundaryAnalysis): Int
override def compare(that: CallQuery) =
this.toString.compare(that.toString)
/** Select events happening under this query (for extends and invokes) */
def under(t: BoundaryAnalysis) =
t.select(Or({
for (e :: _ <- search(t)) yield e.asInstanceOf[Enter].contains
}.toList))
/** Select stack traces for this query (not including the element itself) */
def over(t: BoundaryAnalysis) =
t.select(Or({
for (e :: _ <- search(t)) yield StackTrace(e)
}.toList))
}
object EmptyQuery extends CallQuery {
override def methods = Set()
override def search(t: BoundaryAnalysis) = Nil
override def has(t: BoundaryAnalysis) = false
override def count(t: BoundaryAnalysis) = 0
override def toString = ""
}
object CallQuery {
def fromString(meta: Metadata, s: String): CallQuery =
if (s.isEmpty)
EmptyQuery
else s(0) match {
case '+' => Extends(meta.method(s.substring(1).toLong))
case '=' => Invokes(meta.method(s.substring(1).toLong))
case '[' =>
val comma = s.indexOf(',')
NestedQuery(
fromString(meta, s.substring(1, comma)),
fromString(meta, s.substring(comma + 1, s.size - 1)))
case '{' =>
val comma = s.indexOf(',')
Extends(
meta.method(s.substring(1, comma).toLong),
Some(meta.clazz(s.substring(comma + 1, s.size - 1).toLong)))
case 'r' =>
Invokes(meta.method(s.substring(1).toLong), Not(ValueIs(Null)))
case _ => ???
}
}
/** Framework definition user extensions in the sub-types of the class */
case class Extends(definition: Method, declaration: Option[ClassType] = None) extends CallQuery {
override def methods = Set(definition)
def supertype = declaration.getOrElse(definition.declarer)
def extensions(b: FrameworkBoundary = AllUser): Set[Method] =
definition.overriding(b.isUser, supertype)
override def search(t: BoundaryAnalysis) =
t.member(extensions(t.b)).view.map(List(_))
override def count(t: BoundaryAnalysis) =
t.member(extensions(t.b)).size
override def has(t: BoundaryAnalysis) =
this match {
case Extends(m, None) if m.toString == "java.lang.Object.toString()Ljava/lang/String;" => true
case _ => (t.methods intersect extensions(t.b)).size > 0
}
override def toString =
declaration match {
case None => "+" + definition.id
case Some(t) => "{" + definition.id + "," + t.id + "}"
}
}
/** Simple call to a framework (or its extension) */
case class Invokes(m: Method, filter: Query = True) extends CallQuery {
override def methods = Set(m)
val q = Set(m) ++ m.overriding()
override def search(t: BoundaryAnalysis) =
t.member(q).select(filter).view.collect {
// add return seed as well
case e: Enter => e :: (e.exit match {
case Some(d) => d :: Nil
case None => Nil
})
}
override def count(t: BoundaryAnalysis) =
t.member(q).size
/** TODO Very slow! Not clear why since it should be Lucene-based. Make an under approximation now */
override def has(t: BoundaryAnalysis) =
t.methods(m)
//(t.methods intersect q).size > 0
override def toString = "=" + m.id
}
/** Group call queries by the declaring classes */
case class ClassQuery(clazz: ClassType, queries: Set[CallQuery]) extends CallQuery {
override def methods =
queries.flatMap(_.methods)
override def search(t: BoundaryAnalysis) =
queries.flatMap(_.search(t))
override def count(t: BoundaryAnalysis) =
queries.map(_.count(t)).sum
/** TODO: performance optimization, over approximation */
override def has(t: BoundaryAnalysis) =
true
//queries.exists(_.has(t))
override def toString =
queries.map(_.toString).mkString("(", ",", ")")
}
case class NestedQuery(parent: CallQuery, child: CallQuery) extends CallQuery {
override def methods =
parent.methods ++ child.methods
override def search(t: BoundaryAnalysis) =
for (
r1 <- parent.search(t);
r2 <- child.search(t.select(r1(0).asInstanceOf[Enter].contains))
) yield r1 ::: r2
override def count(t: BoundaryAnalysis) =
search(t).size
/** XXX: over approximation ! */
override def has(t: BoundaryAnalysis) =
parent.has(t) && child.has(t)
//!search(t).isEmpty
override def toString =
"[" + parent + "," + child + "]"
}
case class Score(
// original query
q: CallQuery,
// maximum heuristics; higher -> more important
heuristics: Double = 0,
// minimum depth; lower -> more important
depth: Int = 0,
// # documents / # containing documents or 0; higher -> more specific
IDF: Double = 0,
// boolean for keyword match; higher -> more important
keyword: Int = 0)
/** Distinct score queries */
case class Scores(scores: Set[Score]) {
def idf(target: Group) =
if (target.isEmpty)
this
else {
debug("computing idf")
val n = target.traces.size
val out = Scores(scores.map {
case score =>
val count = target.traces.filter(score.q.has).size;
score.copy(IDF = if (count == 0) 0 else Math.log(n.toDouble / count.toDouble))
})
debug("done")
out
}
def keyword(meta: Metadata, q: String) =
if (q.trim() == "")
this
else {
val index = db.Lucene.index(meta, scores.flatMap(_.q.methods))
val matches = index.search(index.containsOne(q))
Scores(scores.map {
case score =>
score.copy(keyword = (matches intersect score.q.methods).size)
})
}
def declarationClass(q: CallQuery): ClassType = q match {
case q: Invokes => q.m.declarer.sourceClass
case q: Extends => q.supertype.sourceClass
case ClassQuery(clazz, _) => clazz
case EmptyQuery => ???
case NestedQuery(parent, child) => declarationClass(parent)
}
/** Group queries by the defining class */
def groupByClass = {
// extract defining class for each call query
val groups = scores.groupBy(score => declarationClass(score.q))
Scores(groups.map {
case (clazz, scores) => Score(
q = ClassQuery(clazz, scores.map(_.q)),
keyword = scores.map(_.keyword).max,
depth = scores.map(_.depth).min,
IDF = scores.map(_.IDF).max,
heuristics = scores.map(_.heuristics).max)
}.toSet)
}
/** Top-down sorted list */
def sorted =
scores.toList.sortBy(Ranking.lexicographic).reverse
def size = scores.size
}
object Ranking {
// - high is good
def lexicographic(a: Score) = (
// keyword matching first
a.keyword,
// extends first
a.q match { case _: Extends => 2 case _: Invokes => 1 case _ => 0 },
// specific first (absent or common get rank 0)
a.IDF,
// closer to the top get higher rank
-a.depth,
// name heuristic
a.heuristics)
}
object Heuristics {
/** [0,1] priority. Threshold .5 for getters/util, 1 for interesting stuff */
def apply(m: Method): Double = m match {
case m if m.isSynthetic => 0
case m if m.isConstructor || m.isStaticInitializer => .25
case m if m.name == "equals" && m.sig == "(Ljava/lang/Object;)Z" => .25
case m if m.name == "hashCode" && m.sig == "()I" => .25
case m if PrefixMask("java.lang.")(m.declarer) => .4
case m if PrefixMask("java.util.")(m.declarer) => .4
case m if m.name == "getAdapter" && m.sig == "(Ljava/lang/Class;)Ljava/lang/Object;" => .5
case m if m.name.startsWith("get") => .75
case m if m.name.startsWith("has") && m.returnType == BooleanType => .75
case m if m.name.startsWith("is") && m.returnType == BooleanType => .75
case m if m.name.startsWith("can") && m.returnType == BooleanType => .75
case m if m.declarer.name.endsWith("Listener") => .75
case _ => 1.0
}
}
|
kyessenov/semeru
|
src/main/scala/analysis/CallQuery.scala
|
Scala
|
gpl-3.0
| 8,394 |
/** Copyright 2014 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import org.apache.spark.SparkContext
/** Mix in and implement this trait if your model cannot be persisted by
* PredictionIO automatically. A companion object extending
* IPersistentModelLoader is required for PredictionIO to load the persisted
* model automatically during deployment.
*
* {{{
* class MyModel extends IPersistentModel[MyParams] {
* def save(id: String, params: MyParams, sc: SparkContext): Boolean = {
* ...
* }
* }
*
* object MyModel extends IPersistentModelLoader[MyParams, MyModel] {
* def apply(id: String, params: MyParams, sc: Option[SparkContext]): MyModel = {
* ...
* }
* }
* }}}
*
* In Java, all you need to do is to implement this interface, and add a static
* method with 3 arguments of type String, [[Params]], and SparkContext.
*
* {{{
* public class MyModel implements IPersistentModel<MyParams>, Serializable {
* ...
* public boolean save(String id, MyParams params, SparkContext sc) {
* ...
* }
*
* public static MyModel load(String id, Params params, SparkContext sc) {
* ...
* }
* ...
* }
* }}}
*
* @tparam AP Algorithm parameters class.
* @see [[IPersistentModelLoader]]
* @group Algorithm
*/
trait IPersistentModel[AP <: Params] {
/** Save the model to some persistent storage.
*
* This method should return true if the model has been saved successfully so
* that PredictionIO knows that it can be restored later during deployment.
* This method should return false if the model cannot be saved (or should
* not be saved due to configuration) so that PredictionIO will re-train the
* model during deployment. All arguments of this method are provided by
* automatically by PredictionIO.
*
* @param id ID of the run that trained this model.
* @param params Algorithm parameters that were used to train this model.
* @param sc An Apache Spark context.
*/
def save(id: String, params: AP, sc: SparkContext): Boolean
}
/** Implement an object that extends this trait for PredictionIO to support
* loading a persisted model during serving deployment.
*
* @tparam AP Algorithm parameters class.
* @tparam M Model class.
* @see [[IPersistentModel]]
* @group Algorithm
*/
trait IPersistentModelLoader[AP <: Params, M] {
/** Implement this method to restore a persisted model that extends the
* [[IPersistentModel]] trait. All arguments of this method are provided
* automatically by PredictionIO.
*
* @param id ID of the run that trained this model.
* @param params Algorithm parameters that were used to train this model.
* @param sc An optional Apache Spark context. This will be injected if the
* model was generated by a [[PAlgorithm]].
*/
def apply(id: String, params: AP, sc: Option[SparkContext]): M
}
/** Mix in this trait if your model cannot be persisted to PredictionIO's
* metadata store for any reason and want to have it persisted to local
* filesystem instead. These traits contain concrete implementation and need
* not be implemented.
*
* {{{
* class MyModel extends IFSPersistentModel[MyParams] {
* ...
* }
*
* object MyModel extends IFSPersistentModelLoader[MyParams, MyModel] {
* ...
* }
* }}}
*
* @tparam AP Algorithm parameters class.
* @see [[IFSPersistentModelLoader]]
* @group Algorithm
*/
trait IFSPersistentModel[AP <: Params] extends IPersistentModel[AP] {
def save(id: String, params: AP, sc: SparkContext) = {
Utils.save(id, this)
true
}
}
/** Implement an object that extends this trait for PredictionIO to support
* loading a persisted model from local filesystem during serving deployment.
*
* @tparam AP Algorithm parameters class.
* @tparam M Model class.
* @see [[IFSPersistentModel]]
* @group Algorithm
*/
trait IFSPersistentModelLoader[AP <: Params, M]
extends IPersistentModelLoader[AP, M] {
def apply(id: String, params: AP, sc: Option[SparkContext]): M = {
Utils.load(id).asInstanceOf[M]
}
}
|
TheDataShed/PredictionIO
|
core/src/main/scala/controller/IPersistentModel.scala
|
Scala
|
apache-2.0
| 4,716 |
package org.typeclassopedia
package std
import scala.{Conversion, None, Option, Some, StringContext}
import org.typeclassopedia.extras.Show
import scala.Predef.implicitly
import java.lang.{IllegalArgumentException, String}
object Options {
extension[T](t: T) {
def some: Option[T] = Some(t)
}
def none[T]: Option[T] = None
trait OptionFunctor extends Functor[Option] {
extension [A, B](x: Option[A])
override def map(f: A => B): Option[B] = x map f
}
trait OptionPointed extends Pointed[Option] {
extension [A](a: A)
override def point: Option[A] = Some(a)
}
trait OptionCopointed extends Copointed[Option] {
extension[A](f: Option[A])
override def extract: A =
f.getOrElse(throw new IllegalArgumentException("Option cannot be None"))
}
trait OptionApplicative extends Applicative[Option] {
extension [A, B](ma: Option[A])
def <*>(f: Option[A => B]): Option[B] =
for {
m <- ma
g <- f
} yield g(m)
}
trait OptionAlternative extends Alternative[Option] {
def empty[A]: Option[A] = None
extension [A, B](a: Option[A])
override def <|>(b: Option[A]): Option[A] = if (a.isDefined) a else b
}
trait OptionFoldable extends Foldable[Option] {
extension[A, B](value: Option[A])
override def foldMap(f: A => B)(using monoid: Monoid[B]): B =
value.fold(monoid.zero)(f)
}
trait OptionMonad extends Monad[Option] with OptionApplicative {
extension[A, B](ma: Option[A])
override def flatMap(f: A => Option[B]): Option[B] =
ma flatMap f
}
trait OptionComonad extends Comonad[Option] {
extension[A, B](a: Option[A])
override def duplicate: Option[Option[A]] = Option(a)
}
trait OptionMonadPlus extends MonadPlus[Option] {
def mzero[A]: Option[A] = None
extension[A](a: Option[A])
override def mplus(b: Option[A]): Option[A] = a orElse b
}
given [A: Semigroup] as Semigroup[Option[A]] =
new Semigroup[Option[A]] {
extension(a: Option[A])
override def append(b: Option[A]): Option[A] =
(a, b) match {
case (Some(a1), Some(a2)) => Some(a1.append(a2))
case (Some(_), None) => a
case (None, Some(_)) => b
case (None, None) => None
}
}
trait OptionTraverse extends Traversable[Option] with OptionFunctor with OptionFoldable {
extension[G[_]: Applicative, A, B](fa: Option[A])
override def traverse(f: A => G[B]): G[Option[B]] = {
val none: Option[B] = None
fa.fold(none.pure)(v => f(v).map((b: B) => Option(b)))
}
}
trait OptionShow {
given [T: Show] as Show[Option[T]] =
new Show[Option[T]] {
def show(option: Option[T]): String =
option.fold("None")(v => s"Option(${v.show})")
}
}
trait OptionAll
extends OptionPointed
with OptionApplicative
with OptionCopointed
with OptionTraverse
with OptionMonad
with OptionComonad
with OptionMonadPlus
with OptionAlternative
with OptionShow
given optionAll as OptionAll
}
|
channingwalton/typeclassopedia
|
src/main/scala/org/typeclassopedia/std/Options.scala
|
Scala
|
mit
| 3,183 |
package domain.logic
import domain.models.{LunchProvider, LunchOffer}
import org.joda.money.Money
import java.time.LocalDate
import org.scalamock.scalatest.MockFactory
import org.scalatest._
class LunchResolverSaltNPepperSpec extends FlatSpec with Matchers with MockFactory {
it should "resolve offers for week of 2015-06-19" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2015-06-19.html")
val offers = resolver.resolve(url)
offers should have size 24
offers should contain (LunchOffer(0, "Grüne Bohneneintopf mit Kasslerfleisch", date("2015-06-15"), euro("3.90"), Id))
offers should contain (LunchOffer(0, "Hähnchenkeule in Curry-Mango-Rahm, frische Buttermöhren, Kartoffeln", date("2015-06-15"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "Putenstreifen mit Pilzen in Käsesahnesauce auf Spaghetti", date("2015-06-15"), euro("5.70"), Id))
offers should contain (LunchOffer(0, "Sahnemilchreis mit Kirschen, 1 Tasse Kaffee", date("2015-06-15"), euro("5.30"), Id))
offers should contain (LunchOffer(0, "Wochenangebot: Rumpsteak (180gr.), Grillbutter, Pommes frites, Salatbeilage", date("2015-06-15"), euro("6.90"), Id))
offers should contain (LunchOffer(0, "Frische Paprikacremesuppe mit geröstetem Bacon", date("2015-06-16"), euro("3.90"), Id))
offers should contain (LunchOffer(0, "Putenleber, hausgemachter Apfelrotkohl, hausgemachtes Kartoffelpüree", date("2015-06-16"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "Gefüllte Schnitzel „Cordon bleu“, mediterranem Gemüsegratin", date("2015-06-16"), euro("5.70"), Id))
offers should contain (LunchOffer(0, "Blumenkohlgratin, 1 Dessert", date("2015-06-16"), euro("4.90"), Id))
offers should contain (LunchOffer(0, "Wochenangebot: Rumpsteak (180gr.), Grillbutter, Pommes frites, Salatbeilage", date("2015-06-16"), euro("6.90"), Id))
offers should contain (LunchOffer(0, "Erbseneintopf mit Wiener Würstchenscheiben", date("2015-06-17"), euro("3.90"), Id))
offers should contain (LunchOffer(0, "Frische Lachsfiletwürfel auf Bandnudeln, Spinat-Käse-Sauce", date("2015-06-17"), euro("5.70"), Id))
offers should contain (LunchOffer(0, "Hühnerfrikassee mit Butterreis", date("2015-06-17"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "4 Kartoffelpuffer mit Apfelmus", date("2015-06-17"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "Wochenangebot: Rumpsteak (180gr.), Grillbutter, Pommes frites, Salatbeilage", date("2015-06-17"), euro("6.90"), Id))
offers should contain (LunchOffer(0, "Thai Suppe mit Hühnerfleisch und Frischem Koriander", date("2015-06-18"), euro("3.90"), Id))
offers should contain (LunchOffer(0, "Frische Hähnchenbrust auf gebratenen Asianudeln, Kokos-Sauce", date("2015-06-18"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "Paprika-Gulasch oder Champignon-Gulasch mit Nudeln Sauerrahm", date("2015-06-18"), euro("5.70"), Id))
offers should contain (LunchOffer(0, "2 Eier, Senfsauce, Buttergemüse, Kartoffeln, 1 Dessert", date("2015-06-18"), euro("4.90"), Id))
offers should contain (LunchOffer(0, "Wochenangebot: Rumpsteak (180gr.), Grillbutter, Pommes frites, Salatbeilage", date("2015-06-18"), euro("6.90"), Id))
offers should contain (LunchOffer(0, "Spaghetti-Bolognese", date("2015-06-19"), euro("5.20"), Id))
offers should contain (LunchOffer(0, "Kasslersteak mit Pommes frites und Weißkrautsalat", date("2015-06-19"), euro("5.70"), Id))
offers should contain (LunchOffer(0, "Sommerpommes, 1 Dessert", date("2015-06-19"), euro("4.90"), Id))
offers should contain (LunchOffer(0, "Wochenangebot: Rumpsteak (180gr.), Grillbutter, Pommes frites, Salatbeilage", date("2015-06-19"), euro("6.90"), Id))
}
it should "resolve offers for Easter week of 2015-03-30" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2015-03-30.html")
val offers = resolver.resolve(url)
offers should have size 19
offers.filter(_.day == date("2015-03-30")) should have size 5
offers.filter(_.day == date("2015-03-31")) should have size 5
offers.filter(_.day == date("2015-04-01")) should have size 5
offers.filter(_.day == date("2015-04-02")) should have size 4
offers.filter(_.day == date("2015-04-03")) should have size 0
}
it should "resolve offers for week of 2015-05-29" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2015-05-29.html")
val offers = resolver.resolve(url)
offers should have size 19
offers.filter(_.day == date("2015-05-25")) should have size 0
offers.filter(_.day == date("2015-05-26")) should have size 5
offers.filter(_.day == date("2015-05-27")) should have size 5
offers.filter(_.day == date("2015-05-28")) should have size 5
offers.filter(_.day == date("2015-05-29")) should have size 4
}
it should "resolve offers for week of 2015-07-11" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2015-07-11.html")
val offers = resolver.resolve(url)
offers should have size 24
}
it should "resolve offers for week of 2015-08-10" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2015-08-10.html")
val offers = resolver.resolve(url)
offers should have size 24
offers should contain(LunchOffer(0, "Gefülltes Schnitzel „Toskaner Art“ mit Tomatensalat und Pommes frites", date("2015-08-11"), euro("5.70"), Id))
offers should contain(LunchOffer(0, "Gefülltes Schnitzel „Cordon bleu“ mit buntem Krautsalat und Pommes frites", date("2015-08-12"), euro("5.70"), Id))
offers should contain(LunchOffer(0, "Hacksteak „Toskana“ mit mediterraner Gemüsepfanne dazu Pommes frites", date("2015-08-14"), euro("5.20"), Id))
}
it should "resolve offers for week of 2016-02-16" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2016-02-16.html")
val offers = resolver.resolve(url)
offers should have size 24
}
it should "resolve offers for week of 2016-04-12" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2016-04-12.html")
val offers = resolver.resolve(url)
offers should have size 24
offers should contain(LunchOffer(0, "Wochenangebot: Bunter Salat mit frischen Erdbeeren, gebratenem Hähnchenfleisch und hausgemachtem Erdbeer-Minze-Joghurt-Dressing", date("2016-04-11"), euro("5.90"), Id))
}
it should "resolve offers for week of 2018-09-14" in {
val url = getClass.getResource("/mittagsplaene/salt_n_pepper/2018-09-14.html")
val offers = resolver.resolve(url)
offers should have size 19
offers should contain(LunchOffer(0, "Serbischer Bohneneintopf", date("2018-09-10"), euro("3.50"), Id))
offers should contain(LunchOffer(0, "Spaghetti Bolognese mit Reibekäse", date("2018-09-10"), euro("5.90"), Id))
}
private def resolver = {
val validatorStub = stub[DateValidator]
(validatorStub.isValid _).when(*).returning(true)
new LunchResolverSaltNPepper(validatorStub)
}
private val Id = LunchProvider.SALT_N_PEPPER.id
private def date(dateString: String): LocalDate = LocalDate.parse(dateString)
private def euro(moneyString: String): Money = Money.parse(s"EUR $moneyString")
}
|
rori-dev/lunchbox
|
backend-play-akka-scala/test/domain/logic/LunchResolverSaltNPepperSpec.scala
|
Scala
|
mit
| 7,278 |
package db.migration.postgres
import db.migration.{V1_030__SpillStateToDisk => V1_030__SpillStateToDiskDefinition}
import slick.jdbc.{JdbcProfile, PostgresProfile}
class V1_030__SpillStateToDisk extends V1_030__SpillStateToDiskDefinition {
override protected lazy val profile: JdbcProfile = PostgresProfile
}
|
TouK/nussknacker
|
ui/server/src/main/scala/db/migration/postgres/V1_030__SpillStateToDisk.scala
|
Scala
|
apache-2.0
| 312 |
package com.twitter.finagle.filter
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.Filter.TypeAgnostic
import com.twitter.finagle._
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.filter.ServerAdmissionControl.ServerParams
import com.twitter.finagle.server.StackServer
import com.twitter.finagle.stack.Endpoint
import com.twitter.util.{Await, Future}
import java.util.concurrent.atomic.AtomicInteger
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.funsuite.AnyFunSuite
class ServerAdmissionControlTest extends AnyFunSuite with MockitoSugar {
class Ctx {
val a = new AtomicInteger(1)
class AdditionFilter(delta: Int) extends TypeAgnostic {
val name = s"multiple $delta"
override def toFilter[Req, Rep]: Filter[Req, Rep, Req, Rep] = new SimpleFilter[Req, Rep] {
def apply(req: Req, service: Service[Req, Rep]): Future[Rep] = {
a.addAndGet(delta)
service(req)
}
}
}
def injectFilter(filter: AdditionFilter): Stackable[ServiceFactory[Int, Int]] =
new Stack.TransformParams[ServiceFactory[Int, Int]] {
private val head = Stack.Head(Stack.Role("addAc"))
def transform(params: Stack.Params): Stack.Params = {
val j: ServerParams => TypeAgnostic = _ => filter
val nextFilters =
params[ServerAdmissionControl.Filters].filters + (filter.name -> j)
params + ServerAdmissionControl.Filters(nextFilters)
}
def role: Stack.Role = head.role
def description: String = head.description
def parameters: Seq[Stack.Param[_]] = head.parameters
}
val echo = ServiceFactory.const(Service.mk[Int, Int](v => Future.value(v)))
val stack = StackServer
.newStack[Int, Int].insertBefore(
ServerAdmissionControl.role,
injectFilter(new AdditionFilter(2))) ++
Stack.leaf(Endpoint, echo)
}
test("register a controller") {
val ctx = new Ctx
import ctx._
val factory = stack.make(StackServer.defaultParams)
val svc = Await.result(factory(), 5.seconds)
assert(Await.result(svc(1), 5.seconds) == 1)
assert(a.get == 3)
}
test("disabled by param") {
val ctx = new Ctx
import ctx._
val factory = stack.make(
StackServer.defaultParams +
ServerAdmissionControl.Param(false)
)
val svc = Await.result(factory(), 5.seconds)
assert(Await.result(svc(1), 5.seconds) == 1)
assert(a.get == 1)
}
test("register multiple controller") {
val ctx = new Ctx
import ctx._
val factory = stack
.insertBefore(ServerAdmissionControl.role, injectFilter(new AdditionFilter(3)))
.make(StackServer.defaultParams)
val svc = Await.result(factory(), 5.seconds)
assert(Await.result(svc(1), 5.seconds) == 1)
assert(a.get == 6)
}
test("duplicated registration is ignored") {
val ctx = new Ctx
import ctx._
val factory = stack
.insertBefore(ServerAdmissionControl.role, injectFilter(new AdditionFilter(2))).make(
StackServer.defaultParams)
val svc = Await.result(factory(), 5.seconds)
assert(Await.result(svc(1), 5.seconds) == 1)
assert(a.get == 3)
}
test("Respects the NonRetryable context entry") {
val ctx = new Ctx
import ctx._
val factory = stack.make(StackServer.defaultParams)
val svc = Await.result(factory(), 5.seconds)
Contexts.local.let(ServerAdmissionControl.NonRetryable, ()) {
val aInitial = a.get
assert(Await.result(svc(1), 5.seconds) == 1)
assert(a.get == aInitial)
}
}
}
|
twitter/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/filter/ServerAdmissionControlTest.scala
|
Scala
|
apache-2.0
| 3,623 |
package scorex.lagonaki.unit
import org.scalatest.{Matchers, FunSuite}
import scorex.crypto.encode.Base58
import scorex.wallet.Wallet
import scala.util.Random
class WalletSpecification extends FunSuite with Matchers {
private val walletSize = 10
val w = new Wallet(None, "cookies", Base58.decode("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption)
test("wallet - acc creation") {
w.generateNewAccounts(walletSize)
w.privateKeyAccounts().size shouldBe walletSize
w.privateKeyAccounts().map(_.address) shouldBe Seq("3MqMwwHW4v2nSEDHVWoh8RCQL8QrsWLkkeB", "3MuwVgJA8EXHukxo6rcakT5tD6FpvACtitG", "3MuAvUG4EAsG9RP9jaWjewCVmggaQD2t39B", "3MqoX4A3UGBYU7cX2JPs6BCzntNC8K8FBR4", "3N1Q9VVVQtY3GqhwHtJDEyHb3oWBcerZL8X", "3NARifVFHthMDnCwBacXijPB2szAgNTeBCz", "3N6dsnfD88j5yKgpnEavaaJDzAVSRBRVbMY", "3MufvXKZxLuNn5SHcEgGc2Vo7nLWnKVskfJ", "3Myt4tocZmj7o3d1gnuWRrnQWcoxvx5G7Ac", "3N3keodUiS8WLEw9W4BKDNxgNdUpwSnpb3K")
}
test("wallet - acc deletion") {
val head = w.privateKeyAccounts().head
w.deleteAccount(head)
assert(w.privateKeyAccounts().size == walletSize - 1)
w.deleteAccount(w.privateKeyAccounts().head)
assert(w.privateKeyAccounts().size == walletSize - 2)
w.privateKeyAccounts().foreach(w.deleteAccount)
assert(w.privateKeyAccounts().isEmpty)
}
test("reopening") {
//todo read folder from settings
val walletFile = new java.io.File(s"/tmp/wallet${Random.nextLong()}.dat")
val w = new Wallet(Some(walletFile), "cookies", Base58.decode("FQgbSAm6swGbtqA3NE8PttijPhT4N3Ufh4bHFAkyVnQz").toOption)
w.generateNewAccounts(10)
val nonce = w.nonce()
w.close()
assert(w.exists())
val w2 = new Wallet(Some(walletFile), "cookies", None)
w2.privateKeyAccounts().head.address should not be null
w2.nonce() shouldBe nonce
}
}
|
B83YPoj/Waves
|
src/test/scala/scorex/lagonaki/unit/WalletSpecification.scala
|
Scala
|
apache-2.0
| 1,822 |
package org.jetbrains.plugins.scala
package annotator.createFromUsage
import com.intellij.codeInsight.navigation.NavigationUtil
import com.intellij.codeInsight.template.{TemplateBuilder, TemplateBuilderImpl, TemplateManager}
import com.intellij.codeInsight.{CodeInsightUtilCore, FileModificationService}
import com.intellij.ide.util.PsiElementListCellRenderer
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.search.PsiElementProcessor
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.annotator.createFromUsage.CreateFromUsageUtil._
import org.jetbrains.plugins.scala.console.ScalaLanguageConsoleView
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScParameterizedTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScTemplateDefinition, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaDirectoryService
/**
* Nikolay.Tropin
* 2014-07-28
*/
abstract class CreateTypeDefinitionQuickFix(ref: ScReferenceElement, description: String, kind: ClassKind)
extends CreateFromUsageQuickFixBase(ref, description) {
private final val LOG: Logger = Logger.getInstance("#org.jetbrains.plugins.scala.annotator.createFromUsage.CreateTemplateDefinitionQuickFix")
private val name = ref.refName
override def isAvailable(project: Project, editor: Editor, file: PsiFile) = {
def goodQualifier = ref.qualifier match {
case Some(InstanceOfClass(typeDef: ScTypeDefinition)) => true
case Some(ResolvesTo(pack: PsiPackage)) => true
case None => true
case _ => false
}
super.isAvailable(project, editor, file) && goodQualifier
}
override protected def invokeInner(project: Project, editor: Editor, file: PsiFile) = {
inWriteAction {
ref.qualifier match {
case Some(InstanceOfClass(typeDef: ScTypeDefinition)) => createInnerClassIn(typeDef)
case Some(ResolvesTo(pack: PsiPackage)) => createClassInPackage(pack)
case None =>
val inThisFile = (Iterator(ref) ++ ref.parentsInFile).collect {
case inner childOf (_: ScTemplateBody) => inner
case td: ScTypeDefinition if td.isTopLevel => td
}
val possibleSiblings = file +: inThisFile.toSeq.reverse
createClassWithLevelChoosing(editor, possibleSiblings)
case _ =>
}
}
}
private def createClassInPackage(psiPackage: PsiPackage): Unit = {
val directory = psiPackage.getDirectories match {
case Array(dir) => dir
case Array() => throw new IllegalStateException(s"Cannot find directory for the package `${psiPackage.getName}`")
case dirs =>
val currentDir = dirs.find(PsiTreeUtil.isAncestor(_, ref, true))
.orElse(dirs.find(ScalaPsiUtil.getModule(_) == ScalaPsiUtil.getModule(ref)))
currentDir.getOrElse(dirs(0))
}
createClassInDirectory(directory)
}
private def createInnerClassIn(target: ScTemplateDefinition): Unit = {
val extBlock = target.extendsBlock
val targetBody = extBlock.templateBody.getOrElse(
extBlock.add(ScalaPsiElementFactory.createTemplateBody(target.getManager)))
createClassIn(targetBody, Some(targetBody.getLastChild))
}
private def createClassIn(parent: PsiElement, anchorAfter: Option[PsiElement]): Unit = {
try {
if (!FileModificationService.getInstance.preparePsiElementForWrite(parent)) return
val text = s"${kind.keyword} $name"
val newTd = ScalaPsiElementFactory.createTemplateDefinitionFromText(text, parent, parent.getFirstChild)
val anchor = anchorAfter.orNull
parent.addBefore(ScalaPsiElementFactory.createNewLine(parent.getManager), anchor)
val result = parent.addBefore(newTd, anchor)
afterCreationWork(result.asInstanceOf[ScTypeDefinition])
}
catch {
case e: IncorrectOperationException =>
LOG.error(e)
}
}
private def createClassWithLevelChoosing(editor: Editor, siblings: Seq[PsiElement]): Unit = {
val renderer = new PsiElementListCellRenderer[PsiElement] {
override def getElementText(element: PsiElement) = element match {
case f: PsiFile => "New file"
case td: ScTypeDefinition if td.isTopLevel => "Top level in this file"
case _ childOf (tb: ScTemplateBody) =>
val containingClass = PsiTreeUtil.getParentOfType(tb, classOf[ScTemplateDefinition])
s"Inner in ${containingClass.name}"
case _ => "Local scope"
}
override def getContainerText(element: PsiElement, name: String) = null
override def getIconFlags = 0
override def getIcon(element: PsiElement) = null
}
siblings match {
case Seq() =>
case Seq(elem) => createClassAtLevel(elem)
case _ =>
val selection = siblings.head
val processor = new PsiElementProcessor[PsiElement] {
def execute(elem: PsiElement): Boolean = {
inWriteCommandAction(elem.getProject){
createClassAtLevel(elem)
}
false
}
}
NavigationUtil.getPsiElementPopup(siblings.toArray, renderer, "Choose level", processor, selection)
.showInBestPositionFor(editor)
}
}
private def createClassAtLevel(sibling: PsiElement): Unit = {
sibling match {
case file: PsiFile => createClassInDirectory(file.getContainingDirectory)
case td: ScTypeDefinition if td.isTopLevel => createClassIn(td.getParent, None)
case _ childOf (tb: ScTemplateBody) =>
createInnerClassIn(PsiTreeUtil.getParentOfType(tb, classOf[ScTemplateDefinition]))
case _ =>
}
}
private def createClassInDirectory(directory: PsiDirectory) = {
val clazz = ScalaDirectoryService.createClassFromTemplate(directory, name, kind.templateName, askToDefineVariables = false)
afterCreationWork(clazz.asInstanceOf[ScTypeDefinition])
}
protected def afterCreationWork(clazz: ScTypeDefinition) {
addGenericParams(clazz)
addClassParams(clazz)
ScalaPsiUtil.adjustTypes(clazz)
runTemplate(clazz)
}
protected def addMoreElementsToTemplate(builder: TemplateBuilder, clazz: ScTypeDefinition): Unit = {}
private def runTemplate(clazz: ScTypeDefinition): Unit = {
val builder = new TemplateBuilderImpl(clazz)
addTypeParametersToTemplate(clazz, builder)
clazz match {
case cl: ScClass if cl.constructor.exists(_.parameters.nonEmpty) =>
addParametersToTemplate(cl.constructor.get, builder)
case _ =>
}
addMoreElementsToTemplate(builder, clazz)
CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(clazz)
val template = builder.buildTemplate()
val targetFile = clazz.getContainingFile
val isScalaConsole = targetFile.getName == ScalaLanguageConsoleView.SCALA_CONSOLE
if (!isScalaConsole) {
val newEditor = positionCursor(clazz.nameId)
if (template.getSegmentsCount != 0) {
val range = clazz.getTextRange
newEditor.getDocument.deleteString(range.getStartOffset, range.getEndOffset)
TemplateManager.getInstance(clazz.getProject).startTemplate(newEditor, template)
}
}
}
private def addGenericParams(clazz: ScTypeDefinition): Unit = ref.getParent.getParent match {
case pt: ScParameterizedTypeElement =>
val paramsText = pt.typeArgList.typeArgs match {
case args if args.size == 1 => "[T]"
case args => args.indices.map(i => s"T${i + 1}").mkString("[",", ", "]")
}
val nameId = clazz.nameId
val clause = ScalaPsiElementFactory.createTypeParameterClauseFromTextWithContext(paramsText, clazz, nameId)
clazz.addAfter(clause, nameId)
case _ =>
}
private def addClassParams(clazz: ScTypeDefinition): Unit = {
clazz match {
case cl: ScClass =>
val constr = cl.constructor.get
val text = parametersText(ref)
val parameters = ScalaPsiElementFactory.createParamClausesWithContext(text, constr, constr.getFirstChild)
constr.parameterList.replace(parameters)
case _ =>
}
}
}
class CreateObjectQuickFix(ref: ScReferenceElement)
extends CreateTypeDefinitionQuickFix(ref, "object", Object)
class CreateTraitQuickFix(ref: ScReferenceElement)
extends CreateTypeDefinitionQuickFix(ref, "trait", Trait) {
override def isAvailable(project: Project, editor: Editor, file: PsiFile) = {
super.isAvailable(project, editor, file) && parametersText(ref).isEmpty
}
}
class CreateClassQuickFix(ref: ScReferenceElement)
extends CreateTypeDefinitionQuickFix(ref, "class", Class)
class CreateCaseClassQuickFix(ref: ScReferenceElement)
extends CreateTypeDefinitionQuickFix(ref, "case class", Class) {
override protected def afterCreationWork(clazz: ScTypeDefinition) = {
clazz.setModifierProperty("case", value = true)
super.afterCreationWork(clazz)
}
}
|
advancedxy/intellij-scala
|
src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateTypeDefinitionQuickFix.scala
|
Scala
|
apache-2.0
| 9,418 |
// Copyright (c) 2014 David Miguel Antunes <davidmiguel {at} antunes.net>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.github.david04.liftutils.elem
import scala.xml.NodeSeq
import scala.xml.Text
import net.liftweb.util.Helpers._
import scala.util.Try
// get***Value: in the server
// getCurrent***Value: in the client
trait GenDoubleValueElem extends Elem {def getDoubleValue(): Double}
trait GenEditableDoubleValueElem extends GenDoubleValueElem with ValidatableElem {def getCurrentDoubleValue(): Double}
trait GenIntValueElem extends Elem {def getIntValue(): Int}
trait GenEditableIntValueElem extends GenIntValueElem with ValidatableElem {def getCurrentIntValue(): Int}
trait GenBooleanValueElem extends Elem {def getBooleanValue(): Boolean}
trait GenEditableBooleanValueElem extends GenBooleanValueElem with ValidatableElem {def getCurrentBooleanValue(): Boolean}
trait GenStringValueElem extends Elem {def getStringValue(): String}
trait GenEditableStringValueElem extends GenStringValueElem with ValidatableElem {def getCurrentStringValue(): String}
trait GenDateTimeValueElem extends Elem {def getDateTimeValue(): (Long, Long)}
trait GenEditableDateTimeValueElem extends GenDateTimeValueElem with ValidatableElem {def getCurrentDateTimeValue(): (Long, Long)}
trait GenFileOptValueElem extends Elem {def getFile(): Option[(Array[Byte], String)]}
trait GenOneOfEnumValueElem extends Elem {
protected type EnumType <: Enumeration
protected type EnumValueType = EnumType#Value
protected def enum: EnumType
def getOneOfEnumValue(): EnumValueType
}
trait GenEditableOneOfEnumValueElem extends GenOneOfEnumValueElem with ValidatableElem {def getCurrentOneOfEnumValue(): EnumValueType}
trait GenManyOfEnumValueElem extends Elem {
protected type EnumType <: Enumeration
protected type EnumValueType = EnumType#Value
protected def enum: EnumType
def getManyOfEnumValue(): Seq[EnumValueType]
}
trait GenEditableManyOfEnumValueElem extends GenManyOfEnumValueElem with ValidatableElem {def getCurrentManyOfEnumValue(): Seq[EnumValueType]}
trait GenOneOfSeqValueElem extends Elem {
protected type SeqValueType
protected def seq: Seq[SeqValueType]
def getOneOfSeqValue(): SeqValueType
}
trait GenEditableOneOfSeqValueElem extends GenOneOfSeqValueElem with ValidatableElem {def getCurrentOneOfSeqValue(): SeqValueType}
trait GenManyOfSeqValueElem extends Elem {
protected type SeqValueType
protected def seq: Seq[SeqValueType]
def getManyOfSeqValue(): Seq[SeqValueType]
}
trait GenEditableManyOfSeqValueElem extends GenManyOfSeqValueElem with ValidatableElem {def getCurrentManyOfSeqValue(): Seq[SeqValueType]}
trait GenOneOfManyValueElem extends Elem {
protected type OneOfManyValue <: Object {def name: NodeSeq; def id: String}
def getOneOfManyValue(): OneOfManyValue
def getAllOneOfManyValues(): Seq[OneOfManyValue]
}
trait GenEditableOneOfManyValueElem extends GenOneOfManyValueElem with ValidatableElem {def getCurrentOneOfManyValue(): OneOfManyValue}
trait GenManyOfManyValueElem extends Elem {
protected type ManyOfManyValue <: Object {def name: NodeSeq; def id: String}
def getManyOfManyValue(): Seq[ManyOfManyValue]
def getAllManyOfManyValues(): Seq[ManyOfManyValue]
}
trait GenEditableManyOfManyValueElem extends GenManyOfManyValueElem with ValidatableElem {def getCurrentManyOfManyValue(): Seq[ManyOfManyValue]}
abstract class GenDouble2GenString extends GenEditableDoubleValueElem with GenEditableStringValueElem {
val suffix: Option[String]
val precision: Int
protected def double2StringFormat = s"%.${precision}f"
private def currentStringValueWithoutSuffix() =
suffix.map(suffix => getCurrentStringValue().replaceAllLiterally(suffix, "")).getOrElse(getCurrentStringValue())
override def error: Option[NodeSeq] =
Try(currentStringValueWithoutSuffix().toDouble).map(_ => super.error).getOrElse(Some(Text("Invalid value.")))
def getCurrentDoubleValue(): Double = currentStringValueWithoutSuffix().toDouble
def getStringValue() = double2StringFormat.format(getDoubleValue()) + suffix.getOrElse("")
}
abstract class GenInt2GenString extends GenEditableIntValueElem with GenEditableStringValueElem {
val suffix: Option[String]
private def currentStringValueWithoutSuffix() =
suffix.map(suffix => getCurrentStringValue().replaceAllLiterally(suffix, "")).getOrElse(getCurrentStringValue())
override def error: Option[NodeSeq] =
Try(currentStringValueWithoutSuffix().toInt).map(_ => super.error).getOrElse(Some(Text("Invalid value.")))
def getCurrentIntValue(): Int = currentStringValueWithoutSuffix().toInt
def getStringValue() = getIntValue().toString() + suffix.getOrElse("")
}
abstract class GenOneOfEnum2GenOneOfMany extends GenEditableOneOfEnumValueElem with GenEditableOneOfManyValueElem {
protected case class EnumValue(v: EnumValueType) {
def name = enumValue2NodeSeq(v)
def id = Option(v).map(_.id + "").getOrElse("")
}
protected def enumValue2NodeSeq(v: EnumValueType): NodeSeq
protected type OneOfManyValue = EnumValue
def getOneOfManyValue() = EnumValue(getOneOfEnumValue())
def getCurrentOneOfEnumValue() = getCurrentOneOfManyValue().v
def getAllOneOfManyValues() = enum.values.map(EnumValue(_)).toSeq.sortBy(_.v.id)
}
abstract class GenManyOfEnum2GenOneOfMany extends GenEditableManyOfEnumValueElem with GenEditableManyOfManyValueElem {
protected case class EnumValue(v: EnumValueType) {
def name = enumValue2NodeSeq(v)
def id = Option(v).map(_.id + "").getOrElse("")
}
protected def enumValue2NodeSeq(v: EnumValueType): NodeSeq
protected type ManyOfManyValue = EnumValue
def getManyOfManyValue() = getManyOfEnumValue().map(EnumValue(_))
def getCurrentManyOfEnumValue() = getCurrentManyOfManyValue().map(_.v)
def getAllManyOfManyValues() = enum.values.map(EnumValue(_)).toSeq.sortBy(_.v.id)
}
abstract class GenOneOfSeq2GenOneOfMany extends GenEditableOneOfSeqValueElem with GenEditableOneOfManyValueElem {
protected case class SeqValue(v: SeqValueType, idx: Int) {
def name = seqValue2NodeSeq(v)
def id = idx + ""
}
protected def seqValue2NodeSeq(v: SeqValueType): NodeSeq
protected type OneOfManyValue = SeqValue
def getOneOfManyValue() = SeqValue(getOneOfSeqValue(), seq.indexOf(getOneOfSeqValue()))
def getCurrentOneOfSeqValue() = getCurrentOneOfManyValue().v
def getAllOneOfManyValues() = seq.zipWithIndex.map(e => SeqValue(e._1, e._2))
}
abstract class GenManyOfSeq2GenManyOfMany extends GenEditableManyOfSeqValueElem with GenEditableManyOfManyValueElem {
protected case class SeqValue(v: SeqValueType, idx: Int) {
def name = seqValue2NodeSeq(v)
def id = idx + ""
}
protected def seqValue2NodeSeq(v: SeqValueType): NodeSeq
protected type ManyOfManyValue = SeqValue
def getManyOfManyValue() = getManyOfSeqValue().map(v => SeqValue(v, seq.indexOf(v)))
def getCurrentManyOfSeqValue() = getCurrentManyOfManyValue().map(_.v)
def getAllManyOfManyValues() = seq.zipWithIndex.map(e => SeqValue(e._1, e._2))
}
|
david04/liftutils
|
src/main/scala/com/github/david04/liftutils/elem/GenericElem.scala
|
Scala
|
mit
| 8,130 |
package models
import java.sql.Timestamp
import java.util.UUID
import javax.inject.Inject
import com.fang.GamePlayJson
import com.fang.game.Step
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.driver.JdbcProfile
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
class GamePlayDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider)
(implicit executionContext: ExecutionContext)
extends HasDatabaseConfigProvider[JdbcProfile] {
import util.MyPostgresDriver.api._
class GamePlayTable(tag: Tag) extends Table[GamePlayModel](tag, "gameplay") {
def id = column[UUID]("id", O.PrimaryKey)
def first_user = column[String]("first_user")
def second_user = column[String]("second_user")
def status = column[String]("status")
def rule = column[String]("rule")
def first_win = column[Option[Boolean]]("first_win")
def start_time = column[Timestamp]("start_time")
def steps = column[List[Step]]("steps")
def * = (id, first_user, second_user, status, rule, first_win, start_time, steps) <>
((GamePlayModel.apply _).tupled, GamePlayModel.unapply)
}
val gamePlays: TableQuery[GamePlayTable] = TableQuery[GamePlayTable]
def containUser(id: String): (GamePlayTable) => Rep[Boolean] =
(g: GamePlayTable) => g.first_user === id || g.second_user === id
def queryRunningGame(userId: String): Future[Option[GamePlayModel]] = db.run(
gamePlays.filter(g => containUser(userId)(g) && g.status === GamePlayJson.PLAYING).result.headOption
)
def queryPlayingGame(): Future[Seq[GamePlayModel]] = db.run(
gamePlays.sortBy(_.start_time.desc).take(10).result
)
def intSeq2Str(seq: List[Int]): String = {
val builder = new StringBuilder
var isFirst = true
builder.append("{")
for (item <- seq) {
if (isFirst) {
isFirst = false
} else {
builder.append(", ")
}
builder.append(item)
}
builder.append("}")
builder.toString()
}
def createGame(gamePlayModel: GamePlayModel): Future[Boolean] = {
val m = gamePlayModel
val stepStr = intSeq2Str(m.steps.map(_.toInt))
val stat = sqlu"""INSERT INTO gameplay(id, first_user, second_user, status, rule, first_win, start_time, steps)
VALUES (${m.id.toString}::uuid, ${m.first_user}, ${m.second_user}, ${m.status}, ${m.rule}, ${m.first_win},
${m.start_time}, ${stepStr}::integer[])"""
db.run(stat).map(_ > 0).recover{
case exception: Exception =>
exception.printStackTrace()
false
}
}
def queryGame(id: String): Future[Option[GamePlayModel]] = {
val uid = Try(UUID.fromString(id))
if (uid.isFailure) Future.successful(None)
else db.run(gamePlays.filter(_.id === uid.get).result.headOption)
}
def updateGame(id: String, gamePlayModel: GamePlayModel): Future[Int] = {
val uid = Try(UUID.fromString(id))
if (uid.isFailure) return Future.successful(0)
val m = gamePlayModel
val stepStr = intSeq2Str(m.steps.map(_.toInt))
val statement =
sqlu"""UPDATE gameplay SET status = ${m.status}, first_win = ${m.first_win},
steps = ${stepStr}::integer[] WHERE id = ${id}::uuid
"""
db.run(statement)
}
}
|
TianhaoFang/online-go
|
app/models/GamePlayDAO.scala
|
Scala
|
mit
| 3,298 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.math.{BigDecimal => JavaBigDecimal}
import java.time.ZoneId
import java.util.Locale
import java.util.concurrent.TimeUnit._
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeConstants._
import org.apache.spark.sql.catalyst.util.DateTimeUtils._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.UTF8StringBuilder
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.unsafe.types.UTF8String.{IntWrapper, LongWrapper}
object Cast {
/**
* Returns true iff we can cast `from` type to `to` type.
*/
def canCast(from: DataType, to: DataType): Boolean = (from, to) match {
case (fromType, toType) if fromType == toType => true
case (NullType, _) => true
case (_, StringType) => true
case (StringType, BinaryType) => true
case (_: IntegralType, BinaryType) => true
case (StringType, BooleanType) => true
case (DateType, BooleanType) => true
case (TimestampType, BooleanType) => true
case (_: NumericType, BooleanType) => true
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
case (_: NumericType, TimestampType) => true
case (StringType, DateType) => true
case (TimestampType, DateType) => true
case (StringType, CalendarIntervalType) => true
case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
case (DateType, _: NumericType) => true
case (TimestampType, _: NumericType) => true
case (_: NumericType, _: NumericType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
resolvableNullability(fn || forceNullable(fromType, toType), tn)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
canCast(fromKey, toKey) &&
(!forceNullable(fromKey, toKey)) &&
canCast(fromValue, toValue) &&
resolvableNullability(fn || forceNullable(fromValue, toValue), tn)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (fromField, toField) =>
canCast(fromField.dataType, toField.dataType) &&
resolvableNullability(
fromField.nullable || forceNullable(fromField.dataType, toField.dataType),
toField.nullable)
}
case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt1.userClass == udt2.userClass =>
true
case _ => false
}
/**
* Return true if we need to use the `timeZone` information casting `from` type to `to` type.
* The patterns matched reflect the current implementation in the Cast node.
* c.f. usage of `timeZone` in:
* * Cast.castToString
* * Cast.castToDate
* * Cast.castToTimestamp
*/
def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match {
case (StringType, TimestampType | DateType) => true
case (DateType, TimestampType) => true
case (TimestampType, StringType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, _), ArrayType(toType, _)) => needsTimeZone(fromType, toType)
case (MapType(fromKey, fromValue, _), MapType(toKey, toValue, _)) =>
needsTimeZone(fromKey, toKey) || needsTimeZone(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).exists {
case (fromField, toField) =>
needsTimeZone(fromField.dataType, toField.dataType)
}
case _ => false
}
/**
* Returns true iff we can safely up-cast the `from` type to `to` type without any truncating or
* precision lose or possible runtime failures. For example, long -> int, string -> int are not
* up-cast.
*/
def canUpCast(from: DataType, to: DataType): Boolean = (from, to) match {
case _ if from == to => true
case (from: NumericType, to: DecimalType) if to.isWiderThan(from) => true
case (from: DecimalType, to: NumericType) if from.isTighterThan(to) => true
case (f, t) if legalNumericPrecedence(f, t) => true
case (DateType, TimestampType) => true
case (_: AtomicType, StringType) => true
case (_: CalendarIntervalType, StringType) => true
case (NullType, _) => true
// Spark supports casting between long and timestamp, please see `longToTimestamp` and
// `timestampToLong` for details.
case (TimestampType, LongType) => true
case (LongType, TimestampType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
resolvableNullability(fn, tn) && canUpCast(fromType, toType)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
resolvableNullability(fn, tn) && canUpCast(fromKey, toKey) && canUpCast(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (f1, f2) =>
resolvableNullability(f1.nullable, f2.nullable) && canUpCast(f1.dataType, f2.dataType)
}
case _ => false
}
/**
* Returns true iff we can cast the `from` type to `to` type as per the ANSI SQL.
* In practice, the behavior is mostly the same as PostgreSQL. It disallows certain unreasonable
* type conversions such as converting `string` to `int` or `double` to `boolean`.
*/
def canANSIStoreAssign(from: DataType, to: DataType): Boolean = (from, to) match {
case _ if from == to => true
case (NullType, _) => true
case (_: NumericType, _: NumericType) => true
case (_: AtomicType, StringType) => true
case (_: CalendarIntervalType, StringType) => true
case (DateType, TimestampType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
resolvableNullability(fn, tn) && canANSIStoreAssign(fromType, toType)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
resolvableNullability(fn, tn) && canANSIStoreAssign(fromKey, toKey) &&
canANSIStoreAssign(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (f1, f2) =>
resolvableNullability(f1.nullable, f2.nullable) &&
canANSIStoreAssign(f1.dataType, f2.dataType)
}
case _ => false
}
private def legalNumericPrecedence(from: DataType, to: DataType): Boolean = {
val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from)
val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to)
fromPrecedence >= 0 && fromPrecedence < toPrecedence
}
def canNullSafeCastToDecimal(from: DataType, to: DecimalType): Boolean = from match {
case from: BooleanType if to.isWiderThan(DecimalType.BooleanDecimal) => true
case from: NumericType if to.isWiderThan(from) => true
case from: DecimalType =>
// truncating or precision lose
(to.precision - to.scale) > (from.precision - from.scale)
case _ => false // overflow
}
/**
* Returns `true` if casting non-nullable values from `from` type to `to` type
* may return null. Note that the caller side should take care of input nullability
* first and only call this method if the input is not nullable.
*/
def forceNullable(from: DataType, to: DataType): Boolean = (from, to) match {
case (NullType, _) => false // empty array or map case
case (_, _) if from == to => false
case (StringType, BinaryType) => false
case (StringType, _) => true
case (_, StringType) => false
case (FloatType | DoubleType, TimestampType) => true
case (TimestampType, DateType) => false
case (_, DateType) => true
case (DateType, TimestampType) => false
case (DateType, _) => true
case (_, CalendarIntervalType) => true
case (_, to: DecimalType) if !canNullSafeCastToDecimal(from, to) => true
case (_: FractionalType, _: IntegralType) => true // NaN, infinity
case _ => false
}
def resolvableNullability(from: Boolean, to: Boolean): Boolean = !from || to
/**
* We process literals such as 'Infinity', 'Inf', '-Infinity' and 'NaN' etc in case
* insensitive manner to be compatible with other database systems such as PostgreSQL and DB2.
*/
def processFloatingPointSpecialLiterals(v: String, isFloat: Boolean): Any = {
v.trim.toLowerCase(Locale.ROOT) match {
case "inf" | "+inf" | "infinity" | "+infinity" =>
if (isFloat) Float.PositiveInfinity else Double.PositiveInfinity
case "-inf" | "-infinity" =>
if (isFloat) Float.NegativeInfinity else Double.NegativeInfinity
case "nan" =>
if (isFloat) Float.NaN else Double.NaN
case _ => null
}
}
}
abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression with NullIntolerant {
def child: Expression
def dataType: DataType
override def toString: String = {
val ansi = if (ansiEnabled) "ansi_" else ""
s"${ansi}cast($child as ${dataType.simpleString})"
}
override def checkInputDataTypes(): TypeCheckResult = {
if (Cast.canCast(child.dataType, dataType)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
s"cannot cast ${child.dataType.catalogString} to ${dataType.catalogString}")
}
}
override def nullable: Boolean = child.nullable || Cast.forceNullable(child.dataType, dataType)
protected def ansiEnabled: Boolean
// When this cast involves TimeZone, it's only resolved if the timeZoneId is set;
// Otherwise behave like Expression.resolved.
override lazy val resolved: Boolean =
childrenResolved && checkInputDataTypes().isSuccess && (!needsTimeZone || timeZoneId.isDefined)
private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType)
// [[func]] assumes the input is no longer null because eval already does the null check.
@inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])
private lazy val dateFormatter = DateFormatter(zoneId)
private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId)
// UDFToString
private[this] def castToString(from: DataType): Any => Any = from match {
case CalendarIntervalType =>
buildCast[CalendarInterval](_, i => UTF8String.fromString(i.toString))
case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes)
case DateType => buildCast[Int](_, d => UTF8String.fromString(dateFormatter.format(d)))
case TimestampType => buildCast[Long](_,
t => UTF8String.fromString(DateTimeUtils.timestampToString(timestampFormatter, t)))
case ArrayType(et, _) =>
buildCast[ArrayData](_, array => {
val builder = new UTF8StringBuilder
builder.append("[")
if (array.numElements > 0) {
val toUTF8String = castToString(et)
if (!array.isNullAt(0)) {
builder.append(toUTF8String(array.get(0, et)).asInstanceOf[UTF8String])
}
var i = 1
while (i < array.numElements) {
builder.append(",")
if (!array.isNullAt(i)) {
builder.append(" ")
builder.append(toUTF8String(array.get(i, et)).asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append("]")
builder.build()
})
case MapType(kt, vt, _) =>
buildCast[MapData](_, map => {
val builder = new UTF8StringBuilder
builder.append("[")
if (map.numElements > 0) {
val keyArray = map.keyArray()
val valueArray = map.valueArray()
val keyToUTF8String = castToString(kt)
val valueToUTF8String = castToString(vt)
builder.append(keyToUTF8String(keyArray.get(0, kt)).asInstanceOf[UTF8String])
builder.append(" ->")
if (!valueArray.isNullAt(0)) {
builder.append(" ")
builder.append(valueToUTF8String(valueArray.get(0, vt)).asInstanceOf[UTF8String])
}
var i = 1
while (i < map.numElements) {
builder.append(", ")
builder.append(keyToUTF8String(keyArray.get(i, kt)).asInstanceOf[UTF8String])
builder.append(" ->")
if (!valueArray.isNullAt(i)) {
builder.append(" ")
builder.append(valueToUTF8String(valueArray.get(i, vt))
.asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append("]")
builder.build()
})
case StructType(fields) =>
buildCast[InternalRow](_, row => {
val builder = new UTF8StringBuilder
builder.append("[")
if (row.numFields > 0) {
val st = fields.map(_.dataType)
val toUTF8StringFuncs = st.map(castToString)
if (!row.isNullAt(0)) {
builder.append(toUTF8StringFuncs(0)(row.get(0, st(0))).asInstanceOf[UTF8String])
}
var i = 1
while (i < row.numFields) {
builder.append(",")
if (!row.isNullAt(i)) {
builder.append(" ")
builder.append(toUTF8StringFuncs(i)(row.get(i, st(i))).asInstanceOf[UTF8String])
}
i += 1
}
}
builder.append("]")
builder.build()
})
case pudt: PythonUserDefinedType => castToString(pudt.sqlType)
case udt: UserDefinedType[_] =>
buildCast[Any](_, o => UTF8String.fromString(udt.deserialize(o).toString))
case _ => buildCast[Any](_, o => UTF8String.fromString(o.toString))
}
// BinaryConverter
private[this] def castToBinary(from: DataType): Any => Any = from match {
case StringType => buildCast[UTF8String](_, _.getBytes)
case ByteType => buildCast[Byte](_, NumberConverter.toBinary)
case ShortType => buildCast[Short](_, NumberConverter.toBinary)
case IntegerType => buildCast[Int](_, NumberConverter.toBinary)
case LongType => buildCast[Long](_, NumberConverter.toBinary)
}
// UDFToBoolean
private[this] def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
if (StringUtils.isTrueString(s)) {
true
} else if (StringUtils.isFalseString(s)) {
false
} else {
null
}
})
case TimestampType =>
buildCast[Long](_, t => t != 0)
case DateType =>
// Hive would return null when cast from date to boolean
buildCast[Int](_, d => null)
case LongType =>
buildCast[Long](_, _ != 0)
case IntegerType =>
buildCast[Int](_, _ != 0)
case ShortType =>
buildCast[Short](_, _ != 0)
case ByteType =>
buildCast[Byte](_, _ != 0)
case DecimalType() =>
buildCast[Decimal](_, !_.isZero)
case DoubleType =>
buildCast[Double](_, _ != 0)
case FloatType =>
buildCast[Float](_, _ != 0)
}
// TimestampConverter
private[this] def castToTimestamp(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, utfs => DateTimeUtils.stringToTimestamp(utfs, zoneId).orNull)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0)
case LongType =>
buildCast[Long](_, l => longToTimestamp(l))
case IntegerType =>
buildCast[Int](_, i => longToTimestamp(i.toLong))
case ShortType =>
buildCast[Short](_, s => longToTimestamp(s.toLong))
case ByteType =>
buildCast[Byte](_, b => longToTimestamp(b.toLong))
case DateType =>
buildCast[Int](_, d => epochDaysToMicros(d, zoneId))
// TimestampWritable.decimalToTimestamp
case DecimalType() =>
buildCast[Decimal](_, d => decimalToTimestamp(d))
// TimestampWritable.doubleToTimestamp
case DoubleType =>
buildCast[Double](_, d => doubleToTimestamp(d))
// TimestampWritable.floatToTimestamp
case FloatType =>
buildCast[Float](_, f => doubleToTimestamp(f.toDouble))
}
private[this] def decimalToTimestamp(d: Decimal): Long = {
(d.toBigDecimal * MICROS_PER_SECOND).longValue
}
private[this] def doubleToTimestamp(d: Double): Any = {
if (d.isNaN || d.isInfinite) null else (d * MICROS_PER_SECOND).toLong
}
// converting seconds to us
private[this] def longToTimestamp(t: Long): Long = SECONDS.toMicros(t)
// converting us to seconds
private[this] def timestampToLong(ts: Long): Long = {
Math.floorDiv(ts, MICROS_PER_SECOND)
}
// converting us to seconds in double
private[this] def timestampToDouble(ts: Long): Double = {
ts / MICROS_PER_SECOND.toDouble
}
// DateConverter
private[this] def castToDate(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s, zoneId).orNull)
case TimestampType =>
// throw valid precision more than seconds, according to Hive.
// Timestamp.nanos is in 0 to 999,999,999, no more than a second.
buildCast[Long](_, t => microsToEpochDays(t, zoneId))
}
// IntervalConverter
private[this] def castToInterval(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => IntervalUtils.safeStringToInterval(s))
}
// LongConverter
private[this] def castToLong(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toLongExact())
case StringType =>
val result = new LongWrapper()
buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0L)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t))
case x: NumericType if ansiEnabled =>
b => x.exactNumeric.asInstanceOf[Numeric[Any]].toLong(b)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toLong(b)
}
// IntConverter
private[this] def castToInt(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toIntExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1 else 0)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => LongExactNumeric.toInt(timestampToLong(t)))
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toInt)
case x: NumericType if ansiEnabled =>
b => x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b)
}
// ShortConverter
private[this] def castToShort(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toShortExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toShort(result)) {
result.value.toShort
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toShort else 0.toShort)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => {
val longValue = timestampToLong(t)
if (longValue == longValue.toShort) {
longValue.toShort
} else {
throw new ArithmeticException(s"Casting $t to short causes overflow")
}
})
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toShort)
case x: NumericType if ansiEnabled =>
b =>
val intValue = try {
x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
} catch {
case _: ArithmeticException =>
throw new ArithmeticException(s"Casting $b to short causes overflow")
}
if (intValue == intValue.toShort) {
intValue.toShort
} else {
throw new ArithmeticException(s"Casting $b to short causes overflow")
}
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort
}
// ByteConverter
private[this] def castToByte(from: DataType): Any => Any = from match {
case StringType if ansiEnabled =>
buildCast[UTF8String](_, _.toByteExact())
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toByte(result)) {
result.value.toByte
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toByte else 0.toByte)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType if ansiEnabled =>
buildCast[Long](_, t => {
val longValue = timestampToLong(t)
if (longValue == longValue.toByte) {
longValue.toByte
} else {
throw new ArithmeticException(s"Casting $t to byte causes overflow")
}
})
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toByte)
case x: NumericType if ansiEnabled =>
b =>
val intValue = try {
x.exactNumeric.asInstanceOf[Numeric[Any]].toInt(b)
} catch {
case _: ArithmeticException =>
throw new ArithmeticException(s"Casting $b to byte causes overflow")
}
if (intValue == intValue.toByte) {
intValue.toByte
} else {
throw new ArithmeticException(s"Casting $b to byte causes overflow")
}
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte
}
/**
* Change the precision / scale in a given decimal to those set in `decimalType` (if any),
* modifying `value` in-place and returning it if successful. If an overflow occurs, it
* either returns null or throws an exception according to the value set for
* `spark.sql.ansi.enabled`.
*
* NOTE: this modifies `value` in-place, so don't call it on external data.
*/
private[this] def changePrecision(value: Decimal, decimalType: DecimalType): Decimal = {
if (value.changePrecision(decimalType.precision, decimalType.scale)) {
value
} else {
if (!ansiEnabled) {
null
} else {
throw new ArithmeticException(s"${value.toDebugString} cannot be represented as " +
s"Decimal(${decimalType.precision}, ${decimalType.scale}).")
}
}
}
/**
* Create new `Decimal` with precision and scale given in `decimalType` (if any).
* If overflow occurs, if `spark.sql.ansi.enabled` is false, null is returned;
* otherwise, an `ArithmeticException` is thrown.
*/
private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal =
value.toPrecision(
decimalType.precision, decimalType.scale, Decimal.ROUND_HALF_UP, !ansiEnabled)
private[this] def castToDecimal(from: DataType, target: DecimalType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try {
// According the benchmark test, `s.toString.trim` is much faster than `s.trim.toString`.
// Please refer to https://github.com/apache/spark/pull/26640
changePrecision(Decimal(new JavaBigDecimal(s.toString.trim)), target)
} catch {
case _: NumberFormatException =>
if (ansiEnabled) {
throw new NumberFormatException(s"invalid input syntax for type numeric: $s")
} else {
null
}
})
case BooleanType =>
buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
case DateType =>
buildCast[Int](_, d => null) // date can't cast to decimal in Hive
case TimestampType =>
// Note that we lose precision here.
buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target))
case dt: DecimalType =>
b => toPrecision(b.asInstanceOf[Decimal], target)
case t: IntegralType =>
b => changePrecision(Decimal(t.integral.asInstanceOf[Integral[Any]].toLong(b)), target)
case x: FractionalType =>
b => try {
changePrecision(Decimal(x.fractional.asInstanceOf[Fractional[Any]].toDouble(b)), target)
} catch {
case _: NumberFormatException => null
}
}
// DoubleConverter
private[this] def castToDouble(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
val doubleStr = s.toString
try doubleStr.toDouble catch {
case _: NumberFormatException =>
val d = Cast.processFloatingPointSpecialLiterals(doubleStr, false)
if(ansiEnabled && d == null) {
throw new NumberFormatException(s"invalid input syntax for type numeric: $s")
} else {
d
}
}
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1d else 0d)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toDouble(b)
}
// FloatConverter
private[this] def castToFloat(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
val floatStr = s.toString
try floatStr.toFloat catch {
case _: NumberFormatException =>
val f = Cast.processFloatingPointSpecialLiterals(floatStr, true)
if (ansiEnabled && f == null) {
throw new NumberFormatException(s"invalid input syntax for type numeric: $s")
} else {
f
}
}
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1f else 0f)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t).toFloat)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toFloat(b)
}
private[this] def castArray(fromType: DataType, toType: DataType): Any => Any = {
val elementCast = cast(fromType, toType)
// TODO: Could be faster?
buildCast[ArrayData](_, array => {
val values = new Array[Any](array.numElements())
array.foreach(fromType, (i, e) => {
if (e == null) {
values(i) = null
} else {
values(i) = elementCast(e)
}
})
new GenericArrayData(values)
})
}
private[this] def castMap(from: MapType, to: MapType): Any => Any = {
val keyCast = castArray(from.keyType, to.keyType)
val valueCast = castArray(from.valueType, to.valueType)
buildCast[MapData](_, map => {
val keys = keyCast(map.keyArray()).asInstanceOf[ArrayData]
val values = valueCast(map.valueArray()).asInstanceOf[ArrayData]
new ArrayBasedMapData(keys, values)
})
}
private[this] def castStruct(from: StructType, to: StructType): Any => Any = {
val castFuncs: Array[(Any) => Any] = from.fields.zip(to.fields).map {
case (fromField, toField) => cast(fromField.dataType, toField.dataType)
}
// TODO: Could be faster?
buildCast[InternalRow](_, row => {
val newRow = new GenericInternalRow(from.fields.length)
var i = 0
while (i < row.numFields) {
newRow.update(i,
if (row.isNullAt(i)) null else castFuncs(i)(row.get(i, from.apply(i).dataType)))
i += 1
}
newRow
})
}
private[this] def cast(from: DataType, to: DataType): Any => Any = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the codegen path.
if (DataType.equalsStructurally(from, to)) {
identity
} else if (from == NullType) {
// According to `canCast`, NullType can be casted to any type.
// For primitive types, we don't reach here because the guard of `nullSafeEval`.
// But for nested types like struct, we might reach here for nested null type field.
// We won't call the returned function actually, but returns a placeholder.
_ => throw new SparkException(s"should not directly cast from NullType to $to.")
} else {
to match {
case dt if dt == from => identity[Any]
case StringType => castToString(from)
case BinaryType => castToBinary(from)
case DateType => castToDate(from)
case decimal: DecimalType => castToDecimal(from, decimal)
case TimestampType => castToTimestamp(from)
case CalendarIntervalType => castToInterval(from)
case BooleanType => castToBoolean(from)
case ByteType => castToByte(from)
case ShortType => castToShort(from)
case IntegerType => castToInt(from)
case FloatType => castToFloat(from)
case LongType => castToLong(from)
case DoubleType => castToDouble(from)
case array: ArrayType =>
castArray(from.asInstanceOf[ArrayType].elementType, array.elementType)
case map: MapType => castMap(from.asInstanceOf[MapType], map)
case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
identity[Any]
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
}
}
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
protected override def nullSafeEval(input: Any): Any = cast(input)
override def genCode(ctx: CodegenContext): ExprCode = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the interpreted path.
if (DataType.equalsStructurally(child.dataType, dataType)) {
child.genCode(ctx)
} else {
super.genCode(ctx)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)
ev.copy(code = eval.code +
castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast))
}
// The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull`
// in parameter list, because the returned code will be put in null safe evaluation region.
private[this] type CastFunction = (ExprValue, ExprValue, ExprValue) => Block
private[this] def nullSafeCastFunction(
from: DataType,
to: DataType,
ctx: CodegenContext): CastFunction = to match {
case _ if from == NullType => (c, evPrim, evNull) => code"$evNull = true;"
case _ if to == from => (c, evPrim, evNull) => code"$evPrim = $c;"
case StringType => castToStringCode(from, ctx)
case BinaryType => castToBinaryCode(from)
case DateType => castToDateCode(from, ctx)
case decimal: DecimalType => castToDecimalCode(from, decimal, ctx)
case TimestampType => castToTimestampCode(from, ctx)
case CalendarIntervalType => castToIntervalCode(from)
case BooleanType => castToBooleanCode(from)
case ByteType => castToByteCode(from, ctx)
case ShortType => castToShortCode(from, ctx)
case IntegerType => castToIntCode(from, ctx)
case FloatType => castToFloatCode(from, ctx)
case LongType => castToLongCode(from, ctx)
case DoubleType => castToDoubleCode(from, ctx)
case array: ArrayType =>
castArrayCode(from.asInstanceOf[ArrayType].elementType, array.elementType, ctx)
case map: MapType => castMapCode(from.asInstanceOf[MapType], map, ctx)
case struct: StructType => castStructCode(from.asInstanceOf[StructType], struct, ctx)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
(c, evPrim, evNull) => code"$evPrim = $c;"
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
// Since we need to cast input expressions recursively inside ComplexTypes, such as Map's
// Key and Value, Struct's field, we need to name out all the variable names involved in a cast.
private[this] def castCode(ctx: CodegenContext, input: ExprValue, inputIsNull: ExprValue,
result: ExprValue, resultIsNull: ExprValue, resultType: DataType, cast: CastFunction): Block = {
val javaType = JavaCode.javaType(resultType)
code"""
boolean $resultIsNull = $inputIsNull;
$javaType $result = ${CodeGenerator.defaultValue(resultType)};
if (!$inputIsNull) {
${cast(input, result, resultIsNull)}
}
"""
}
private def writeArrayToStringBuilder(
et: DataType,
array: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
val elementToStringCode = castToStringCode(et, ctx)
val funcName = ctx.freshName("elementToString")
val element = JavaCode.variable("element", et)
val elementStr = JavaCode.variable("elementStr", StringType)
val elementToStringFunc = inline"${ctx.addNewFunction(funcName,
s"""
|private UTF8String $funcName(${CodeGenerator.javaType(et)} $element) {
| UTF8String $elementStr = null;
| ${elementToStringCode(element, elementStr, null /* resultIsNull won't be used */)}
| return elementStr;
|}
""".stripMargin)}"
val loopIndex = ctx.freshVariable("loopIndex", IntegerType)
code"""
|$buffer.append("[");
|if ($array.numElements() > 0) {
| if (!$array.isNullAt(0)) {
| $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, "0")}));
| }
| for (int $loopIndex = 1; $loopIndex < $array.numElements(); $loopIndex++) {
| $buffer.append(",");
| if (!$array.isNullAt($loopIndex)) {
| $buffer.append(" ");
| $buffer.append($elementToStringFunc(${CodeGenerator.getValue(array, et, loopIndex)}));
| }
| }
|}
|$buffer.append("]");
""".stripMargin
}
private def writeMapToStringBuilder(
kt: DataType,
vt: DataType,
map: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
def dataToStringFunc(func: String, dataType: DataType) = {
val funcName = ctx.freshName(func)
val dataToStringCode = castToStringCode(dataType, ctx)
val data = JavaCode.variable("data", dataType)
val dataStr = JavaCode.variable("dataStr", StringType)
val functionCall = ctx.addNewFunction(funcName,
s"""
|private UTF8String $funcName(${CodeGenerator.javaType(dataType)} $data) {
| UTF8String $dataStr = null;
| ${dataToStringCode(data, dataStr, null /* resultIsNull won't be used */)}
| return dataStr;
|}
""".stripMargin)
inline"$functionCall"
}
val keyToStringFunc = dataToStringFunc("keyToString", kt)
val valueToStringFunc = dataToStringFunc("valueToString", vt)
val loopIndex = ctx.freshVariable("loopIndex", IntegerType)
val mapKeyArray = JavaCode.expression(s"$map.keyArray()", classOf[ArrayData])
val mapValueArray = JavaCode.expression(s"$map.valueArray()", classOf[ArrayData])
val getMapFirstKey = CodeGenerator.getValue(mapKeyArray, kt, JavaCode.literal("0", IntegerType))
val getMapFirstValue = CodeGenerator.getValue(mapValueArray, vt,
JavaCode.literal("0", IntegerType))
val getMapKeyArray = CodeGenerator.getValue(mapKeyArray, kt, loopIndex)
val getMapValueArray = CodeGenerator.getValue(mapValueArray, vt, loopIndex)
code"""
|$buffer.append("[");
|if ($map.numElements() > 0) {
| $buffer.append($keyToStringFunc($getMapFirstKey));
| $buffer.append(" ->");
| if (!$map.valueArray().isNullAt(0)) {
| $buffer.append(" ");
| $buffer.append($valueToStringFunc($getMapFirstValue));
| }
| for (int $loopIndex = 1; $loopIndex < $map.numElements(); $loopIndex++) {
| $buffer.append(", ");
| $buffer.append($keyToStringFunc($getMapKeyArray));
| $buffer.append(" ->");
| if (!$map.valueArray().isNullAt($loopIndex)) {
| $buffer.append(" ");
| $buffer.append($valueToStringFunc($getMapValueArray));
| }
| }
|}
|$buffer.append("]");
""".stripMargin
}
private def writeStructToStringBuilder(
st: Seq[DataType],
row: ExprValue,
buffer: ExprValue,
ctx: CodegenContext): Block = {
val structToStringCode = st.zipWithIndex.map { case (ft, i) =>
val fieldToStringCode = castToStringCode(ft, ctx)
val field = ctx.freshVariable("field", ft)
val fieldStr = ctx.freshVariable("fieldStr", StringType)
val javaType = JavaCode.javaType(ft)
code"""
|${if (i != 0) code"""$buffer.append(",");""" else EmptyBlock}
|if (!$row.isNullAt($i)) {
| ${if (i != 0) code"""$buffer.append(" ");""" else EmptyBlock}
|
| // Append $i field into the string buffer
| $javaType $field = ${CodeGenerator.getValue(row, ft, s"$i")};
| UTF8String $fieldStr = null;
| ${fieldToStringCode(field, fieldStr, null /* resultIsNull won't be used */)}
| $buffer.append($fieldStr);
|}
""".stripMargin
}
val writeStructCode = ctx.splitExpressions(
expressions = structToStringCode.map(_.code),
funcName = "fieldToString",
arguments = ("InternalRow", row.code) ::
(classOf[UTF8StringBuilder].getName, buffer.code) :: Nil)
code"""
|$buffer.append("[");
|$writeStructCode
|$buffer.append("]");
""".stripMargin
}
private[this] def castToStringCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case BinaryType =>
(c, evPrim, evNull) => code"$evPrim = UTF8String.fromBytes($c);"
case DateType =>
val df = JavaCode.global(
ctx.addReferenceObj("dateFormatter", dateFormatter),
dateFormatter.getClass)
(c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString(${df}.format($c));"""
case TimestampType =>
val tf = JavaCode.global(
ctx.addReferenceObj("timestampFormatter", timestampFormatter),
timestampFormatter.getClass)
(c, evPrim, evNull) => code"""$evPrim = UTF8String.fromString(
org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($tf, $c));"""
case CalendarIntervalType =>
(c, evPrim, _) => code"""$evPrim = UTF8String.fromString($c.toString());"""
case ArrayType(et, _) =>
(c, evPrim, evNull) => {
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeArrayElemCode = writeArrayToStringBuilder(et, c, buffer, ctx)
code"""
|$bufferClass $buffer = new $bufferClass();
|$writeArrayElemCode;
|$evPrim = $buffer.build();
""".stripMargin
}
case MapType(kt, vt, _) =>
(c, evPrim, evNull) => {
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeMapElemCode = writeMapToStringBuilder(kt, vt, c, buffer, ctx)
code"""
|$bufferClass $buffer = new $bufferClass();
|$writeMapElemCode;
|$evPrim = $buffer.build();
""".stripMargin
}
case StructType(fields) =>
(c, evPrim, evNull) => {
val row = ctx.freshVariable("row", classOf[InternalRow])
val buffer = ctx.freshVariable("buffer", classOf[UTF8StringBuilder])
val bufferClass = JavaCode.javaType(classOf[UTF8StringBuilder])
val writeStructCode = writeStructToStringBuilder(fields.map(_.dataType), row, buffer, ctx)
code"""
|InternalRow $row = $c;
|$bufferClass $buffer = new $bufferClass();
|$writeStructCode
|$evPrim = $buffer.build();
""".stripMargin
}
case pudt: PythonUserDefinedType => castToStringCode(pudt.sqlType, ctx)
case udt: UserDefinedType[_] =>
val udtRef = JavaCode.global(ctx.addReferenceObj("udt", udt), udt.sqlType)
(c, evPrim, evNull) => {
code"$evPrim = UTF8String.fromString($udtRef.deserialize($c).toString());"
}
case _ =>
(c, evPrim, evNull) => code"$evPrim = UTF8String.fromString(String.valueOf($c));"
}
}
private[this] def castToBinaryCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
code"$evPrim = $c.getBytes();"
case _: IntegralType =>
(c, evPrim, evNull) =>
code"$evPrim = ${NumberConverter.getClass.getName.stripSuffix("$")}.toBinary($c);"
}
private[this] def castToDateCode(
from: DataType,
ctx: CodegenContext): CastFunction = {
def getZoneId() = {
val zoneIdClass = classOf[ZoneId]
JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
}
from match {
case StringType =>
val intOpt = ctx.freshVariable("intOpt", classOf[Option[Integer]])
val zid = getZoneId()
(c, evPrim, evNull) =>
code"""
scala.Option<Integer> $intOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDate($c, $zid);
if ($intOpt.isDefined()) {
$evPrim = ((Integer) $intOpt.get()).intValue();
} else {
$evNull = true;
}
"""
case TimestampType =>
val zid = getZoneId()
(c, evPrim, evNull) =>
code"""$evPrim =
org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToEpochDays($c, $zid);"""
case _ =>
(c, evPrim, evNull) => code"$evNull = true;"
}
}
private[this] def changePrecision(d: ExprValue, decimalType: DecimalType,
evPrim: ExprValue, evNull: ExprValue, canNullSafeCast: Boolean): Block = {
if (canNullSafeCast) {
code"""
|$d.changePrecision(${decimalType.precision}, ${decimalType.scale});
|$evPrim = $d;
""".stripMargin
} else {
val overflowCode = if (!ansiEnabled) {
s"$evNull = true;"
} else {
s"""
|throw new ArithmeticException($d.toDebugString() + " cannot be represented as " +
| "Decimal(${decimalType.precision}, ${decimalType.scale}).");
""".stripMargin
}
code"""
|if ($d.changePrecision(${decimalType.precision}, ${decimalType.scale})) {
| $evPrim = $d;
|} else {
| $overflowCode
|}
""".stripMargin
}
}
private[this] def castToDecimalCode(
from: DataType,
target: DecimalType,
ctx: CodegenContext): CastFunction = {
val tmp = ctx.freshVariable("tmpDecimal", classOf[Decimal])
val canNullSafeCast = Cast.canNullSafeCastToDecimal(from, target)
from match {
case StringType =>
(c, evPrim, evNull) =>
val handleException = if (ansiEnabled) {
s"""throw new NumberFormatException("invalid input syntax for type numeric: $c");"""
} else {
s"$evNull =true;"
}
code"""
try {
Decimal $tmp = Decimal.apply(new java.math.BigDecimal($c.toString().trim()));
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
} catch (java.lang.NumberFormatException e) {
$handleException
}
"""
case BooleanType =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = $c ? Decimal.apply(1) : Decimal.apply(0);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case DateType =>
// date can't cast to decimal in Hive
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
// Note that we lose precision here.
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.apply(
scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)}));
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case DecimalType() =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = $c.clone();
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case x: IntegralType =>
(c, evPrim, evNull) =>
code"""
Decimal $tmp = Decimal.apply((long) $c);
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
"""
case x: FractionalType =>
// All other numeric types can be represented precisely as Doubles
(c, evPrim, evNull) =>
code"""
try {
Decimal $tmp = Decimal.apply(scala.math.BigDecimal.valueOf((double) $c));
${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
}
}
private[this] def castToTimestampCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val zoneIdClass = classOf[ZoneId]
val zid = JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]])
(c, evPrim, evNull) =>
code"""
scala.Option<Long> $longOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $zid);
if ($longOpt.isDefined()) {
$evPrim = ((Long) $longOpt.get()).longValue();
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;"
case _: IntegralType =>
(c, evPrim, evNull) => code"$evPrim = ${longToTimeStampCode(c)};"
case DateType =>
val zoneIdClass = classOf[ZoneId]
val zid = JavaCode.global(
ctx.addReferenceObj("zoneId", zoneId, zoneIdClass.getName),
zoneIdClass)
(c, evPrim, evNull) =>
code"""$evPrim =
org.apache.spark.sql.catalyst.util.DateTimeUtils.epochDaysToMicros($c, $zid);"""
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = ${decimalToTimestampCode(c)};"
case DoubleType =>
(c, evPrim, evNull) =>
code"""
if (Double.isNaN($c) || Double.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * $MICROS_PER_SECOND);
}
"""
case FloatType =>
(c, evPrim, evNull) =>
code"""
if (Float.isNaN($c) || Float.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * $MICROS_PER_SECOND);
}
"""
}
private[this] def castToIntervalCode(from: DataType): CastFunction = from match {
case StringType =>
val util = IntervalUtils.getClass.getCanonicalName.stripSuffix("$")
(c, evPrim, evNull) =>
code"""$evPrim = $util.safeStringToInterval($c);
if(${evPrim} == null) {
${evNull} = true;
}
""".stripMargin
}
private[this] def decimalToTimestampCode(d: ExprValue): Block = {
val block = inline"new java.math.BigDecimal($MICROS_PER_SECOND)"
code"($d.toBigDecimal().bigDecimal().multiply($block)).longValue()"
}
private[this] def longToTimeStampCode(l: ExprValue): Block = code"$l * (long)$MICROS_PER_SECOND"
private[this] def timestampToLongCode(ts: ExprValue): Block =
code"java.lang.Math.floorDiv($ts, $MICROS_PER_SECOND)"
private[this] def timestampToDoubleCode(ts: ExprValue): Block =
code"$ts / (double)$MICROS_PER_SECOND"
private[this] def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}"
(c, evPrim, evNull) =>
code"""
if ($stringUtils.isTrueString($c)) {
$evPrim = true;
} else if ($stringUtils.isFalseString($c)) {
$evPrim = false;
} else {
$evNull = true;
}
"""
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = $c != 0;"
case DateType =>
// Hive would return null when cast from date to boolean
(c, evPrim, evNull) => code"$evNull = true;"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = !$c.isZero();"
case n: NumericType =>
(c, evPrim, evNull) => code"$evPrim = $c != 0;"
}
private[this] def castTimestampToIntegralTypeCode(
ctx: CodegenContext,
integralType: String): CastFunction = {
if (ansiEnabled) {
val longValue = ctx.freshName("longValue")
(c, evPrim, evNull) =>
code"""
long $longValue = ${timestampToLongCode(c)};
if ($longValue == ($integralType) $longValue) {
$evPrim = ($integralType) $longValue;
} else {
throw new ArithmeticException("Casting " + $c + " to $integralType causes overflow");
}
"""
} else {
(c, evPrim, evNull) => code"$evPrim = ($integralType) ${timestampToLongCode(c)};"
}
}
private[this] def castDecimalToIntegralTypeCode(
ctx: CodegenContext,
integralType: String): CastFunction = {
if (ansiEnabled) {
(c, evPrim, evNull) => code"$evPrim = $c.roundTo${integralType.capitalize}();"
} else {
(c, evPrim, evNull) => code"$evPrim = $c.to${integralType.capitalize}();"
}
}
private[this] def castIntegralTypeToIntegralTypeExactCode(integralType: String): CastFunction = {
assert(ansiEnabled)
(c, evPrim, evNull) =>
code"""
if ($c == ($integralType) $c) {
$evPrim = ($integralType) $c;
} else {
throw new ArithmeticException("Casting " + $c + " to $integralType causes overflow");
}
"""
}
private[this] def lowerAndUpperBound(
fractionType: String,
integralType: String): (String, String) = {
assert(fractionType == "float" || fractionType == "double")
val typeIndicator = fractionType.charAt(0)
val (min, max) = integralType.toLowerCase(Locale.ROOT) match {
case "long" => (Long.MinValue, Long.MaxValue)
case "int" => (Int.MinValue, Int.MaxValue)
case "short" => (Short.MinValue, Short.MaxValue)
case "byte" => (Byte.MinValue, Byte.MaxValue)
}
(min.toString + typeIndicator, max.toString + typeIndicator)
}
private[this] def castFractionToIntegralTypeCode(
fractionType: String,
integralType: String): CastFunction = {
assert(ansiEnabled)
val (min, max) = lowerAndUpperBound(fractionType, integralType)
val mathClass = classOf[Math].getName
// When casting floating values to integral types, Spark uses the method `Numeric.toInt`
// Or `Numeric.toLong` directly. For positive floating values, it is equivalent to `Math.floor`;
// for negative floating values, it is equivalent to `Math.ceil`.
// So, we can use the condition `Math.floor(x) <= upperBound && Math.ceil(x) >= lowerBound`
// to check if the floating value x is in the range of an integral type after rounding.
(c, evPrim, evNull) =>
code"""
if ($mathClass.floor($c) <= $max && $mathClass.ceil($c) >= $min) {
$evPrim = ($integralType) $c;
} else {
throw new ArithmeticException("Casting " + $c + " to $integralType causes overflow");
}
"""
}
private[this] def castToByteCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toByteExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toByte($wrapper)) {
$evPrim = (byte) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? (byte) 1 : (byte) 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "byte")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "byte")
case _: ShortType | _: IntegerType | _: LongType if ansiEnabled =>
castIntegralTypeToIntegralTypeExactCode("byte")
case _: FloatType if ansiEnabled =>
castFractionToIntegralTypeCode("float", "byte")
case _: DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("double", "byte")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (byte) $c;"
}
private[this] def castToShortCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toShortExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toShort($wrapper)) {
$evPrim = (short) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? (short) 1 : (short) 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "short")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "short")
case _: IntegerType | _: LongType if ansiEnabled =>
castIntegralTypeToIntegralTypeExactCode("short")
case _: FloatType if ansiEnabled =>
castFractionToIntegralTypeCode("float", "short")
case _: DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("double", "short")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (short) $c;"
}
private[this] def castToIntCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toIntExact();"
case StringType =>
val wrapper = ctx.freshVariable("intWrapper", classOf[UTF8String.IntWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toInt($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1 : 0;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType => castTimestampToIntegralTypeCode(ctx, "int")
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "int")
case _: LongType if ansiEnabled => castIntegralTypeToIntegralTypeExactCode("int")
case _: FloatType if ansiEnabled =>
castFractionToIntegralTypeCode("float", "int")
case _: DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("double", "int")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (int) $c;"
}
private[this] def castToLongCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType if ansiEnabled =>
(c, evPrim, evNull) => code"$evPrim = $c.toLongExact();"
case StringType =>
val wrapper = ctx.freshVariable("longWrapper", classOf[UTF8String.LongWrapper])
(c, evPrim, evNull) =>
code"""
UTF8String.LongWrapper $wrapper = new UTF8String.LongWrapper();
if ($c.toLong($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1L : 0L;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = (long) ${timestampToLongCode(c)};"
case DecimalType() => castDecimalToIntegralTypeCode(ctx, "long")
case _: FloatType if ansiEnabled =>
castFractionToIntegralTypeCode("float", "long")
case _: DoubleType if ansiEnabled =>
castFractionToIntegralTypeCode("double", "long")
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (long) $c;"
}
private[this] def castToFloatCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case StringType =>
val floatStr = ctx.freshVariable("floatStr", StringType)
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
s"""throw new NumberFormatException("invalid input syntax for type numeric: $c");"""
} else {
s"$evNull = true;"
}
code"""
final String $floatStr = $c.toString();
try {
$evPrim = Float.valueOf($floatStr);
} catch (java.lang.NumberFormatException e) {
final Float f = (Float) Cast.processFloatingPointSpecialLiterals($floatStr, true);
if (f == null) {
$handleNull
} else {
$evPrim = f.floatValue();
}
}
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1.0f : 0.0f;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = (float) (${timestampToDoubleCode(c)});"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = $c.toFloat();"
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (float) $c;"
}
}
private[this] def castToDoubleCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case StringType =>
val doubleStr = ctx.freshVariable("doubleStr", StringType)
(c, evPrim, evNull) =>
val handleNull = if (ansiEnabled) {
s"""throw new NumberFormatException("invalid input syntax for type numeric: $c");"""
} else {
s"$evNull = true;"
}
code"""
final String $doubleStr = $c.toString();
try {
$evPrim = Double.valueOf($doubleStr);
} catch (java.lang.NumberFormatException e) {
final Double d = (Double) Cast.processFloatingPointSpecialLiterals($doubleStr, false);
if (d == null) {
$handleNull
} else {
$evPrim = d.doubleValue();
}
}
"""
case BooleanType =>
(c, evPrim, evNull) => code"$evPrim = $c ? 1.0d : 0.0d;"
case DateType =>
(c, evPrim, evNull) => code"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => code"$evPrim = ${timestampToDoubleCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => code"$evPrim = $c.toDouble();"
case x: NumericType =>
(c, evPrim, evNull) => code"$evPrim = (double) $c;"
}
}
private[this] def castArrayCode(
fromType: DataType, toType: DataType, ctx: CodegenContext): CastFunction = {
val elementCast = nullSafeCastFunction(fromType, toType, ctx)
val arrayClass = JavaCode.javaType(classOf[GenericArrayData])
val fromElementNull = ctx.freshVariable("feNull", BooleanType)
val fromElementPrim = ctx.freshVariable("fePrim", fromType)
val toElementNull = ctx.freshVariable("teNull", BooleanType)
val toElementPrim = ctx.freshVariable("tePrim", toType)
val size = ctx.freshVariable("n", IntegerType)
val j = ctx.freshVariable("j", IntegerType)
val values = ctx.freshVariable("values", classOf[Array[Object]])
val javaType = JavaCode.javaType(fromType)
(c, evPrim, evNull) =>
code"""
final int $size = $c.numElements();
final Object[] $values = new Object[$size];
for (int $j = 0; $j < $size; $j ++) {
if ($c.isNullAt($j)) {
$values[$j] = null;
} else {
boolean $fromElementNull = false;
$javaType $fromElementPrim =
${CodeGenerator.getValue(c, fromType, j)};
${castCode(ctx, fromElementPrim,
fromElementNull, toElementPrim, toElementNull, toType, elementCast)}
if ($toElementNull) {
$values[$j] = null;
} else {
$values[$j] = $toElementPrim;
}
}
}
$evPrim = new $arrayClass($values);
"""
}
private[this] def castMapCode(from: MapType, to: MapType, ctx: CodegenContext): CastFunction = {
val keysCast = castArrayCode(from.keyType, to.keyType, ctx)
val valuesCast = castArrayCode(from.valueType, to.valueType, ctx)
val mapClass = JavaCode.javaType(classOf[ArrayBasedMapData])
val keys = ctx.freshVariable("keys", ArrayType(from.keyType))
val convertedKeys = ctx.freshVariable("convertedKeys", ArrayType(to.keyType))
val convertedKeysNull = ctx.freshVariable("convertedKeysNull", BooleanType)
val values = ctx.freshVariable("values", ArrayType(from.valueType))
val convertedValues = ctx.freshVariable("convertedValues", ArrayType(to.valueType))
val convertedValuesNull = ctx.freshVariable("convertedValuesNull", BooleanType)
(c, evPrim, evNull) =>
code"""
final ArrayData $keys = $c.keyArray();
final ArrayData $values = $c.valueArray();
${castCode(ctx, keys, FalseLiteral,
convertedKeys, convertedKeysNull, ArrayType(to.keyType), keysCast)}
${castCode(ctx, values, FalseLiteral,
convertedValues, convertedValuesNull, ArrayType(to.valueType), valuesCast)}
$evPrim = new $mapClass($convertedKeys, $convertedValues);
"""
}
private[this] def castStructCode(
from: StructType, to: StructType, ctx: CodegenContext): CastFunction = {
val fieldsCasts = from.fields.zip(to.fields).map {
case (fromField, toField) => nullSafeCastFunction(fromField.dataType, toField.dataType, ctx)
}
val tmpResult = ctx.freshVariable("tmpResult", classOf[GenericInternalRow])
val rowClass = JavaCode.javaType(classOf[GenericInternalRow])
val tmpInput = ctx.freshVariable("tmpInput", classOf[InternalRow])
val fieldsEvalCode = fieldsCasts.zipWithIndex.map { case (cast, i) =>
val fromFieldPrim = ctx.freshVariable("ffp", from.fields(i).dataType)
val fromFieldNull = ctx.freshVariable("ffn", BooleanType)
val toFieldPrim = ctx.freshVariable("tfp", to.fields(i).dataType)
val toFieldNull = ctx.freshVariable("tfn", BooleanType)
val fromType = JavaCode.javaType(from.fields(i).dataType)
val setColumn = CodeGenerator.setColumn(tmpResult, to.fields(i).dataType, i, toFieldPrim)
code"""
boolean $fromFieldNull = $tmpInput.isNullAt($i);
if ($fromFieldNull) {
$tmpResult.setNullAt($i);
} else {
$fromType $fromFieldPrim =
${CodeGenerator.getValue(tmpInput, from.fields(i).dataType, i.toString)};
${castCode(ctx, fromFieldPrim,
fromFieldNull, toFieldPrim, toFieldNull, to.fields(i).dataType, cast)}
if ($toFieldNull) {
$tmpResult.setNullAt($i);
} else {
$setColumn;
}
}
"""
}
val fieldsEvalCodes = ctx.splitExpressions(
expressions = fieldsEvalCode.map(_.code),
funcName = "castStruct",
arguments = ("InternalRow", tmpInput.code) :: (rowClass.code, tmpResult.code) :: Nil)
(input, result, resultIsNull) =>
code"""
final $rowClass $tmpResult = new $rowClass(${fieldsCasts.length});
final InternalRow $tmpInput = $input;
$fieldsEvalCodes
$result = $tmpResult;
"""
}
override def sql: String = dataType match {
// HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this
// type of casting can only be introduced by the analyzer, and can be omitted when converting
// back to SQL query string.
case _: ArrayType | _: MapType | _: StructType => child.sql
case _ => s"CAST(${child.sql} AS ${dataType.sql})"
}
}
/**
* Cast the child expression to the target data type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
@ExpressionDescription(
usage = "_FUNC_(expr AS type) - Casts the value `expr` to the target data type `type`.",
examples = """
Examples:
> SELECT _FUNC_('10' as int);
10
""")
case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends CastBase {
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override protected val ansiEnabled: Boolean = SQLConf.get.ansiEnabled
}
/**
* Cast the child expression to the target data type as per ANSI SQL standard.
* A runtime exception will be thrown on casting failure such as converting an out-of-range value
* to an integral type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
case class AnsiCast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends CastBase {
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override protected val ansiEnabled: Boolean = true
}
/**
* Cast the child expression to the target data type, but will throw error if the cast might
* truncate, e.g. long -> int, timestamp -> data.
*/
case class UpCast(child: Expression, dataType: DataType, walkedTypePath: Seq[String] = Nil)
extends UnaryExpression with Unevaluable {
override lazy val resolved = false
}
|
matthewfranglen/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
|
Scala
|
mit
| 68,320 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.catnap
import cats.effect.{ContextShift, IO}
import cats.implicits._
import minitest.TestSuite
import monix.execution.internal.Platform
import monix.execution.schedulers.TestScheduler
import scala.concurrent.{ExecutionContext, Promise}
import scala.util.{Random, Success}
object SemaphoreSuite extends TestSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(env: TestScheduler): Unit =
assert(env.state.tasks.isEmpty, "should not have tasks left to execute")
implicit def contextShift(implicit ec: ExecutionContext): ContextShift[IO] =
IO.contextShift(ec)
test("simple greenLight") { implicit s =>
val semaphore = Semaphore.unsafe[IO](provisioned = 4)
val future = semaphore.withPermit(IO.shift *> IO(100)).unsafeToFuture()
assertEquals(semaphore.available.unsafeRunSync(), 3)
assert(!future.isCompleted, "!future.isCompleted")
s.tick()
assertEquals(future.value, Some(Success(100)))
assertEquals(semaphore.available.unsafeRunSync(), 4)
}
test("should back-pressure when full") { implicit s =>
val semaphore = Semaphore.unsafe[IO](provisioned = 2)
val p1 = Promise[Int]()
val f1 = semaphore.withPermit(IO.fromFuture(IO.pure(p1.future))).unsafeToFuture()
val p2 = Promise[Int]()
val f2 = semaphore.withPermit(IO.fromFuture(IO.pure(p2.future))).unsafeToFuture()
s.tick()
assertEquals(semaphore.available.unsafeRunSync(), 0)
val f3 = semaphore.withPermit(IO(3)).unsafeToFuture()
s.tick()
assertEquals(f3.value, None)
assertEquals(semaphore.available.unsafeRunSync(), 0)
p1.success(1); s.tick()
assertEquals(semaphore.available.unsafeRunSync(), 1)
assertEquals(f1.value, Some(Success(1)))
assertEquals(f3.value, Some(Success(3)))
p2.success(2); s.tick()
assertEquals(f2.value, Some(Success(2)))
assertEquals(semaphore.available.unsafeRunSync(), 2)
}
testAsync("real async test of many futures") { _ =>
// Executing Futures on the global scheduler!
import scala.concurrent.ExecutionContext.Implicits.global
val semaphore = Semaphore.unsafe[IO](provisioned = 20)
val count = if (Platform.isJVM) 10000 else 1000
val futures = for (i <- 0 until count) yield
semaphore.withPermit(IO.shift *> IO(i))
val sum =
futures.toList.parSequence.map(_.sum).unsafeToFuture()
// Asynchronous result, to be handled by Minitest
for (result <- sum) yield {
assertEquals(result, count * (count - 1) / 2)
}
}
test("await for release of all active and pending permits") { implicit s =>
val semaphore = Semaphore.unsafe[IO](provisioned = 2)
val p1 = semaphore.acquire.unsafeToFuture()
assertEquals(p1.value, Some(Success(())))
val p2 = semaphore.acquire.unsafeToFuture()
assertEquals(p2.value, Some(Success(())))
val p3 = semaphore.acquire.unsafeToFuture()
assert(!p3.isCompleted, "!p3.isCompleted")
val p4 = semaphore.acquire.unsafeToFuture()
assert(!p4.isCompleted, "!p4.isCompleted")
val all1 = semaphore.awaitAvailable(2).unsafeToFuture()
assert(!all1.isCompleted, "!all1.isCompleted")
semaphore.release.unsafeToFuture(); s.tick()
assert(!all1.isCompleted, "!all1.isCompleted")
semaphore.release.unsafeToFuture(); s.tick()
assert(!all1.isCompleted, "!all1.isCompleted")
semaphore.release.unsafeToFuture(); s.tick()
assert(!all1.isCompleted, "!all1.isCompleted")
semaphore.release.unsafeToFuture(); s.tick()
assert(all1.isCompleted, "all1.isCompleted")
// REDO
val p5 = semaphore.acquire.unsafeToFuture()
assert(p5.isCompleted, "p5.isCompleted")
val all2 = semaphore.awaitAvailable(2).unsafeToFuture()
s.tick(); assert(!all2.isCompleted, "!all2.isCompleted")
semaphore.release.unsafeToFuture(); s.tick()
assert(all2.isCompleted, "all2.isCompleted")
// Already completed
val all3 = semaphore.awaitAvailable(2).unsafeToFuture(); s.tick()
assert(all3.isCompleted, "all3.isCompleted")
}
test("acquire is cancelable") { implicit s =>
val semaphore = Semaphore.unsafe[IO](provisioned = 2)
val p1 = semaphore.acquire.unsafeToFuture()
assert(p1.isCompleted, "p1.isCompleted")
val p2 = semaphore.acquire.unsafeToFuture()
assert(p2.isCompleted, "p2.isCompleted")
val p3 = Promise[Unit]
val cancel = semaphore.acquire.unsafeRunCancelable(_ => p3.success(()))
assert(!p3.isCompleted, "!p3.isCompleted")
assertEquals(semaphore.available.unsafeRunSync(), 0)
cancel.unsafeRunSync()
semaphore.release.unsafeToFuture(); s.tick()
assertEquals(semaphore.available.unsafeRunSync(), 1)
semaphore.release.unsafeRunSync(); s.tick()
assertEquals(semaphore.available.unsafeRunSync(), 2)
s.tick()
assertEquals(semaphore.available.unsafeRunSync(), 2)
assert(!p3.isCompleted, "!p3.isCompleted")
}
testAsync("withPermitN / awaitAvailable concurrent test") { _ =>
// Executing Futures on the global scheduler!
import scala.concurrent.ExecutionContext.Implicits.global
val task = repeatTest(10) {
val available = 6
val semaphore = Semaphore.unsafe[IO](provisioned = available)
val count = if (Platform.isJVM) 10000 else 50
val allReleased = Promise[Unit]()
val task = semaphore.withPermit(IO.suspend {
allReleased.completeWith(semaphore.awaitAvailable(available).unsafeToFuture())
val futures = for (i <- 0 until count) yield {
semaphore.withPermitN(Math.floorMod(Random.nextInt(), 3) + 1) {
IO(1).map { x =>
assert(!allReleased.isCompleted, s"!allReleased.isCompleted (index $i)")
x
}
}
}
futures.toList.parSequence.map { x => x.sum }
})
for (r <- task; _ <- IO.fromFuture(IO.pure(allReleased.future))) yield {
assertEquals(r, count)
assertEquals(semaphore.available.unsafeRunSync(), available)
}
}
task.unsafeToFuture()
}
test("withPermitN has FIFO priority") { implicit s =>
val sem = Semaphore.unsafe[IO](provisioned = 0)
val f1 = sem.withPermitN(3)(IO(1 + 1)).unsafeToFuture()
assertEquals(f1.value, None)
val f2 = sem.withPermitN(4)(IO(1 + 1)).unsafeToFuture()
assertEquals(f2.value, None)
sem.releaseN(2).unsafeRunAsyncAndForget(); s.tick()
assertEquals(f1.value, None)
assertEquals(f2.value, None)
sem.releaseN(1).unsafeRunAsyncAndForget(); s.tick()
assertEquals(f1.value, Some(Success(2)))
assertEquals(f2.value, None)
sem.releaseN(1).unsafeRunAsyncAndForget(); s.tick()
assertEquals(f2.value, Some(Success(2)))
}
test("withPermitN is cancelable (1)") { implicit s =>
val sem = Semaphore.unsafe[IO](provisioned = 0)
assertEquals(sem.count.unsafeRunSync(), 0)
val p1 = Promise[Int]()
val cancel = sem.withPermitN(3)(IO(1 + 1)).unsafeRunCancelable(r => p1.complete(r.toTry))
val f2 = sem.withPermitN(3)(IO(1 + 1)).unsafeToFuture()
assertEquals(p1.future.value, None)
assertEquals(f2.value, None)
assertEquals(sem.count.unsafeRunSync(), -6)
cancel.unsafeRunAsyncAndForget(); s.tick()
assertEquals(sem.count.unsafeRunSync(), -3)
sem.releaseN(3).unsafeRunAsyncAndForget()
s.tick()
assertEquals(p1.future.value, None)
assertEquals(f2.value, Some(Success(2)))
}
test("withPermitN is cancelable (2)") { implicit s =>
val sem = Semaphore.unsafe[IO](provisioned = 1)
val p1 = Promise[Int]()
val cancel = sem.withPermitN(3)(IO(1 + 1)).unsafeRunCancelable(r => p1.complete(r.toTry))
val f2 = sem.withPermitN(3)(IO(1 + 1)).unsafeToFuture()
assertEquals(sem.count.unsafeRunSync(), -5)
sem.releaseN(1).unsafeRunAsyncAndForget()
assertEquals(sem.count.unsafeRunSync(), -4)
assertEquals(p1.future.value, None)
assertEquals(f2.value, None)
cancel.unsafeRunAsyncAndForget(); s.tick()
assertEquals(sem.count.unsafeRunSync(), -1)
sem.releaseN(1).unsafeRunAsyncAndForget()
s.tick()
assertEquals(p1.future.value, None)
assertEquals(f2.value, Some(Success(2)))
}
def repeatTest(n: Int)(f: => IO[Unit]): IO[Unit] =
if (n > 0) f.flatMap(_ => repeatTest(n - 1)(f))
else IO.unit
}
|
Wogan/monix
|
monix-catnap/shared/src/test/scala/monix/catnap/SemaphoreSuite.scala
|
Scala
|
apache-2.0
| 8,915 |
object ch8_10 {
import ch8.Gen
case class SGen[A](forSize: Int => Gen[A]) //should be case class SGen[+A](forSize: Int => Gen[A]) but ''<console>:79: error: covariant type A occurs in invariant position in type => Int => ch8.Gen[A] of value forSize' occurs
def unsized[A](g: Gen[A]): SGen[A] = SGen(_ => g)
}
import ch8_10._
/*
from repl you can test typing:
:load src/main/scala/fpinscala/ch6/RNG.scala
:load src/main/scala/fpinscala/ch6/State.scala
:load src/main/scala/fpinscala/ch8/Gen.scala
:load src/main/scala/fpinscala/ch8/Exercise10.scala
*/
|
rucka/fpinscala
|
src/main/scala/fpinscala/ch8/Exercise10.scala
|
Scala
|
gpl-2.0
| 565 |
// Copyright (C) 2014 Fehmi Can Saglam (@fehmicans) and contributors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package reactivemongo.extensions.json.dao
import scala.concurrent.ExecutionContext.Implicits.global
import reactivemongo.extensions.json.model.Person
import reactivemongo.api.DB
import scala.concurrent.Future
import reactivemongo.extensions.json.dsl.JsonDsl
class PersonJsonDao(_db: DB)
extends JsonDao[Person, String](() => _db, "persons")
with JsonDsl {
def findByName(name: String): Future[Option[Person]] = {
findOne("name" $eq name)
}
}
|
fehmicansaglam/reactivemongo-extensions
|
json/src/test/scala/dao/PersonJsonDao.scala
|
Scala
|
apache-2.0
| 1,202 |
package com.monsanto.arch.kamon.prometheus.metric
import com.monsanto.arch.kamon.prometheus.util.strictlyIncreasing
/** Generic trait for all possible metric values. */
sealed trait MetricValue
object MetricValue {
/** Counters are represented as doubles, despite being integral. They must be non-negative. */
case class Counter(count: Double) extends MetricValue {
require(count >= 0)
}
/** A histogram consists of series of buckets, a sample count, and a sample sum. The buckets must be in strictly
* increasing order by level. Also, the counts in each bucket is cumulative and should therefore also be strictly
* increasing (i.e. the bucket is not empty), with the possible exception of the last bucket, which is special.
* The last bucket must have the level +∞ and should have a cumulative count that is the same as the total count
* for the whole histogram. The sample count and sum should match the values from the histogram.
*/
case class Histogram(buckets: Seq[Bucket], sampleCount: Long, sampleSum: Double) extends MetricValue {
require(strictlyIncreasing(buckets.map(_.upperBound)), "Buckets must be in strictly increasing order by level")
require(strictlyIncreasing(buckets.dropRight(1).map(_.cumulativeCount)), "Every finite bucket must add to the count")
require(buckets.nonEmpty, "Buckets must not be empty, a (∞, 0) bucket indicates no values")
require(buckets.last.upperBound == Double.PositiveInfinity, "The last bucket must have the value +∞")
require(buckets.takeRight(2) match {
case Bucket(Double.PositiveInfinity, x) :: Nil ⇒ x == 0
case Bucket(_, x) :: Bucket(Double.PositiveInfinity, y) :: Nil ⇒ x == y
}, "∞ bucket must not modify cumulative count")
require(sampleCount == buckets.last.cumulativeCount,
s"The sampleCount ($sampleCount) must match the count from the buckets (${buckets.last.cumulativeCount})")
val bucketSum = {
buckets.dropRight(1).foldLeft(0.0 → 0L) { (sumAndLastCount, bucket) ⇒
val (lastSum, lastCount) = sumAndLastCount
val thisCount = bucket.cumulativeCount - lastCount
val sum = lastSum + bucket.upperBound * thisCount
(sum, bucket.cumulativeCount)
}._1
}
require(sampleSum == bucketSum || (math.abs(sampleSum - bucketSum) < 5 * math.ulp(bucketSum)) ,
s"The sampleSum ($sampleSum) must match the sum derived from the buckets ($bucketSum)")
}
/** Provides a factory function for creating new instances. */
object Histogram {
/** Creates a new value directly from a Kamon snapshot. */
def apply(snapshot: kamon.metric.instrument.Histogram.Snapshot): Histogram = {
val rawValues = {
val builder = Seq.newBuilder[(Double, Long)]
snapshot.recordsIterator.foreach(r ⇒ builder += r.level.toDouble → r.count)
builder.result()
}
val sumValues = rawValues match {
case Nil ⇒ Nil
case h :: t ⇒ t.scanLeft(h)((lhs, rhs) ⇒ (rhs._1, lhs._2 + rhs._2))
}
val finiteBuckets = sumValues.map(b ⇒ MetricValue.Bucket(b._1.toDouble, b._2))
val sum = rawValues.foldLeft(0.0) { (sum, pair) ⇒ sum + pair._1 * pair._2 }
val count = finiteBuckets.lastOption.map(_.cumulativeCount).getOrElse(0L)
val allBuckets = finiteBuckets :+ MetricValue.Bucket(Double.PositiveInfinity, count)
Histogram(allBuckets, count, sum)
}
}
/** A bucket in a histogram. */
case class Bucket(upperBound: Double, cumulativeCount: Long)
}
|
MonsantoCo/kamon-prometheus
|
library/src/main/scala/com/monsanto/arch/kamon/prometheus/metric/MetricValue.scala
|
Scala
|
bsd-3-clause
| 3,535 |
package org.sireum.pilarform.lexer
import org.sireum.pilarform.lexer.CharConstants.SU
import org.sireum.pilarform.util.Utils._
object UnicodeEscapeDecoder {
/**
* Decode unicode escapes of the form "\\u0061" in the given text.
* If forgiveErrors is true, then no exception will be thrown on malformed escapes.
*/
@throws(classOf[PilarLexerException])
def decode(text: String, forgiveErrors: Boolean = false): String =
new UnicodeEscapeReader(text, forgiveErrors).mkString
}
trait IUnicodeEscapeReader extends Iterator[Char] {
val text: String
/**
* @return true if all the available characters have been read.
*/
def isEof: Boolean
/**
* @return the next character from the post-decoded text
*/
@throws(classOf[PilarLexerException])
def read(): Char
/**
* @return the corresponding unicode escape sequence if the last character read was decoded, otherwise None.
*/
def unicodeEscapeOpt: Option[String]
def next() = read()
def hasNext = !isEof
/**
* Return a clone of this reader initialised to the current state
*/
def copy: IUnicodeEscapeReader
}
class UnicodeEscapeReader(val text: String, forgiveErrors: Boolean = false) extends IUnicodeEscapeReader {
private var pos: Int = 0
private var unicodeEscapeSequence: String = null
/**
* To distinguish cases like "\\\\u" from unicode escape sequences.
*/
private var consecutiveBackslashCount = 0
def copy: UnicodeEscapeReader = {
val reader = new UnicodeEscapeReader(text, forgiveErrors)
reader.pos = pos
reader.unicodeEscapeSequence = unicodeEscapeSequence
reader.consecutiveBackslashCount = consecutiveBackslashCount
reader
}
def isEof = pos >= text.length
@throws(classOf[PilarLexerException])
def read(): Char = {
val ch = consumeNextCharacter()
unicodeEscapeSequence = null
if (ch == '\\\\')
if (nextChar == 'u' && consecutiveBackslashCount % 2 == 0) {
consecutiveBackslashCount = 0
readUnicodeChar(pos - 1)
} else {
consecutiveBackslashCount += 1
ch
}
else {
consecutiveBackslashCount = 0
ch
}
}
def unicodeEscapeOpt: Option[String] = Option(unicodeEscapeSequence)
private def consumeNextCharacter(): Char = {
val result = safeGet(pos)
pos += 1
result
}
private def nextChar = safeGet(pos)
private def safeGet(pos: Int): Char = if (pos >= text.length) SU else text(pos)
private def readUnicodeChar(startPos: Int): Char = {
this.unicodeEscapeSequence = consumeUnicodeEscape()
val decodedChar = decodeUnicodeChar(unicodeEscapeSequence takeRight 4 toList, unicodeEscapeSequence, startPos)
decodedChar
}
private def consumeUnicodeEscape(): String = {
val sb = new StringBuilder
sb.append('\\\\')
// Repeating u's are allowed in Unicode escapes (bizarrely enough):
do sb.append(consumeNextCharacter())
while (nextChar == 'u')
for (n ← 1 to 4)
sb.append(consumeNextCharacter())
sb.toString
}
private def decodeUnicodeChar(digits: List[Char], unicodeEscapeSequence: String, startPos: Int): Char = {
val List(digit1, digit2, digit3, digit4) = digits.map(digit2int(_, base = 16))
if (digit1 < 0 || digit2 < 0 || digit3 < 0 || digit4 < 0)
if (forgiveErrors)
' '
else {
val (line, column) = lineAndColumn(startPos)
throw new PilarLexerException("[" + line + ":" + column + "] error in unicode escape: '" + unicodeEscapeSequence + "'")
}
else
(digit1 << 12 | digit2 << 8 | digit3 << 4 | digit4).toChar
}
private def lineAndColumn(offset: Int): (Int, Int) = {
var line = 1
var column = 1
for (i ← 0 until offset) {
if (text(i) == '\\n') {
line += 1
column = 1
} else
column += 1
}
(line, column)
}
}
class NoUnicodeEscapeReader(val text: String) extends IUnicodeEscapeReader {
private var pos = 0
def copy = {
val reader = new NoUnicodeEscapeReader(text)
reader.pos = pos
reader
}
def isEof: Boolean = pos >= text.length
def read(): Char = {
val result = if (isEof) SU else text(pos)
pos += 1
result
}
def unicodeEscapeOpt: Option[String] = None
}
|
fgwei/pilarform
|
pilarform/src/main/scala/org/sireum/pilarform/lexer/UnicodeEscapeReader.scala
|
Scala
|
epl-1.0
| 4,258 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import scala.collection.JavaConverters._
import org.apache.spark.internal.Logging
/**
* Thread audit for test suites.
*/
trait ThreadAudit extends Logging {
val threadWhiteList = Set(
/**
* Netty related internal threads.
* These are excluded because their lifecycle is handled by the netty itself
* and spark has no explicit effect on them.
*/
"netty.*",
/**
* Netty related internal threads.
* A Single-thread singleton EventExecutor inside netty which creates such threads.
* These are excluded because their lifecycle is handled by the netty itself
* and spark has no explicit effect on them.
*/
"globalEventExecutor.*",
/**
* Netty related internal threads.
* Checks if a thread is alive periodically and runs a task when a thread dies.
* These are excluded because their lifecycle is handled by the netty itself
* and spark has no explicit effect on them.
*/
"threadDeathWatcher.*",
/**
* During [[SparkContext]] creation [[org.apache.spark.rpc.netty.NettyRpcEnv]]
* creates event loops. One is wrapped inside
* [[org.apache.spark.network.server.TransportServer]]
* the other one is inside [[org.apache.spark.network.client.TransportClient]].
* Calling [[SparkContext#stop]] will shut down the thread pool of this event group
* asynchronously. In each case proper stopping is checked manually.
*/
"rpc-client.*",
"rpc-server.*",
/**
* During [[org.apache.spark.network.TransportContext]] construction a separate event loop could
* be created for handling ChunkFetchRequest.
* Calling [[org.apache.spark.network.TransportContext#close]] will shut down the thread pool
* of this event group asynchronously. In each case proper stopping is checked manually.
*/
"shuffle-chunk-fetch-handler.*",
/**
* During [[SparkContext]] creation BlockManager creates event loops. One is wrapped inside
* [[org.apache.spark.network.server.TransportServer]]
* the other one is inside [[org.apache.spark.network.client.TransportClient]].
* Calling [[SparkContext#stop]] will shut down the thread pool of this event group
* asynchronously. In each case proper stopping is checked manually.
*/
"shuffle-client.*",
"shuffle-server.*",
/**
* Global cleaner thread that manage statistics data references of Hadoop filesystems.
* This is excluded because their lifecycle is handled by Hadoop and spark has no explicit
* effect on it.
*/
"org.apache.hadoop.fs.FileSystem\\\\$Statistics\\\\$StatisticsDataReferenceCleaner",
/**
* A global thread pool for broadcast exchange executions.
*/
"broadcast-exchange.*",
/**
* A thread started by JRE to support safe parallel execution of waitFor() and exitStatus()
* methods to forked subprocesses.
*/
"process reaper"
)
private var threadNamesSnapshot: Set[String] = Set.empty
protected def doThreadPreAudit(): Unit = {
threadNamesSnapshot = runningThreadNames()
}
protected def doThreadPostAudit(): Unit = {
val shortSuiteName = this.getClass.getName.replaceAll("org.apache.spark", "o.a.s")
if (threadNamesSnapshot.nonEmpty) {
val remainingThreadNames = runningThreadNames().diff(threadNamesSnapshot)
.filterNot { s => threadWhiteList.exists(s.matches(_)) }
if (remainingThreadNames.nonEmpty) {
logWarning(s"\\n\\n===== POSSIBLE THREAD LEAK IN SUITE $shortSuiteName, " +
s"thread names: ${remainingThreadNames.mkString(", ")} =====\\n")
}
} else {
logWarning("\\n\\n===== THREAD AUDIT POST ACTION CALLED " +
s"WITHOUT PRE ACTION IN SUITE $shortSuiteName =====\\n")
}
}
private def runningThreadNames(): Set[String] = {
Thread.getAllStackTraces.keySet().asScala.map(_.getName).toSet
}
}
|
ConeyLiu/spark
|
core/src/test/scala/org/apache/spark/ThreadAudit.scala
|
Scala
|
apache-2.0
| 4,730 |
package mountainrangepvp.core
import com.badlogic.gdx.math.{MathUtils, Vector2}
/**
* Holds the point the camera is centred on.
*/
case class Camera(centre: Vector2) {
val RunningLookAhead = 170
val AimingLookAhead = 70
val VerticalShift = +50
def centreOn(player: PlayerEntity) = {
val target = player.position.cpy()
target.add(0, VerticalShift)
val lookAhead = MathUtils.lerp(AimingLookAhead, RunningLookAhead,
(player.velocity.x / 400).abs.max(0).min(1))
target.add(player.aim.cpy().scl(lookAhead))
val nextCentre = centre.cpy()
nextCentre.lerp(target, 0.15f)
Camera(nextCentre)
}
}
|
thorinii/MountainRangePvP
|
src/main/scala/mountainrangepvp/core/Camera.scala
|
Scala
|
mit
| 667 |
package se.lu.nateko.cp.meta.onto.reasoner
import scala.jdk.CollectionConverters.IteratorHasAsScala
import org.semanticweb.owlapi.model.OWLClass
import org.semanticweb.owlapi.model.OWLProperty
import org.semanticweb.owlapi.model.OWLClassExpression
import org.semanticweb.owlapi.model.OWLOntology
import org.semanticweb.owlapi.model.OWLDataProperty
import org.semanticweb.owlapi.model.OWLObjectProperty
class HermitBasedReasoner(ontology: OWLOntology) extends BaseReasoner(ontology){
val reasoner = new org.semanticweb.HermiT.ReasonerFactory().createReasoner(ontology)
override def close(): Unit = {
reasoner.dispose()
}
override def getSubClasses(owlClass: OWLClass, direct: Boolean): Seq[OWLClass] =
reasoner.getSubClasses(owlClass, direct).entities.iterator.asScala.toSeq
override def getTopLevelClasses: Seq[OWLClass] = reasoner
.getSubClasses(factory.getOWLThing, true)
.entities.iterator.asScala
.toSeq
override def isSubClass(subClass: OWLClassExpression, superClass: OWLClassExpression): Boolean = {
val axiom = factory.getOWLSubClassOfAxiom(subClass, superClass)
reasoner.isEntailed(axiom)
}
override protected def getParentProps(owlProp: OWLProperty): Seq[OWLProperty] = owlProp match {
case dp: OWLDataProperty => reasoner.getSuperDataProperties(dp, false).entities.iterator.asScala.toIndexedSeq
case op: OWLObjectProperty =>
reasoner.getSuperObjectProperties(op, false).entities.iterator.asScala.collect{
case op: OWLObjectProperty => op
}.toIndexedSeq
case _ => Nil //ignoring annotation properties
}
}
|
ICOS-Carbon-Portal/meta
|
src/main/scala/se/lu/nateko/cp/meta/onto/reasoner/HermitBasedReasoner.scala
|
Scala
|
gpl-3.0
| 1,563 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.rdbms
import slamdata.Predef._
import quasar.{RenderTree, RenderTreeT, fp}
import quasar.Planner.PlannerError
import quasar.common._
import quasar.common.PhaseResult._
import quasar.connector.{BackendModule, DefaultAnalyzeModule}
import quasar.contrib.pathy.APath
import quasar.contrib.scalaz.{MonadReader_, MonadTell_}
import quasar.effect.MonoSeq
import quasar.fp.free._
import quasar.fs.FileSystemError._
import quasar.fs.MonadFsErr
import quasar.fs.mount.BackendDef.{DefErrT, DefinitionError}
import quasar.fs.mount.ConnectionUri
import quasar.physical.rdbms.fs._
import quasar.physical.rdbms.common.Config
import quasar.physical.rdbms.planner.Planner
import quasar.physical.rdbms.planner.sql.SqlExpr
import quasar.physical.rdbms.common._
import quasar.physical.rdbms.jdbc.JdbcConnectionInfo
import quasar.qscript.analysis._
import quasar.qscript.{ExtractPath, Injectable, QScriptCore, QScriptTotal}
import quasar.qscript.rewrites.{Optimize, Unicoalesce, Unirewrite}
import scala.Predef.implicitly
import doobie.imports.Transactor
import doobie.hikari.hikaritransactor.HikariTransactor
import matryoshka.{BirecursiveT, Delay, EqualT, RecursiveT, ShowT}
import matryoshka.implicits._
import matryoshka.data._
import scalaz._
import Scalaz._
import scalaz.concurrent.Task
trait Rdbms extends BackendModule with RdbmsReadFile with RdbmsWriteFile with RdbmsManageFile with RdbmsQueryFile with Interpreter with DefaultAnalyzeModule {
type Repr = Fix[SqlExpr]
type QS[T[_[_]]] = model.QS[T]
type Eff[A] = model.Eff[A]
type M[A] = model.M[A]
type Config = common.Config
val chunkSize = 512
implicit class LiftEffBackend[F[_], A](m: F[A])(implicit I: F :<: Eff) {
val liftB: Backend[A] = lift(m).into[Eff].liftB
}
import Cost._
import Cardinality._
def MonoSeqM: MonoSeq[M] = MonoSeq[M]
def CardinalityQSM: Cardinality[QSM[Fix, ?]] = Cardinality[QSM[Fix, ?]]
def CostQSM: Cost[QSM[Fix, ?]] = Cost[QSM[Fix, ?]]
def FunctorQSM[T[_[_]]] = Functor[QSM[T, ?]]
def TraverseQSM[T[_[_]]] = Traverse[QSM[T, ?]]
def DelayRenderTreeQSM[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT] =
implicitly[Delay[RenderTree, QSM[T, ?]]]
def ExtractPathQSM[T[_[_]]: RecursiveT] = ExtractPath[QSM[T, ?], APath]
def QSCoreInject[T[_[_]]] = implicitly[QScriptCore[T, ?] :<: QSM[T, ?]]
def MonadM = Monad[M]
def UnirewriteT[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT] = implicitly[Unirewrite[T, QS[T]]]
def UnicoalesceCap[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT] = Unicoalesce.Capture[T, QS[T]]
implicit def qScriptToQScriptTotal[T[_[_]]]: Injectable.Aux[
QSM[T, ?],
QScriptTotal[T, ?]] = quasar.physical.rdbms.qScriptToQScriptTotal[T]
override def optimize[T[_[_]]: BirecursiveT: EqualT: ShowT]: QSM[T, T[QSM[T, ?]]] => QSM[T, T[QSM[T, ?]]] = {
val O = new Optimize[T]
O.optimize(fp.reflNT[QSM[T, ?]])
}
def parseConfig(uri: ConnectionUri): DefErrT[Task, Config] =
EitherT(Task.delay(parseConnectionUri(uri).map(Config.apply)))
def compile(cfg: Config): DefErrT[Task, (M ~> Task, Task[Unit])] = {
val xa = HikariTransactor[Task](
cfg.connInfo.driverClassName,
cfg.connInfo.url,
cfg.connInfo.userName,
cfg.connInfo.password.getOrElse("")
)
(interp(xa) ∘ {
case (i, close) => (foldMapNT[Eff, Task](i), close)
}).liftM[DefErrT]
}
lazy val MR = MonadReader_[Backend, Config]
lazy val MRT = quasar.effect.Read.monadReader_[Transactor[Task], Eff]
lazy val ME = MonadFsErr[Backend]
lazy val MT = MonadTell_[Backend, PhaseResults]
// TODO[scalaz]: Shadow the scalaz.Monad.monadMTMAB SI-2712 workaround
import EitherT.eitherTMonad
def plan[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT](
cp: T[QSM[T, ?]]): Backend[Repr] = {
val planner = Planner[T, EitherT[Free[Eff, ?], PlannerError, ?], QSM[T, ?]]
for {
plan <- ME.unattempt(
cp.cataM(planner.plan)
.bimap(qscriptPlanningFailed(_), _.convertTo[Repr])
.run
.liftB)
_ <- MT.tell(
Vector(detail("SQL AST", RenderTreeT[Fix].render(plan).shows)))
} yield plan
}
def parseConnectionUri(uri: ConnectionUri): DefinitionError \\/ JdbcConnectionInfo
}
|
jedesah/Quasar
|
rdbms/src/main/scala/quasar/physical/rdbms/Rdbms.scala
|
Scala
|
apache-2.0
| 5,122 |
package org.dele.text.maen.matchers
import org.dele.text.maen.AtomPropMatcherLib._
import org.dele.text.maen.TestHelper._
import SubMatchCheckerLib._
import TMatcher._
import org.dele.text.maen.TInput
import org.dele.text.maen.test.TestAtom._
import org.dele.text.maen.test.TestInput._
import org.scalatest._
import org.scalatest.testng.TestNGSuite
import org.testng.annotations.Test
/**
* Created by jiaji on 2016-10-04.
*/
class ANOBMatcherTest extends TestNGSuite with ShouldMatchers {
val input:TInput = fromAtomArrayEng(IndexedSeq(
textAtom("Now"),
FBI,
textAtom("and"),
Microsoft,
textAtom("announce")
))
implicit val checkerLib = StaticSubMatchCheckerLib
val orgCompanyMatcherId = "OrgCompany"
val entityMatcher = fromAtomMatcher(E(EmptyRegexDict, Array("Company", "Organization")), EmptyCheckerIds, Option(orgCompanyMatcherId))
val andMatcher = fromAtomMatcher(FExact("and"))
val announceMatcher = fromAtomMatcher(FExact("announce"))
@Test
def t1 = {
val mm = MatcherManager.create
val anobMatcherId = "anob"
val anobMatcherId2 = "anob2"
val andentMatcherId = "andEnt"
val entAnnounceMatcherId = "entAnnounce"
val andEntMatcher = matchersOrderedAllPositive(Seq(andMatcher, entityMatcher), List(ListNGramId), Option(andentMatcherId))
val entAnnounceMatcher = matchersOrderedAllPositive(Seq(entityMatcher, announceMatcher), List(ListNGramId), Option(entAnnounceMatcherId))
val anobMatcher = matchersNonOverlap(andEntMatcher, andMatcher, Option(anobMatcherId))
val anobMatcher2 = matchersNonOverlap(andEntMatcher, announceMatcher, Option(anobMatcherId2))
mm.add(entityMatcher)
mm.add(andMatcher)
mm.add(andEntMatcher)
mm.add(entAnnounceMatcher)
mm.add(anobMatcher)
mm.add(anobMatcher2)
val resultPool = mm.m(input, StaticSubMatchCheckerLib, MatcherManager.EmptyMIdFilters)
val orgCompanies = resultPool.query(orgCompanyMatcherId)
orgCompanies.size shouldBe(2)
val andentMatches = resultPool.query(andentMatcherId)
andentMatches.size shouldBe(1)
val anobMatches = resultPool.query(anobMatcherId)
anobMatches.size shouldBe(0)
val anobMatches2 = resultPool.query(anobMatcherId2)
anobMatches2.size shouldBe(1)
}
}
|
new2scala/text-util
|
maen/src/test/scala/org/dele/text/maen/matchers/ANOBMatcherTest.scala
|
Scala
|
apache-2.0
| 2,265 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.responsiblepeople
import play.api.libs.json.{Json, Reads, Writes}
final case class ApprovalFlags(
hasAlreadyPassedFitAndProper: Option[Boolean] = None,
hasAlreadyPaidApprovalCheck: Option[Boolean] = None
) {
def isComplete() = {
hasAlreadyPassedFitAndProper.isDefined &
hasAlreadyPaidApprovalCheck.isDefined
}
}
object ApprovalFlags {
implicit lazy val reads: Reads[ApprovalFlags] = {
import play.api.libs.functional.syntax._
import play.api.libs.json._
(
(__ \\ "hasAlreadyPassedFitAndProper").readNullable[Boolean] and
(__ \\ "hasAlreadyPaidApprovalCheck").readNullable[Boolean]
) (ApprovalFlags.apply _)
}
implicit lazy val writes: Writes[ApprovalFlags] = {
import play.api.libs.functional.syntax._
import play.api.libs.json._
(
(__ \\ "hasAlreadyPassedFitAndProper").writeNullable[Boolean] and
(__ \\ "hasAlreadyPaidApprovalCheck").writeNullable[Boolean]
) (unlift(ApprovalFlags.unapply))
}
implicit val format = Json.format[ApprovalFlags]
}
|
hmrc/amls-frontend
|
app/models/responsiblepeople/ApprovalFlags.scala
|
Scala
|
apache-2.0
| 1,747 |
/*
* Copyright (c) <2015-2016>, see CONTRIBUTORS
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ch.usi.inf.l3.sana.oberon0.symbols
import ch.usi.inf.l3.sana
import sana.tiny.symbols.Symbol
import sana.tiny.modifiers.Ops.noflags
import sana.tiny.types.Type
import sana.tiny.names.Name
import sana.ooj.names.StdNames
import sana.ooj.modifiers.Ops._
import sana.ooj.types.TypeUtils
import sana.calcj.types.{IntType, BooleanType}
import sana.primj.types.VoidType
import sana.tiny.symbols.{TypeSymbol, TermSymbol}
import sana.primj.symbols.{ProgramSymbol, MethodSymbol,
VariableSymbol, ScopeSymbol,
VoidSymbol}
trait SymbolUtils extends sana.arrooj.symbols.SymbolUtils {
/**
* Returns the enclosing module of a given symbol
*
* @param symbol the symbol to return its enclosing module
*/
def enclosingModule(symbol: Option[Symbol]): Option[Symbol] =
symbol.flatMap {
case sym: ModuleSymbol => Some(sym)
case sym => enclosingModule(sym.owner)
}
/** @param [[sana.arrooj.symbols.SymbolUtils]] */
override def getSymbol(t: Type): Option[Symbol] = t match {
case BooleanType => Some(BooleanSymbol)
case IntType => Some(IntSymbol)
case VoidType => Some(VoidSymbol)
case _ => None
}
/** @param [[sana.arrooj.symbols.standardDefinitions]] */
override def standardDefinitions: Set[Symbol] = Set(
VoidSymbol,
BooleanSymbol,
IntSymbol
)
}
object SymbolUtils extends SymbolUtils
|
amanjpro/languages-a-la-carte
|
oberon0/src/main/scala/symbols/SymbolUtils.scala
|
Scala
|
bsd-3-clause
| 3,066 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
object Product20 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](x: Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]): Option[Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] =
Some(x)
}
/** Product20 is a cartesian product of 20 components.
*
* @since 2.3
*/
trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20] extends Product {
/**
* The arity of this product.
* @return 20
*/
override def productArity = 20
/**
* Returns the n-th projection of this product if 0<=n<arity,
* otherwise null.
*
* @param n number of the projection to be returned
* @return same as _(n+1)
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
case 3 => _4
case 4 => _5
case 5 => _6
case 6 => _7
case 7 => _8
case 8 => _9
case 9 => _10
case 10 => _11
case 11 => _12
case 12 => _13
case 13 => _14
case 14 => _15
case 15 => _16
case 16 => _17
case 17 => _18
case 18 => _19
case 19 => _20
case _ => throw new IndexOutOfBoundsException(n.toString())
}
/** projection of this product */
def _1: T1
/** projection of this product */
def _2: T2
/** projection of this product */
def _3: T3
/** projection of this product */
def _4: T4
/** projection of this product */
def _5: T5
/** projection of this product */
def _6: T6
/** projection of this product */
def _7: T7
/** projection of this product */
def _8: T8
/** projection of this product */
def _9: T9
/** projection of this product */
def _10: T10
/** projection of this product */
def _11: T11
/** projection of this product */
def _12: T12
/** projection of this product */
def _13: T13
/** projection of this product */
def _14: T14
/** projection of this product */
def _15: T15
/** projection of this product */
def _16: T16
/** projection of this product */
def _17: T17
/** projection of this product */
def _18: T18
/** projection of this product */
def _19: T19
/** projection of this product */
def _20: T20
}
|
cran/rkafkajars
|
java/scala/Product20.scala
|
Scala
|
apache-2.0
| 3,072 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.optim.aggregator
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.feature.Instance
import org.apache.spark.ml.linalg.{BLAS, Vector, Vectors}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.mllib.util.MLlibTestSparkContext
class HingeAggregatorSuite extends SparkFunSuite with MLlibTestSparkContext {
import DifferentiableLossAggregatorSuite.getClassificationSummarizers
@transient var instances: Array[Instance] = _
@transient var instancesConstantFeature: Array[Instance] = _
@transient var instancesConstantFeatureFiltered: Array[Instance] = _
override def beforeAll(): Unit = {
super.beforeAll()
instances = Array(
Instance(0.0, 0.1, Vectors.dense(1.0, 2.0)),
Instance(1.0, 0.5, Vectors.dense(1.5, 1.0)),
Instance(0.0, 0.3, Vectors.dense(4.0, 0.5))
)
instancesConstantFeature = Array(
Instance(0.0, 0.1, Vectors.dense(1.0, 2.0)),
Instance(1.0, 0.5, Vectors.dense(1.0, 1.0)),
Instance(1.0, 0.3, Vectors.dense(1.0, 0.5))
)
instancesConstantFeatureFiltered = Array(
Instance(0.0, 0.1, Vectors.dense(2.0)),
Instance(1.0, 0.5, Vectors.dense(1.0)),
Instance(2.0, 0.3, Vectors.dense(0.5))
)
}
/** Get summary statistics for some data and create a new HingeAggregator. */
private def getNewAggregator(
instances: Array[Instance],
coefficients: Vector,
fitIntercept: Boolean): HingeAggregator = {
val (featuresSummarizer, ySummarizer) =
DifferentiableLossAggregatorSuite.getClassificationSummarizers(instances)
val featuresStd = featuresSummarizer.variance.toArray.map(math.sqrt)
val bcFeaturesStd = spark.sparkContext.broadcast(featuresStd)
val bcCoefficients = spark.sparkContext.broadcast(coefficients)
new HingeAggregator(bcFeaturesStd, fitIntercept)(bcCoefficients)
}
test("aggregator add method input size") {
val coefArray = Array(1.0, 2.0)
val interceptArray = Array(2.0)
val agg = getNewAggregator(instances, Vectors.dense(coefArray ++ interceptArray),
fitIntercept = true)
withClue("HingeAggregator features dimension must match coefficients dimension") {
intercept[IllegalArgumentException] {
agg.add(Instance(1.0, 1.0, Vectors.dense(2.0)))
}
}
}
test("negative weight") {
val coefArray = Array(1.0, 2.0)
val interceptArray = Array(2.0)
val agg = getNewAggregator(instances, Vectors.dense(coefArray ++ interceptArray),
fitIntercept = true)
withClue("HingeAggregator does not support negative instance weights") {
intercept[IllegalArgumentException] {
agg.add(Instance(1.0, -1.0, Vectors.dense(2.0, 1.0)))
}
}
}
test("check sizes") {
val rng = new scala.util.Random
val numFeatures = instances.head.features.size
val coefWithIntercept = Vectors.dense(Array.fill(numFeatures + 1)(rng.nextDouble))
val coefWithoutIntercept = Vectors.dense(Array.fill(numFeatures)(rng.nextDouble))
val aggIntercept = getNewAggregator(instances, coefWithIntercept, fitIntercept = true)
val aggNoIntercept = getNewAggregator(instances, coefWithoutIntercept,
fitIntercept = false)
instances.foreach(aggIntercept.add)
instances.foreach(aggNoIntercept.add)
assert(aggIntercept.gradient.size === numFeatures + 1)
assert(aggNoIntercept.gradient.size === numFeatures)
}
test("check correctness") {
val coefArray = Array(1.0, 2.0)
val intercept = 1.0
val numFeatures = instances.head.features.size
val (featuresSummarizer, _) = getClassificationSummarizers(instances)
val featuresStd = featuresSummarizer.variance.toArray.map(math.sqrt)
val weightSum = instances.map(_.weight).sum
val agg = getNewAggregator(instances, Vectors.dense(coefArray ++ Array(intercept)),
fitIntercept = true)
instances.foreach(agg.add)
// compute the loss
val stdCoef = coefArray.indices.map(i => coefArray(i) / featuresStd(i)).toArray
val lossSum = instances.map { case Instance(l, w, f) =>
val margin = BLAS.dot(Vectors.dense(stdCoef), f) + intercept
val labelScaled = 2 * l - 1.0
if (1.0 > labelScaled * margin) {
(1.0 - labelScaled * margin) * w
} else {
0.0
}
}.sum
val loss = lossSum / weightSum
// compute the gradients
val gradientCoef = new Array[Double](numFeatures)
var gradientIntercept = 0.0
instances.foreach { case Instance(l, w, f) =>
val margin = BLAS.dot(f, Vectors.dense(coefArray)) + intercept
if (1.0 > (2 * l - 1.0) * margin) {
gradientCoef.indices.foreach { i =>
gradientCoef(i) += f(i) * -(2 * l - 1.0) * w / featuresStd(i)
}
gradientIntercept += -(2 * l - 1.0) * w
}
}
val gradient = Vectors.dense((gradientCoef ++ Array(gradientIntercept)).map(_ / weightSum))
assert(loss ~== agg.loss relTol 0.01)
assert(gradient ~== agg.gradient relTol 0.01)
}
test("check with zero standard deviation") {
val binaryCoefArray = Array(1.0, 2.0)
val intercept = 1.0
val aggConstantFeatureBinary = getNewAggregator(instancesConstantFeature,
Vectors.dense(binaryCoefArray ++ Array(intercept)), fitIntercept = true)
instancesConstantFeature.foreach(aggConstantFeatureBinary.add)
val aggConstantFeatureBinaryFiltered = getNewAggregator(instancesConstantFeatureFiltered,
Vectors.dense(binaryCoefArray ++ Array(intercept)), fitIntercept = true)
instancesConstantFeatureFiltered.foreach(aggConstantFeatureBinaryFiltered.add)
// constant features should not affect gradient
assert(aggConstantFeatureBinary.gradient(0) === 0.0)
assert(aggConstantFeatureBinary.gradient(1) == aggConstantFeatureBinaryFiltered.gradient(0))
}
}
|
sahilTakiar/spark
|
mllib/src/test/scala/org/apache/spark/ml/optim/aggregator/HingeAggregatorSuite.scala
|
Scala
|
apache-2.0
| 6,603 |
package fringe
import chisel3._
import chisel3.util._
/**
* Counter: 1-dimensional counter. Counts upto 'max', each time incrementing
* by 'stride', beginning at zero.
* @param w: Word width
*/
class Counter(val w: Int) extends Module {
val io = IO(new Bundle {
val max = Input(UInt(w.W))
val stride = Input(UInt(w.W))
val out = Output(UInt(w.W))
val next = Output(UInt(w.W))
val last = Output(Bool())
val reset = Input(Bool())
val enable = Input(Bool())
val saturate = Input(Bool())
val done = Output(Bool())
})
val reg = Module(new FF(UInt(w.W)))
val init = 0.U(w.W)
reg.io.init := init
reg.io.enable := io.reset | io.enable
val count = Cat(0.U(1.W), reg.io.out)
val newval = count + io.stride
val isMax = newval >= io.max
val next = Mux(isMax, Mux(io.saturate, count, init), newval)
when (io.reset) {
reg.io.in := init
} .otherwise {
reg.io.in := next
}
io.last := isMax
io.out := count
io.next := next
io.done := io.enable & isMax
}
class CounterReg(val w: Int) extends Module {
val io = IO(new Bundle {
val max = Input(UInt(w.W))
val stride = Input(UInt(w.W))
val out = Output(UInt(w.W))
val reset = Input(Bool())
val enable = Input(Bool())
val saturate = Input(Bool())
val done = Output(Bool())
})
// Register the inputs
val maxReg = Module(new FF(UInt(w.W)))
maxReg.io.enable := true.B
maxReg.io.in := io.max
val max = maxReg.io.out
val strideReg = Module(new FF(UInt(w.W)))
strideReg.io.enable := true.B
strideReg.io.in := io.stride
val stride = strideReg.io.out
val rstReg = Module(new FF(Bool()))
rstReg.io.enable := true.B
rstReg.io.in := io.reset
val rst = rstReg.io.out
val enableReg = Module(new FF(Bool()))
enableReg.io.enable := true.B
enableReg.io.in := io.enable
val enable = enableReg.io.out
val saturateReg = Module(new FF(Bool()))
saturateReg.io.enable := true.B
saturateReg.io.in := io.saturate
val saturate = saturateReg.io.out
// Instantiate counter
val counter = Module(new Counter(w))
counter.io.max := max
counter.io.stride := stride
counter.io.enable := enable
counter.io.reset := rst
counter.io.enable := enable
counter.io.saturate := saturate
// Register outputs
val outReg = Module(new FF(UInt(w.W)))
outReg.io.enable := true.B
outReg.io.in := counter.io.out
io.out := outReg.io.out
val doneReg = Module(new FF(Bool()))
doneReg.io.enable := true.B
doneReg.io.in := counter.io.done
io.done := doneReg.io.out
}
|
stanford-ppl/spatial-lang
|
spatial/core/resources/chiselgen/template-level/fringeHW/Counter.scala
|
Scala
|
mit
| 2,583 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import com.clearspring.analytics.stream.cardinality.HyperLogLog
import org.apache.spark.sql.types._
import org.apache.spark.sql.catalyst.trees
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.util.collection.OpenHashSet
abstract class AggregateExpression extends Expression {
self: Product =>
/**
* Creates a new instance that can be used to compute this aggregate expression for a group
* of input rows/
*/
def newInstance(): AggregateFunction
/**
* [[AggregateExpression.eval]] should never be invoked because [[AggregateExpression]]'s are
* replaced with a physical aggregate operator at runtime.
*/
override def eval(input: Row = null): EvaluatedType =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
}
/**
* Represents an aggregation that has been rewritten to be performed in two steps.
*
* @param finalEvaluation an aggregate expression that evaluates to same final result as the
* original aggregation.
* @param partialEvaluations A sequence of [[NamedExpression]]s that can be computed on partial
* data sets and are required to compute the `finalEvaluation`.
*/
case class SplitEvaluation(
finalEvaluation: Expression,
partialEvaluations: Seq[NamedExpression])
/**
* An [[AggregateExpression]] that can be partially computed without seeing all relevant tuples.
* These partial evaluations can then be combined to compute the actual answer.
*/
abstract class PartialAggregate extends AggregateExpression {
self: Product =>
/**
* Returns a [[SplitEvaluation]] that computes this aggregation using partial aggregation.
*/
def asPartial: SplitEvaluation
}
/**
* A specific implementation of an aggregate function. Used to wrap a generic
* [[AggregateExpression]] with an algorithm that will be used to compute one specific result.
*/
abstract class AggregateFunction
extends AggregateExpression with Serializable with trees.LeafNode[Expression] {
self: Product =>
override type EvaluatedType = Any
/** Base should return the generic aggregate expression that this function is computing */
val base: AggregateExpression
override def nullable: Boolean = base.nullable
override def dataType: DataType = base.dataType
def update(input: Row): Unit
// Do we really need this?
override def newInstance(): AggregateFunction = {
makeCopy(productIterator.map { case a: AnyRef => a }.toArray)
}
}
case class Min(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = true
override def dataType: DataType = child.dataType
override def toString: String = s"MIN($child)"
override def asPartial: SplitEvaluation = {
val partialMin = Alias(Min(child), "PartialMin")()
SplitEvaluation(Min(partialMin.toAttribute), partialMin :: Nil)
}
override def newInstance(): MinFunction = new MinFunction(child, this)
}
case class MinFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val currentMin: MutableLiteral = MutableLiteral(null, expr.dataType)
val cmp = GreaterThan(currentMin, expr)
override def update(input: Row): Unit = {
if (currentMin.value == null) {
currentMin.value = expr.eval(input)
} else if (cmp.eval(input) == true) {
currentMin.value = expr.eval(input)
}
}
override def eval(input: Row): Any = currentMin.value
}
case class Max(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = true
override def dataType: DataType = child.dataType
override def toString: String = s"MAX($child)"
override def asPartial: SplitEvaluation = {
val partialMax = Alias(Max(child), "PartialMax")()
SplitEvaluation(Max(partialMax.toAttribute), partialMax :: Nil)
}
override def newInstance(): MaxFunction = new MaxFunction(child, this)
}
case class MaxFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val currentMax: MutableLiteral = MutableLiteral(null, expr.dataType)
val cmp = LessThan(currentMax, expr)
override def update(input: Row): Unit = {
if (currentMax.value == null) {
currentMax.value = expr.eval(input)
} else if (cmp.eval(input) == true) {
currentMax.value = expr.eval(input)
}
}
override def eval(input: Row): Any = currentMax.value
}
case class Count(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = false
override def dataType: LongType.type = LongType
override def toString: String = s"COUNT($child)"
override def asPartial: SplitEvaluation = {
val partialCount = Alias(Count(child), "PartialCount")()
SplitEvaluation(Coalesce(Seq(Sum(partialCount.toAttribute), Literal(0L))), partialCount :: Nil)
}
override def newInstance(): CountFunction = new CountFunction(child, this)
}
case class CountDistinct(expressions: Seq[Expression]) extends PartialAggregate {
def this() = this(null)
override def children: Seq[Expression] = expressions
override def nullable: Boolean = false
override def dataType: DataType = LongType
override def toString: String = s"COUNT(DISTINCT ${expressions.mkString(",")})"
override def newInstance(): CountDistinctFunction = new CountDistinctFunction(expressions, this)
override def asPartial: SplitEvaluation = {
val partialSet = Alias(CollectHashSet(expressions), "partialSets")()
SplitEvaluation(
CombineSetsAndCount(partialSet.toAttribute),
partialSet :: Nil)
}
}
case class CollectHashSet(expressions: Seq[Expression]) extends AggregateExpression {
def this() = this(null)
override def children: Seq[Expression] = expressions
override def nullable: Boolean = false
override def dataType: OpenHashSetUDT = new OpenHashSetUDT(expressions.head.dataType)
override def toString: String = s"AddToHashSet(${expressions.mkString(",")})"
override def newInstance(): CollectHashSetFunction =
new CollectHashSetFunction(expressions, this)
}
case class CollectHashSetFunction(
@transient expr: Seq[Expression],
@transient base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val seen = new OpenHashSet[Any]()
@transient
val distinctValue = new InterpretedProjection(expr)
override def update(input: Row): Unit = {
val evaluatedExpr = distinctValue(input)
if (!evaluatedExpr.anyNull) {
seen.add(evaluatedExpr)
}
}
override def eval(input: Row): Any = {
seen
}
}
case class CombineSetsAndCount(inputSet: Expression) extends AggregateExpression {
def this() = this(null)
override def children: Seq[Expression] = inputSet :: Nil
override def nullable: Boolean = false
override def dataType: DataType = LongType
override def toString: String = s"CombineAndCount($inputSet)"
override def newInstance(): CombineSetsAndCountFunction = {
new CombineSetsAndCountFunction(inputSet, this)
}
}
case class CombineSetsAndCountFunction(
@transient inputSet: Expression,
@transient base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val seen = new OpenHashSet[Any]()
override def update(input: Row): Unit = {
val inputSetEval = inputSet.eval(input).asInstanceOf[OpenHashSet[Any]]
val inputIterator = inputSetEval.iterator
while (inputIterator.hasNext) {
seen.add(inputIterator.next)
}
}
override def eval(input: Row): Any = seen.size.toLong
}
/** The data type of ApproxCountDistinctPartition since its output is a HyperLogLog object. */
private[sql] case object HyperLogLogUDT extends UserDefinedType[HyperLogLog] {
override def sqlType: DataType = BinaryType
/** Since we are using HyperLogLog internally, usually it will not be called. */
override def serialize(obj: Any): Array[Byte] =
obj.asInstanceOf[HyperLogLog].getBytes
/** Since we are using HyperLogLog internally, usually it will not be called. */
override def deserialize(datum: Any): HyperLogLog =
HyperLogLog.Builder.build(datum.asInstanceOf[Array[Byte]])
override def userClass: Class[HyperLogLog] = classOf[HyperLogLog]
}
case class ApproxCountDistinctPartition(child: Expression, relativeSD: Double)
extends AggregateExpression with trees.UnaryNode[Expression] {
override def nullable: Boolean = false
override def dataType: DataType = HyperLogLogUDT
override def toString: String = s"APPROXIMATE COUNT(DISTINCT $child)"
override def newInstance(): ApproxCountDistinctPartitionFunction = {
new ApproxCountDistinctPartitionFunction(child, this, relativeSD)
}
}
case class ApproxCountDistinctMerge(child: Expression, relativeSD: Double)
extends AggregateExpression with trees.UnaryNode[Expression] {
override def nullable: Boolean = false
override def dataType: LongType.type = LongType
override def toString: String = s"APPROXIMATE COUNT(DISTINCT $child)"
override def newInstance(): ApproxCountDistinctMergeFunction = {
new ApproxCountDistinctMergeFunction(child, this, relativeSD)
}
}
case class ApproxCountDistinct(child: Expression, relativeSD: Double = 0.05)
extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = false
override def dataType: LongType.type = LongType
override def toString: String = s"APPROXIMATE COUNT(DISTINCT $child)"
override def asPartial: SplitEvaluation = {
val partialCount =
Alias(ApproxCountDistinctPartition(child, relativeSD), "PartialApproxCountDistinct")()
SplitEvaluation(
ApproxCountDistinctMerge(partialCount.toAttribute, relativeSD),
partialCount :: Nil)
}
override def newInstance(): CountDistinctFunction = new CountDistinctFunction(child :: Nil, this)
}
case class Average(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = true
override def dataType: DataType = child.dataType match {
case DecimalType.Fixed(precision, scale) =>
DecimalType(precision + 4, scale + 4) // Add 4 digits after decimal point, like Hive
case DecimalType.Unlimited =>
DecimalType.Unlimited
case _ =>
DoubleType
}
override def toString: String = s"AVG($child)"
override def asPartial: SplitEvaluation = {
child.dataType match {
case DecimalType.Fixed(_, _) | DecimalType.Unlimited =>
// Turn the child to unlimited decimals for calculation, before going back to fixed
val partialSum = Alias(Sum(Cast(child, DecimalType.Unlimited)), "PartialSum")()
val partialCount = Alias(Count(child), "PartialCount")()
val castedSum = Cast(Sum(partialSum.toAttribute), DecimalType.Unlimited)
val castedCount = Cast(Sum(partialCount.toAttribute), DecimalType.Unlimited)
SplitEvaluation(
Cast(Divide(castedSum, castedCount), dataType),
partialCount :: partialSum :: Nil)
case _ =>
val partialSum = Alias(Sum(child), "PartialSum")()
val partialCount = Alias(Count(child), "PartialCount")()
val castedSum = Cast(Sum(partialSum.toAttribute), dataType)
val castedCount = Cast(Sum(partialCount.toAttribute), dataType)
SplitEvaluation(
Divide(castedSum, castedCount),
partialCount :: partialSum :: Nil)
}
}
override def newInstance(): AverageFunction = new AverageFunction(child, this)
}
case class Sum(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = true
override def dataType: DataType = child.dataType match {
case DecimalType.Fixed(precision, scale) =>
DecimalType(precision + 10, scale) // Add 10 digits left of decimal point, like Hive
case DecimalType.Unlimited =>
DecimalType.Unlimited
case _ =>
child.dataType
}
override def toString: String = s"SUM($child)"
override def asPartial: SplitEvaluation = {
child.dataType match {
case DecimalType.Fixed(_, _) =>
val partialSum = Alias(Sum(Cast(child, DecimalType.Unlimited)), "PartialSum")()
SplitEvaluation(
Cast(CombineSum(partialSum.toAttribute), dataType),
partialSum :: Nil)
case _ =>
val partialSum = Alias(Sum(child), "PartialSum")()
SplitEvaluation(
CombineSum(partialSum.toAttribute),
partialSum :: Nil)
}
}
override def newInstance(): SumFunction = new SumFunction(child, this)
}
/**
* Sum should satisfy 3 cases:
* 1) sum of all null values = zero
* 2) sum for table column with no data = null
* 3) sum of column with null and not null values = sum of not null values
* Require separate CombineSum Expression and function as it has to distinguish "No data" case
* versus "data equals null" case, while aggregating results and at each partial expression.i.e.,
* Combining PartitionLevel InputData
* <-- null
* Zero <-- Zero <-- null
*
* <-- null <-- no data
* null <-- null <-- no data
*/
case class CombineSum(child: Expression) extends AggregateExpression {
def this() = this(null)
override def children: Seq[Expression] = child :: Nil
override def nullable: Boolean = true
override def dataType: DataType = child.dataType
override def toString: String = s"CombineSum($child)"
override def newInstance(): CombineSumFunction = new CombineSumFunction(child, this)
}
case class SumDistinct(child: Expression)
extends PartialAggregate with trees.UnaryNode[Expression] {
def this() = this(null)
override def nullable: Boolean = true
override def dataType: DataType = child.dataType match {
case DecimalType.Fixed(precision, scale) =>
DecimalType(precision + 10, scale) // Add 10 digits left of decimal point, like Hive
case DecimalType.Unlimited =>
DecimalType.Unlimited
case _ =>
child.dataType
}
override def toString: String = s"SUM(DISTINCT $child)"
override def newInstance(): SumDistinctFunction = new SumDistinctFunction(child, this)
override def asPartial: SplitEvaluation = {
val partialSet = Alias(CollectHashSet(child :: Nil), "partialSets")()
SplitEvaluation(
CombineSetsAndSum(partialSet.toAttribute, this),
partialSet :: Nil)
}
}
case class CombineSetsAndSum(inputSet: Expression, base: Expression) extends AggregateExpression {
def this() = this(null, null)
override def children: Seq[Expression] = inputSet :: Nil
override def nullable: Boolean = true
override def dataType: DataType = base.dataType
override def toString: String = s"CombineAndSum($inputSet)"
override def newInstance(): CombineSetsAndSumFunction = {
new CombineSetsAndSumFunction(inputSet, this)
}
}
case class CombineSetsAndSumFunction(
@transient inputSet: Expression,
@transient base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val seen = new OpenHashSet[Any]()
override def update(input: Row): Unit = {
val inputSetEval = inputSet.eval(input).asInstanceOf[OpenHashSet[Any]]
val inputIterator = inputSetEval.iterator
while (inputIterator.hasNext) {
seen.add(inputIterator.next)
}
}
override def eval(input: Row): Any = {
val casted = seen.asInstanceOf[OpenHashSet[Row]]
if (casted.size == 0) {
null
} else {
Cast(Literal(
casted.iterator.map(f => f.apply(0)).reduceLeft(
base.dataType.asInstanceOf[NumericType].numeric.asInstanceOf[Numeric[Any]].plus)),
base.dataType).eval(null)
}
}
}
case class First(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def nullable: Boolean = true
override def dataType: DataType = child.dataType
override def toString: String = s"FIRST($child)"
override def asPartial: SplitEvaluation = {
val partialFirst = Alias(First(child), "PartialFirst")()
SplitEvaluation(
First(partialFirst.toAttribute),
partialFirst :: Nil)
}
override def newInstance(): FirstFunction = new FirstFunction(child, this)
}
case class Last(child: Expression) extends PartialAggregate with trees.UnaryNode[Expression] {
override def references: AttributeSet = child.references
override def nullable: Boolean = true
override def dataType: DataType = child.dataType
override def toString: String = s"LAST($child)"
override def asPartial: SplitEvaluation = {
val partialLast = Alias(Last(child), "PartialLast")()
SplitEvaluation(
Last(partialLast.toAttribute),
partialLast :: Nil)
}
override def newInstance(): LastFunction = new LastFunction(child, this)
}
case class AverageFunction(expr: Expression, base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
private val calcType =
expr.dataType match {
case DecimalType.Fixed(_, _) =>
DecimalType.Unlimited
case _ =>
expr.dataType
}
private val zero = Cast(Literal(0), calcType)
private var count: Long = _
private val sum = MutableLiteral(zero.eval(null), calcType)
private def addFunction(value: Any) = Add(sum,
Cast(Literal.create(value, expr.dataType), calcType))
override def eval(input: Row): Any = {
if (count == 0L) {
null
} else {
expr.dataType match {
case DecimalType.Fixed(_, _) =>
Cast(Divide(
Cast(sum, DecimalType.Unlimited),
Cast(Literal(count), DecimalType.Unlimited)), dataType).eval(null)
case _ =>
Divide(
Cast(sum, dataType),
Cast(Literal(count), dataType)).eval(null)
}
}
}
override def update(input: Row): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
count += 1
sum.update(addFunction(evaluatedExpr), input)
}
}
}
case class CountFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
var count: Long = _
override def update(input: Row): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
count += 1L
}
}
override def eval(input: Row): Any = count
}
case class ApproxCountDistinctPartitionFunction(
expr: Expression,
base: AggregateExpression,
relativeSD: Double)
extends AggregateFunction {
def this() = this(null, null, 0) // Required for serialization.
private val hyperLogLog = new HyperLogLog(relativeSD)
override def update(input: Row): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
hyperLogLog.offer(evaluatedExpr)
}
}
override def eval(input: Row): Any = hyperLogLog
}
case class ApproxCountDistinctMergeFunction(
expr: Expression,
base: AggregateExpression,
relativeSD: Double)
extends AggregateFunction {
def this() = this(null, null, 0) // Required for serialization.
private val hyperLogLog = new HyperLogLog(relativeSD)
override def update(input: Row): Unit = {
val evaluatedExpr = expr.eval(input)
hyperLogLog.addAll(evaluatedExpr.asInstanceOf[HyperLogLog])
}
override def eval(input: Row): Any = hyperLogLog.cardinality()
}
case class SumFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
private val calcType =
expr.dataType match {
case DecimalType.Fixed(_, _) =>
DecimalType.Unlimited
case _ =>
expr.dataType
}
private val zero = Cast(Literal(0), calcType)
private val sum = MutableLiteral(null, calcType)
private val addFunction =
Coalesce(Seq(Add(Coalesce(Seq(sum, zero)), Cast(expr, calcType)), sum, zero))
override def update(input: Row): Unit = {
sum.update(addFunction, input)
}
override def eval(input: Row): Any = {
expr.dataType match {
case DecimalType.Fixed(_, _) =>
Cast(sum, dataType).eval(null)
case _ => sum.eval(null)
}
}
}
case class CombineSumFunction(expr: Expression, base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
private val calcType =
expr.dataType match {
case DecimalType.Fixed(_, _) =>
DecimalType.Unlimited
case _ =>
expr.dataType
}
private val zero = Cast(Literal(0), calcType)
private val sum = MutableLiteral(null, calcType)
private val addFunction =
Coalesce(Seq(Add(Coalesce(Seq(sum, zero)), Cast(expr, calcType)), sum, zero))
override def update(input: Row): Unit = {
val result = expr.eval(input)
// partial sum result can be null only when no input rows present
if(result != null) {
sum.update(addFunction, input)
}
}
override def eval(input: Row): Any = {
expr.dataType match {
case DecimalType.Fixed(_, _) =>
Cast(sum, dataType).eval(null)
case _ => sum.eval(null)
}
}
}
case class SumDistinctFunction(expr: Expression, base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
private val seen = new scala.collection.mutable.HashSet[Any]()
override def update(input: Row): Unit = {
val evaluatedExpr = expr.eval(input)
if (evaluatedExpr != null) {
seen += evaluatedExpr
}
}
override def eval(input: Row): Any = {
if (seen.size == 0) {
null
} else {
Cast(Literal(
seen.reduceLeft(
dataType.asInstanceOf[NumericType].numeric.asInstanceOf[Numeric[Any]].plus)),
dataType).eval(null)
}
}
}
case class CountDistinctFunction(
@transient expr: Seq[Expression],
@transient base: AggregateExpression)
extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
val seen = new OpenHashSet[Any]()
@transient
val distinctValue = new InterpretedProjection(expr)
override def update(input: Row): Unit = {
val evaluatedExpr = distinctValue(input)
if (!evaluatedExpr.anyNull) {
seen.add(evaluatedExpr)
}
}
override def eval(input: Row): Any = seen.size.toLong
}
case class FirstFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
var result: Any = null
override def update(input: Row): Unit = {
if (result == null) {
result = expr.eval(input)
}
}
override def eval(input: Row): Any = result
}
case class LastFunction(expr: Expression, base: AggregateExpression) extends AggregateFunction {
def this() = this(null, null) // Required for serialization.
var result: Any = null
override def update(input: Row): Unit = {
result = input
}
override def eval(input: Row): Any = {
if (result != null) expr.eval(result.asInstanceOf[Row]) else null
}
}
|
andrewor14/iolap
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
|
Scala
|
apache-2.0
| 24,166 |
/**
* Copyright 2015 NICTA
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.n1analytics.paillier.examples
import com.n1analytics.paillier._
object PrivateSum {
def main(args: Array[String]): Unit = {
val rawNumbers = Array(0.0, 0.8, 1.0, 3.2, -5, 50)
val keypair = PaillierPrivateKey.create(1024)
var publicKey = keypair.getPublicKey
val paillierContext = publicKey.createSignedContext
println("Encrypting doubles with public key (e.g., on multiple devices)")
val encryptedNumbers = rawNumbers.map(n => paillierContext.encrypt(n))
println("Adding encrypted doubles")
val encryptedSum = encryptedNumbers.reduce((n1, n2) => n1.add(n2))
println("Decrypting result:")
println(keypair.decrypt(encryptedSum).decodeDouble)
}
}
|
NICTA/javallier
|
examples/privateSum/src/main/scala/PrivateSum.scala
|
Scala
|
apache-2.0
| 1,290 |
/*
* Copyright (c) 2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.kinesistee.routing
import com.snowplowanalytics.kinesistee.StreamWriter
import scalaz._
import scalaz.syntax.validation._
import com.snowplowanalytics.kinesistee.models.Stream
/**
* This routing strategy passes all traffic through to the destination
* @param destination the endpoint to route all data to
*/
class PointToPointRoute(destination: StreamWriter) extends RoutingStrategy {
/**
* Routing strategy that sends all traffic to the given destination
* @return all traffic sent to the given destination
*/
override def route: ValidationNel[String, StreamWriter] = {
destination.success
}
override def toString:String = {
s"Stream to stream route: stream `source` -> stream ${destination.toString}"
}
}
|
snowplow/kinesis-tee
|
src/main/scala/com/snowplowanalytics/kinesistee/routing/PointToPointRoute.scala
|
Scala
|
apache-2.0
| 1,482 |
package com.theiterators.scalaspray
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import spray.can.Http
import com.typesafe.config.ConfigFactory
object Boot extends App {
val config = ConfigFactory.load()
implicit val system = ActorSystem("on-spray-can")
val service = system.actorOf(Props[DemoServiceActor], "demo-service")
IO(Http) ! Http.Bind(service, interface = config.getString("host.interface"), port = config.getInt("host.port"))
}
|
pjagielski/microservices-jvm
|
scala-spray-rest/src/main/scala/com/theiterators/scalaspray/Boot.scala
|
Scala
|
mit
| 463 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import scala.util.Random
import org.scalatest.Matchers.the
import org.apache.spark.sql.execution.WholeStageCodegenExec
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper
import org.apache.spark.sql.execution.aggregate.{HashAggregateExec, ObjectHashAggregateExec, SortAggregateExec}
import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData.DecimalData
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
case class Fact(date: Int, hour: Int, minute: Int, room_name: String, temp: Double)
class DataFrameAggregateSuite extends QueryTest
with SharedSparkSession
with AdaptiveSparkPlanHelper {
import testImplicits._
val absTol = 1e-8
test("groupBy") {
checkAnswer(
testData2.groupBy("a").agg(sum($"b")),
Seq(Row(1, 3), Row(2, 3), Row(3, 3))
)
checkAnswer(
testData2.groupBy("a").agg(sum($"b").as("totB")).agg(sum($"totB")),
Row(9)
)
checkAnswer(
testData2.groupBy("a").agg(count("*")),
Row(1, 2) :: Row(2, 2) :: Row(3, 2) :: Nil
)
checkAnswer(
testData2.groupBy("a").agg(Map("*" -> "count")),
Row(1, 2) :: Row(2, 2) :: Row(3, 2) :: Nil
)
checkAnswer(
testData2.groupBy("a").agg(Map("b" -> "sum")),
Row(1, 3) :: Row(2, 3) :: Row(3, 3) :: Nil
)
val df1 = Seq(("a", 1, 0, "b"), ("b", 2, 4, "c"), ("a", 2, 3, "d"))
.toDF("key", "value1", "value2", "rest")
checkAnswer(
df1.groupBy("key").min(),
df1.groupBy("key").min("value1", "value2").collect()
)
checkAnswer(
df1.groupBy("key").min("value2"),
Seq(Row("a", 0), Row("b", 4))
)
checkAnswer(
decimalData.groupBy("a").agg(sum("b")),
Seq(Row(new java.math.BigDecimal(1), new java.math.BigDecimal(3)),
Row(new java.math.BigDecimal(2), new java.math.BigDecimal(3)),
Row(new java.math.BigDecimal(3), new java.math.BigDecimal(3)))
)
val decimalDataWithNulls = spark.sparkContext.parallelize(
DecimalData(1, 1) ::
DecimalData(1, null) ::
DecimalData(2, 1) ::
DecimalData(2, null) ::
DecimalData(3, 1) ::
DecimalData(3, 2) ::
DecimalData(null, 2) :: Nil).toDF()
checkAnswer(
decimalDataWithNulls.groupBy("a").agg(sum("b")),
Seq(Row(new java.math.BigDecimal(1), new java.math.BigDecimal(1)),
Row(new java.math.BigDecimal(2), new java.math.BigDecimal(1)),
Row(new java.math.BigDecimal(3), new java.math.BigDecimal(3)),
Row(null, new java.math.BigDecimal(2)))
)
}
test("SPARK-17124 agg should be ordering preserving") {
val df = spark.range(2)
val ret = df.groupBy("id").agg("id" -> "sum", "id" -> "count", "id" -> "min")
assert(ret.schema.map(_.name) == Seq("id", "sum(id)", "count(id)", "min(id)"))
checkAnswer(
ret,
Row(0, 0, 1, 0) :: Row(1, 1, 1, 1) :: Nil
)
}
test("SPARK-18952: regexes fail codegen when used as keys due to bad forward-slash escapes") {
val df = Seq(("some[thing]", "random-string")).toDF("key", "val")
checkAnswer(
df.groupBy(regexp_extract($"key", "([a-z]+)\\\\[", 1)).count(),
Row("some", 1) :: Nil
)
}
test("rollup") {
checkAnswer(
courseSales.rollup("course", "year").sum("earnings"),
Row("Java", 2012, 20000.0) ::
Row("Java", 2013, 30000.0) ::
Row("Java", null, 50000.0) ::
Row("dotNET", 2012, 15000.0) ::
Row("dotNET", 2013, 48000.0) ::
Row("dotNET", null, 63000.0) ::
Row(null, null, 113000.0) :: Nil
)
}
test("cube") {
checkAnswer(
courseSales.cube("course", "year").sum("earnings"),
Row("Java", 2012, 20000.0) ::
Row("Java", 2013, 30000.0) ::
Row("Java", null, 50000.0) ::
Row("dotNET", 2012, 15000.0) ::
Row("dotNET", 2013, 48000.0) ::
Row("dotNET", null, 63000.0) ::
Row(null, 2012, 35000.0) ::
Row(null, 2013, 78000.0) ::
Row(null, null, 113000.0) :: Nil
)
val df0 = spark.sparkContext.parallelize(Seq(
Fact(20151123, 18, 35, "room1", 18.6),
Fact(20151123, 18, 35, "room2", 22.4),
Fact(20151123, 18, 36, "room1", 17.4),
Fact(20151123, 18, 36, "room2", 25.6))).toDF()
val cube0 = df0.cube("date", "hour", "minute", "room_name").agg(Map("temp" -> "avg"))
assert(cube0.where("date IS NULL").count > 0)
}
test("grouping and grouping_id") {
checkAnswer(
courseSales.cube("course", "year")
.agg(grouping("course"), grouping("year"), grouping_id("course", "year")),
Row("Java", 2012, 0, 0, 0) ::
Row("Java", 2013, 0, 0, 0) ::
Row("Java", null, 0, 1, 1) ::
Row("dotNET", 2012, 0, 0, 0) ::
Row("dotNET", 2013, 0, 0, 0) ::
Row("dotNET", null, 0, 1, 1) ::
Row(null, 2012, 1, 0, 2) ::
Row(null, 2013, 1, 0, 2) ::
Row(null, null, 1, 1, 3) :: Nil
)
// use column reference in `grouping_id` instead of column name
checkAnswer(
courseSales.cube("course", "year")
.agg(grouping_id(courseSales("course"), courseSales("year"))),
Row("Java", 2012, 0) ::
Row("Java", 2013, 0) ::
Row("Java", null, 1) ::
Row("dotNET", 2012, 0) ::
Row("dotNET", 2013, 0) ::
Row("dotNET", null, 1) ::
Row(null, 2012, 2) ::
Row(null, 2013, 2) ::
Row(null, null, 3) :: Nil
)
intercept[AnalysisException] {
courseSales.groupBy().agg(grouping("course")).explain()
}
intercept[AnalysisException] {
courseSales.groupBy().agg(grouping_id("course")).explain()
}
}
test("grouping/grouping_id inside window function") {
val w = Window.orderBy(sum("earnings"))
checkAnswer(
courseSales.cube("course", "year")
.agg(sum("earnings"),
grouping_id("course", "year"),
rank().over(Window.partitionBy(grouping_id("course", "year")).orderBy(sum("earnings")))),
Row("Java", 2012, 20000.0, 0, 2) ::
Row("Java", 2013, 30000.0, 0, 3) ::
Row("Java", null, 50000.0, 1, 1) ::
Row("dotNET", 2012, 15000.0, 0, 1) ::
Row("dotNET", 2013, 48000.0, 0, 4) ::
Row("dotNET", null, 63000.0, 1, 2) ::
Row(null, 2012, 35000.0, 2, 1) ::
Row(null, 2013, 78000.0, 2, 2) ::
Row(null, null, 113000.0, 3, 1) :: Nil
)
}
test("SPARK-21980: References in grouping functions should be indexed with semanticEquals") {
checkAnswer(
courseSales.cube("course", "year")
.agg(grouping("CouRse"), grouping("year")),
Row("Java", 2012, 0, 0) ::
Row("Java", 2013, 0, 0) ::
Row("Java", null, 0, 1) ::
Row("dotNET", 2012, 0, 0) ::
Row("dotNET", 2013, 0, 0) ::
Row("dotNET", null, 0, 1) ::
Row(null, 2012, 1, 0) ::
Row(null, 2013, 1, 0) ::
Row(null, null, 1, 1) :: Nil
)
}
test("rollup overlapping columns") {
checkAnswer(
testData2.rollup($"a" + $"b" as "foo", $"b" as "bar").agg(sum($"a" - $"b") as "foo"),
Row(2, 1, 0) :: Row(3, 2, -1) :: Row(3, 1, 1) :: Row(4, 2, 0) :: Row(4, 1, 2) :: Row(5, 2, 1)
:: Row(2, null, 0) :: Row(3, null, 0) :: Row(4, null, 2) :: Row(5, null, 1)
:: Row(null, null, 3) :: Nil
)
checkAnswer(
testData2.rollup("a", "b").agg(sum("b")),
Row(1, 1, 1) :: Row(1, 2, 2) :: Row(2, 1, 1) :: Row(2, 2, 2) :: Row(3, 1, 1) :: Row(3, 2, 2)
:: Row(1, null, 3) :: Row(2, null, 3) :: Row(3, null, 3)
:: Row(null, null, 9) :: Nil
)
}
test("cube overlapping columns") {
checkAnswer(
testData2.cube($"a" + $"b", $"b").agg(sum($"a" - $"b")),
Row(2, 1, 0) :: Row(3, 2, -1) :: Row(3, 1, 1) :: Row(4, 2, 0) :: Row(4, 1, 2) :: Row(5, 2, 1)
:: Row(2, null, 0) :: Row(3, null, 0) :: Row(4, null, 2) :: Row(5, null, 1)
:: Row(null, 1, 3) :: Row(null, 2, 0)
:: Row(null, null, 3) :: Nil
)
checkAnswer(
testData2.cube("a", "b").agg(sum("b")),
Row(1, 1, 1) :: Row(1, 2, 2) :: Row(2, 1, 1) :: Row(2, 2, 2) :: Row(3, 1, 1) :: Row(3, 2, 2)
:: Row(1, null, 3) :: Row(2, null, 3) :: Row(3, null, 3)
:: Row(null, 1, 3) :: Row(null, 2, 6)
:: Row(null, null, 9) :: Nil
)
}
test("spark.sql.retainGroupColumns config") {
checkAnswer(
testData2.groupBy("a").agg(sum($"b")),
Seq(Row(1, 3), Row(2, 3), Row(3, 3))
)
spark.conf.set(SQLConf.DATAFRAME_RETAIN_GROUP_COLUMNS.key, false)
checkAnswer(
testData2.groupBy("a").agg(sum($"b")),
Seq(Row(3), Row(3), Row(3))
)
spark.conf.set(SQLConf.DATAFRAME_RETAIN_GROUP_COLUMNS.key, true)
}
test("agg without groups") {
checkAnswer(
testData2.agg(sum($"b")),
Row(9)
)
}
test("agg without groups and functions") {
checkAnswer(
testData2.agg(lit(1)),
Row(1)
)
}
test("average") {
checkAnswer(
testData2.agg(avg($"a"), mean($"a")),
Row(2.0, 2.0))
checkAnswer(
testData2.agg(avg($"a"), sumDistinct($"a")), // non-partial
Row(2.0, 6.0) :: Nil)
checkAnswer(
decimalData.agg(avg($"a")),
Row(new java.math.BigDecimal(2)))
checkAnswer(
decimalData.agg(avg($"a"), sumDistinct($"a")), // non-partial
Row(new java.math.BigDecimal(2), new java.math.BigDecimal(6)) :: Nil)
checkAnswer(
decimalData.agg(avg($"a" cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2)))
// non-partial
checkAnswer(
decimalData.agg(
avg($"a" cast DecimalType(10, 2)), sumDistinct($"a" cast DecimalType(10, 2))),
Row(new java.math.BigDecimal(2), new java.math.BigDecimal(6)) :: Nil)
}
test("null average") {
checkAnswer(
testData3.agg(avg($"b")),
Row(2.0))
checkAnswer(
testData3.agg(avg($"b"), countDistinct($"b")),
Row(2.0, 1))
checkAnswer(
testData3.agg(avg($"b"), sumDistinct($"b")), // non-partial
Row(2.0, 2.0))
}
test("zero average") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(
emptyTableData.agg(avg($"a")),
Row(null))
checkAnswer(
emptyTableData.agg(avg($"a"), sumDistinct($"b")), // non-partial
Row(null, null))
}
test("count") {
assert(testData2.count() === testData2.rdd.map(_ => 1).count())
checkAnswer(
testData2.agg(count($"a"), sumDistinct($"a")), // non-partial
Row(6, 6.0))
}
test("null count") {
checkAnswer(
testData3.groupBy($"a").agg(count($"b")),
Seq(Row(1, 0), Row(2, 1))
)
checkAnswer(
testData3.groupBy($"a").agg(count($"a" + $"b")),
Seq(Row(1, 0), Row(2, 1))
)
checkAnswer(
testData3.agg(
count($"a"), count($"b"), count(lit(1)), countDistinct($"a"), countDistinct($"b")),
Row(2, 1, 2, 2, 1)
)
checkAnswer(
testData3.agg(count($"b"), countDistinct($"b"), sumDistinct($"b")), // non-partial
Row(1, 1, 2)
)
}
test("multiple column distinct count") {
val df1 = Seq(
("a", "b", "c"),
("a", "b", "c"),
("a", "b", "d"),
("x", "y", "z"),
("x", "q", null.asInstanceOf[String]))
.toDF("key1", "key2", "key3")
checkAnswer(
df1.agg(countDistinct($"key1", $"key2")),
Row(3)
)
checkAnswer(
df1.agg(countDistinct($"key1", $"key2", $"key3")),
Row(3)
)
checkAnswer(
df1.groupBy($"key1").agg(countDistinct($"key2", $"key3")),
Seq(Row("a", 2), Row("x", 1))
)
}
test("zero count") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(
emptyTableData.agg(count($"a"), sumDistinct($"a")), // non-partial
Row(0, null))
}
test("stddev") {
val testData2ADev = math.sqrt(4.0 / 5.0)
checkAnswer(
testData2.agg(stddev($"a"), stddev_pop($"a"), stddev_samp($"a")),
Row(testData2ADev, math.sqrt(4 / 6.0), testData2ADev))
checkAnswer(
testData2.agg(stddev("a"), stddev_pop("a"), stddev_samp("a")),
Row(testData2ADev, math.sqrt(4 / 6.0), testData2ADev))
}
test("zero stddev") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(
emptyTableData.agg(stddev($"a"), stddev_pop($"a"), stddev_samp($"a")),
Row(null, null, null))
}
test("zero sum") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(
emptyTableData.agg(sum($"a")),
Row(null))
}
test("zero sum distinct") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(
emptyTableData.agg(sumDistinct($"a")),
Row(null))
}
test("moments") {
val sparkVariance = testData2.agg(variance($"a"))
checkAggregatesWithTol(sparkVariance, Row(4.0 / 5.0), absTol)
val sparkVariancePop = testData2.agg(var_pop($"a"))
checkAggregatesWithTol(sparkVariancePop, Row(4.0 / 6.0), absTol)
val sparkVarianceSamp = testData2.agg(var_samp($"a"))
checkAggregatesWithTol(sparkVarianceSamp, Row(4.0 / 5.0), absTol)
val sparkSkewness = testData2.agg(skewness($"a"))
checkAggregatesWithTol(sparkSkewness, Row(0.0), absTol)
val sparkKurtosis = testData2.agg(kurtosis($"a"))
checkAggregatesWithTol(sparkKurtosis, Row(-1.5), absTol)
}
test("zero moments") {
val input = Seq((1, 2)).toDF("a", "b")
checkAnswer(
input.agg(stddev($"a"), stddev_samp($"a"), stddev_pop($"a"), variance($"a"),
var_samp($"a"), var_pop($"a"), skewness($"a"), kurtosis($"a")),
Row(Double.NaN, Double.NaN, 0.0, Double.NaN, Double.NaN, 0.0,
Double.NaN, Double.NaN))
checkAnswer(
input.agg(
expr("stddev(a)"),
expr("stddev_samp(a)"),
expr("stddev_pop(a)"),
expr("variance(a)"),
expr("var_samp(a)"),
expr("var_pop(a)"),
expr("skewness(a)"),
expr("kurtosis(a)")),
Row(Double.NaN, Double.NaN, 0.0, Double.NaN, Double.NaN, 0.0,
Double.NaN, Double.NaN))
}
test("null moments") {
val emptyTableData = Seq.empty[(Int, Int)].toDF("a", "b")
checkAnswer(emptyTableData.agg(
variance($"a"), var_samp($"a"), var_pop($"a"), skewness($"a"), kurtosis($"a")),
Row(null, null, null, null, null))
checkAnswer(
emptyTableData.agg(
expr("variance(a)"),
expr("var_samp(a)"),
expr("var_pop(a)"),
expr("skewness(a)"),
expr("kurtosis(a)")),
Row(null, null, null, null, null))
}
test("collect functions") {
val df = Seq((1, 2), (2, 2), (3, 4)).toDF("a", "b")
checkAnswer(
df.select(collect_list($"a"), collect_list($"b")),
Seq(Row(Seq(1, 2, 3), Seq(2, 2, 4)))
)
checkAnswer(
df.select(collect_set($"a"), collect_set($"b")),
Seq(Row(Seq(1, 2, 3), Seq(2, 4)))
)
checkDataset(
df.select(collect_set($"a").as("aSet")).as[Set[Int]],
Set(1, 2, 3))
checkDataset(
df.select(collect_set($"b").as("bSet")).as[Set[Int]],
Set(2, 4))
checkDataset(
df.select(collect_set($"a"), collect_set($"b")).as[(Set[Int], Set[Int])],
Seq(Set(1, 2, 3) -> Set(2, 4)): _*)
}
test("collect functions structs") {
val df = Seq((1, 2, 2), (2, 2, 2), (3, 4, 1))
.toDF("a", "x", "y")
.select($"a", struct($"x", $"y").as("b"))
checkAnswer(
df.select(collect_list($"a"), sort_array(collect_list($"b"))),
Seq(Row(Seq(1, 2, 3), Seq(Row(2, 2), Row(2, 2), Row(4, 1))))
)
checkAnswer(
df.select(collect_set($"a"), sort_array(collect_set($"b"))),
Seq(Row(Seq(1, 2, 3), Seq(Row(2, 2), Row(4, 1))))
)
}
test("collect_set functions cannot have maps") {
val df = Seq((1, 3, 0), (2, 3, 0), (3, 4, 1))
.toDF("a", "x", "y")
.select($"a", map($"x", $"y").as("b"))
val error = intercept[AnalysisException] {
df.select(collect_set($"a"), collect_set($"b"))
}
assert(error.message.contains("collect_set() cannot have map type data"))
}
test("SPARK-17641: collect functions should not collect null values") {
val df = Seq(("1", 2), (null, 2), ("1", 4)).toDF("a", "b")
checkAnswer(
df.select(collect_list($"a"), collect_list($"b")),
Seq(Row(Seq("1", "1"), Seq(2, 2, 4)))
)
checkAnswer(
df.select(collect_set($"a"), collect_set($"b")),
Seq(Row(Seq("1"), Seq(2, 4)))
)
}
test("collect functions should be able to cast to array type with no null values") {
val df = Seq(1, 2).toDF("a")
checkAnswer(df.select(collect_list("a") cast ArrayType(IntegerType, false)),
Seq(Row(Seq(1, 2))))
checkAnswer(df.select(collect_set("a") cast ArrayType(FloatType, false)),
Seq(Row(Seq(1.0, 2.0))))
}
test("SPARK-14664: Decimal sum/avg over window should work.") {
checkAnswer(
spark.sql("select sum(a) over () from values 1.0, 2.0, 3.0 T(a)"),
Row(6.0) :: Row(6.0) :: Row(6.0) :: Nil)
checkAnswer(
spark.sql("select avg(a) over () from values 1.0, 2.0, 3.0 T(a)"),
Row(2.0) :: Row(2.0) :: Row(2.0) :: Nil)
}
test("SQL decimal test (used for catching certain decimal handling bugs in aggregates)") {
checkAnswer(
decimalData.groupBy($"a" cast DecimalType(10, 2)).agg(avg($"b" cast DecimalType(10, 2))),
Seq(Row(new java.math.BigDecimal(1), new java.math.BigDecimal("1.5")),
Row(new java.math.BigDecimal(2), new java.math.BigDecimal("1.5")),
Row(new java.math.BigDecimal(3), new java.math.BigDecimal("1.5"))))
}
test("SPARK-17616: distinct aggregate combined with a non-partial aggregate") {
val df = Seq((1, 3, "a"), (1, 2, "b"), (3, 4, "c"), (3, 4, "c"), (3, 5, "d"))
.toDF("x", "y", "z")
checkAnswer(
df.groupBy($"x").agg(countDistinct($"y"), sort_array(collect_list($"z"))),
Seq(Row(1, 2, Seq("a", "b")), Row(3, 2, Seq("c", "c", "d"))))
}
test("SPARK-18004 limit + aggregates") {
val df = Seq(("a", 1), ("b", 2), ("c", 1), ("d", 5)).toDF("id", "value")
val limit2Df = df.limit(2)
checkAnswer(
limit2Df.groupBy("id").count().select($"id"),
limit2Df.select($"id"))
}
test("SPARK-17237 remove backticks in a pivot result schema") {
val df = Seq((2, 3, 4), (3, 4, 5)).toDF("a", "x", "y")
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
df.groupBy("a").pivot("x").agg(count("y"), avg("y")).na.fill(0),
Seq(Row(3, 0, 0.0, 1, 5.0), Row(2, 1, 4.0, 0, 0.0))
)
}
}
test("aggregate function in GROUP BY") {
val e = intercept[AnalysisException] {
testData.groupBy(sum($"key")).count()
}
assert(e.message.contains("aggregate functions are not allowed in GROUP BY"))
}
private def assertNoExceptions(c: Column): Unit = {
for ((wholeStage, useObjectHashAgg) <-
Seq((true, true), (true, false), (false, true), (false, false))) {
withSQLConf(
(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key, wholeStage.toString),
(SQLConf.USE_OBJECT_HASH_AGG.key, useObjectHashAgg.toString)) {
val df = Seq(("1", 1), ("1", 2), ("2", 3), ("2", 4)).toDF("x", "y")
// test case for HashAggregate
val hashAggDF = df.groupBy("x").agg(c, sum("y"))
hashAggDF.collect()
val hashAggPlan = hashAggDF.queryExecution.executedPlan
if (wholeStage) {
assert(find(hashAggPlan) {
case WholeStageCodegenExec(_: HashAggregateExec) => true
case _ => false
}.isDefined)
} else {
assert(stripAQEPlan(hashAggPlan).isInstanceOf[HashAggregateExec])
}
// test case for ObjectHashAggregate and SortAggregate
val objHashAggOrSortAggDF = df.groupBy("x").agg(c, collect_list("y"))
objHashAggOrSortAggDF.collect()
val objHashAggOrSortAggPlan =
stripAQEPlan(objHashAggOrSortAggDF.queryExecution.executedPlan)
if (useObjectHashAgg) {
assert(objHashAggOrSortAggPlan.isInstanceOf[ObjectHashAggregateExec])
} else {
assert(objHashAggOrSortAggPlan.isInstanceOf[SortAggregateExec])
}
}
}
}
test("SPARK-19471: AggregationIterator does not initialize the generated result projection" +
" before using it") {
Seq(
monotonically_increasing_id(), spark_partition_id(),
rand(Random.nextLong()), randn(Random.nextLong())
).foreach(assertNoExceptions)
}
test("SPARK-21580 ints in aggregation expressions are taken as group-by ordinal.") {
checkAnswer(
testData2.groupBy(lit(3), lit(4)).agg(lit(6), lit(7), sum("b")),
Seq(Row(3, 4, 6, 7, 9)))
checkAnswer(
testData2.groupBy(lit(3), lit(4)).agg(lit(6), $"b", sum("b")),
Seq(Row(3, 4, 6, 1, 3), Row(3, 4, 6, 2, 6)))
checkAnswer(
spark.sql("SELECT 3, 4, SUM(b) FROM testData2 GROUP BY 1, 2"),
Seq(Row(3, 4, 9)))
checkAnswer(
spark.sql("SELECT 3 AS c, 4 AS d, SUM(b) FROM testData2 GROUP BY c, d"),
Seq(Row(3, 4, 9)))
}
test("SPARK-22223: ObjectHashAggregate should not introduce unnecessary shuffle") {
withSQLConf(SQLConf.USE_OBJECT_HASH_AGG.key -> "true") {
val df = Seq(("1", "2", 1), ("1", "2", 2), ("2", "3", 3), ("2", "3", 4)).toDF("a", "b", "c")
.repartition(col("a"))
val objHashAggDF = df
.withColumn("d", expr("(a, b, c)"))
.groupBy("a", "b").agg(collect_list("d").as("e"))
.withColumn("f", expr("(b, e)"))
.groupBy("a").agg(collect_list("f").as("g"))
val aggPlan = objHashAggDF.queryExecution.executedPlan
val sortAggPlans = collect(aggPlan) {
case sortAgg: SortAggregateExec => sortAgg
}
assert(sortAggPlans.isEmpty)
val objHashAggPlans = collect(aggPlan) {
case objHashAgg: ObjectHashAggregateExec => objHashAgg
}
assert(objHashAggPlans.nonEmpty)
val exchangePlans = collect(aggPlan) {
case shuffle: ShuffleExchangeExec => shuffle
}
assert(exchangePlans.length == 1)
}
}
testWithWholeStageCodegenOnAndOff("SPARK-22951: dropDuplicates on empty dataFrames " +
"should produce correct aggregate") { _ =>
// explicit global aggregations
val emptyAgg = Map.empty[String, String]
checkAnswer(spark.emptyDataFrame.agg(emptyAgg), Seq(Row()))
checkAnswer(spark.emptyDataFrame.groupBy().agg(emptyAgg), Seq(Row()))
checkAnswer(spark.emptyDataFrame.groupBy().agg(count("*")), Seq(Row(0)))
checkAnswer(spark.emptyDataFrame.dropDuplicates().agg(emptyAgg), Seq(Row()))
checkAnswer(spark.emptyDataFrame.dropDuplicates().groupBy().agg(emptyAgg), Seq(Row()))
checkAnswer(spark.emptyDataFrame.dropDuplicates().groupBy().agg(count("*")), Seq(Row(0)))
// global aggregation is converted to grouping aggregation:
assert(spark.emptyDataFrame.dropDuplicates().count() == 0)
}
test("SPARK-21896: Window functions inside aggregate functions") {
def checkWindowError(df: => DataFrame): Unit = {
val thrownException = the [AnalysisException] thrownBy {
df.queryExecution.analyzed
}
assert(thrownException.message.contains("not allowed to use a window function"))
}
checkWindowError(testData2.select(min(avg($"b").over(Window.partitionBy($"a")))))
checkWindowError(testData2.agg(sum($"b"), max(rank().over(Window.orderBy($"a")))))
checkWindowError(testData2.groupBy($"a").agg(sum($"b"), max(rank().over(Window.orderBy($"b")))))
checkWindowError(testData2.groupBy($"a").agg(max(sum(sum($"b")).over(Window.orderBy($"a")))))
checkWindowError(testData2.groupBy($"a").agg(
sum($"b").as("s"), max(count("*").over())).where($"s" === 3))
checkAnswer(testData2.groupBy($"a").agg(
max($"b"), sum($"b").as("s"), count("*").over()).where($"s" === 3),
Row(1, 2, 3, 3) :: Row(2, 2, 3, 3) :: Row(3, 2, 3, 3) :: Nil)
checkWindowError(sql("SELECT MIN(AVG(b) OVER(PARTITION BY a)) FROM testData2"))
checkWindowError(sql("SELECT SUM(b), MAX(RANK() OVER(ORDER BY a)) FROM testData2"))
checkWindowError(sql("SELECT SUM(b), MAX(RANK() OVER(ORDER BY b)) FROM testData2 GROUP BY a"))
checkWindowError(sql("SELECT MAX(SUM(SUM(b)) OVER(ORDER BY a)) FROM testData2 GROUP BY a"))
checkWindowError(
sql("SELECT MAX(RANK() OVER(ORDER BY b)) FROM testData2 GROUP BY a HAVING SUM(b) = 3"))
checkAnswer(
sql("SELECT a, MAX(b), RANK() OVER(ORDER BY a) FROM testData2 GROUP BY a HAVING SUM(b) = 3"),
Row(1, 2, 1) :: Row(2, 2, 2) :: Row(3, 2, 3) :: Nil)
}
test("SPARK-24788: RelationalGroupedDataset.toString with unresolved exprs should not fail") {
// Checks if these raise no exception
assert(testData.groupBy($"key").toString.contains(
"[grouping expressions: [key], value: [key: int, value: string], type: GroupBy]"))
assert(testData.groupBy(col("key")).toString.contains(
"[grouping expressions: [key], value: [key: int, value: string], type: GroupBy]"))
assert(testData.groupBy(current_date()).toString.contains(
"grouping expressions: [current_date(None)], value: [key: int, value: string], " +
"type: GroupBy]"))
}
test("SPARK-26021: NaN and -0.0 in grouping expressions") {
checkAnswer(
Seq(0.0f, -0.0f, 0.0f/0.0f, Float.NaN).toDF("f").groupBy("f").count(),
Row(0.0f, 2) :: Row(Float.NaN, 2) :: Nil)
checkAnswer(
Seq(0.0d, -0.0d, 0.0d/0.0d, Double.NaN).toDF("d").groupBy("d").count(),
Row(0.0d, 2) :: Row(Double.NaN, 2) :: Nil)
// test with complicated type grouping expressions
checkAnswer(
Seq(0.0f, -0.0f, 0.0f/0.0f, Float.NaN).toDF("f")
.groupBy(array("f"), struct("f")).count(),
Row(Seq(0.0f), Row(0.0f), 2) ::
Row(Seq(Float.NaN), Row(Float.NaN), 2) :: Nil)
checkAnswer(
Seq(0.0d, -0.0d, 0.0d/0.0d, Double.NaN).toDF("d")
.groupBy(array("d"), struct("d")).count(),
Row(Seq(0.0d), Row(0.0d), 2) ::
Row(Seq(Double.NaN), Row(Double.NaN), 2) :: Nil)
checkAnswer(
Seq(0.0f, -0.0f, 0.0f/0.0f, Float.NaN).toDF("f")
.groupBy(array(struct("f")), struct(array("f"))).count(),
Row(Seq(Row(0.0f)), Row(Seq(0.0f)), 2) ::
Row(Seq(Row(Float.NaN)), Row(Seq(Float.NaN)), 2) :: Nil)
checkAnswer(
Seq(0.0d, -0.0d, 0.0d/0.0d, Double.NaN).toDF("d")
.groupBy(array(struct("d")), struct(array("d"))).count(),
Row(Seq(Row(0.0d)), Row(Seq(0.0d)), 2) ::
Row(Seq(Row(Double.NaN)), Row(Seq(Double.NaN)), 2) :: Nil)
// test with complicated type grouping columns
val df = Seq(
(Array(-0.0f, 0.0f), Tuple2(-0.0d, Double.NaN), Seq(Tuple2(-0.0d, Double.NaN))),
(Array(0.0f, -0.0f), Tuple2(0.0d, Double.NaN), Seq(Tuple2(0.0d, 0.0/0.0)))
).toDF("arr", "stru", "arrOfStru")
checkAnswer(
df.groupBy("arr", "stru", "arrOfStru").count(),
Row(Seq(0.0f, 0.0f), Row(0.0d, Double.NaN), Seq(Row(0.0d, Double.NaN)), 2)
)
}
test("SPARK-27581: DataFrame countDistinct(\\"*\\") shouldn't fail with AnalysisException") {
val df = sql("select id % 100 from range(100000)")
val distinctCount1 = df.select(expr("count(distinct(*))"))
val distinctCount2 = df.select(countDistinct("*"))
checkAnswer(distinctCount1, distinctCount2)
val countAndDistinct = df.select(count("*"), countDistinct("*"))
checkAnswer(countAndDistinct, Row(100000, 100))
}
test("max_by") {
val yearOfMaxEarnings =
sql("SELECT course, max_by(year, earnings) FROM courseSales GROUP BY course")
checkAnswer(yearOfMaxEarnings, Row("dotNET", 2013) :: Row("Java", 2013) :: Nil)
checkAnswer(
sql("SELECT max_by(x, y) FROM VALUES (('a', 10)), (('b', 50)), (('c', 20)) AS tab(x, y)"),
Row("b") :: Nil
)
checkAnswer(
sql("SELECT max_by(x, y) FROM VALUES (('a', 10)), (('b', null)), (('c', 20)) AS tab(x, y)"),
Row("c") :: Nil
)
checkAnswer(
sql("SELECT max_by(x, y) FROM VALUES (('a', null)), (('b', null)), (('c', 20)) AS tab(x, y)"),
Row("c") :: Nil
)
checkAnswer(
sql("SELECT max_by(x, y) FROM VALUES (('a', 10)), (('b', 50)), (('c', null)) AS tab(x, y)"),
Row("b") :: Nil
)
checkAnswer(
sql("SELECT max_by(x, y) FROM VALUES (('a', null)), (('b', null)) AS tab(x, y)"),
Row(null) :: Nil
)
// structs as ordering value.
checkAnswer(
sql("select max_by(x, y) FROM VALUES (('a', (10, 20))), (('b', (10, 50))), " +
"(('c', (10, 60))) AS tab(x, y)"),
Row("c") :: Nil
)
checkAnswer(
sql("select max_by(x, y) FROM VALUES (('a', (10, 20))), (('b', (10, 50))), " +
"(('c', null)) AS tab(x, y)"),
Row("b") :: Nil
)
withTempView("tempView") {
val dfWithMap = Seq((0, "a"), (1, "b"), (2, "c"))
.toDF("x", "y")
.select($"x", map($"x", $"y").as("y"))
.createOrReplaceTempView("tempView")
val error = intercept[AnalysisException] {
sql("SELECT max_by(x, y) FROM tempView").show
}
assert(
error.message.contains("function max_by does not support ordering on type map<int,string>"))
}
}
test("min_by") {
val yearOfMinEarnings =
sql("SELECT course, min_by(year, earnings) FROM courseSales GROUP BY course")
checkAnswer(yearOfMinEarnings, Row("dotNET", 2012) :: Row("Java", 2012) :: Nil)
checkAnswer(
sql("SELECT min_by(x, y) FROM VALUES (('a', 10)), (('b', 50)), (('c', 20)) AS tab(x, y)"),
Row("a") :: Nil
)
checkAnswer(
sql("SELECT min_by(x, y) FROM VALUES (('a', 10)), (('b', null)), (('c', 20)) AS tab(x, y)"),
Row("a") :: Nil
)
checkAnswer(
sql("SELECT min_by(x, y) FROM VALUES (('a', null)), (('b', null)), (('c', 20)) AS tab(x, y)"),
Row("c") :: Nil
)
checkAnswer(
sql("SELECT min_by(x, y) FROM VALUES (('a', 10)), (('b', 50)), (('c', null)) AS tab(x, y)"),
Row("a") :: Nil
)
checkAnswer(
sql("SELECT min_by(x, y) FROM VALUES (('a', null)), (('b', null)) AS tab(x, y)"),
Row(null) :: Nil
)
// structs as ordering value.
checkAnswer(
sql("select min_by(x, y) FROM VALUES (('a', (10, 20))), (('b', (10, 50))), " +
"(('c', (10, 60))) AS tab(x, y)"),
Row("a") :: Nil
)
checkAnswer(
sql("select min_by(x, y) FROM VALUES (('a', null)), (('b', (10, 50))), " +
"(('c', (10, 60))) AS tab(x, y)"),
Row("b") :: Nil
)
withTempView("tempView") {
val dfWithMap = Seq((0, "a"), (1, "b"), (2, "c"))
.toDF("x", "y")
.select($"x", map($"x", $"y").as("y"))
.createOrReplaceTempView("tempView")
val error = intercept[AnalysisException] {
sql("SELECT min_by(x, y) FROM tempView").show
}
assert(
error.message.contains("function min_by does not support ordering on type map<int,string>"))
}
}
test("count_if") {
withTempView("tempView") {
Seq(("a", None), ("a", Some(1)), ("a", Some(2)), ("a", Some(3)),
("b", None), ("b", Some(4)), ("b", Some(5)), ("b", Some(6)))
.toDF("x", "y")
.createOrReplaceTempView("tempView")
checkAnswer(
sql("SELECT COUNT_IF(NULL), COUNT_IF(y % 2 = 0), COUNT_IF(y % 2 <> 0), " +
"COUNT_IF(y IS NULL) FROM tempView"),
Row(0L, 3L, 3L, 2L))
checkAnswer(
sql("SELECT x, COUNT_IF(NULL), COUNT_IF(y % 2 = 0), COUNT_IF(y % 2 <> 0), " +
"COUNT_IF(y IS NULL) FROM tempView GROUP BY x"),
Row("a", 0L, 1L, 2L, 1L) :: Row("b", 0L, 2L, 1L, 1L) :: Nil)
checkAnswer(
sql("SELECT x FROM tempView GROUP BY x HAVING COUNT_IF(y % 2 = 0) = 1"),
Row("a"))
checkAnswer(
sql("SELECT x FROM tempView GROUP BY x HAVING COUNT_IF(y % 2 = 0) = 2"),
Row("b"))
checkAnswer(
sql("SELECT x FROM tempView GROUP BY x HAVING COUNT_IF(y IS NULL) > 0"),
Row("a") :: Row("b") :: Nil)
checkAnswer(
sql("SELECT x FROM tempView GROUP BY x HAVING COUNT_IF(NULL) > 0"),
Nil)
val error = intercept[AnalysisException] {
sql("SELECT COUNT_IF(x) FROM tempView")
}
assert(error.message.contains("function count_if requires boolean type"))
}
}
}
|
kevinyu98/spark
|
sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala
|
Scala
|
apache-2.0
| 33,269 |
package org.nexbook.fix
import org.nexbook.app.OrderHandlersModule
import org.slf4j.LoggerFactory
import quickfix._
import quickfix.fix44.{NewOrderSingle, OrderCancelRequest}
class FixMessageHandler(orderHandlersModule: OrderHandlersModule, fixOrderConverter: FixOrderConverter) extends Application {
val logger = LoggerFactory.getLogger(classOf[FixMessageHandler])
val newOrderHandler = orderHandlersModule.newOrderHandler
override def onCreate(sessionId: SessionID) {
logger.info(s"FixOrderHandler Session Created with SessionID: $sessionId")
}
override def onLogon(sessionId: SessionID) {
logger.info(s"Logon: $sessionId")
}
override def onLogout(sessionId: SessionID) {
logger.info(s"Logout: $sessionId")
}
override def toAdmin(message: Message, sessionId: SessionID) {
logger.trace(s"ToAdmin: $message")
}
@throws(classOf[RejectLogon])
@throws(classOf[IncorrectTagValue])
@throws(classOf[IncorrectDataFormat])
@throws(classOf[FieldNotFound])
override def fromAdmin(message: Message, sessionId: SessionID) {
logger.debug(s"FromAdmin: $message")
}
@throws(classOf[DoNotSend])
override def toApp(message: Message, sessionId: SessionID) {
logger.trace(s"ToApp: $message")
}
@throws(classOf[UnsupportedMessageType])
@throws(classOf[IncorrectTagValue])
@throws(classOf[IncorrectDataFormat])
@throws(classOf[FieldNotFound])
override def fromApp(message: Message, sessionId: SessionID) {
logger.trace(s"FromApp: ${System.currentTimeMillis}: $message")
try {
message match {
case o: NewOrderSingle => onMessage(o, sessionId)
case o: OrderCancelRequest => onMessage(o, sessionId)
}
} catch {
case e: Exception => logger.error("Unexpected exception", e)
}
}
def onMessage(order: NewOrderSingle, sessionID: SessionID) {
logger.debug(s"${order.getClOrdID.getValue} - onMessage: handled message - $order from: ${sessionID.getSenderCompID}")
newOrderHandler.handleNewOrder(fixOrderConverter convert order)
}
def onMessage(orderCancel: OrderCancelRequest, sessionID: SessionID) = {
logger.debug(s"${orderCancel.getClOrdID.getValue} - onMessage: handled message with clOrdID: $orderCancel from: ${sessionID.getSenderCompID}")
newOrderHandler.handleNewOrderCancel(fixOrderConverter convert orderCancel)
}
}
|
milczarekIT/nexbook
|
src/main/scala/org/nexbook/fix/FixMessageHandler.scala
|
Scala
|
apache-2.0
| 2,292 |
package de.kalass.batchmonads.base
/**
* A very simply operation that simply returns the given value when executed in a batch.
*/
case class Return[A](private[base] result: A) extends Operation[A]
|
kkalass/BatchMonads
|
src/de/kalass/batchmonads/base/Return.scala
|
Scala
|
lgpl-3.0
| 201 |
object SearchComments
/**
* SearchComments
*/
object Main {
/*caret*/SearchComments
}
/*
object NameAfterRename
/**
* NameAfterRename
*/
object Main {
/*caret*/NameAfterRename
}
*/
|
triggerNZ/intellij-scala
|
testdata/rename/class/SearchComments.scala
|
Scala
|
apache-2.0
| 190 |
package org.emailscript.api
import java.net.{ConnectException, URL}
import java.time.Instant
import java.time.format.DateTimeFormatter
import java.util.Date
import com.google.gson.Gson
import org.emailscript.helpers.{Exporter, Importer, LoggerFactory}
import uk.co.bigbeeconsultants.http.HttpClient
import uk.co.bigbeeconsultants.http.header.MediaType
import uk.co.bigbeeconsultants.http.request.RequestBody
import scala.beans.BeanProperty
class NoteBean {
@BeanProperty var uid_l: Long = 0
@BeanProperty var from_s: String = ""
@BeanProperty var subject_s: String = ""
@BeanProperty var note_t: String = ""
@BeanProperty var date_dt: String = ""
}
object NoteBean {
def apply(email: Email, note: String) = {
val bean = new NoteBean
bean.uid_l = email.getUid()
bean.from_s = email.getFrom().toString
bean.subject_s = email.getSubject()
bean.note_t = note
bean.date_dt = Indexer.formatDate(email.getReceived())
bean
}
}
class IndexEmailBean {
@BeanProperty var id: String = ""
@BeanProperty var from_s: String = ""
@BeanProperty var received_dt: String = ""
@BeanProperty var body_t: String = ""
@BeanProperty var subject_s: String = ""
@BeanProperty var folder_s: String = ""
}
object IndexEmailBean {
def apply(email: Email) = {
val bean = new IndexEmailBean
bean.id = email.getUid().toString
bean.from_s = email.getFrom().toString
bean.received_dt = Indexer.formatDate(email.getReceived())
bean.body_t = email.getBody()
bean.subject_s = email.getSubject()
bean.folder_s = email.getFolder()
bean
}
}
class LogBean {
@BeanProperty var date_dt: String = ""
@BeanProperty var text_t: String = ""
@BeanProperty var level_s: String = ""
@BeanProperty var thread_s: String = ""
}
object LogBean {
def apply (timeStamp: Long, text: String, level: String, thread: String) = {
val bean = new LogBean
bean.date_dt = Indexer.formatDate(timeStamp)
bean.text_t = text
bean.level_s = level
bean.thread_s = thread
bean
}
}
class IndexerBean() extends NamedBean with Importer {
@BeanProperty var url: String = ""
override def doImport(): AnyRef = Indexer(this)
}
/**
* Provide search and indexing support using the Lucene search engine
*/
class Indexer(val url: String) extends Exporter {
import org.emailscript.api.Indexer._
val httpClient = new HttpClient
override def doExport(): AnyRef = {
val bean = new IndexerBean()
bean.url = url
bean
}
//
// Public API
//
def indexNote(email: Email, note:String) = {
val noteBean = NoteBean(email, note)
index(noteBean)
}
def indexEmail(email: Email) = {
val emailBean = IndexEmailBean(email)
index(emailBean)
}
/**
* Add this email to the search index, so that we can find it later
*/
def index(data: AnyRef): Unit = {
val gson = new Gson();
val body = gson.toJson(Array(data))
val requestBody = RequestBody(body, MediaType.APPLICATION_JSON)
try {
val response = httpClient.post(new URL(url), Some(requestBody))
logger.debug(s"indexing to url: $url body: $body ")
if (!response.status.isSuccess)
logger.warn(s"status: ${response.status.message} response = ${response.body.asString}")
} catch {
case e: ConnectException => logger.debug("Connection not working")
case e: Throwable => logger.info(s"Could not post to $url", e)
}
}
}
object Indexer {
val logger = LoggerFactory.getLogger(getClass)
val dateFormatter = DateTimeFormatter.ISO_INSTANT
val CommitParam = "?commitWithin=5000" // tells solr to commit transactions with 5 seconds
def makeUpdateCommand(url: String) = url + "/update" + CommitParam
def apply(bean: IndexerBean) = new Indexer(makeUpdateCommand(bean.url))
def apply(url: String) = new Indexer(makeUpdateCommand(url))
def formatDate(date: Date) = dateFormatter.format(date.toInstant)
def formatDate(timeStamp: Long) = dateFormatter.format(Instant.ofEpochMilli(timeStamp))
}
|
OdysseusLevy/emailscript
|
src/main/scala/org/emailscript/api/Indexer.scala
|
Scala
|
lgpl-3.0
| 4,029 |
package mypkg
object Container {
class StringExtras(val s: String) extends AnyVal {
def op(item: Int): Int = ???
}
}
trait Container {
import Container.*
implicit def mkStringExtras(s: String): StringExtras = new StringExtras(s)
}
|
lampepfl/dotty
|
tests/pos/i6989/Container_1.scala
|
Scala
|
apache-2.0
| 244 |
package com.fh
import javax.servlet.annotation.WebServlet
import com.vaadin.annotations.{Theme, Title, VaadinServletConfiguration}
import com.vaadin.server.{VaadinRequest, VaadinServlet}
import com.vaadin.ui._
import org.slf4j.LoggerFactory
/**
* Servlet definition with annotations, so that we can avoid using a web.xml file.
* Points to our sub-class of the Vaadin UI.
* Created by fh on 2/12/2017.
*/
@WebServlet(value=Array("/*"), asyncSupported=true)
@VaadinServletConfiguration(productionMode=false, ui=classOf[App])
class Servlet extends VaadinServlet
/**
* Vaadin application.
* Created by fh on 2/12/2017.
*/
@Theme( "fh1" )
@Title( "Hello World Application" )
class App extends UI {
private val log = LoggerFactory.getLogger( classOf[App] )
override def init(request: VaadinRequest): Unit = {
try {
log.info( "init" )
val view = new VerticalLayout()
view.setStyleName( "bgYellow" ) // highlight the background of this view, so that we can see it on the page
view.addComponent(new Label( getMsg ))
setContent(view)
} catch {
case t: Throwable => log.error( "Failed miserably", t )
}
}
def getMsg = "Hello Vaadin!"
}
|
fh137/ScalaVaadin
|
src/main/scala/com/fh/App.scala
|
Scala
|
mit
| 1,248 |
/*
* Copyright (C) 2014 Ivan Cukic <ivan at mi.sanu.ac.rs>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package dataapi
import slick.driver.PostgresDriver.simple._
import akka.actor.Actor
import akka.io.IO
import akka.pattern.ask
import spray.can.Http
import spray.http._
import spray.httpx.RequestBuilding._
import spray.httpx.SprayJsonSupport._
import spray.json._
import DefaultJsonProtocol._
import MediaTypes._
import HttpCharsets._
import HttpMethods._
import HttpHeaders._
import StateFilter._
import common.Config.{ Ckan => CkanConfig }
import scala.concurrent.ExecutionContext.Implicits.global
import spray.http.HttpResponse
import java.sql.Timestamp
import ckan.{CkanGodInterface, DataspaceTable}
import spray.httpx.SprayJsonSupport._
import ckan.DataspaceJsonProtocol._
import ckan.ResourceJsonProtocol._
import CkanJsonProtocol._
import scala.slick.lifted.{Column, Query}
import spray.http.HttpHeaders.Location
import ckan.CkanGodInterface.IteratorData
import java.util.UUID
object DataspaceActor {
/// Gets the list of resources modified in the specified time range
case class ListDataspaces(
val authorizationKey: String,
val since: Option[Timestamp],
val until: Option[Timestamp],
val state: StateFilter,
val start: Int = 0,
val count: Int = CkanGodInterface.queryResultDefaultLimit
)
/// Gets the next results for the iterator
case class ListDataspacesFromIterator(
val authorizationKey: String,
val iterator: String
)
/// Gets the meta data for the the specified resource
case class GetDataspaceMetadata(
val authorizationKey: String,
val id: String)
/// Creates a dataspace
case class CreateDataspace(
val authorizationKey: String,
val dataspace: DataspaceCreateWithId
)
/// Updates a dataspace
case class UpdateDataspace(
val authorizationKey: String,
val dataspace: DataspaceUpdateWithId)
/// Creates a new package in the dataspace
case class CreatePackageInDataspace(
val authorizationKey: String,
val p: PackageCreateWithId)
}
class DataspaceActor
extends Actor
with dataapi.DefaultValues
{
import DataspaceActor._
import context.system
def postRequest[T](action: String, data: T, authorizationKey: String)(implicit evidence: spray.httpx.marshalling.Marshaller[T]) =
(IO(Http) ? (
Post(CkanConfig.namespace + "action/" + action, data)~>addHeader("Authorization", authorizationKey)
))
def receive: Receive = {
/// Gets the list of resources modified in the specified time range
case ListDataspaces(authorizationKey, since, until, state, start, count) =>
CkanGodInterface.database withSession { implicit session: Session =>
val (query, nextPage, currentPage) =
CkanGodInterface.listDataspacesQuery(authorizationKey, since, until, state, start, count)
// Dataspaces do not support iterators thanks to CKAN //
// "nextPage" -> JsString(nextPage.map("/resources/query/results/" + _) getOrElse ""),
// "currentPage" -> JsString(currentPage.map("/resources/query/results/" + _) getOrElse ""),
sender ! HttpResponse(status = StatusCodes.OK,
entity = HttpEntity(ContentType(`application/json`, `UTF-8`),
JsObject("data" -> query.list.toJson).prettyPrint))
}
/// Decodes the iterator data and invokes ListDataspaces
case ListDataspacesFromIterator(authorizationKey, iteratorData) =>
val iterator = IteratorData.fromId(iteratorData).get
receive(ListDataspaces(
authorizationKey,
Some(iterator.since),
Some(iterator.until),
iterator.state,
iterator.start,
iterator.count
))
/// Gets the meta data for the the specified resource
case GetDataspaceMetadata(authorizationKey, request) =>
CkanGodInterface.database withSession { implicit session: Session =>
val requestParts = request.split('.')
val id = requestParts.head
val format = if (requestParts.size == 2) requestParts(1) else "json"
val mimetype = if (format == "html") `text/html` else `application/json`
val dataspace = CkanGodInterface.getDataspace(authorizationKey, id)
sender ! HttpResponse(
status = StatusCodes.OK,
entity = HttpEntity(
ContentType(mimetype, `UTF-8`),
if (format == "html") {
dataspace.map {
templates.html.dataspace(_).toString
}.getOrElse {
templates.html.error(403, id).toString
}
} else {
dataspace.map {
_.toJson.prettyPrint
}.getOrElse {
""
}
}
)
)
}
// Creates a new dataspace
case CreateDataspace(authorizationKey, dataspace) => {
val originalSender = sender
// Passing the request to CKAN
postRequest("organization_create", dataspace, authorizationKey)
.mapTo[HttpResponse]
.map { response => response.status match {
case StatusCodes.OK =>
// TODO: Try to read dataspace from (ugly) CKAN response, not from db
val createdDataspace = CkanGodInterface.getDataspace(authorizationKey, dataspace.id)
originalSender ! HttpResponse(status = StatusCodes.Created,
entity = HttpEntity(ContentType(`application/json`, `UTF-8`),
createdDataspace.map { _.toJson.prettyPrint}.getOrElse {""}),
headers = List(Location(s"${common.Config.namespace}dataspaces/${dataspace.id}")))
case StatusCodes.BadRequest =>
originalSender ! HttpResponse(status = response.status, entity = response.entity)
case _ =>
originalSender ! HttpResponse(response.status, "Error creating dataspace!")
}
}
}
// Updates the dataspace
case UpdateDataspace(authorizationKey, dataspace) => {
val originalSender = sender
// Passing the request to CKAN
postRequest("organization_update", dataspace, authorizationKey)
.mapTo[HttpResponse]
.map { response => response.status match {
case StatusCodes.OK =>
// TODO: Try to read dataspace from (ugly) CKAN response, not from db
val updatedDataspace = CkanGodInterface.getDataspace(authorizationKey, dataspace.id)
originalSender ! HttpResponse(status = StatusCodes.OK,
entity = HttpEntity(ContentType(`application/json`, `UTF-8`),
updatedDataspace.map { _.toJson.prettyPrint} getOrElse ""))
case StatusCodes.BadRequest =>
originalSender ! HttpResponse(status = response.status, entity = response.entity)
case _ =>
originalSender ! HttpResponse(response.status, "Error updating dataspace!")
}
}
}
// Creates a new package in the dataspace
case CreatePackageInDataspace(authorizationKey, p) => {
val originalSender = sender
postRequest("package_create", p, authorizationKey)
.mapTo[HttpResponse]
.map { response => originalSender ! response}
}
case response: HttpResponse =>
println(s"Sending the response back to the requester $response")
case other =>
println(s"Found an unknown thing: $other")
sender ! other
}
}
|
ivan-cukic/litef-conductor
|
src/main/scala/dataapi/DataspaceActor.scala
|
Scala
|
apache-2.0
| 9,323 |
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.graphx.{Edge, Graph}
import org.apache.spark.rdd.RDD
/**
* Created by xinghao on 3/13/15.
*/
object AuxiliaryFunctions {
def setZeroDegreeToCenter(graph: Graph[Int, Int], zeroDegreeID: Int, centerID: Int): Graph[Int, Int] = {
graph.mapVertices[Int]((vid, attr) => if (attr == zeroDegreeID) centerID else attr)
}
def setCenterAttrToNegativeID(graph: Graph[Int, Int]): Graph[Int, Int] = {
graph.mapVertices[Int]((vid, attr) => if (attr <= 0) -vid.toInt else attr)
}
def computeObjective(graph: Graph[Int, Int]): Long = {
val g = setCenterAttrToNegativeID(graph)
val cChoose2: Long = g.aggregateMessages[Long](
triplet => {
if (triplet.srcAttr == triplet.dstId)
triplet.sendToDst(1)
},
_ + _
).map(vc => vc._2 * (vc._2 + 1) / 2).fold(0)(_ + _)
val intraCluster: Long = g.aggregateMessages[Long](
triplet => {
if (math.abs(triplet.srcAttr) == math.abs(triplet.dstAttr))
triplet.sendToDst(1)
},
_ + _
).map(vc => vc._2).fold(0)(_ + _) / 2
val interCluster: Long = g.aggregateMessages[Long](
triplet => {
if (math.abs(triplet.srcAttr) != math.abs(triplet.dstAttr))
triplet.sendToDst(1)
},
_ + _
).map(vc => vc._2).fold(0)(_ + _) / 2
cChoose2 - intraCluster + interCluster
}
def main(args: Array[String]) = {
Logger.getLogger("org").setLevel(Level.WARN)
Logger.getLogger("akka").setLevel(Level.WARN)
val sc = new SparkContext()
/* Start with 1, obj = 2 */
val vertexArray = Array(
(1L, -999),
(2L, 1),
(3L, 1),
(4L, -999),
(5L, 1)
)
/* Start with 2, obj = 4 */
// val vertexArray = Array(
// (1L, 2),
// (2L, -999),
// (3L, -999),
// (4L, 2),
// (5L, 2)
// )
/* Start with 3, obj = 2 */
// val vertexArray = Array(
// (1L, 3),
// (2L, -999),
// (3L, -999),
// (4L, 2),
// (5L, 3)
// )
/* Start with 4, obj = 2 */
// val vertexArray = Array(
// (1L, 3),
// (2L, 4),
// (3L, -999),
// (4L, -999),
// (5L, 3)
// )
/* Start with 5, obj = 2 */
// val vertexArray = Array(
// (1L, 5),
// (2L, 5),
// (3L, 5),
// (4L, -999),
// (5L, -999)
// )
/* Every vertex in its own cluster, obj = 6 */
// val vertexArray = Array(
// (1L, -999),
// (2L, -999),
// (3L, -999),
// (4L, -999),
// (5L, -999)
// )
/* {{1,2,3}, {4}, {5}}, obj = 5 */
// val vertexArray = Array(
// (1L, -999),
// (2L, 1),
// (3L, 1),
// (4L, -999),
// (5L, -999)
// )
val edgeArray = Array(
Edge(1, 2, 1),
Edge(1, 3, 1),
Edge(1, 5, 1),
Edge(2, 1, 1),
Edge(2, 4, 1),
Edge(2, 5, 1),
Edge(3, 1, 1),
Edge(3, 5, 1),
Edge(4, 2, 1),
Edge(5, 1, 1),
Edge(5, 2, 1),
Edge(5, 3, 1)
)
val vertexRDD: RDD[(Long, Int)] = sc.parallelize(vertexArray)
val edgeRDD: RDD[Edge[Int]] = sc.parallelize(edgeArray)
var graph: Graph[Int, Int] = Graph(vertexRDD, edgeRDD)
val obj = AuxiliaryFunctions.computeObjective(graph)
System.out.println(s"$obj")
}
}
|
anadim/clusterWild
|
src/main/scala/AuxiliaryFunctions.scala
|
Scala
|
apache-2.0
| 3,342 |
package bad.robot.temperature
import bad.robot.temperature.rrd.{Host, Seconds}
import org.specs2.matcher.DisjunctionMatchers._
import org.specs2.matcher.MatchResult
import org.specs2.mutable.Specification
import org.specs2.specification.AfterAll
import scala.Double._
import scalaz.{\\/, \\/-}
class ErrorOnTemperatureSpikeTest extends Specification with AfterAll {
val SensorError: PartialFunction[Error, MatchResult[Any]] = {
case _: SensorSpikeError => ok
}
"Delegates" >> {
val delegate = new StubWriter
new ErrorOnTemperatureSpike(delegate).write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1))))
)
}
"Errors on spiked value (single sensor)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(51.1))))) must be_-\\/.like(SensorError)
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))),
Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6))))
)
}
"Error on spiked values (multiple sensors)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(5), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(7), List(SensorReading("A", Temperature(51.6))))) must be_-\\/.like(SensorError)
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("B", Temperature(31.4)))))
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("B", Temperature(31.1)))))
writer.write(Measurement(Host("example"), Seconds(6), List(SensorReading("B", Temperature(31.4)))))
writer.write(Measurement(Host("example"), Seconds(8), List(SensorReading("B", Temperature(51.1))))) must be_-\\/.like(SensorError)
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))),
Measurement(Host("example"), Seconds(5), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("B", Temperature(31.4)))),
Measurement(Host("example"), Seconds(4), List(SensorReading("B", Temperature(31.1)))),
Measurement(Host("example"), Seconds(6), List(SensorReading("B", Temperature(31.4))))
)
}
"Recovers from spiked value (single sensor)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(51.1))))) must be_-\\/.like(SensorError)
writer.write(Measurement(Host("example"), Seconds(5), List(SensorReading("A", Temperature(51.2))))) must be_-\\/.like(SensorError)
writer.write(Measurement(Host("example"), Seconds(6), List(SensorReading("A", Temperature(51.5))))) must be_-\\/.like(SensorError)
writer.write(Measurement(Host("example"), Seconds(7), List(SensorReading("A", Temperature(21.7)))))
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))),
Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))),
Measurement(Host("example"), Seconds(7), List(SensorReading("A", Temperature(21.7))))
)
}
"Negative spikes values (single sensor)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(1.1))))) must be_-\\/.like(SensorError)
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.4)))),
Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(21.6))))
)
}
"Negative spikes values (single sensor), example from production" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate, Barrier(1))
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(0.5)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(0.5)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(0.4)))))
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(-0.7))))) must be_-\\/.like(SensorError)
writer.write(Measurement(Host("example"), Seconds(5), List(SensorReading("A", Temperature(0.4)))))
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(0.5)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(0.5)))),
Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(0.4)))),
Measurement(Host("example"), Seconds(5), List(SensorReading("A", Temperature(0.4))))
)
}
"NaN (32.625 - 0.0 / 0.0 * 100 is NaN)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(0.0)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(32.625))))) must be_-\\/.like(SensorError)
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(0.0))))
)
}
"Infinity (0.0 - 32.625 / 0.0 * 100 is -Infinity)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(32.625)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(0.0))))) must be_-\\/.like(SensorError)
delegate.temperatures must_== List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(32.625))))
)
}
"Error message on a spiked value (single sensor)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(51.1))))) must be_-\\/.like {
case e: SensorSpikeError => e.message must_==
"""An unexpected spike was encountered on:
| sensor(s) : A
| previous temperatures : 21.6 °C
| spiked temperatures : 51.1 °C
|""".stripMargin
}
}
"Error message on a spiked value (multiple sensors)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A1", Temperature(21.1)), SensorReading("A2", Temperature(21.3)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("B1", Temperature(21.6)), SensorReading("B2", Temperature(21.8)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A1", Temperature(51.4)), SensorReading("A2", Temperature(51.1))))) must be_-\\/.like {
case e: SensorSpikeError => e.message must_==
"""An unexpected spike was encountered on:
| sensor(s) : A1, A2
| previous temperatures : 21.1 °C, 21.3 °C
| spiked temperatures : 51.4 °C, 51.1 °C
|""".stripMargin
}
}
"What happens with NaN (let it through)" >> {
val delegate = new StubWriter
val writer = new ErrorOnTemperatureSpike(delegate)
writer.write(Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))))
writer.write(Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.6)))))
writer.write(Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(NaN))))) must be_\\/-
writer.write(Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(21.8)))))
delegate.temperatures must containAllOf(List(
Measurement(Host("example"), Seconds(1), List(SensorReading("A", Temperature(21.1)))),
Measurement(Host("example"), Seconds(2), List(SensorReading("A", Temperature(21.6)))),
// Measurement(Host("example"), Seconds(3), List(SensorReading("A", Temperature(NaN)))), // can't do equality check on NaN
Measurement(Host("example"), Seconds(4), List(SensorReading("A", Temperature(21.8))))
))
}
"Toggle the use based on system property" >> {
ErrorOnTemperatureSpike(new StubWriter()) must haveClass[StubWriter]
sys.props += ("avoid.spikes" -> "true")
ErrorOnTemperatureSpike(new StubWriter()) must haveClass[ErrorOnTemperatureSpike]
}
class StubWriter extends TemperatureWriter {
var temperatures: List[Measurement] = List()
def write(measurement: Measurement): \\/[Error, Unit] = {
temperatures = temperatures :+ measurement
\\/-(())
}
}
override def afterAll(): Unit = sys.props -= "avoid.spikes"
}
|
tobyweston/temperature-machine
|
src/test/scala/bad/robot/temperature/ErrorOnTemperatureSpikeTest.scala
|
Scala
|
apache-2.0
| 11,069 |
package com.arcusys.valamis.slide.service
import com.arcusys.learn.liferay.LiferayClasses.LAssetEntry
import com.arcusys.learn.liferay.services.{AssetEntryLocalServiceHelper, CompanyHelper}
import com.arcusys.valamis.liferay.AssetHelper
import com.arcusys.valamis.slide.model.SlideSet
import com.arcusys.valamis.tag.model.ValamisTag
import scala.collection.JavaConverters._
trait SlideSetAssetHelper {
def updateSlideAsset(slideSet: SlideSet, userId: Option[Long]): Long
def getSlideAsset(slideSetId: Long): Option[LAssetEntry]
def getSlideAssets(slideSetIds: Seq[Long]): Seq[LAssetEntry]
def getSlideAssetCategories(slideSetId: Long): Seq[ValamisTag]
def getSlidesAssetCategories(slideSetId: Seq[Long]): Seq[(Long, Seq[ValamisTag])]
def deleteSlideAsset(slideSetId: Long): Unit
}
class SlideSetAssetHelperImpl extends AssetHelper[SlideSet] with SlideSetAssetHelper {
private lazy val assetHelper = AssetEntryLocalServiceHelper
def updateSlideAsset(slideSet: SlideSet, userId: Option[Long]): Long = {
updateAssetEntry(
slideSet.id,
userId,
if (slideSet.courseId != -1L) Some(slideSet.courseId) else None,
Some(slideSet.title),
Some(slideSet.description),
slideSet,
Option(CompanyHelper.getCompanyId))
}
def getSlideAsset(slideSetId: Long): Option[LAssetEntry] =
assetHelper.fetchAssetEntry(className, slideSetId)
def getSlideAssets(slideSetIds: Seq[Long]): Seq[LAssetEntry] =
assetHelper.fetchAssetEntries(className, slideSetIds)
def getSlideAssetCategories(slideSetId: Long): Seq[ValamisTag] =
getSlideAsset(slideSetId) map { asset =>
asset.getCategories.asScala.map(c => ValamisTag(c.getCategoryId, c.getName)).toSeq
} getOrElse Seq()
def getSlidesAssetCategories(slideSetIds: Seq[Long]): Seq[(Long, Seq[ValamisTag])] =
getSlideAssets(slideSetIds) map { asset =>
val tags = asset.getCategories.asScala.map(c => ValamisTag(c.getCategoryId, c.getName)).toSeq
(asset.getClassPK, tags)
}
def deleteSlideAsset(slideSetId: Long): Unit = {
getSlideAsset(slideSetId).map { asset =>
assetHelper.deleteAssetEntry(asset.getEntryId)
}
}
}
|
arcusys/Valamis
|
valamis-slide/src/main/scala/com/arcusys/valamis/slide/service/SlideSetAssetHelperImpl.scala
|
Scala
|
gpl-3.0
| 2,181 |
/*
Copyright (c) 2008, 2009 Hanno Braun <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.hannobraun.kong.util
import javax.swing.SwingUtilities
object PiccoUtil {
def updateSG( f: () => Unit ) {
SwingUtilities.invokeAndWait( new Runnable {
def run {
f.apply
}
} )
}
}
|
hannobraun/Kong
|
src/main/scala/com/hannobraun/kong/util/PiccoUtil.scala
|
Scala
|
apache-2.0
| 812 |
package nak.classify
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import breeze.linalg._
/**
*
* @author dlwh
*/
@RunWith(classOf[JUnitRunner])
class SMOTrainerTest
extends ClassifierTrainerTestHarness
with ContinuousTestHarness {
def trainer[L,T]:Classifier.Trainer[L,Counter[T,Double]] =
new SVM.SMOTrainer[L,Counter[T,Double]](10);
}
|
scalanlp/nak
|
src/test/scala/nak/classify/SVMTrainerTest.scala
|
Scala
|
apache-2.0
| 377 |
package io.rout
/**
* Allows the creation of reusable validation rules for [[ReqRead]]s.
*/
object ValidationRule {
/**
* Implicit conversion that allows the same [[ValidationRule]] to be used for required
* and optional values. If the optional value is non-empty, it gets validated (and validation may fail, producing an
* error), but if it is empty, it is always treated as valid.
*
* @param rule the validation rule to adapt for optional values
* @return a new validation rule that applies the specified rule to an optional value in case it is not empty
*/
implicit def toOptionalRule[A](rule: ValidationRule[A]): ValidationRule[Option[A]] = {
ValidationRule(rule.description) {
case Some(value) => rule(value)
case None => true
}
}
/**
* Creates a new reusable [[ValidationRule]] based on the specified predicate.
*
* @param desc text describing the rule being validated
* @param p returns true if the data is valid
*
* @return a new reusable validation rule.
*/
def apply[A](desc: String)(p: A => Boolean): ValidationRule[A] = new ValidationRule[A] {
def description: String = desc
def apply(value: A): Boolean = p(value)
}
}
/**
* A `ValidationRule` enables a reusable way of defining a validation rules in the application domain. It might be
* composed with [[ReqRead]]s using either should` or `shouldNot` methods and with other `ValidationRule`s using
* logical methods `and` and `or`.
*
* {{{
* case class User(name: String, age: Int)
* val user: ReqRead[User] = (
* param("name").should(beLongerThan(3)) ::
* param("age").as[Int].should(beGreaterThan(0) and beLessThan(120))
* ).as[User]
* }}}
*/
trait ValidationRule[A] { self =>
/**
* Text description of this validation rule.
*/
def description: String
/**
* Applies the rule to the specified value.
*
* @return true if the predicate of this rule holds for the specified value
*/
def apply(value: A): Boolean
/**
* Combines this rule with another rule such that the new rule only validates if both the combined rules validate.
*
* @param that the rule to combine with this rule
* @return a new rule that only validates if both the combined rules validate
*/
def and(that: ValidationRule[A]): ValidationRule[A] =
ValidationRule(s"${self.description} and ${that.description}") { value => self(value) && that(value) }
/**
* Combines this rule with another rule such that the new rule validates if any one of the combined rules validates.
*
* @param that the rule to combine with this rule
* @return a new rule that validates if any of the combined rules validates
*/
def or(that: ValidationRule[A]): ValidationRule[A] =
ValidationRule(s"${self.description} or ${that.description}") { value => self(value) || that(value) }
}
|
teodimoff/rOut
|
core/src/io/rout/ValidationRule.scala
|
Scala
|
apache-2.0
| 2,873 |
trait T[T] { def apply(x: Int): T }
class C(val x: Int) extends AnyVal { override def toString = s"$x" }
object Test {
def main(args: Array[String]): Unit = {
{
val t: A[String] = s => s
assert((t: A[_]).apply("there") == "there")
}
{
var u = 0
val t: T[Unit] = x => u = x
t.apply(1)
assert(u == 1)
}
{
val t: T[C] = x => new C(x)
assert(t.apply(1) == new C(1))
}
}
}
|
lrytz/scala
|
test/files/run/t10334b/Test.scala
|
Scala
|
apache-2.0
| 445 |
package models
case class Inspectee(uri: String)
|
ukwa/interject
|
interject-webapp/app/models/Inspectee.scala
|
Scala
|
apache-2.0
| 51 |
package com.telegram.api
/**
* Sticker
*
* This object represents a sticker.
*
* @param fileId Unique identifier for this file
* @param width Sticker width
* @param height Sticker height
* @param thumb Optional. Sticker thumbnail in .webp or .jpg format
* @param fileSize Optional. File size
*/
case class Sticker(
fileId : String,
width : Int,
height : Int,
thumb : Option[PhotoSize] = None,
fileSize : Option[Int] = None
)
|
rub0/tbot
|
src/main/scala/com/telegram/api/Sticker.scala
|
Scala
|
gpl-3.0
| 589 |
package edu.gemini.spModel.core
import scalaz.{Order, Monoid, Show}
/**
* Offset in P.
*/
case class OffsetP(toAngle: Angle) extends AnyVal
object OffsetP {
val Zero = OffsetP(Angle.zero)
implicit val IsoAngleP = new IsoAngle[OffsetP] {
override def toDegrees(p: OffsetP): Double = Angle.signedDegrees(p.toAngle.toDegrees)
override def fromDegrees(d: Double): OffsetP = OffsetP(Angle.fromDegrees(d))
}
import AngleSyntax._
implicit val ShowP: Show[OffsetP] =
Show.shows(p => f"${p.arcsecs}%4.03f arcsecs")
implicit val MonoidP: Monoid[OffsetP] =
new Monoid[OffsetP] {
val zero = Zero
def append(a: OffsetP, b: => OffsetP): OffsetP = IsoAngleP.add(a, b)
}
implicit val OrderP: Order[OffsetP] =
Order.orderBy(_.degrees)
}
|
arturog8m/ocs
|
bundle/edu.gemini.spModel.core/src/main/scala/edu/gemini/spModel/core/OffsetP.scala
|
Scala
|
bsd-3-clause
| 782 |
package streaming
import java.io.File
import org.apache.spark.streaming._
import org.apache.spark.{SparkConf, SparkContext}
import scala.util.Random
//
// A utility for creating a sequence of files of integers in the file system
// so that Spark can treat them like a stream.
//
class FileMaker {
private val root =
new File("c:" + File.separator + "temp" + File.separator + "streamFiles")
makeExist(root)
private val prep =
new File(root.getAbsolutePath() + File.separator + "prep")
makeExist(prep)
val dest =
new File(root.getAbsoluteFile() + File.separator + "dest")
makeExist(dest)
// fill a file with integers
private def writeOutput(f: File) : Unit = {
val p = new java.io.PrintWriter(f)
try {
for (i <- 1 to 100) {
p.println(Random.nextInt)
}
} finally {
p.close()
}
}
private def makeExist(dir: File) : Unit = {
dir.mkdir()
}
// make the sequence of files by creating them in one place and renaming
// them into the directory where Spark is looking for them
// (file-based streaming requires "atomic" creation of the files)
def makeFiles() = {
for (n <- 1 to 10) {
val f = File.createTempFile("Spark_", ".txt", prep)
writeOutput(f)
val nf = new File(dest + File.separator + f.getName())
f renameTo nf
nf.deleteOnExit()
Thread.sleep(500)
}
}
}
object FileBasedStreaming {
def main (args: Array[String]) {
val conf = new SparkConf().setAppName("FileBasedStreaming").setMaster("local[4]")
val sc = new SparkContext(conf)
// streams will produce data every second
val ssc = new StreamingContext(sc, Seconds(1))
val fm = new FileMaker()
// create the stream
val stream = ssc.textFileStream(fm.dest.getAbsolutePath())
// register for data
stream.foreachRDD(r => {
println(r.count())
})
// start streaming
ssc.start()
// start producing files
fm.makeFiles()
while (true) {
Thread.sleep(100)
}
}
}
|
IMCG/https-github.com-spirom-LearningSpark
|
src/main/scala/streaming/FileBased.scala
|
Scala
|
mit
| 2,030 |
package io.plasmap.model
case class OsmProperties(
osmId: OsmId,
user: Option[OsmUser] = None,
version: OsmVersion
) {
override def toString = StringBuilder.newBuilder.++=(osmId.toString).++=(user.getOrElse("unknown").toString).++=(version.toString).toString()
}
|
plasmap/geow
|
src/main/scala/io/plasmap/model/OsmProperties.scala
|
Scala
|
apache-2.0
| 370 |
package com.signalcollect.dcop.test
import com.signalcollect.dcop.termination.ConvergenceHistory
object ConvergenceHistoryTest extends App {
val h : ConvergenceHistory[Int] = new ConvergenceHistory(3)
h.push(5)
h.push(12)
h.push(3)
h.push(45)
println(h)
println(h.isFull)
h.push(10)
println(h)
}
|
gmazlami/dcop-maxsum
|
src/main/scala/com/signalcollect/dcop/test/ConvergenceHistoryTest.scala
|
Scala
|
apache-2.0
| 334 |
package scuff
import java.util.concurrent.{ Callable, Executor, ScheduledExecutorService }
import scala.concurrent._
import scala.concurrent.duration.{ DurationInt, FiniteDuration }
import scala.util.Try
import scala.util.control.NoStackTrace
import java.util.concurrent.ScheduledFuture
import java.util.concurrent.TimeoutException
import scala.annotation.implicitNotFound
package object concurrent {
implicit def exeCtxToExecutor(ec: ExecutionContext): Executor = ec match {
case exe: Executor => exe
case _ => new Executor with ExecutionContext {
def execute(runnable: Runnable): Unit = ec.execute(runnable)
def reportFailure(th: Throwable): Unit = ec.reportFailure(th)
}
}
implicit class ScuffExecutor(private val ec: { def execute(run: Runnable): Unit }) extends AnyVal {
@inline private def execute(r: Runnable): Unit =
ec match {
case ec: ExecutionContext =>
ec execute r
case exe: Executor =>
exe execute r
case _ =>
ec execute r
}
def execute(thunk: => Unit): Unit = this execute new Runnable {
def run = thunk
}
def submit(runnable: Runnable): Future[Unit] = {
val promise = Promise[Unit]()
this execute new Runnable {
def run = promise complete Try(runnable.run)
override def hashCode = runnable.hashCode
}
promise.future
}
def submit[T](callable: Callable[T]): Future[T] = {
val promise = Promise[T]()
this execute new Runnable {
def run = promise complete Try(callable.call)
override def hashCode = callable.hashCode
}
promise.future
}
def submit[T](thunk: => T): Future[T] = {
val promise = Promise[T]()
this execute new Runnable {
def run = promise complete Try(thunk)
}
promise.future
}
}
implicit class ScuffScheduledExecutor(private val scheduler: ScheduledExecutorService) extends AnyVal {
def schedule[T](delay: FiniteDuration)(thunk: => T): ScheduledFuture[T] = {
val c = new Callable[T] {
def call = thunk
}
scheduler.schedule(c, delay.length, delay.unit)
}
def scheduleAtFixedRate(initDelay: FiniteDuration, period: FiniteDuration)(thunk: => Unit): ScheduledFuture[Unit] = {
val r = new Runnable {
def run = thunk
}
val initialDelay = period.unit.convert(initDelay.length, initDelay.unit)
scheduler.scheduleAtFixedRate(r, initialDelay, period.length, period.unit).asInstanceOf[ScheduledFuture[Unit]]
}
def scheduleWithFixedDelay(initDelay: FiniteDuration, period: FiniteDuration)(thunk: => Unit): ScheduledFuture[Unit] = {
val r = new Runnable {
def run = thunk
}
val initialDelay = period.unit.convert(initDelay.length, initDelay.unit)
scheduler.scheduleWithFixedDelay(r, initialDelay, period.length, period.unit).asInstanceOf[ScheduledFuture[Unit]]
}
}
private val DefaultTimeout = 30.seconds
implicit class ScuffScalaFuture[T](private val f: Future[T]) extends AnyVal {
def await: T = await(DefaultTimeout)
def await(maxWait: FiniteDuration, reportFailureAfterTimeout: Throwable => Unit = null): T =
if (f.isCompleted) {
f.value.get.get
} else {
try Await.result(f, maxWait) catch {
case timeout: TimeoutException if reportFailureAfterTimeout != null =>
f.failed.foreach(reportFailureAfterTimeout)(Threads.PiggyBack)
throw timeout
}
}
def flatten[A](implicit ev: T <:< Future[A]): Future[A] = {
assert(ev != null) // Remove warning
f.asInstanceOf[Future[Future[A]]].flatMap(identity)(Threads.PiggyBack)
}
def withTimeout(timeout: FiniteDuration)(implicit scheduler: ScheduledExecutorService): Future[T] = {
if (f.isCompleted) f
else {
val promise = Promise[T]()
val cmd = new Runnable {
def run(): Unit = {
promise tryFailure new TimeoutException(s"Timed out after $timeout") with NoStackTrace
}
}
val timeoutFuture = scheduler.schedule(cmd, timeout.length, timeout.unit)
f.onComplete {
case result =>
if (promise tryComplete result) {
timeoutFuture.cancel(false)
}
}(Threads.PiggyBack)
promise.future
}
}
}
implicit def typedFutureConv[T](implicit untyped: JavaFutureConverter) =
untyped.asInstanceOf[java.util.concurrent.Future[T] => Future[T]]
implicit class ScuffJavaFuture[T](private val f: java.util.concurrent.Future[T]) extends AnyVal {
@implicitNotFound(msg = "No java.util.concurrent.Future => scala.concurrent.Future function found. Try an instance of JavaFutureConverter")
def asScala(implicit conv: java.util.concurrent.Future[T] => Future[T]): Future[T] = conv(f)
}
implicit class ScuffLock(private val lock: java.util.concurrent.locks.Lock) extends AnyVal {
def tryFor[T](dur: FiniteDuration)(thunk: => T): Option[T] = {
if (lock.tryLock(dur.length, dur.unit)) try {
Some(thunk)
} finally {
lock.unlock()
}
else None
}
def apply[T](thunk: => T): T = {
lock.lockInterruptibly()
try {
thunk
} finally {
lock.unlock()
}
}
def uninterruptibly[T](thunk: => T) = {
lock.lock()
try {
thunk
} finally {
lock.unlock()
}
}
}
implicit class ScuffCondition(private val cond: java.util.concurrent.locks.Condition) extends AnyVal {
def await(condition: => Boolean): Unit = while (!condition) cond.await()
def signalIf(condition: Boolean): Unit = if (condition) cond.signal()
def signalAllIf(condition: Boolean): Unit = if (condition) cond.signalAll()
}
implicit class ScuffConcurrentMap[K, V](private val cmap: collection.concurrent.Map[K, V]) extends AnyVal {
/** Update if present; return updated value. */
def updateIfPresent(k: K)(update: V => V): Option[V] = {
cmap.get(k) flatMap { oldvalue =>
val newvalue = update(oldvalue)
if (cmap.replace(k, oldvalue, newvalue)) Some(newvalue)
else updateIfPresent(k)(update)
}
}
/** Update or insert; return upserted value. */
def upsert(k: K, putIfAbsent: V)(updateIfPresent: V => V): V = {
cmap.putIfAbsent(k, putIfAbsent) match {
case None => putIfAbsent
case Some(present) =>
val update = updateIfPresent(present)
if (cmap.replace(k, present, update)) {
update
} else {
upsert(k, putIfAbsent)(updateIfPresent)
}
}
}
}
private[this] val NoFuture = Future successful None
private[this] val NilFuture = Future successful Nil
private[this] val UnitFuture = Future successful (())
private[this] val TrueFuture = Future successful true
private[this] val FalseFuture = Future successful false
implicit final class ScuffFutureObject(private val f: Future.type) extends AnyVal {
def none: Future[None.type] = NoFuture
def nil: Future[Nil.type] = NilFuture
def unit: Future[Unit] = UnitFuture
def True: Future[Boolean] = TrueFuture
def False: Future[Boolean] = FalseFuture
}
}
|
nilskp/scuff
|
src/main/scala/scuff/concurrent/package.scala
|
Scala
|
mit
| 7,275 |
package model.estate
import model.Url
import model.Id
import model.Identifiable
/**
* Data object for storing all the estate related data
*/
case class Estate[+Domain] (override val id: Option[Id] = None,
url: Url,
address: Option[EstateAddress] = None,
fee: Option[EstateFee] = None,
basicInfo: Option[EstateBasicInfo] = None,
statusInfo: EstateStatusInfo = new EstateStatusInfo(),
images: List[EstateImage] = List()
) extends Identifiable {
override def updateId(newId: Id) = this.copy( id = Some(newId))
}
|
tiborbotos/domino
|
domino-crawler/src/main/scala/model/estate/Estate.scala
|
Scala
|
lgpl-3.0
| 532 |
package vultura.factor
/** An instance of this class represents a sum-product operation on some factors with. The only thing that may change
* are the values of the involved factors. Using this class to represents a sp-operation allows for very fast
* operation.
* @see sumProduct
*
* @param remainingVars
* @param domainSizes
* @param factorVariables
*/
case class SumProductTask(remainingVars: Array[Int],
domainSizes: Array[Int],
factorVariables: Array[Array[Int]],
ring: Ring[Double]){
val numFactors: Int = factorVariables.length
val remainSize: Int = Factor.mapMultiply(remainingVars,domainSizes)
//collect all variables
val (cliqueOrdering: Array[Int], margVars: Array[Int]) = {
val allVars: Array[Int] = factorVariables.flatten.distinct
//reorder, so that all margVars are at beginning
val mv: Array[Int] = allVars.filterNot(remainingVars.contains)
(mv ++ remainingVars,mv)
}
val lookups: Array[Array[Int]] = factorVariables.map(Factor.buildLookup(cliqueOrdering,domainSizes,_))(collection.breakOut)
val margSize: Int = Factor.mapMultiply(margVars,domainSizes)
//domain sizes ordered by the appearance of variables in the clique ordering (for efficient access when counting)
val cliqueDomains: Array[Int] = cliqueOrdering.map(domainSizes)
val counterSize: Int = cliqueOrdering.length
/** Holds the temporary values that get summed, get overwritten before being read in sumProduct. */
val margTemp: Array[Double] = new Array[Double](margSize)
/** Holds the temporary values that get multiplied. Get written to before being read in sumProduct. */
val prodTemp: Array[Double] = new Array[Double](numFactors)
val counter: Array[Int] = new Array[Int](counterSize)
val factorPointers: Array[Int] = new Array[Int](numFactors)
final def sumProduct(factorValues: IndexedSeq[Array[Double]], result: Array[Double]) {
//TODO maybe the clearing is not needed
java.util.Arrays.fill(counter,0)
java.util.Arrays.fill(factorPointers,0)
var remainIdx = 0
var cnt = 0
while(remainIdx < remainSize){
var margIdx = 0
while(margIdx < margSize){
//increment counter
val overflow = increment(cnt)
cnt += 1
//calculate the factor contributions
//NOTE: we collect the factor values for the counter state before its update in this loop!
var fi = 0
while(fi < numFactors){
prodTemp(fi) = factorValues(fi)(factorPointers(fi))
factorPointers(fi) += lookups(fi)(overflow)
fi += 1
}
//multiply factor values
margTemp(margIdx) = ring.prodA(prodTemp)
margIdx += 1
}
//now sum over marginalized variables for one assignment to the remaining variables
result(remainIdx) = ring.sumA(margTemp)
remainIdx += 1
}
}
final def sumProductNormalize(factorValues: IndexedSeq[Array[Double]], result: Array[Double]) {
//TODO maybe the clearing is not needed
java.util.Arrays.fill(counter,0)
java.util.Arrays.fill(factorPointers,0)
var remainIdx = 0
var cnt = 0
while(remainIdx < remainSize){
var margIdx = 0
while(margIdx < margSize){
//increment counter
val overflow = increment(cnt)
cnt += 1
//calculate the factor contributions
//NOTE: we collect the factor values for the counter state before its update in this loop!
var fi = 0
while(fi < numFactors){
prodTemp(fi) = factorValues(fi)(factorPointers(fi))
factorPointers(fi) += lookups(fi)(overflow)
fi += 1
}
//multiply factor values
margTemp(margIdx) = ring.prodA(prodTemp)
margIdx += 1
}
//now sum over marginalized variables for one assignment to the remaining variables
result(remainIdx) = ring.sumA(margTemp)
remainIdx += 1
}
ring.normalizeInplace(result)
}
/** Increments the counter.
* Mutates the `counter` member.
* @param count The current step number of the counter.
* @return The counter figure where the overflow occurred.
*/
@inline
private final def increment(count: Int): Int = {
var overflow = 0
while(overflow < counterSize){
counter(overflow) += 1
if(counter(overflow) == cliqueDomains(overflow)){
counter(overflow) = 0
overflow += 1
} else {
return overflow
}
}
overflow
}
}
|
ziggystar/vultura-factor
|
src/main/scala/vultura/factor/SumProductTask.scala
|
Scala
|
mit
| 4,523 |
package com.mesosphere.cosmos.http
case class Authorization(token: String) {
val headerValue: String = token
}
|
movicha/cosmos
|
cosmos-server/src/main/scala/com/mesosphere/cosmos/http/Authorization.scala
|
Scala
|
apache-2.0
| 114 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import java.io.PrintStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.CountDownLatch
import java.util.{Locale, Properties, Random}
import com.typesafe.scalalogging.LazyLogging
import joptsimple._
import kafka.api.OffsetRequest
import kafka.common.{MessageFormatter, StreamEndException}
import kafka.consumer._
import kafka.message._
import kafka.metrics.KafkaMetricsReporter
import kafka.utils._
import kafka.utils.Implicits._
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord, KafkaConsumer}
import org.apache.kafka.common.errors.{AuthenticationException, WakeupException}
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, Deserializer}
import org.apache.kafka.common.utils.Utils
import scala.collection.JavaConversions
import scala.collection.JavaConverters._
/**
* Consumer that dumps messages to standard out.
*/
object ConsoleConsumer extends Logging {
var messageCount = 0
private val shutdownLatch = new CountDownLatch(1)
def main(args: Array[String]) {
val conf = new ConsumerConfig(args)
try {
run(conf)
} catch {
case e: AuthenticationException =>
error("Authentication failed: terminating consumer process", e)
Exit.exit(1)
case e: Throwable =>
error("Unknown error when running consumer: ", e)
Exit.exit(1)
}
}
def run(conf: ConsumerConfig) {
val consumer =
if (conf.useOldConsumer) {
checkZk(conf)
val props = getOldConsumerProps(conf)
checkAndMaybeDeleteOldPath(conf, props)
new OldConsumer(conf.filterSpec, props)
} else {
val timeoutMs = if (conf.timeoutMs >= 0) conf.timeoutMs else Long.MaxValue
val consumer = new KafkaConsumer(getNewConsumerProps(conf), new ByteArrayDeserializer, new ByteArrayDeserializer)
if (conf.partitionArg.isDefined)
new NewShinyConsumer(Option(conf.topicArg), conf.partitionArg, Option(conf.offsetArg), None, consumer, timeoutMs)
else
new NewShinyConsumer(Option(conf.topicArg), None, None, Option(conf.whitelistArg), consumer, timeoutMs)
}
addShutdownHook(consumer, conf)
try {
process(conf.maxMessages, conf.formatter, consumer, System.out, conf.skipMessageOnError)
} finally {
consumer.cleanup()
conf.formatter.close()
reportRecordCount()
// if we generated a random group id (as none specified explicitly) then avoid polluting zookeeper with persistent group data, this is a hack
if (conf.useOldConsumer && !conf.groupIdPassed)
ZkUtils.maybeDeletePath(conf.options.valueOf(conf.zkConnectOpt), "/consumers/" + conf.consumerProps.get("group.id"))
shutdownLatch.countDown()
}
}
def checkZk(config: ConsumerConfig) {
if (!checkZkPathExists(config.options.valueOf(config.zkConnectOpt), "/brokers/ids")) {
System.err.println("No brokers found in ZK.")
Exit.exit(1)
}
if (!config.options.has(config.deleteConsumerOffsetsOpt) && config.options.has(config.resetBeginningOpt) &&
checkZkPathExists(config.options.valueOf(config.zkConnectOpt), "/consumers/" + config.consumerProps.getProperty("group.id") + "/offsets")) {
System.err.println("Found previous offset information for this group " + config.consumerProps.getProperty("group.id")
+ ". Please use --delete-consumer-offsets to delete previous offsets metadata")
Exit.exit(1)
}
}
def addShutdownHook(consumer: BaseConsumer, conf: ConsumerConfig) {
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
consumer.stop()
shutdownLatch.await()
if (conf.enableSystestEventsLogging) {
System.out.println("shutdown_complete")
}
}
})
}
def process(maxMessages: Integer, formatter: MessageFormatter, consumer: BaseConsumer, output: PrintStream, skipMessageOnError: Boolean) {
while (messageCount < maxMessages || maxMessages == -1) {
val msg: BaseConsumerRecord = try {
consumer.receive()
} catch {
case _: StreamEndException =>
trace("Caught StreamEndException because consumer is shutdown, ignore and terminate.")
// Consumer is already closed
return
case _: WakeupException =>
trace("Caught WakeupException because consumer is shutdown, ignore and terminate.")
// Consumer will be closed
return
case e: Throwable =>
error("Error processing message, terminating consumer process: ", e)
// Consumer will be closed
return
}
messageCount += 1
try {
formatter.writeTo(new ConsumerRecord(msg.topic, msg.partition, msg.offset, msg.timestamp,
msg.timestampType, 0, 0, 0, msg.key, msg.value, msg.headers), output)
} catch {
case e: Throwable =>
if (skipMessageOnError) {
error("Error processing message, skipping this message: ", e)
} else {
// Consumer will be closed
throw e
}
}
if (checkErr(output, formatter)) {
// Consumer will be closed
return
}
}
}
def reportRecordCount() {
System.err.println(s"Processed a total of $messageCount messages")
}
def checkErr(output: PrintStream, formatter: MessageFormatter): Boolean = {
val gotError = output.checkError()
if (gotError) {
// This means no one is listening to our output stream any more, time to shutdown
System.err.println("Unable to write to standard out, closing consumer.")
}
gotError
}
def getOldConsumerProps(config: ConsumerConfig): Properties = {
val props = new Properties
props ++= config.consumerProps
props ++= config.extraConsumerProps
setAutoOffsetResetValue(config, props)
props.put("zookeeper.connect", config.zkConnectionStr)
if (config.timeoutMs >= 0)
props.put("consumer.timeout.ms", config.timeoutMs.toString)
props
}
def checkAndMaybeDeleteOldPath(config: ConsumerConfig, props: Properties) = {
val consumerGroupBasePath = "/consumers/" + props.getProperty("group.id")
if (config.options.has(config.deleteConsumerOffsetsOpt)) {
ZkUtils.maybeDeletePath(config.options.valueOf(config.zkConnectOpt), consumerGroupBasePath)
} else {
val resetToBeginning = OffsetRequest.SmallestTimeString == props.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)
if (resetToBeginning && checkZkPathExists(config.options.valueOf(config.zkConnectOpt), consumerGroupBasePath + "/offsets")) {
System.err.println("Found previous offset information for this group " + props.getProperty("group.id")
+ ". Please use --delete-consumer-offsets to delete previous offsets metadata")
Exit.exit(1)
}
}
}
private[tools] def getNewConsumerProps(config: ConsumerConfig): Properties = {
val props = new Properties
props ++= config.consumerProps
props ++= config.extraConsumerProps
setAutoOffsetResetValue(config, props)
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, config.bootstrapServer)
props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, config.isolationLevel)
props
}
/**
* Used by both getNewConsumerProps and getOldConsumerProps to retrieve the correct value for the
* consumer parameter 'auto.offset.reset'.
* Order of priority is:
* 1. Explicitly set parameter via --consumer.property command line parameter
* 2. Explicit --from-beginning given -> 'earliest'
* 3. Default value of 'latest'
*
* In case both --from-beginning and an explicit value are specified an error is thrown if these
* are conflicting.
*/
def setAutoOffsetResetValue(config: ConsumerConfig, props: Properties) {
val (earliestConfigValue, latestConfigValue) = if (config.useOldConsumer)
(OffsetRequest.SmallestTimeString, OffsetRequest.LargestTimeString)
else
("earliest", "latest")
if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)) {
// auto.offset.reset parameter was specified on the command line
val autoResetOption = props.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)
if (config.options.has(config.resetBeginningOpt) && earliestConfigValue != autoResetOption) {
// conflicting options - latest und earliest, throw an error
System.err.println(s"Can't simultaneously specify --from-beginning and 'auto.offset.reset=$autoResetOption', " +
"please remove one option")
Exit.exit(1)
}
// nothing to do, checking for valid parameter values happens later and the specified
// value was already copied during .putall operation
} else {
// no explicit value for auto.offset.reset was specified
// if --from-beginning was specified use earliest, otherwise default to latest
val autoResetOption = if (config.options.has(config.resetBeginningOpt)) earliestConfigValue else latestConfigValue
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoResetOption)
}
}
class ConsumerConfig(args: Array[String]) {
val parser = new OptionParser(false)
val topicIdOpt = parser.accepts("topic", "The topic id to consume on.")
.withRequiredArg
.describedAs("topic")
.ofType(classOf[String])
val whitelistOpt = parser.accepts("whitelist", "Whitelist of topics to include for consumption.")
.withRequiredArg
.describedAs("whitelist")
.ofType(classOf[String])
val blacklistOpt = parser.accepts("blacklist", "Blacklist of topics to exclude from consumption.")
.withRequiredArg
.describedAs("blacklist")
.ofType(classOf[String])
val partitionIdOpt = parser.accepts("partition", "The partition to consume from. Consumption " +
"starts from the end of the partition unless '--offset' is specified.")
.withRequiredArg
.describedAs("partition")
.ofType(classOf[java.lang.Integer])
val offsetOpt = parser.accepts("offset", "The offset id to consume from (a non-negative number), or 'earliest' which means from beginning, or 'latest' which means from end")
.withRequiredArg
.describedAs("consume offset")
.ofType(classOf[String])
.defaultsTo("latest")
val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED (only when using old consumer): The connection string for the zookeeper connection in the form host:port. " +
"Multiple URLS can be given to allow fail-over.")
.withRequiredArg
.describedAs("urls")
.ofType(classOf[String])
val consumerPropertyOpt = parser.accepts("consumer-property", "A mechanism to pass user-defined properties in the form key=value to the consumer.")
.withRequiredArg
.describedAs("consumer_prop")
.ofType(classOf[String])
val consumerConfigOpt = parser.accepts("consumer.config", s"Consumer config properties file. Note that ${consumerPropertyOpt} takes precedence over this config.")
.withRequiredArg
.describedAs("config file")
.ofType(classOf[String])
val messageFormatterOpt = parser.accepts("formatter", "The name of a class to use for formatting kafka messages for display.")
.withRequiredArg
.describedAs("class")
.ofType(classOf[String])
.defaultsTo(classOf[DefaultMessageFormatter].getName)
val messageFormatterArgOpt = parser.accepts("property",
"The properties to initialize the message formatter. Default properties include:\\n" +
"\\tprint.timestamp=true|false\\n" +
"\\tprint.key=true|false\\n" +
"\\tprint.value=true|false\\n" +
"\\tkey.separator=<key.separator>\\n" +
"\\tline.separator=<line.separator>\\n" +
"\\tkey.deserializer=<key.deserializer>\\n" +
"\\tvalue.deserializer=<value.deserializer>\\n" +
"\\nUsers can also pass in customized properties for their formatter; more specifically, users " +
"can pass in properties keyed with \\'key.deserializer.\\' and \\'value.deserializer.\\' prefixes to configure their deserializers.")
.withRequiredArg
.describedAs("prop")
.ofType(classOf[String])
val deleteConsumerOffsetsOpt = parser.accepts("delete-consumer-offsets", "If specified, the consumer path in zookeeper is deleted when starting up")
val resetBeginningOpt = parser.accepts("from-beginning", "If the consumer does not already have an established offset to consume from, " +
"start with the earliest message present in the log rather than the latest message.")
val maxMessagesOpt = parser.accepts("max-messages", "The maximum number of messages to consume before exiting. If not set, consumption is continual.")
.withRequiredArg
.describedAs("num_messages")
.ofType(classOf[java.lang.Integer])
val timeoutMsOpt = parser.accepts("timeout-ms", "If specified, exit if no message is available for consumption for the specified interval.")
.withRequiredArg
.describedAs("timeout_ms")
.ofType(classOf[java.lang.Integer])
val skipMessageOnErrorOpt = parser.accepts("skip-message-on-error", "If there is an error when processing a message, " +
"skip it instead of halt.")
val csvMetricsReporterEnabledOpt = parser.accepts("csv-reporter-enabled", "If set, the CSV metrics reporter will be enabled")
val metricsDirectoryOpt = parser.accepts("metrics-dir", "If csv-reporter-enable is set, and this parameter is" +
"set, the csv metrics will be output here")
.withRequiredArg
.describedAs("metrics directory")
.ofType(classOf[java.lang.String])
val newConsumerOpt = parser.accepts("new-consumer", "Use the new consumer implementation. This is the default, so " +
"this option is deprecated and will be removed in a future release.")
val bootstrapServerOpt = parser.accepts("bootstrap-server", "REQUIRED (unless old consumer is used): The server to connect to.")
.withRequiredArg
.describedAs("server to connect to")
.ofType(classOf[String])
val keyDeserializerOpt = parser.accepts("key-deserializer")
.withRequiredArg
.describedAs("deserializer for key")
.ofType(classOf[String])
val valueDeserializerOpt = parser.accepts("value-deserializer")
.withRequiredArg
.describedAs("deserializer for values")
.ofType(classOf[String])
val enableSystestEventsLoggingOpt = parser.accepts("enable-systest-events",
"Log lifecycle events of the consumer in addition to logging consumed " +
"messages. (This is specific for system tests.)")
val isolationLevelOpt = parser.accepts("isolation-level",
"Set to read_committed in order to filter out transactional messages which are not committed. Set to read_uncommitted" +
"to read all messages.")
.withRequiredArg()
.ofType(classOf[String])
.defaultsTo("read_uncommitted")
val groupIdOpt = parser.accepts("group", "The consumer group id of the consumer.")
.withRequiredArg
.describedAs("consumer group id")
.ofType(classOf[String])
if (args.length == 0)
CommandLineUtils.printUsageAndDie(parser, "The console consumer is a tool that reads data from Kafka and outputs it to standard output.")
var groupIdPassed = true
val options: OptionSet = tryParse(parser, args)
val useOldConsumer = options.has(zkConnectOpt)
val enableSystestEventsLogging = options.has(enableSystestEventsLoggingOpt)
// If using old consumer, exactly one of whitelist/blacklist/topic is required.
// If using new consumer, topic must be specified.
var topicArg: String = null
var whitelistArg: String = null
var filterSpec: TopicFilter = null
val extraConsumerProps = CommandLineUtils.parseKeyValueArgs(options.valuesOf(consumerPropertyOpt).asScala)
val consumerProps = if (options.has(consumerConfigOpt))
Utils.loadProps(options.valueOf(consumerConfigOpt))
else
new Properties()
val zkConnectionStr = options.valueOf(zkConnectOpt)
val fromBeginning = options.has(resetBeginningOpt)
val partitionArg = if (options.has(partitionIdOpt)) Some(options.valueOf(partitionIdOpt).intValue) else None
val skipMessageOnError = options.has(skipMessageOnErrorOpt)
val messageFormatterClass = Class.forName(options.valueOf(messageFormatterOpt))
val formatterArgs = CommandLineUtils.parseKeyValueArgs(options.valuesOf(messageFormatterArgOpt).asScala)
val maxMessages = if (options.has(maxMessagesOpt)) options.valueOf(maxMessagesOpt).intValue else -1
val timeoutMs = if (options.has(timeoutMsOpt)) options.valueOf(timeoutMsOpt).intValue else -1
val bootstrapServer = options.valueOf(bootstrapServerOpt)
val keyDeserializer = options.valueOf(keyDeserializerOpt)
val valueDeserializer = options.valueOf(valueDeserializerOpt)
val isolationLevel = options.valueOf(isolationLevelOpt).toString
val formatter: MessageFormatter = messageFormatterClass.newInstance().asInstanceOf[MessageFormatter]
if (keyDeserializer != null && !keyDeserializer.isEmpty) {
formatterArgs.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer)
}
if (valueDeserializer != null && !valueDeserializer.isEmpty) {
formatterArgs.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer)
}
formatter.init(formatterArgs)
if (useOldConsumer) {
if (options.has(bootstrapServerOpt))
CommandLineUtils.printUsageAndDie(parser, s"Option $bootstrapServerOpt is not valid with $zkConnectOpt.")
else if (options.has(newConsumerOpt))
CommandLineUtils.printUsageAndDie(parser, s"Option $newConsumerOpt is not valid with $zkConnectOpt.")
val topicOrFilterOpt = List(topicIdOpt, whitelistOpt, blacklistOpt).filter(options.has)
if (topicOrFilterOpt.size != 1)
CommandLineUtils.printUsageAndDie(parser, "Exactly one of whitelist/blacklist/topic is required.")
topicArg = options.valueOf(topicOrFilterOpt.head)
filterSpec = if (options.has(blacklistOpt)) new Blacklist(topicArg) else new Whitelist(topicArg)
Console.err.println("Using the ConsoleConsumer with old consumer is deprecated and will be removed " +
s"in a future major release. Consider using the new consumer by passing $bootstrapServerOpt instead of ${zkConnectOpt}.")
} else {
val topicOrFilterOpt = List(topicIdOpt, whitelistOpt).filter(options.has)
if (topicOrFilterOpt.size != 1)
CommandLineUtils.printUsageAndDie(parser, "Exactly one of whitelist/topic is required.")
topicArg = options.valueOf(topicIdOpt)
whitelistArg = options.valueOf(whitelistOpt)
}
if (useOldConsumer && (partitionArg.isDefined || options.has(offsetOpt)))
CommandLineUtils.printUsageAndDie(parser, "Partition-offset based consumption is supported in the new consumer only.")
if (partitionArg.isDefined) {
if (!options.has(topicIdOpt))
CommandLineUtils.printUsageAndDie(parser, "The topic is required when partition is specified.")
if (fromBeginning && options.has(offsetOpt))
CommandLineUtils.printUsageAndDie(parser, "Options from-beginning and offset cannot be specified together.")
} else if (options.has(offsetOpt))
CommandLineUtils.printUsageAndDie(parser, "The partition is required when offset is specified.")
def invalidOffset(offset: String): Nothing =
CommandLineUtils.printUsageAndDie(parser, s"The provided offset value '$offset' is incorrect. Valid values are " +
"'earliest', 'latest', or a non-negative long.")
val offsetArg =
if (options.has(offsetOpt)) {
options.valueOf(offsetOpt).toLowerCase(Locale.ROOT) match {
case "earliest" => OffsetRequest.EarliestTime
case "latest" => OffsetRequest.LatestTime
case offsetString =>
val offset =
try offsetString.toLong
catch {
case _: NumberFormatException => invalidOffset(offsetString)
}
if (offset < 0) invalidOffset(offsetString)
offset
}
}
else if (fromBeginning) OffsetRequest.EarliestTime
else OffsetRequest.LatestTime
if (!useOldConsumer) {
CommandLineUtils.checkRequiredArgs(parser, options, bootstrapServerOpt)
if (options.has(newConsumerOpt)) {
Console.err.println("The --new-consumer option is deprecated and will be removed in a future major release. " +
"The new consumer is used by default if the --bootstrap-server option is provided.")
}
}
if (options.has(csvMetricsReporterEnabledOpt)) {
val csvReporterProps = new Properties()
csvReporterProps.put("kafka.metrics.polling.interval.secs", "5")
csvReporterProps.put("kafka.metrics.reporters", "kafka.metrics.KafkaCSVMetricsReporter")
if (options.has(metricsDirectoryOpt))
csvReporterProps.put("kafka.csv.metrics.dir", options.valueOf(metricsDirectoryOpt))
else
csvReporterProps.put("kafka.csv.metrics.dir", "kafka_metrics")
csvReporterProps.put("kafka.csv.metrics.reporter.enabled", "true")
val verifiableProps = new VerifiableProperties(csvReporterProps)
KafkaMetricsReporter.startReporters(verifiableProps)
}
// if the group id is provided in more than place (through different means) all values must be the same
val groupIdsProvided = Set(
Option(options.valueOf(groupIdOpt)), // via --group
Option(consumerProps.get(ConsumerConfig.GROUP_ID_CONFIG)), // via --consumer-property
Option(extraConsumerProps.get(ConsumerConfig.GROUP_ID_CONFIG)) // via --cosumer.config
).flatten
if (groupIdsProvided.size > 1) {
CommandLineUtils.printUsageAndDie(parser, "The group ids provided in different places (directly using '--group', "
+ "via '--consumer-property', or via '--consumer.config') do not match. "
+ s"Detected group ids: ${groupIdsProvided.mkString("'", "', '", "'")}")
}
groupIdsProvided.headOption match {
case Some(group) =>
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, group)
case None =>
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, s"console-consumer-${new Random().nextInt(100000)}")
groupIdPassed = false
}
def tryParse(parser: OptionParser, args: Array[String]): OptionSet = {
try
parser.parse(args: _*)
catch {
case e: OptionException =>
CommandLineUtils.printUsageAndDie(parser, e.getMessage)
}
}
}
def checkZkPathExists(zkUrl: String, path: String): Boolean = {
try {
val zk = ZkUtils.createZkClient(zkUrl, 30 * 1000, 30 * 1000)
zk.exists(path)
} catch {
case _: Throwable => false
}
}
}
class DefaultMessageFormatter extends MessageFormatter {
var printKey = false
var printValue = true
var printTimestamp = false
var keySeparator = "\\t".getBytes(StandardCharsets.UTF_8)
var lineSeparator = "\\n".getBytes(StandardCharsets.UTF_8)
var keyDeserializer: Option[Deserializer[_]] = None
var valueDeserializer: Option[Deserializer[_]] = None
override def init(props: Properties) {
if (props.containsKey("print.timestamp"))
printTimestamp = props.getProperty("print.timestamp").trim.equalsIgnoreCase("true")
if (props.containsKey("print.key"))
printKey = props.getProperty("print.key").trim.equalsIgnoreCase("true")
if (props.containsKey("print.value"))
printValue = props.getProperty("print.value").trim.equalsIgnoreCase("true")
if (props.containsKey("key.separator"))
keySeparator = props.getProperty("key.separator").getBytes(StandardCharsets.UTF_8)
if (props.containsKey("line.separator"))
lineSeparator = props.getProperty("line.separator").getBytes(StandardCharsets.UTF_8)
// Note that `toString` will be called on the instance returned by `Deserializer.deserialize`
if (props.containsKey("key.deserializer")) {
keyDeserializer = Some(Class.forName(props.getProperty("key.deserializer")).newInstance().asInstanceOf[Deserializer[_]])
keyDeserializer.get.configure(JavaConversions.propertiesAsScalaMap(propertiesWithKeyPrefixStripped("key.deserializer.", props)).asJava, true)
}
// Note that `toString` will be called on the instance returned by `Deserializer.deserialize`
if (props.containsKey("value.deserializer")) {
valueDeserializer = Some(Class.forName(props.getProperty("value.deserializer")).newInstance().asInstanceOf[Deserializer[_]])
valueDeserializer.get.configure(JavaConversions.propertiesAsScalaMap(propertiesWithKeyPrefixStripped("value.deserializer.", props)).asJava, false)
}
}
private def propertiesWithKeyPrefixStripped(prefix: String, props: Properties): Properties = {
val newProps = new Properties()
import scala.collection.JavaConversions._
for ((key, value) <- props if key.startsWith(prefix) && key.length > prefix.length)
newProps.put(key.substring(prefix.length), value)
newProps
}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream) {
def writeSeparator(columnSeparator: Boolean): Unit = {
if (columnSeparator)
output.write(keySeparator)
else
output.write(lineSeparator)
}
def write(deserializer: Option[Deserializer[_]], sourceBytes: Array[Byte]) {
val nonNullBytes = Option(sourceBytes).getOrElse("null".getBytes(StandardCharsets.UTF_8))
val convertedBytes = deserializer.map(_.deserialize(null, nonNullBytes).toString.
getBytes(StandardCharsets.UTF_8)).getOrElse(nonNullBytes)
output.write(convertedBytes)
}
import consumerRecord._
if (printTimestamp) {
if (timestampType != TimestampType.NO_TIMESTAMP_TYPE)
output.write(s"$timestampType:$timestamp".getBytes(StandardCharsets.UTF_8))
else
output.write(s"NO_TIMESTAMP".getBytes(StandardCharsets.UTF_8))
writeSeparator(printKey || printValue)
}
if (printKey) {
write(keyDeserializer, key)
writeSeparator(printValue)
}
if (printValue) {
write(valueDeserializer, value)
output.write(lineSeparator)
}
}
}
class LoggingMessageFormatter extends MessageFormatter with LazyLogging {
private val defaultWriter: DefaultMessageFormatter = new DefaultMessageFormatter
override def init(props: Properties): Unit = defaultWriter.init(props)
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream): Unit = {
import consumerRecord._
defaultWriter.writeTo(consumerRecord, output)
logger.info({if (timestampType != TimestampType.NO_TIMESTAMP_TYPE) s"$timestampType:$timestamp, " else ""} +
s"key:${if (key == null) "null" else new String(key, StandardCharsets.UTF_8)}, " +
s"value:${if (value == null) "null" else new String(value, StandardCharsets.UTF_8)}")
}
}
class NoOpMessageFormatter extends MessageFormatter {
override def init(props: Properties) {}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream){}
}
class ChecksumMessageFormatter extends MessageFormatter {
private var topicStr: String = _
override def init(props: Properties) {
topicStr = props.getProperty("topic")
if (topicStr != null)
topicStr = topicStr + ":"
else
topicStr = ""
}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream) {
import consumerRecord._
val chksum =
if (timestampType != TimestampType.NO_TIMESTAMP_TYPE)
new Message(value, key, timestamp, timestampType, NoCompressionCodec, 0, -1, Message.MagicValue_V1).checksum
else
new Message(value, key, Message.NoTimestamp, Message.MagicValue_V0).checksum
output.println(topicStr + "checksum:" + chksum)
}
}
|
sebadiaz/kafka
|
core/src/main/scala/kafka/tools/ConsoleConsumer.scala
|
Scala
|
apache-2.0
| 29,081 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.effect.features
import io.truthencode.ddo.api.model.effect.DetailedEffect
import io.truthencode.ddo.enhancement.BonusType
import io.truthencode.ddo.model.effect
import io.truthencode.ddo.model.effect._
import io.truthencode.ddo.model.stats.BasicStat
import io.truthencode.ddo.support.naming.UsingSearchPrefix
/**
* Affects your Hit Points by the specific Percent
*/
trait HitPointPercentFeature extends Features {
self: SourceInfo =>
protected val hitPointBonusType: BonusType
protected val hitPointBonusPercent: Int
protected[this] val triggerOn: Seq[TriggerEvent]
protected[this] val triggerOff: Seq[TriggerEvent]
protected[this] val hitPointPctCategories: Seq[effect.EffectCategories.Value]
private val src = this
private[this] val hitPointPercent =
new PartModifier[Int, BasicStat] with UsingSearchPrefix {
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from
* qualified "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = partToModify.searchPrefixSource
private val eb = EffectParameterBuilder()
.toggleOffValue(triggerOff: _*)
.toggleOnValue(triggerOn: _*)
.addBonusType(hitPointBonusType)
.build
override protected[this] def effectParameters: Seq[ParameterModifier[_]] = eb.modifiers
override protected[this] lazy val partToModify: BasicStat =
BasicStat.HitPoints
/**
* The General Description should be just that. This should not include specific values unless
* all instances will share that value. I.e. a Dodge Effect might state it increases your
* miss-chance, but omit any value such as 20%. Those values will be displayed in the
* effectText of a specific implementation such as the Dodge Feat or Uncanny Dodge
*/
override val generalDescription: String =
"Increases your Total Hit Points by certain percentage"
/**
* a list of Categories useful for menu / UI placement and also for searching / querying for
* Miss-Chance or other desired effects.
*
* This list might be constrained or filtered by an Enumeration or CSV file. The goal is to
* enable quick and advanced searching for specific categories from general (Miss-Chance) to
* specific (evasion). In addition, it may be useful for deep searching such as increasing
* Spot, which should suggest not only +Spot items, but +Wisdom or eventually include a feat
* or enhancement that allows the use of some other value as your spot score.
*/
override def categories: Seq[String] = hitPointPctCategories.map(_.toString)
override lazy val effectDetail: DetailedEffect = DetailedEffect(
id = "HitChance",
description = "Increases your Hit points",
triggersOn = triggerOn.map(_.entryName),
triggersOff = triggerOff.map(_.entryName),
bonusType = hitPointBonusType.entryName
)
override val source: SourceInfo = src
override lazy val value: Int = hitPointBonusPercent
override lazy val effectText: Option[String] = Some(s"Hit Points by $value%")
}
abstract override def features: Seq[Feature[_]] = {
assert(hitPointPercent.value == hitPointBonusPercent)
super.features :+ hitPointPercent
}
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/effect/features/HitPointPercentFeature.scala
|
Scala
|
apache-2.0
| 4,081 |
/**
* Copyright 2014 www.alaraph.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alaraph.sudoku
abstract class Sudoku(val board: Vector[Vector[Char]],
val square: Int,
val alphabet: Set[Char]) {
val size = square * square
val ncells = size * size
require(board.size == size &&
board.forall(row => row.size == size),
"The board must have the same number of rows and columns: " + size)
require(board.flatMap(row => row.toSet).toSet.subsetOf((alphabet + '-')),
"The symbols in the board must be included in this set: %s".format(alphabet + '-'))
override def toString: String = {
val str = for {
r <- 0 until size
c <- 0 until size
sep = if ((c + 1) % size == 0)
"\\n" + (if ((r + 1) % square == 0) "\\n" else "")
else if ((c + 1) % square == 0) " "
else ""
} yield "%c%s".format(board(r)(c), sep)
str.mkString("")
}
def canEqual(other: Any): Boolean =
other.isInstanceOf[Sudoku]
override def equals(other: Any): Boolean =
other match {
case that: Sudoku => (that canEqual this) &&
(0 until size).forall(x => this.board(x) == that.board(x))
case _ => false
}
override def hashCode: Int = board.hashCode
}
private class Sudoku9(board: Vector[Vector[Char]])
extends Sudoku(board, 3, Sudoku.alphabet(9))
private class Sudoku4(board: Vector[Vector[Char]])
extends Sudoku(board, 2, Sudoku.alphabet(4))
private class Sudoku16(board: Vector[Vector[Char]])
extends Sudoku(board, 4, Sudoku.alphabet(16))
object Sudoku {
def build(board: Vector[Vector[Char]], size: Int = 9): Sudoku =
size match {
case 9 => new Sudoku9(board)
case 4 => new Sudoku4(board)
case 16 => new Sudoku16(board)
case _ => throw new IllegalArgumentException(sizeErrMsg)
}
def build(board: String): Sudoku = {
val ncells = board.size
val sizes = alphabet.keySet.map(x => (x * x, x)).toMap
val size = sizes.get(ncells)
size match {
case Some(s) =>
val vecBoard = (for (row <- board.sliding(s, s))
yield row.toVector).toVector
build(vecBoard, s)
case None => throw new IllegalArgumentException(sizeErrMsg)
}
}
val alphabet = Map(
(9 -> ('1' to '9').toSet),
(4 -> ('1' to '4').toSet),
(16 -> (('1' to '9') ++ ('A' to 'G')).toSet)).withDefaultValue(Set.empty)
private val sizeErrMsg = "Sudoku can only have size %s".format(alphabet.keys.mkString(","))
}
|
maumorelli/alaraph
|
sudoku/src/main/scala/com/alaraph/sudoku/Sudoku.scala
|
Scala
|
apache-2.0
| 3,105 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.ops.{Rank => RankOperation}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.tf.Context
import org.tensorflow.framework.NodeDef
import scala.reflect.ClassTag
class Rank extends TensorflowOpsLoader {
import Utils._
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
new RankOperation[T]()
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/Rank.scala
|
Scala
|
apache-2.0
| 1,295 |
package org.scaladebugger.tool.backend.functions
import com.sun.jdi.ThreadReference
import org.scaladebugger.api.lowlevel.wrappers.Implicits._
import org.scaladebugger.tool.backend.StateManager
import scala.collection.JavaConverters._
import scala.util.Try
/**
* Represents a collection of functions for managing threads.
*
* @param stateManager The manager whose state to share among functions
* @param writeLine Used to write output to the terminal
*/
class ThreadGroupFunctions(
private val stateManager: StateManager,
private val writeLine: String => Unit
) {
/** Entrypoint for listing thread groups for connected JVMs. */
def threadsGroups(m: Map[String, Any]) = {
val jvms = stateManager.state.scalaVirtualMachines
if (jvms.isEmpty) writeLine("No VM connected!")
jvms.foreach(s => {
val threadGroups = s.threads.map(_.threadGroup)
.groupBy(_.name).map(_._2.head)
writeLine(s"<= JVM ${s.uniqueId} =>")
threadGroups.foreach(tg => {
val rName = tg.referenceType.name
val id = "0x" + tg.uniqueIdHexString
val name = tg.name
writeLine(s"($rName)$id $name")
})
})
}
/** Entrypoint for setting the default thread group. */
def threadGroup(m: Map[String, Any]) = {
val jvms = stateManager.state.scalaVirtualMachines
if (jvms.isEmpty) writeLine("No VM connected!")
val threadGroupName = m.get("threadGroup").map(_.toString)
threadGroupName match {
// If name provided, lookup and set as active thread group
case Some(name) =>
val threadGroup = jvms.view.flatMap(_.threads).map(_.threadGroup)
.collectFirst { case tg if tg.name == name => tg }
if (threadGroup.isEmpty) writeLine(s"No thread group found named '$name'!")
threadGroup.foreach(stateManager.updateActiveThreadGroup)
// No name provided, so clear existing active thread group
case None =>
stateManager.clearActiveThreadGroup()
}
}
}
|
ensime/scala-debugger
|
scala-debugger-tool/src/main/scala/org/scaladebugger/tool/backend/functions/ThreadGroupFunctions.scala
|
Scala
|
apache-2.0
| 1,991 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.coordinator.server
import java.util.concurrent.atomic.AtomicReference
import org.apache.samza.SamzaException
import org.apache.samza.job.model.JobModel
import org.apache.samza.util.Logging
/**
* A servlet that dumps the job model for a Samza job.
*/
class JobServlet(jobModelRef: AtomicReference[JobModel]) extends ServletBase with Logging {
protected def getObjectToWrite() = {
val jobModel = jobModelRef.get()
if (jobModel == null) { // This should never happen because JobServlet is instantiated only after a jobModel is generated and its reference is updated
throw new SamzaException("Job Model is not defined in the JobCoordinator. This indicates that the Samza job is unstable. Exiting...")
}
jobModel
}
}
|
vjagadish/samza-clone
|
samza-core/src/main/scala/org/apache/samza/coordinator/server/JobServlet.scala
|
Scala
|
apache-2.0
| 1,577 |
package sangria.validation.rules
import sangria.renderer.{SchemaRenderer, QueryRenderer}
import sangria.validation.ValidationContext.isValidLiteralValue
import sangria.validation.{BadValueViolation, ValidationContext, ValidationRule}
import sangria.ast
import sangria.ast.AstVisitorCommand._
/**
* Argument values of correct type
*
* A GraphQL document is only valid if all field argument literal values are
* of the type expected by their position.
*/
class ArgumentsOfCorrectType extends ValidationRule {
override def visitor(ctx: ValidationContext) = new AstValidatingVisitor {
override val onEnter: ValidationVisit = {
case ast.Argument(name, value, _) =>
ctx.typeInfo.argument.map { arg =>
if (!isValidLiteralValue(arg.inputValueType, value))
Left(Vector(BadValueViolation(
arg.name,
SchemaRenderer.renderTypeName(arg.inputValueType),
QueryRenderer.render(value),
ctx.sourceMapper,
value.position.toList)))
else
Right(Continue)
} getOrElse Right(Continue)
}
}
}
|
narahari92/sangria
|
src/main/scala/sangria/validation/rules/ArgumentsOfCorrectType.scala
|
Scala
|
apache-2.0
| 1,122 |
package setup
import com.datastax.driver.core._
import com.datastax.driver.core.Cluster
import scala.collection.JavaConversions._
object Setup {
def runThenClose(f: => Unit) {
val csc = new CassandraDataSetup("localhost")
csc.populateTable()
csc.querySchema
try f
finally csc.close
}
}
class CassandraDataSetup(node: String) {
protected val cluster = Cluster.builder.addContactPoint(node).build
protected val metaData = cluster.getMetadata
protected val session = cluster.connect
createSchema("test.persons")
createSchema("test.adults" )
createSchema("test.minors" )
val sourceBS = bs("test.persons")
val adultsBS = bs("test.adults" )
val minorsBS = bs("test.minors" )
protected def bs(tableName: String) = new BoundStatement(
session.prepare(s"""INSERT INTO $tableName (id, fname, lname, age) VALUES (?, ?, ?, ?)""")
)
println(s"Connected to cluster: ${metaData.getClusterName}")
metaData.getAllHosts.foreach{ h => println(
s"Datacenter: ${h.getDatacenter}; Host: ${h.getAddress}, Rack: ${h.getRack}"
)}
def createSchema(tableName: String) {
session.execute("""CREATE KEYSPACE IF NOT EXISTS test WITH replication = {'class':'SimpleStrategy', 'replication_factor':1};""")
session.execute(s"""
|CREATE TABLE IF NOT EXISTS $tableName (
| id text PRIMARY KEY,
| fname text,
| lname text,
| age int
|);
""".stripMargin)
session.execute(s"CREATE INDEX IF NOT EXISTS ON $tableName (fname);")
session.execute(s"CREATE INDEX IF NOT EXISTS ON $tableName (lname);")
session.execute(s"CREATE INDEX IF NOT EXISTS ON $tableName (age);" )
}
def populateTable(implicit bs: BoundStatement = sourceBS) {
insert(java.util.UUID.randomUUID.toString, "Matt", "Kew", 40)
insert(java.util.UUID.randomUUID.toString, "Anita", "Kew", 35)
insert(java.util.UUID.randomUUID.toString, "Gavin", "Kew", 6)
insert(java.util.UUID.randomUUID.toString, "Aries", "Kew", 3)
}
def insert(id: String, fname: String, lname: String, age: java.lang.Integer)(implicit bs: BoundStatement) {
session.execute(bs.bind(id, fname, lname, age))
}
def querySchema {
val results = session.execute("""SELECT * FROM test.persons WHERE fname = 'Matt';""")
println(String.format("%-30s\\t%-20s\\t%-20s\\n%s", "fname", "lname", "age", "-------------------------------+-----------------------+--------------------"))
results.foreach{ row => println(String.format("%-30s\\t%-20s\\t%-20s", row.getString("fname"), row.getString("lname"), row.getInt("age").toString)) }
}
def close {
session.execute("DROP TABLE test.persons")
session.execute("DROP TABLE test.adults" )
session.execute("DROP TABLE test.minors" )
cluster.close
}
}
|
mkew/sparkdemo
|
src/main/scala/setup/CassandraDataSetup.scala
|
Scala
|
gpl-2.0
| 2,792 |
import util.control.Breaks._
import scala.collection.parallel._
class DynamicFind (arrCombo: Array[Int]) {
private val arrLen = arrCombo.length
def FindSubset: Unit = {
var a: Int = 0
var b: Int = 0
for (i <- 0 to arrLen - 1){
if(arrCombo(i)>0) b += arrCombo(i) else a += arrCombo(i)
}
val s: Int = (b - a)+1
val matrix = Array.ofDim[Boolean](arrLen,s)
matrix(0)(arrCombo(0) - a) = true
var findIndex: Int = 0
// breakable {
// val v = Vector.range(0, s - 1)
// for (j <- 1 to arrLen - 1){
// v.par.foreach { k =>
// val check = k - arrCombo(j)
// if(s - 1 >= check && check >= 0) {
// if(matrix(j-1)(k) || (k + a) == arrCombo(j) || matrix(j-1)(check)) matrix(j)(k) = true
// }
// else{
// if(matrix(j-1)(k) || (k + a) == arrCombo(j)) matrix(j)(k) = true
// }
// }
// if (matrix(j)(-a)) {
// findIndex = j
// break
// }
// }
// findIndex = arrLen - 1
// }
var check: Int = 0
breakable {
for (j <- 1 to arrLen - 1){
for (k <- 0 to s - 1){
check = k - arrCombo(j)
if(s - 1 >= check && check >= 0) {
if(matrix(j-1)(k) || (k + a) == arrCombo(j) || matrix(j-1)(check)) matrix(j)(k) = true
}
else{
if(matrix(j-1)(k) || (k + a) == arrCombo(j)) matrix(j)(k) = true
}
}
if (matrix(j)(-a)) {
findIndex = j
break
}
}
findIndex = arrLen - 1
}
val result: Boolean = matrix(findIndex)(-a)
println(result)
//println(matrix.deep.mkString("\\n"))
if (result){
var findResult = List(arrCombo(findIndex).toDouble/100)
var col: Int = -a - arrCombo(findIndex)
for (i <- findIndex - 1 to 1 by -1){
if (!matrix(i - 1)(col)){
findResult :::= List(arrCombo(i).toDouble/100)
col = col - arrCombo(i)
}
}
if (matrix(0)(col)) findResult :::= List(arrCombo(0).toDouble/100)
println(findResult)
}
}
}
|
matt-bentley/SubsetSumProcessor
|
SubsetSumProcessor/Scala/src/main/scala/DynamicFind.scala
|
Scala
|
gpl-3.0
| 2,227 |
package im.actor.server.mtproto.codecs
import im.actor.server.mtproto.protocol._
import scodec.codecs._
package object protocol {
val EncryptedPackageCodec = (int64 :: bytes).as[EncryptedPackage]
val EncryptionCBCPackageCodec = (bytes :: bytes).as[EncryptionCBCPackage]
val MessageAckCodec = longs.as[MessageAck]
val NewSessionCodec = (int64 :: int64).as[NewSession]
val AuthIdInvalidCodec = provide[AuthIdInvalid](AuthIdInvalid)
val SessionHelloCodec = provide[SessionHello](SessionHello)
val SessionLostCodec = provide[SessionLost](SessionLost)
val RequestAuthIdCodec = provide[RequestAuthId](RequestAuthId)
val RequestResendCodec = int64.as[RequestResend]
val ResponseAuthIdCodec = int64.as[ResponseAuthId]
val RequestStartAuthCodec = int64.as[RequestStartAuth]
val ResponseStartAuthCodec = (int64 :: longs :: bytes).as[ResponseStartAuth]
val RequestGetServerKeyCodec = int64.as[RequestGetServerKey]
val ResponseGetServerKeyCodec = (int64 :: bytes).as[ResponseGetServerKey]
val RequestDHCodec = (int64 :: int64 :: bytes :: bytes).as[RequestDH]
val ResponseDoDHCodec = (int64 :: bytes :: bytes).as[ResponseDoDH]
val ProtoRpcRequestCodec = bytes.as[ProtoRpcRequest]
val ProtoRpcResponseCodec = (int64 :: bytes).as[ProtoRpcResponse]
val UnsentMessageCodec = (int64 :: int32).as[UnsentMessage]
val UnsentResponseCodec = (int64 :: int64 :: int32).as[UnsentResponse]
val ProtoPushCodec = bytes.as[ProtoPush]
}
|
actorapp/actor-platform
|
actor-server/actor-codecs/src/main/scala/im/actor/server/mtproto/codecs/protocol/package.scala
|
Scala
|
agpl-3.0
| 1,452 |
/*
* Copyright 2016-2017 original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Created by [email protected] on 7/11/17.
*/
import sbt._
import Keys._
object LocalSbtSettings {
val githubBaseUrl = "https://github.com/uts-cic/tap"
val scaladocApiBaseUrl = "https://uts-cic.github.io/tap"
val dockerRepoURI = "079464859481.dkr.ecr.ap-southeast-2.amazonaws.com"
}
|
uts-cic/tap
|
project/LocalSbtSettings.scala
|
Scala
|
apache-2.0
| 924 |
import shapeless._
import shapeless.syntax.singleton._
/**
* This addresses:
* FR-01: Dev shall be able to define a style that requires a specific configuration of children such that the
* compiler will enforce that the children are styled.
*
* The most sensible way of doing this is to have a composite style produce a ClassName for each component but then
* the composition order matters which is error-prone and fragile.
* Eg. in (Style,Style) which one is for the label and which is for the checkbox?
*
* This PoC below forces the client-site to acknowledge each style by name.
*/
object NamedChildrenPoC {
trait Style
class Step1 {
val w = Witness('header)
private val s: Style = ???
def apply[X](a: w.T, f: Style => Step2 => X): X = f(s)(new Step2)
}
class Step2 {
val w = Witness('body)
private val s: Style = ???
def apply[X](a: w.T, f: Style => X): X = f(s)
}
val myCompStyle = new Step1
// Example usage:
myCompStyle('header, h =>
_('body, b =>
s"Header = $h, body = $b"))
// Doesn't compile:
// myCompStyle('header, h =>
// _('sidebar, b =>
// s"Header = $h, sidebar = $b"))
// -----------------------------------------------------
val x = (Witness('a), "Aye") :: (Witness('b), "BB") :: (Witness('c), "Sea") :: HNil
val l = x.tail.tail
final class MidStep[W, A, B](a: A, b: B) {
def apply[C](n: W, f: A => B => C): C = f(a)(b)
}
final class LastStep[W, A](a: A) {
def apply[B](n: W, f: A => B): B = f(a)
}
def MidStep[A,B](w: Witness, a: A, b: B) = new MidStep[w.T, A, B](a, b)
def LastStep[A](w: Witness, a: A) = new LastStep[w.T, A](a)
val s3 = LastStep(Witness('c), 3)
val s2 = MidStep(Witness('b), 20, s3)
val s1 = MidStep(Witness('a), 100, s2)
s1('a, a =>
_('b, b =>
_('c, a + b + _)))
// -----------------------------------------------------
case class Named[W,A](a: A)
trait MkStep[L <: HList] {
type Out
def apply(l: L): Out
}
def mkSteps[L <: HList](l: L)(implicit m: MkStep[L]): m.Out = m(l)
type MkStepAux[L <: HList, O] = MkStep[L]{ type Out = O }
trait LowPri {
implicit def mkMidStep[W, A, T <: HList](implicit next: MkStep[T]): MkStepAux[Named[W,A] :: T, MidStep[W,A,next.Out]] = {
type L = Named[W,A] :: T
new MkStep[L] {
override type Out = MidStep[W,A,next.Out]
override def apply(l: L): Out = new MidStep[W,A,next.Out](l.head.a, next(l.tail))
}
}
}
object TopPri extends LowPri {
implicit def mkTailStep[W,A]: MkStepAux[Named[W,A] :: HNil, LastStep[W,A]] = {
type L = Named[W,A] :: HNil
new MkStep[L] {
override type Out = LastStep[W,A]
override def apply(l: L): Out = new LastStep[W,A](l.head.a)
}
}
}
import TopPri._
val wc = Witness('c)
val hc = Named[wc.T, Int](123) :: HNil
val sc = mkSteps(hc)
sc('c, identity)
val wb = Witness('b)
val hb = Named[wb.T, Int](100) :: hc
val sb = mkSteps(hb)
sb('b, b => _('c, _ + b))
}
// =====================================================================================================================
// Again for real
object CompositeStyleStuff {
case class Named[W,A](a: A) {
def map[B](f: A => B): Named[W,B] = Named(f(a))
}
final class UsageH[W, A, B](a: A, b: B) {
def apply[C](n: W, f: A => B => C): C = f(a)(b)
}
final class UsageT[W, A](a: A) {
def apply[B](n: W, f: A => B): B = f(a)
}
sealed abstract class MkUsage[L <: HList] {
type Out
val apply: L => Out
}
object MkUsage extends MkUsageLowPri {
type Aux[L <: HList, O] = MkUsage[L]{ type Out = O }
def apply[L <: HList, O](f: L => O): Aux[L, O] =
new MkUsage[L] {
override type Out = O
override val apply = f
}
implicit def mkUsageT[W,A]: Aux[Named[W,A] :: HNil, UsageT[W,A]] =
MkUsage(l => new UsageT(l.head.a))
}
sealed trait MkUsageLowPri {
implicit def mkUsageH[W, A, T <: HList](implicit t: MkUsage[T]): MkUsage.Aux[Named[W,A] :: T, UsageH[W,A,t.Out]] =
MkUsage(l => new UsageH(l.head.a, t apply l.tail))
}
def usage[L <: HList](l: L)(implicit m: MkUsage[L]): m.Out = m apply l
}
|
beni55/scalacss
|
misc/experiment/named_children.scala
|
Scala
|
apache-2.0
| 4,216 |
package com.github.btmorr.harmonia.models
/* The Vowpal Wabbit implementation of SEARN search requires input in a particular format. This can be submitted
* to a stationary vw model (executing vw in a subprocess with a pretrained model and the test flag, to keep the
* model stationary, equivalent to `echo '| <tag> <word>' | vw -t -i policy.vw -P 1`, or to an active
*/
case object SearnPredictor extends Model[(Word, PosTag), Boolean] {
def apply(wt: (Word, PosTag)) = {
val req = s"| ${wt._2} ${wt._1}"
println(s"Assembled request for VW: $req")
// submit req to vw server, get prediction
false
}
}
|
btmorr/ideal-spork
|
mastermind/src/main/scala/com/github/btmorr/harmonia/models/SearnPredictor.scala
|
Scala
|
gpl-3.0
| 626 |
package client.rootmodel
import shared.dtos.Introduction
/**
* Created by mandar.k on 7/19/2016.
*/
case class IntroRootModel(introResponse:Seq[Introduction]) {
def updated(newIntroRes: Introduction) = {
introResponse.indexWhere(_.connection.target == newIntroRes.connection.target) match {
case -1 =>
IntroRootModel(introResponse :+ newIntroRes)
case target =>
IntroRootModel(introResponse.updated(target, newIntroRes))
}
}
}
|
LivelyGig/ProductWebUI
|
client/src/main/scala/client/rootmodel/IntroRootModel.scala
|
Scala
|
apache-2.0
| 475 |
package cl.asa.result
class Result(line:String = null) {
var chunks: Seq[Chunk] = Seq.empty[Chunk] //文中の文節
var surface: String = line //文の表層
def addChunk(chunk: Chunk) {
chunks = chunks :+ chunk
}
}
|
Takeuchi-Lab-LM/scala_asa3
|
ASA/src/main/scala/cl/asa/result/Result.scala
|
Scala
|
mit
| 224 |
package mobile.stream
import monix.execution.Ack.Stop
import monix.execution.{Ack, Cancelable}
import monix.reactive.observers.Subscriber
import monix.reactive.{Observable, OverflowStrategy}
import org.scalajs.dom.raw.MessageEvent
import org.scalajs.dom.{CloseEvent, ErrorEvent, Event, WebSocket}
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.control.NonFatal
/** This `Observable` is a simple variant that does not communicate
* using the Reactive Streams back-pressure protocol, like the one
* implemented by [[mobile.stream.BackPressuredWebSocketClient]].
*
* Instead this connection uses a client-side buffer than can overflow. We
* can control what happens on overflow if our subscribers are too slow
* (like dropping events). But the disadvantage is that the server can't
* find out about it.
*/
final class SimpleWebSocketClient private (url: String, os: OverflowStrategy.Synchronous[String])
extends Observable[String] { self =>
/** An `Observable` that upon subscription will open a
* buffered web-socket connection.
*/
private val channel: Observable[String] =
// This `create` builder is safer to use (versus unsafeCreate), because
// the injected subscriber is going to be buffered and you don't
// have to know details about the back-pressure protocol in order to use it.
Observable.create[String](os) { downstream =>
// Reusing this in 2 places
def closeConnection(webSocket: WebSocket): Unit = {
Utils.log(s"Closing connection to $url")
if (webSocket != null && webSocket.readyState <= 1)
try webSocket.close() catch {
case _: Throwable => ()
}
}
try {
Utils.log(s"Connecting to $url")
val webSocket = new WebSocket(url)
// Not doing anything on open
webSocket.onopen = (event: Event) => ()
webSocket.onerror = (event: ErrorEvent) => {
// If error, signal it and it will be the last message
downstream.onError(BackPressuredWebSocketClient.Exception(event.message))
}
webSocket.onclose = (event: CloseEvent) => {
// If close, signal it and it will be the last message
downstream.onComplete()
}
webSocket.onmessage = (event: MessageEvent) => {
// Signal next event as usual, but we need to catch
// Stop acknowledgements. But given this is a synchronous
// (buffered) subscriber, it's a simple if statement.
val ack = downstream.onNext(event.data.asInstanceOf[String])
if (ack == Stop) closeConnection(webSocket)
}
Cancelable(() => closeConnection(webSocket))
} catch {
case NonFatal(ex) =>
// Normally this could be a race condition, meaning that we aren't allowed to
// send `onError` twice and at this point we have no way of knowing if `onError`
// already happened, but this right here is fine, for one because this is Javascript,
// but also because the `downstream` is protected by a concurrent buffer.
downstream.onError(ex)
Cancelable.empty
}
}
override def unsafeSubscribeFn(subscriber: Subscriber[String]): Cancelable =
channel.unsafeSubscribeFn(new Subscriber[String] {
val scheduler = subscriber.scheduler
def onNext(elem: String): Future[Ack] =
subscriber.onNext(elem)
def onError(ex: Throwable): Unit = {
scheduler.reportFailure(ex)
// Retry connection in a couple of secs
self
.delaySubscription(3.seconds)
.unsafeSubscribeFn(subscriber)
}
def onComplete(): Unit = {
// Retry connection in a couple of secs
self
.delaySubscription(3.seconds)
.unsafeSubscribeFn(subscriber)
}
})
}
object SimpleWebSocketClient {
def apply(url: String, os: OverflowStrategy.Synchronous[String]): SimpleWebSocketClient = {
new SimpleWebSocketClient(url, os)
}
case class Exception(msg: String) extends RuntimeException(msg)
}
|
monifu/monix-ionic-sample
|
app-js/src/main/scala/mobile/stream/SimpleWebSocketClient.scala
|
Scala
|
gpl-2.0
| 4,026 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.raft
import java.io.File
import java.nio.ByteBuffer
import java.nio.file.{Files, Path}
import java.util.{Collections, Optional}
import kafka.log.Log
import kafka.server.KafkaRaftServer
import kafka.utils.{MockTime, TestUtils}
import org.apache.kafka.common.errors.{OffsetOutOfRangeException, RecordTooLargeException}
import org.apache.kafka.common.protocol
import org.apache.kafka.common.protocol.{ObjectSerializationCache, Writable}
import org.apache.kafka.common.record.{CompressionType, MemoryRecords, SimpleRecord}
import org.apache.kafka.common.utils.Utils
import org.apache.kafka.raft.internals.BatchBuilder
import org.apache.kafka.raft.{KafkaRaftClient, LogAppendInfo, LogOffsetMetadata, OffsetAndEpoch, RecordSerde, ReplicatedLog, ValidOffsetAndEpoch}
import org.apache.kafka.snapshot.{SnapshotPath, Snapshots}
import org.junit.jupiter.api.Assertions.{assertEquals, assertFalse, assertNotEquals, assertThrows, assertTrue}
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
final class KafkaMetadataLogTest {
import KafkaMetadataLogTest._
var tempDir: File = _
val mockTime = new MockTime()
@BeforeEach
def setUp(): Unit = {
tempDir = TestUtils.tempDir()
}
@AfterEach
def tearDown(): Unit = {
Utils.delete(tempDir)
}
@Test
def testUnexpectedAppendOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val recordFoo = new SimpleRecord("foo".getBytes())
val currentEpoch = 3
val initialOffset = log.endOffset().offset
log.appendAsLeader(
MemoryRecords.withRecords(initialOffset, CompressionType.NONE, currentEpoch, recordFoo),
currentEpoch
)
// Throw exception for out of order records
assertThrows(
classOf[RuntimeException],
() => {
log.appendAsLeader(
MemoryRecords.withRecords(initialOffset, CompressionType.NONE, currentEpoch, recordFoo),
currentEpoch
)
}
)
assertThrows(
classOf[RuntimeException],
() => {
log.appendAsFollower(
MemoryRecords.withRecords(initialOffset, CompressionType.NONE, currentEpoch, recordFoo)
)
}
)
}
@Test
def testCreateSnapshot(): Unit = {
val numberOfRecords = 10
val epoch = 0
val snapshotId = new OffsetAndEpoch(numberOfRecords, epoch)
val log = buildMetadataLog(tempDir, mockTime)
append(log, numberOfRecords, epoch)
log.updateHighWatermark(new LogOffsetMetadata(numberOfRecords))
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
TestUtils.resource(log.readSnapshot(snapshotId).get()) { snapshot =>
assertEquals(0, snapshot.sizeInBytes())
}
}
@Test
def testReadMissingSnapshot(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
assertEquals(Optional.empty(), log.readSnapshot(new OffsetAndEpoch(10, 0)))
}
@Test
def testUpdateLogStartOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val offset = 10
val epoch = 0
val snapshotId = new OffsetAndEpoch(offset, epoch)
append(log, offset, epoch)
log.updateHighWatermark(new LogOffsetMetadata(offset))
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.deleteBeforeSnapshot(snapshotId))
assertEquals(offset, log.startOffset)
assertEquals(epoch, log.lastFetchedEpoch)
assertEquals(offset, log.endOffset().offset)
assertEquals(offset, log.highWatermark.offset)
val newRecords = 10
append(log, newRecords, epoch + 1)
// Start offset should not change since a new snapshot was not generated
assertFalse(log.deleteBeforeSnapshot(new OffsetAndEpoch(offset + newRecords, epoch)))
assertEquals(offset, log.startOffset)
assertEquals(epoch + 1, log.lastFetchedEpoch)
assertEquals(offset + newRecords, log.endOffset().offset)
assertEquals(offset, log.highWatermark.offset)
}
@Test
def testUpdateLogStartOffsetWillRemoveOlderSnapshot(): Unit = {
val (logDir, log) = buildMetadataLogAndDir(tempDir, mockTime)
val offset = 10
val epoch = 0
append(log, offset, epoch)
val oldSnapshotId = new OffsetAndEpoch(offset, epoch)
TestUtils.resource(log.createSnapshot(oldSnapshotId)) { snapshot =>
snapshot.freeze()
}
append(log, offset, epoch)
val newSnapshotId = new OffsetAndEpoch(offset * 2, epoch)
TestUtils.resource(log.createSnapshot(newSnapshotId)) { snapshot =>
snapshot.freeze()
}
log.updateHighWatermark(new LogOffsetMetadata(offset * 2))
assertTrue(log.deleteBeforeSnapshot(newSnapshotId))
log.close()
mockTime.sleep(log.fileDeleteDelayMs)
// Assert that the log dir doesn't contain any older snapshots
Files
.walk(logDir, 1)
.map[Optional[SnapshotPath]](Snapshots.parse)
.filter(_.isPresent)
.forEach { path =>
assertFalse(path.get.snapshotId.offset < log.startOffset)
}
}
@Test
def testUpdateLogStartOffsetWithMissingSnapshot(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val offset = 10
val epoch = 0
append(log, offset, epoch)
log.updateHighWatermark(new LogOffsetMetadata(offset))
assertFalse(log.deleteBeforeSnapshot(new OffsetAndEpoch(1L, epoch)))
assertEquals(0, log.startOffset)
assertEquals(epoch, log.lastFetchedEpoch)
assertEquals(offset, log.endOffset().offset)
assertEquals(offset, log.highWatermark.offset)
}
@Test
def testFailToIncreaseLogStartPastHighWatermark(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val offset = 10
val epoch = 0
val snapshotId = new OffsetAndEpoch(2 * offset, 1 + epoch)
append(log, offset, epoch)
log.updateHighWatermark(new LogOffsetMetadata(offset))
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
assertThrows(
classOf[OffsetOutOfRangeException],
() => log.deleteBeforeSnapshot(snapshotId)
)
}
@Test
def testTruncateFullyToLatestSnapshot(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 0
val sameEpochSnapshotId = new OffsetAndEpoch(2 * numberOfRecords, epoch)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(sameEpochSnapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.truncateToLatestSnapshot())
assertEquals(sameEpochSnapshotId.offset, log.startOffset)
assertEquals(sameEpochSnapshotId.epoch, log.lastFetchedEpoch)
assertEquals(sameEpochSnapshotId.offset, log.endOffset().offset)
assertEquals(sameEpochSnapshotId.offset, log.highWatermark.offset)
val greaterEpochSnapshotId = new OffsetAndEpoch(3 * numberOfRecords, epoch + 1)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(greaterEpochSnapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.truncateToLatestSnapshot())
assertEquals(greaterEpochSnapshotId.offset, log.startOffset)
assertEquals(greaterEpochSnapshotId.epoch, log.lastFetchedEpoch)
assertEquals(greaterEpochSnapshotId.offset, log.endOffset().offset)
assertEquals(greaterEpochSnapshotId.offset, log.highWatermark.offset)
}
@Test
def testTruncateWillRemoveOlderSnapshot(): Unit = {
val (logDir, log) = buildMetadataLogAndDir(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
append(log, 1, epoch - 1)
val oldSnapshotId1 = new OffsetAndEpoch(1, epoch - 1)
TestUtils.resource(log.createSnapshot(oldSnapshotId1)) { snapshot =>
snapshot.freeze()
}
append(log, 1, epoch)
val oldSnapshotId2 = new OffsetAndEpoch(2, epoch)
TestUtils.resource(log.createSnapshot(oldSnapshotId2)) { snapshot =>
snapshot.freeze()
}
append(log, numberOfRecords - 2, epoch)
val oldSnapshotId3 = new OffsetAndEpoch(numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(oldSnapshotId3)) { snapshot =>
snapshot.freeze()
}
val greaterSnapshotId = new OffsetAndEpoch(3 * numberOfRecords, epoch)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(greaterSnapshotId)) { snapshot =>
snapshot.freeze()
}
assertNotEquals(log.earliestSnapshotId(), log.latestSnapshotId())
assertTrue(log.truncateToLatestSnapshot())
assertEquals(log.earliestSnapshotId(), log.latestSnapshotId())
log.close()
mockTime.sleep(log.fileDeleteDelayMs)
// Assert that the log dir doesn't contain any older snapshots
Files
.walk(logDir, 1)
.map[Optional[SnapshotPath]](Snapshots.parse)
.filter(_.isPresent)
.forEach { path =>
assertFalse(path.get.snapshotId.offset < log.startOffset)
}
}
@Test
def testDoesntTruncateFully(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
append(log, numberOfRecords, epoch)
val olderEpochSnapshotId = new OffsetAndEpoch(numberOfRecords, epoch - 1)
TestUtils.resource(log.createSnapshot(olderEpochSnapshotId)) { snapshot =>
snapshot.freeze()
}
assertFalse(log.truncateToLatestSnapshot())
append(log, numberOfRecords, epoch)
val olderOffsetSnapshotId = new OffsetAndEpoch(numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(olderOffsetSnapshotId)) { snapshot =>
snapshot.freeze()
}
assertFalse(log.truncateToLatestSnapshot())
}
@Test
def testCleanupPartialSnapshots(): Unit = {
val (logDir, log) = buildMetadataLogAndDir(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
val snapshotId = new OffsetAndEpoch(1, epoch)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
log.close()
// Create a few partial snapshots
Snapshots.createTempFile(logDir, new OffsetAndEpoch(0, epoch - 1))
Snapshots.createTempFile(logDir, new OffsetAndEpoch(1, epoch))
Snapshots.createTempFile(logDir, new OffsetAndEpoch(2, epoch + 1))
val secondLog = buildMetadataLog(tempDir, mockTime)
assertEquals(snapshotId, secondLog.latestSnapshotId().get)
assertEquals(0, log.startOffset)
assertEquals(epoch, log.lastFetchedEpoch)
assertEquals(numberOfRecords, log.endOffset().offset)
assertEquals(0, secondLog.highWatermark.offset)
// Assert that the log dir doesn't contain any partial snapshots
Files
.walk(logDir, 1)
.map[Optional[SnapshotPath]](Snapshots.parse)
.filter(_.isPresent)
.forEach { path =>
assertFalse(path.get.partial)
}
}
@Test
def testCleanupOlderSnapshots(): Unit = {
val (logDir, log) = buildMetadataLogAndDir(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
append(log, 1, epoch - 1)
val oldSnapshotId1 = new OffsetAndEpoch(1, epoch - 1)
TestUtils.resource(log.createSnapshot(oldSnapshotId1)) { snapshot =>
snapshot.freeze()
}
append(log, 1, epoch)
val oldSnapshotId2 = new OffsetAndEpoch(2, epoch)
TestUtils.resource(log.createSnapshot(oldSnapshotId2)) { snapshot =>
snapshot.freeze()
}
append(log, numberOfRecords - 2, epoch)
val oldSnapshotId3 = new OffsetAndEpoch(numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(oldSnapshotId3)) { snapshot =>
snapshot.freeze()
}
val greaterSnapshotId = new OffsetAndEpoch(3 * numberOfRecords, epoch)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(greaterSnapshotId)) { snapshot =>
snapshot.freeze()
}
log.close()
val secondLog = buildMetadataLog(tempDir, mockTime)
assertEquals(greaterSnapshotId, secondLog.latestSnapshotId().get)
assertEquals(3 * numberOfRecords, secondLog.startOffset)
assertEquals(epoch, secondLog.lastFetchedEpoch)
mockTime.sleep(log.fileDeleteDelayMs)
// Assert that the log dir doesn't contain any older snapshots
Files
.walk(logDir, 1)
.map[Optional[SnapshotPath]](Snapshots.parse)
.filter(_.isPresent)
.forEach { path =>
assertFalse(path.get.snapshotId.offset < log.startOffset)
}
}
@Test
def testCreateReplicatedLogTruncatesFully(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
val snapshotId = new OffsetAndEpoch(numberOfRecords + 1, epoch + 1)
append(log, numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
log.close()
val secondLog = buildMetadataLog(tempDir, mockTime)
assertEquals(snapshotId, secondLog.latestSnapshotId().get)
assertEquals(snapshotId.offset, secondLog.startOffset)
assertEquals(snapshotId.epoch, secondLog.lastFetchedEpoch)
assertEquals(snapshotId.offset, secondLog.endOffset().offset)
assertEquals(snapshotId.offset, secondLog.highWatermark.offset)
}
@Test
def testMaxBatchSize(): Unit = {
val leaderEpoch = 5
val maxBatchSizeInBytes = 16384
val recordSize = 64
val log = buildMetadataLog(tempDir, mockTime, maxBatchSizeInBytes)
val oversizeBatch = buildFullBatch(leaderEpoch, recordSize, maxBatchSizeInBytes + recordSize)
assertThrows(classOf[RecordTooLargeException], () => {
log.appendAsLeader(oversizeBatch, leaderEpoch)
})
val undersizeBatch = buildFullBatch(leaderEpoch, recordSize, maxBatchSizeInBytes)
val appendInfo = log.appendAsLeader(undersizeBatch, leaderEpoch)
assertEquals(0L, appendInfo.firstOffset)
}
@Test
def testTruncateBelowHighWatermark(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numRecords = 10
val epoch = 5
append(log, numRecords, epoch)
assertEquals(numRecords.toLong, log.endOffset.offset)
log.updateHighWatermark(new LogOffsetMetadata(numRecords))
assertEquals(numRecords.toLong, log.highWatermark.offset)
assertThrows(classOf[IllegalArgumentException], () => log.truncateTo(5L))
assertEquals(numRecords.toLong, log.highWatermark.offset)
}
private def buildFullBatch(
leaderEpoch: Int,
recordSize: Int,
maxBatchSizeInBytes: Int
): MemoryRecords = {
val buffer = ByteBuffer.allocate(maxBatchSizeInBytes)
val batchBuilder = new BatchBuilder[Array[Byte]](
buffer,
new ByteArraySerde,
CompressionType.NONE,
0L,
mockTime.milliseconds(),
false,
leaderEpoch,
maxBatchSizeInBytes
)
val serializationCache = new ObjectSerializationCache
val records = Collections.singletonList(new Array[Byte](recordSize))
while (!batchBuilder.bytesNeeded(records, serializationCache).isPresent) {
batchBuilder.appendRecord(records.get(0), serializationCache)
}
batchBuilder.build()
}
@Test
def testValidateEpochGreaterThanLastKnownEpoch(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 1
val epoch = 1
append(log, numberOfRecords, epoch)
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(numberOfRecords, epoch + 1)
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, resultOffsetAndEpoch.kind)
assertEquals(new OffsetAndEpoch(log.endOffset.offset, epoch), resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateEpochLessThanOldestSnapshotEpoch(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
append(log, numberOfRecords, epoch)
log.updateHighWatermark(new LogOffsetMetadata(numberOfRecords))
val snapshotId = new OffsetAndEpoch(numberOfRecords, epoch)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.deleteBeforeSnapshot(snapshotId))
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(numberOfRecords, epoch - 1)
assertEquals(ValidOffsetAndEpoch.Kind.SNAPSHOT, resultOffsetAndEpoch.kind)
assertEquals(snapshotId, resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateOffsetLessThanOldestSnapshotOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val offset = 2
val epoch = 1
append(log, offset, epoch)
log.updateHighWatermark(new LogOffsetMetadata(offset))
val snapshotId = new OffsetAndEpoch(offset, epoch)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.deleteBeforeSnapshot(snapshotId))
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(offset - 1, epoch)
assertEquals(ValidOffsetAndEpoch.Kind.SNAPSHOT, resultOffsetAndEpoch.kind)
assertEquals(snapshotId, resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateOffsetEqualToOldestSnapshotOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val offset = 2
val epoch = 1
append(log, offset, epoch)
log.updateHighWatermark(new LogOffsetMetadata(offset))
val snapshotId = new OffsetAndEpoch(offset, epoch)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
assertTrue(log.deleteBeforeSnapshot(snapshotId))
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(offset, epoch)
assertEquals(ValidOffsetAndEpoch.Kind.VALID, resultOffsetAndEpoch.kind)
assertEquals(snapshotId, resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateUnknownEpochLessThanLastKnownGreaterThanOldestSnapshot(): Unit = {
val offset = 10
val numOfRecords = 5
val log = buildMetadataLog(tempDir, mockTime)
log.updateHighWatermark(new LogOffsetMetadata(offset))
val snapshotId = new OffsetAndEpoch(offset, 1)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
log.truncateToLatestSnapshot()
append(log, numOfRecords, epoch = 1)
append(log, numOfRecords, epoch = 2)
append(log, numOfRecords, epoch = 4)
// offset is not equal to oldest snapshot's offset
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(100, 3)
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, resultOffsetAndEpoch.kind)
assertEquals(new OffsetAndEpoch(20, 2), resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateEpochLessThanFirstEpochInLog(): Unit = {
val offset = 10
val numOfRecords = 5
val log = buildMetadataLog(tempDir, mockTime)
log.updateHighWatermark(new LogOffsetMetadata(offset))
val snapshotId = new OffsetAndEpoch(offset, 1)
TestUtils.resource(log.createSnapshot(snapshotId)) { snapshot =>
snapshot.freeze()
}
log.truncateToLatestSnapshot()
append(log, numOfRecords, epoch = 3)
// offset is not equal to oldest snapshot's offset
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(100, 2)
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, resultOffsetAndEpoch.kind)
assertEquals(snapshotId, resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateOffsetGreatThanEndOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 1
val epoch = 1
append(log, numberOfRecords, epoch)
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(numberOfRecords + 1, epoch)
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, resultOffsetAndEpoch.kind)
assertEquals(new OffsetAndEpoch(log.endOffset.offset, epoch), resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateOffsetLessThanLEO(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 10
val epoch = 1
append(log, numberOfRecords, epoch)
append(log, numberOfRecords, epoch + 1)
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(11, epoch)
assertEquals(ValidOffsetAndEpoch.Kind.DIVERGING, resultOffsetAndEpoch.kind)
assertEquals(new OffsetAndEpoch(10, epoch), resultOffsetAndEpoch.offsetAndEpoch())
}
@Test
def testValidateValidEpochAndOffset(): Unit = {
val log = buildMetadataLog(tempDir, mockTime)
val numberOfRecords = 5
val epoch = 1
append(log, numberOfRecords, epoch)
val resultOffsetAndEpoch = log.validateOffsetAndEpoch(numberOfRecords - 1, epoch)
assertEquals(ValidOffsetAndEpoch.Kind.VALID, resultOffsetAndEpoch.kind)
assertEquals(new OffsetAndEpoch(numberOfRecords - 1, epoch), resultOffsetAndEpoch.offsetAndEpoch())
}
}
object KafkaMetadataLogTest {
class ByteArraySerde extends RecordSerde[Array[Byte]] {
override def recordSize(data: Array[Byte], serializationCache: ObjectSerializationCache): Int = {
data.length
}
override def write(data: Array[Byte], serializationCache: ObjectSerializationCache, out: Writable): Unit = {
out.writeByteArray(data)
}
override def read(input: protocol.Readable, size: Int): Array[Byte] = {
val array = new Array[Byte](size)
input.readArray(array)
array
}
}
def buildMetadataLogAndDir(
tempDir: File,
time: MockTime,
maxBatchSizeInBytes: Int = KafkaRaftClient.MAX_BATCH_SIZE_BYTES,
maxFetchSizeInBytes: Int = KafkaRaftClient.MAX_FETCH_SIZE_BYTES
): (Path, KafkaMetadataLog) = {
val logDir = createLogDirectory(
tempDir,
Log.logDirName(KafkaRaftServer.MetadataPartition)
)
val metadataLog = KafkaMetadataLog(
KafkaRaftServer.MetadataPartition,
logDir,
time,
time.scheduler,
maxBatchSizeInBytes,
maxFetchSizeInBytes
)
(logDir.toPath, metadataLog)
}
def buildMetadataLog(
tempDir: File,
time: MockTime,
maxBatchSizeInBytes: Int = KafkaRaftClient.MAX_BATCH_SIZE_BYTES,
maxFetchSizeInBytes: Int = KafkaRaftClient.MAX_FETCH_SIZE_BYTES
): KafkaMetadataLog = {
val (_, log) = buildMetadataLogAndDir(tempDir, time, maxBatchSizeInBytes, maxFetchSizeInBytes)
log
}
def append(log: ReplicatedLog, numberOfRecords: Int, epoch: Int): LogAppendInfo = {
log.appendAsLeader(
MemoryRecords.withRecords(
log.endOffset().offset,
CompressionType.NONE,
epoch,
(0 until numberOfRecords).map(number => new SimpleRecord(number.toString.getBytes)): _*
),
epoch
)
}
private def createLogDirectory(logDir: File, logDirName: String): File = {
val logDirPath = logDir.getAbsolutePath
val dir = new File(logDirPath, logDirName)
if (!Files.exists(dir.toPath)) {
Files.createDirectories(dir.toPath)
}
dir
}
}
|
Chasego/kafka
|
core/src/test/scala/kafka/raft/KafkaMetadataLogTest.scala
|
Scala
|
apache-2.0
| 23,383 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zouzias.spark.lucenerdd.config
import com.typesafe.config.ConfigFactory
/**
* Load typesafe configuration
*/
trait Configurable extends Serializable {
lazy val Config = ConfigFactory.load()
}
|
zouzias/spark-lucenerdd
|
src/main/scala/org/zouzias/spark/lucenerdd/config/Configurable.scala
|
Scala
|
apache-2.0
| 1,011 |
/*
* Copyright 2009-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ccf.transport
class MalformedDataException(s: String) extends Exception(s)
|
akisaarinen/ccf
|
ccf/src/main/scala/ccf/transport/MalformedDataException.scala
|
Scala
|
apache-2.0
| 704 |
package org.rebeam.boxes.persistence
import org.rebeam.boxes.core._
import org.rebeam.boxes.persistence.formats._
import org.rebeam.boxes.persistence.json.JsonPrettyIO
import org.rebeam.boxes.persistence.buffers._
import org.scalacheck.Arbitrary
import org.scalatest._
import org.scalatest.prop.PropertyChecks
import scala.util.Try
class PersistenceSpec extends WordSpec with PropertyChecks with ShouldMatchers {
def duplicate[T: Format](t: T): Unit = {
val s = BufferIO.toTokens(t:T)
val d = BufferIO.fromTokens[T](s)
t shouldBe d
}
"Persistence" should {
"duplicate list of 100000 ints without stack overflow" in {
import PrimFormats._
import CollectionFormats._
val l = Range(1, 100000).toList
duplicate(l)
}
}
}
|
trepidacious/boxes-core
|
src/test/scala/org/rebeam/boxes/persistence/PersistenceSpec.scala
|
Scala
|
gpl-2.0
| 773 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.infinity.server.actions
import scala.concurrent.{Future, future}
import scala.concurrent.ExecutionContext.Implicits.global
import es.tid.cosmos.infinity.common.fs.Path
import es.tid.cosmos.infinity.common.permissions.PermissionsMask
import es.tid.cosmos.infinity.server.actions.Action.Context
import es.tid.cosmos.infinity.server.actions.MetadataAction.Created
import es.tid.cosmos.infinity.server.hadoop.NameNode
case class CreateDirectory(
nameNode: NameNode, on: Path, permissions: PermissionsMask) extends MetadataAction {
override def apply(context: Context): Future[MetadataAction.Result] = future {
nameNode.as(context.user) {
nameNode.createDirectory(on, context.user.username, context.user.groups.head, permissions)
Created(nameNode.pathMetadata(on))
}
}
}
|
telefonicaid/fiware-cosmos-platform
|
infinity/server/src/main/scala/es/tid/cosmos/infinity/server/actions/CreateDirectory.scala
|
Scala
|
apache-2.0
| 1,460 |
package qrygraph.shared.pig
import prickle.{CompositePickler, Pickler}
import qrygraph.shared.pig.Primitives._
/** the typing of a result a node creates */
case class ResultType(name: String, fields: List[PField])
/** represents a typed field as a data type like (username:chararray) */
case class PField(var name: String, typ: DataType)
/** A PigDataTyp defines a type of a given column */
sealed trait DataType
/** All Pig primitives data types */
object Primitives {
case object PByteArray extends DataType
case object PScalar extends DataType
case object PInt extends DataType
case object PLong extends DataType
case object PFloat extends DataType
case object PDouble extends DataType
case object PArray extends DataType
case object PCharArray extends DataType
case object PComplex extends DataType
//@todo
case object PDateTime extends DataType
case object PBigInteger extends DataType
case object PBgDecimal extends DataType
}
/** Pig tuple data type */
case class PTuple(fields: List[PField]) extends DataType
/** Pig bag data type */
case class PBag(fields: List[PField]) extends DataType
case class PMap(value: DataType) extends DataType
/** helper to serialize PigDataTypes over webSocket */
object PigDataTypesPickler {
// !!! as the data structure is recursive, we need to define the parent first
implicit var pickler = CompositePickler[DataType]
// then the recursive structure
implicit val fieldPickler: Pickler[PField] = Pickler.materializePickler[PField]
// then add all children
pickler = pickler.concreteType[PByteArray.type]
.concreteType[PScalar.type]
.concreteType[PInt.type]
.concreteType[PLong.type]
.concreteType[PFloat.type]
.concreteType[PDouble.type]
.concreteType[PArray.type]
.concreteType[PCharArray.type]
.concreteType[PComplex.type]
.concreteType[PMap]
.concreteType[PTuple]
.concreteType[PBag]
}
|
Starofall/QryGraph
|
qrygraph/shared/src/main/scala/qrygraph/shared/pig/PigDataTypes.scala
|
Scala
|
mit
| 1,932 |
package de.unihamburg.vsis.sddf.test.reading
import org.scalatest.FunSuite
import de.unihamburg.vsis.sddf.reading.SymPair
import de.unihamburg.vsis.sddf.reading.Tuple
import de.unihamburg.vsis.sddf.reading.TupleArray
import de.unihamburg.vsis.sddf.test.util.LocalSparkContext
class SymPairTest extends FunSuite with LocalSparkContext {
test("test equality and hashcode") {
val tuple1: Tuple = new TupleArray(1)
tuple1.addFeature(0, "test")
tuple1.id = 1
val tuple2: Tuple = new TupleArray(1)
tuple1.addFeature(0, "test2")
tuple2.id = 2
val pair1 = new SymPair(tuple1, tuple2)
val pair2 = new SymPair(tuple2, tuple1)
assert(pair1 === pair1)
assert(pair2 === pair1)
assert(pair1 === pair2)
assert(pair2 === pair2)
assert(pair1.hashCode() === pair1.hashCode())
assert(pair2.hashCode() === pair1.hashCode())
assert(pair1.hashCode() === pair2.hashCode())
assert(pair2.hashCode() === pair2.hashCode())
val tuple3: Tuple = new TupleArray(1)
tuple3.addFeature(0, "test2")
tuple3.id = 2
val pair3 = new SymPair(tuple2, tuple3)
val pair4 = new SymPair(tuple3, tuple2)
assert(pair3 === pair4)
assert(!(pair1 === pair3))
assert(pair3.hashCode() === pair4.hashCode())
assert(!(pair1.hashCode() === pair3.hashCode()))
}
test("test subtraction of RDD[SymPair[Tuple]]") {
val tuple1 = new TupleArray(1)
tuple1.id = 1
val tuple2 = new TupleArray(1)
tuple2.id = 2
val pair1 = new SymPair(tuple1, tuple2)
val pair1swapped = new SymPair(tuple2, tuple1)
val listPair1 = Seq(pair1)
val listPair1Swapped = Seq(pair1swapped)
val list1 = sc.parallelize(listPair1)
val list1Swapped = sc.parallelize(listPair1Swapped)
val subtractionResult = list1.subtract(list1Swapped)
assert(subtractionResult.count() === 0)
}
}
|
numbnut/sddf
|
src/test/scala/de/unihamburg/vsis/sddf/test/reading/SymPairTest.scala
|
Scala
|
gpl-3.0
| 1,856 |
package org.jetbrains.plugins.scala
package codeInsight.intention.booleans
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.openapi.util.TextRange
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.project.Project
import com.intellij.openapi.editor.Editor
import lang.psi.api.expr.ScInfixExpr
import lang.psi.impl.ScalaPsiElementFactory
import extensions._
import com.intellij.psi.{PsiDocumentManager, PsiElement}
/**
* @author Ksenia.Sautina
* @since 4/23/12
*/
object ReplaceEqualsOrEqualityInInfixExprIntention {
def familyName = "Replace equals or equality in infix expression"
}
class ReplaceEqualsOrEqualityInInfixExprIntention extends PsiElementBaseIntentionAction {
def getFamilyName = ReplaceEqualsOrEqualityInInfixExprIntention.familyName
def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
val infixExpr: ScInfixExpr = PsiTreeUtil.getParentOfType(element, classOf[ScInfixExpr], false)
if (infixExpr == null) return false
val oper = infixExpr.operation.nameId.getText
if (oper != "equals" && oper != "==") return false
val range: TextRange = infixExpr.operation.nameId.getTextRange
val offset = editor.getCaretModel.getOffset
if (!(range.getStartOffset <= offset && offset <= range.getEndOffset)) return false
val replaceOper = Map("equals" -> "==", "==" -> "equals")
setText("Replace '" + oper + "' with '" + replaceOper(oper) + "'")
true
}
override def invoke(project: Project, editor: Editor, element: PsiElement) {
val infixExpr: ScInfixExpr = PsiTreeUtil.getParentOfType(element, classOf[ScInfixExpr], false)
if (infixExpr == null || !infixExpr.isValid) return
val start = infixExpr.getTextRange.getStartOffset
val expr = new StringBuilder
val replaceOper = Map("equals" -> "==", "==" -> "equals")
expr.append(infixExpr.getBaseExpr.getText).append(" ").append(replaceOper(infixExpr.operation.nameId.getText)).
append(" ").append(infixExpr.getArgExpr.getText)
val newInfixExpr = ScalaPsiElementFactory.createExpressionFromText(expr.toString(), element.getManager)
val size = newInfixExpr.asInstanceOf[ScInfixExpr].operation.nameId.getTextRange.getStartOffset -
newInfixExpr.getTextRange.getStartOffset
inWriteAction {
infixExpr.replace(newInfixExpr)
editor.getCaretModel.moveToOffset(start + size)
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument)
}
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/codeInsight/intention/booleans/ReplaceEqualsOrEqualityInInfixExprIntention.scala
|
Scala
|
apache-2.0
| 2,540 |
package org.apache.spark.sql.cassandra
import java.net.InetAddress
import java.sql.Timestamp
import java.util.{UUID, Date}
import java.math.BigInteger
import org.apache.spark.sql.types.UTF8String
import org.apache.spark.sql.types.Decimal
import com.datastax.driver.core.{Row, ProtocolVersion}
import com.datastax.spark.connector.{TupleValue, UDTValue, GettableData}
import com.datastax.spark.connector.rdd.reader.{ThisRowReaderAsFactory, RowReader}
import com.datastax.spark.connector.types.TypeConverter
import org.apache.spark.sql.catalyst.expressions.{Row => SparkRow}
final class CassandraSQLRow(val columnNames: IndexedSeq[String], val columnValues: IndexedSeq[AnyRef])
extends GettableData with SparkRow with Serializable {
protected def fieldNames = columnNames
private[spark] def this() = this(null, null) // required by Kryo for deserialization :(
/** Generic getter for getting columns of any type.
* Looks the column up by its index. First column starts at index 0. */
private def get[T](index: Int)(implicit c: TypeConverter[T]): T =
c.convert(columnValues(index))
override def apply(i: Int) = columnValues(i)
override def copy() = this // immutable
override def size = super.size
override def getDouble(i: Int) = get[Double](i)
override def getFloat(i: Int) = get[Float](i)
override def getLong(i: Int) = get[Long](i)
override def getByte(i: Int) = get[Byte](i)
override def getBoolean(i: Int) = get[Boolean](i)
override def getShort(i: Int) = get[Short](i)
override def getInt(i: Int) = get[Int](i)
override def getString(i: Int) = get[String](i)
override def toSeq: Seq[Any] = columnValues
}
object CassandraSQLRow {
def fromJavaDriverRow(row: Row, columnNames: Array[String])(implicit protocolVersion: ProtocolVersion): CassandraSQLRow = {
val data = new Array[Object](columnNames.length)
for (i <- columnNames.indices) {
data(i) = GettableData.get(row, i)
data.update(i, toSparkSqlType(data(i)))
}
new CassandraSQLRow(columnNames, data)
}
implicit object CassandraSQLRowReader extends RowReader[CassandraSQLRow] with ThisRowReaderAsFactory[CassandraSQLRow] {
override def read(row: Row, columnNames: Array[String])(implicit protocolVersion: ProtocolVersion): CassandraSQLRow =
fromJavaDriverRow(row, columnNames)
override def neededColumns = None
override def targetClass = classOf[CassandraSQLRow]
}
private def toSparkSqlType(value: Any): AnyRef = {
value match {
case date: Date => new Timestamp(date.getTime)
case str: String => UTF8String(str)
case bigInteger: BigInteger => Decimal(bigInteger.toString)
case inetAddress: InetAddress => UTF8String(inetAddress.getHostAddress)
case uuid: UUID => UTF8String(uuid.toString)
case set: Set[_] => set.map(toSparkSqlType).toSeq
case list: List[_] => list.map(toSparkSqlType)
case map: Map[_, _] => map map { case(k, v) => (toSparkSqlType(k), toSparkSqlType(v))}
case udt: UDTValue => UDTValue(udt.columnNames, udt.columnValues.map(toSparkSqlType))
case tupleValue: TupleValue => TupleValue(tupleValue.values.map(toSparkSqlType))
case _ => value.asInstanceOf[AnyRef]
}
}
}
|
viirya/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/org/apache/spark/sql/cassandra/CassandraSQLRow.scala
|
Scala
|
apache-2.0
| 3,228 |
package cssprocessor
import java.net.InetSocketAddress
import java.util.concurrent.TimeUnit
import java.util.concurrent.TimeUnit
import org.specs2.mutable.Specification
import cats.effect.IO
import fs2._
import spinoco.fs2.http
import http._
import http.websocket._
import spinoco.protocol.http.header._
import spinoco.protocol.http._
import spinoco.protocol.http.header.value._
import http.websocket._
import org.specs2.specification.AfterAll
import spinoco.fs2.http.util._
import spinoco.protocol.http.Uri
import scala.concurrent.duration._
import cats.effect.IO
import fs2.io.tcp
import org.specs2.mutable.Specification
import org.specs2.specification.AfterAll
import scala.concurrent.duration._
class CssProcessorSpec extends Specification with AfterAll {
import Resources._
import CssProcessor._
def afterAll(): Unit = {
println("shutting down ...")
AG.shutdownNow()
println("awaiting termination...")
AG.awaitTermination(16, TimeUnit.SECONDS)
()
}
val sample1: String =
s"""
|:display a { display: flex; }
""".stripMargin
//TODO: https://github.com/scalavision/scunicorn/issues/1
"CssProcessor" should {
"process incoming css raw text" in {
val startServer = cssProcessor
val cssOutput: Stream[IO, Byte] = {
Sch.sleep[IO](4.seconds) ++ tcp.client[IO](
new InetSocketAddress("127.0.0.1", 5001)
).flatMap { (socket: io.tcp.Socket[IO]) =>
Stream(sample1)
.through(text.utf8Encode).covary[IO]
.to(socket.writes())
.drain.onFinalize(socket.endOfOutput) ++
socket.reads(1024, None)
}
}.collect { case b: Byte => b} // stripping away the scheduler sleep unit thingy
println("starting up the server and sending data ...")
(startServer mergeHaltBoth cssOutput.through(
text.utf8Decode andThen CssStreamHandler.cssBlocks
).to(log("result"))).run.unsafeRunTimed(10.seconds)
println("data processed, finished")
1 === 1
}
}
}
|
scalavision/scunicorn
|
cssProcessing/src/test/scala/cssprocessor/CssProcessorSpec.scala
|
Scala
|
apache-2.0
| 2,050 |
package model.json
/**
* @author Camilo Sampedro <[email protected]>
*/
case class LaboratoryJson(name: String,
location: Option[String],
administration: Option[String])
|
ProjectAton/AtonLab
|
app/model/json/LaboratoryJson.scala
|
Scala
|
gpl-3.0
| 237 |
package net.fehmicansaglam.pide.examples
import java.util.UUID
import akka.util.Timeout
import net.fehmicansaglam.bson.BsonDocument
import net.fehmicansaglam.bson.BsonDsl._
import net.fehmicansaglam.pide.{Dao, Entity, StringPide}
import net.fehmicansaglam.tepkin.{MongoClient, MongoCollection}
import scala.concurrent.Await
import scala.concurrent.duration._
object Example2 extends App {
val client = MongoClient("mongodb://localhost")
val db = client("tepkin")
case class Person(id: String,
name: String,
surname: String,
age: Int) extends Entity[String]
object PersonDao extends Dao[String, Person] {
override val collection: MongoCollection = db("person")
}
implicit object PersonPide extends StringPide[Person] {
override def read(document: BsonDocument): Person = {
Person(
id = document.getAs[String]("_id").get,
name = document.getAs[String]("name").get,
surname = document.getAs[String]("surname").get,
age = document.getAs[Int]("age").get
)
}
override def write(person: Person): BsonDocument = {
("_id" := person.id) ~
("name" := person.name) ~
("surname" := person.surname) ~
("age" := person.age)
}
}
val person1 = Person(UUID.randomUUID().toString, "name1", "surname1", 16)
val person2 = Person(UUID.randomUUID().toString, "name2", "surname2", 32)
import client.ec
implicit val timeout: Timeout = 5.seconds
val result = for {
insert1 <- PersonDao.insert(person1)
insert2 <- PersonDao.insert(person2)
drop <- PersonDao.collection.drop()
} yield drop
Await.ready(result, 30.seconds)
client.shutdown()
}
|
fehmicansaglam/tepkin
|
examples/src/main/scala/net/fehmicansaglam/pide/examples/Example2.scala
|
Scala
|
apache-2.0
| 1,727 |
package org.phenoscape.owl.build
import com.bigdata.journal.Options
import com.bigdata.rdf.sail.{BigdataSail, BigdataSailRepository}
import org.openrdf.query.QueryLanguage
import java.io.{File, FileReader}
import java.util.Properties
import scala.io.Source
object RunSPARQLUpdate extends App {
val BlazegraphProperties = new File(args(0))
val BlazegraphJournal = new File(args(1))
val queryFile = new File(args(2))
val blazegraphProperties = new Properties()
blazegraphProperties.load(new FileReader(BlazegraphProperties))
blazegraphProperties.setProperty(Options.FILE, BlazegraphJournal.getAbsolutePath)
val sail = new BigdataSail(blazegraphProperties)
val repository = new BigdataSailRepository(sail)
repository.initialize()
val blazegraph = repository.getUnisolatedConnection()
val query = blazegraph.prepareUpdate(QueryLanguage.SPARQL, Source.fromFile(queryFile, "utf-8").mkString)
query.execute()
blazegraph.close()
}
|
phenoscape/phenoscape-owl-tools
|
src/main/scala/org/phenoscape/owl/build/RunSPARQLUpdate.scala
|
Scala
|
mit
| 956 |
object Test {
def f[X, Y](x: X, y: Y): Int = ???
f[Int, String](1, "")
f[X = Int, Y = String](1, "")
f[X = Int](1, "")
f[Y = String](1, "")
}
|
som-snytt/dotty
|
tests/pos/namedTypeParams.scala
|
Scala
|
apache-2.0
| 156 |
package com.kifi.franz
import play.api.libs.iteratee.Enumerator
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.concurrent.duration.{FiniteDuration, SECONDS}
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import com.amazonaws.services.sqs.AmazonSQSAsync
import com.amazonaws.services.sqs.model._
import com.amazonaws.handlers.AsyncHandler
case class QueueName(name: String)
case class MessageId(id: String)
case class SQSMessage[T](
id: MessageId,
body: T,
consume: () => Unit,
setVisibilityTimeout: (FiniteDuration) => Unit,
attributes: Map[String,String],
messageAttributes: Map[String, MessageAttributeValue]) {
def consume[K](block: T => K): K = {
val returnValue = block(body)
consume()
returnValue
}
}
object SQSQueue {
val DefaultWaitTimeout = FiniteDuration(10, SECONDS)
}
trait SQSQueue[T]{
import SQSQueue._
val queue: QueueName
protected val sqs: AmazonSQSAsync
protected val createIfNotExists: Boolean
protected implicit def asString(obj: T): String
protected implicit def fromString(s: String): T
protected val queueUrl: String = initQueueUrl()
protected def initQueueUrl() = {
try {
sqs.getQueueUrl(new GetQueueUrlRequest(queue.name)).getQueueUrl
} catch {
case t: com.amazonaws.services.sqs.model.QueueDoesNotExistException if createIfNotExists => sqs.createQueue(new CreateQueueRequest(queue.name)).getQueueUrl
case t: Throwable => throw t
}
}
protected def stringMessageAttribute( attributeValue: String ): MessageAttributeValue = {
val attr = new MessageAttributeValue()
attr.setDataType("String")
attr.setStringValue(attributeValue)
attr
}
def send(msg: T ): Future[MessageId] = {
send (msg, None)
}
def send(msg: T, delay:Int): Future[MessageId] = {
send (msg, None, Some(delay))
}
def send(msg: T, messageAttributes: Option[Map[String, String]] = None, delay:Option[Int] = None): Future[MessageId] = {
val request = new SendMessageRequest
request.setMessageBody(msg)
request.setQueueUrl(queueUrl)
delay.map{ d =>
request.setDelaySeconds(d)
}
// foreach on an Option unfolds Some, and skips if None
messageAttributes.foreach { ma =>
ma.foreach { case (k,v) =>
request.addMessageAttributesEntry(k, stringMessageAttribute(v))
}
}
val p = Promise[MessageId]()
sqs.sendMessageAsync(request, new AsyncHandler[SendMessageRequest,SendMessageResult]{
def onError(exception: Exception) = p.failure(exception)
def onSuccess(req: SendMessageRequest, res: SendMessageResult) = p.success(MessageId(res.getMessageId))
})
p.future
}
def sendBatch(msg: Seq[(T, Option[Map[String, String]])], delay: Option[Int] = None): Future[(Seq[MessageId],Seq[MessageId])] = {
val request = new SendMessageBatchRequest()
request.setQueueUrl(queueUrl)
val entries = msg.zipWithIndex.map { case ((message, attributes), index) =>
val entry = new SendMessageBatchRequestEntry()
delay.foreach(entry.setDelaySeconds(_))
attributes.foreach(m => m.foreach { case (k, v) =>
entry.addMessageAttributesEntry(k, stringMessageAttribute(v))
})
entry.setMessageBody(message)
entry.setId(index.toString)
entry
}
request.setEntries(entries.asJavaCollection)
val p = Promise[(Seq[MessageId], Seq[MessageId])]()
sqs.sendMessageBatchAsync(request, new AsyncHandler[SendMessageBatchRequest,SendMessageBatchResult]{
def onError(exception: Exception) = p.failure(exception)
def onSuccess(req: SendMessageBatchRequest, res: SendMessageBatchResult) = p.success((res.getSuccessful.asScala.map(m => MessageId(m.getMessageId)), res.getFailed.asScala.map(m => MessageId(m.getId))))
})
p.future
}
def attributes(attributeNames:Seq[String]):Future[Map[String,String]]={
val request = new GetQueueAttributesRequest()
request.setQueueUrl(queueUrl)
import scala.collection.JavaConversions._
request.setAttributeNames(attributeNames)
val p = Promise[Map[String,String]]()
sqs.getQueueAttributesAsync(request, new AsyncHandler[GetQueueAttributesRequest, GetQueueAttributesResult]{
def onError(exception: Exception) = p.failure(exception)
def onSuccess(req: GetQueueAttributesRequest, response: GetQueueAttributesResult) = {
try {
val rawMessages = response.getAttributes
p.success(rawMessages.asScala.toMap)
} catch {
case t: Throwable => p.failure(t)
}
}
})
p.future
}
protected def nextBatchRequestWithLock(requestMaxBatchSize: Int, lockTimeout: FiniteDuration, waitTimeout: FiniteDuration): Future[Seq[SQSMessage[T]]] = {
val request = new ReceiveMessageRequest
request.setMaxNumberOfMessages(requestMaxBatchSize)
request.setVisibilityTimeout(lockTimeout.toSeconds.toInt)
request.setWaitTimeSeconds(waitTimeout.toSeconds.toInt)
request.setQueueUrl(queueUrl)
request.withMessageAttributeNames("All")
request.withAttributeNames("All")
val p = Promise[Seq[SQSMessage[T]]]()
sqs.receiveMessageAsync(request, new AsyncHandler[ReceiveMessageRequest, ReceiveMessageResult]{
def onError(exception: Exception) = p.failure(exception)
def onSuccess(req: ReceiveMessageRequest, response: ReceiveMessageResult) = {
try {
val rawMessages = response.getMessages
p.success(rawMessages.asScala.map { rawMessage =>
SQSMessage[T](
id = MessageId(rawMessage.getMessageId),
body = rawMessage.getBody,
consume = {() =>
val request = new DeleteMessageRequest
request.setQueueUrl(queueUrl)
request.setReceiptHandle(rawMessage.getReceiptHandle)
sqs.deleteMessageAsync(request)
},
setVisibilityTimeout = {(timeout: FiniteDuration) =>
val request = (new ChangeMessageVisibilityRequest)
.withQueueUrl(queueUrl)
.withReceiptHandle(rawMessage.getReceiptHandle)
.withVisibilityTimeout(timeout.toSeconds.toInt)
sqs.changeMessageVisibilityAsync(request)
},
attributes = rawMessage.getAttributes.asScala.toMap,
messageAttributes = rawMessage.getMessageAttributes.asScala.toMap)
})
} catch {
case t: Throwable => p.failure(t)
}
}
})
p.future
}
def nextBatchWithLock(maxBatchSize: Int, lockTimeout: FiniteDuration, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Future[Seq[SQSMessage[T]]] = {
val maxBatchSizePerRequest = 10
val requiredBatchRequests = Seq.fill(maxBatchSize / maxBatchSizePerRequest)(maxBatchSizePerRequest) :+ (maxBatchSize % maxBatchSizePerRequest)
val futureBatches = requiredBatchRequests.collect {
case requestMaxBatchSize if requestMaxBatchSize > 0 => nextBatchRequestWithLock(requestMaxBatchSize, lockTimeout, waitTimeout)
}
Future.sequence(futureBatches).map { batches =>
val messages = batches.flatten
val distinctMessages = messages.map { message => message.id -> message }.toMap.values
distinctMessages.toSeq
}
}
def next(waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Future[Option[SQSMessage[T]]] = nextBatchRequestWithLock(1, FiniteDuration(0, SECONDS), waitTimeout).map(_.headOption)
def nextWithLock(lockTimeout: FiniteDuration, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Future[Option[SQSMessage[T]]] = nextBatchRequestWithLock(1, lockTimeout, waitTimeout).map(_.headOption)
def nextBatch(maxBatchSize: Int, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Future[Seq[SQSMessage[T]]] = nextBatchWithLock(maxBatchSize, FiniteDuration(0, SECONDS), waitTimeout)
def enumerator(waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Enumerator[SQSMessage[T]] = Enumerator.repeatM[SQSMessage[T]]{ loopFuture(next(waitTimeout)(ec)) }
def enumeratorWithLock(lockTimeout: FiniteDuration, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Enumerator[SQSMessage[T]] = Enumerator.repeatM[SQSMessage[T]]{ loopFuture(nextWithLock(lockTimeout, waitTimeout)) }
def batchEnumerator(maxBatchSize:Int, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Enumerator[Seq[SQSMessage[T]]] = Enumerator.repeatM[Seq[SQSMessage[T]]]{ loopFutureBatch(nextBatch(maxBatchSize, waitTimeout)) }
def batchEnumeratorWithLock(maxBatchSize:Int, lockTimeout: FiniteDuration, waitTimeout: FiniteDuration = DefaultWaitTimeout)(implicit ec: ExecutionContext): Enumerator[Seq[SQSMessage[T]]] = Enumerator.repeatM[Seq[SQSMessage[T]]]{ loopFutureBatch(nextBatchWithLock(maxBatchSize, lockTimeout, waitTimeout)) }
private def loopFuture[A](f: => Future[Option[A]], promise: Promise[A] = Promise[A]())(implicit ec: ExecutionContext): Future[A] = {
f.onComplete {
case util.Success(Some(res)) => promise.success(res)
case util.Success(None) => loopFuture(f, promise)
case util.Failure(ex) => promise.failure(ex)
}
promise.future
}
private def loopFutureBatch[A](f: => Future[Seq[A]], promise: Promise[Seq[A]] = Promise[Seq[A]]())(implicit ec: ExecutionContext): Future[Seq[A]] = {
f.onComplete {
case util.Success(res) if res.nonEmpty => promise.success(res)
case util.Success(res) if res.isEmpty => loopFutureBatch(f, promise)
case util.Failure(ex) => promise.failure(ex)
}
promise.future
}
}
|
kifi/franz
|
src/main/scala/com/kifi/franz/SQSQueue.scala
|
Scala
|
mit
| 9,781 |
package app
import service.{AccountService, SystemSettingsService}
import SystemSettingsService._
import util.AdminAuthenticator
import jp.sf.amateras.scalatra.forms._
class SystemSettingsController extends SystemSettingsControllerBase
with SystemSettingsService with AccountService with AdminAuthenticator
trait SystemSettingsControllerBase extends ControllerBase {
self: SystemSettingsService with AccountService with AdminAuthenticator =>
private val form = mapping(
"baseUrl" -> trim(label("Base URL", optional(text()))),
"allowAccountRegistration" -> trim(label("Account registration", boolean())),
"gravatar" -> trim(label("Gravatar", boolean())),
"notification" -> trim(label("Notification", boolean())),
"smtp" -> optionalIfNotChecked("notification", mapping(
"host" -> trim(label("SMTP Host", text(required))),
"port" -> trim(label("SMTP Port", optional(number()))),
"user" -> trim(label("SMTP User", optional(text()))),
"password" -> trim(label("SMTP Password", optional(text()))),
"ssl" -> trim(label("Enable SSL", optional(boolean()))),
"fromAddress" -> trim(label("FROM Address", optional(text()))),
"fromName" -> trim(label("FROM Name", optional(text())))
)(Smtp.apply)),
"ldapAuthentication" -> trim(label("LDAP", boolean())),
"ldap" -> optionalIfNotChecked("ldapAuthentication", mapping(
"host" -> trim(label("LDAP host", text(required))),
"port" -> trim(label("LDAP port", optional(number()))),
"bindDN" -> trim(label("Bind DN", optional(text()))),
"bindPassword" -> trim(label("Bind Password", optional(text()))),
"baseDN" -> trim(label("Base DN", text(required))),
"userNameAttribute" -> trim(label("User name attribute", text(required))),
"fullNameAttribute" -> trim(label("Full name attribute", optional(text()))),
"mailAttribute" -> trim(label("Mail address attribute", text(required))),
"tls" -> trim(label("Enable TLS", optional(boolean()))),
"keystore" -> trim(label("Keystore", optional(text())))
)(Ldap.apply))
)(SystemSettings.apply)
get("/admin/system")(adminOnly {
admin.html.system(loadSystemSettings(), flash.get("info"))
})
post("/admin/system", form)(adminOnly { form =>
saveSystemSettings(form)
flash += "info" -> "System settings has been updated."
redirect("/admin/system")
})
}
|
wangsicong/gitbucket
|
src/main/scala/app/SystemSettingsController.scala
|
Scala
|
apache-2.0
| 2,795 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
trait FreeSpecLike extends FreeSpecRegistration with ClassicTests
|
cheeseng/scalatest
|
scalatest/src/main/scala/org/scalatest/fixture/FreeSpecLike.scala
|
Scala
|
apache-2.0
| 698 |
package vexriscv.experimental
import spinal.core._
import spinal.lib.eda.bench.{AlteraStdTargets, Bench, Rtl, XilinxStdTargets}
import spinal.lib.eda.icestorm.IcestormStdTargets
import vexriscv.demo.{GenSmallestNoCsr, Murax, MuraxConfig}
import vexriscv.plugin._
import vexriscv.{VexRiscv, VexRiscvConfig, plugin}
/**
* Created by spinalvm on 15.06.17.
*/
object GenMicro extends App{
def cpu() = {
val removeOneFetchStage = true
val pessimisticHazard = true
val writeBackOpt = true
val rspHoldValue = true
val withCompliantCsr = true
val withCompliantCsrPlusEmulation = true
val earlyBranch = false
val noShifter = false
val onlyLoadWords = false
new VexRiscv(
config = VexRiscvConfig(
plugins = List(
// new PcManagerSimplePlugin(
// resetVector = 0x00000000l,
// relaxedPcCalculation = false
// ),
new IBusSimplePlugin(
resetVector = 0x80000000l,
cmdForkOnSecondStage = false,
cmdForkPersistence = false,
prediction = NONE,
catchAccessFault = false,
compressedGen = false,
injectorStage = !removeOneFetchStage,
rspHoldValue = rspHoldValue
),
new DBusSimplePlugin(
catchAddressMisaligned = withCompliantCsr,
catchAccessFault = false,
earlyInjection = writeBackOpt,
onlyLoadWords = onlyLoadWords
),
new DecoderSimplePlugin(
catchIllegalInstruction = withCompliantCsrPlusEmulation
),
new RegFilePlugin(
regFileReadyKind = plugin.SYNC,
zeroBoot = false,
readInExecute = removeOneFetchStage,
writeRfInMemoryStage = writeBackOpt
),
new IntAluPlugin,
new SrcPlugin(
separatedAddSub = false,
executeInsertion = removeOneFetchStage
),
if(!pessimisticHazard)
new HazardSimplePlugin(
bypassExecute = false,
bypassMemory = false,
bypassWriteBack = false,
bypassWriteBackBuffer = false,
pessimisticUseSrc = false,
pessimisticWriteRegFile = false,
pessimisticAddressMatch = false
)
else
new HazardPessimisticPlugin(),
new BranchPlugin(
earlyBranch = earlyBranch,
catchAddressMisaligned = withCompliantCsr,
fenceiGenAsAJump = withCompliantCsr
),
new YamlPlugin("cpu0.yaml")
) ++ (if(noShifter) Nil else List(new LightShifterPlugin))
++ (if(!withCompliantCsr) Nil else List(new CsrPlugin(
config = if(withCompliantCsrPlusEmulation)CsrPluginConfig(
catchIllegalAccess = true,
mvendorid = null,
marchid = null,
mimpid = null,
mhartid = null,
misaExtensionsInit = 0,
misaAccess = CsrAccess.NONE,
mtvecAccess = CsrAccess.NONE,
mtvecInit = 0x80000020l,
mepcAccess = CsrAccess.NONE,
mscratchGen = false,
mcauseAccess = CsrAccess.READ_ONLY,
mbadaddrAccess = CsrAccess.NONE,
mcycleAccess = CsrAccess.NONE,
minstretAccess = CsrAccess.NONE,
ecallGen = false,
ebreakGen = false,
wfiGenAsWait = false,
wfiGenAsNop = false,
ucycleAccess = CsrAccess.NONE,
noCsrAlu = true
) else CsrPluginConfig(
catchIllegalAccess = false,
mvendorid = null,
marchid = null,
mimpid = null,
mhartid = null,
misaExtensionsInit = 0,
misaAccess = CsrAccess.READ_ONLY,
mtvecAccess = CsrAccess.WRITE_ONLY,
mtvecInit = 0x80000020l,
mepcAccess = CsrAccess.READ_WRITE,
mscratchGen = true,
mcauseAccess = CsrAccess.READ_ONLY,
mbadaddrAccess = CsrAccess.READ_ONLY,
mcycleAccess = CsrAccess.NONE,
minstretAccess = CsrAccess.NONE,
ecallGen = true,
ebreakGen = true,
wfiGenAsWait = false,
wfiGenAsNop = true,
ucycleAccess = CsrAccess.NONE
)
)))
)
)
}
SpinalConfig(mergeAsyncProcess = false).generateVerilog(cpu())
}
object GenMicroSynthesis {
def main(args: Array[String]) {
val microNoCsr = new Rtl {
override def getName(): String = "MicroNoCsr"
override def getRtlPath(): String = "MicroNoCsr.v"
SpinalVerilog(GenMicro.cpu().setDefinitionName(getRtlPath().split("\\.").head))
}
val smallestNoCsr = new Rtl {
override def getName(): String = "SmallestNoCsr"
override def getRtlPath(): String = "SmallestNoCsr.v"
SpinalVerilog(GenSmallestNoCsr.cpu().setDefinitionName(getRtlPath().split("\\.").head))
}
val rtls = List(microNoCsr)
// val rtls = List(smallestNoCsr)
val targets = IcestormStdTargets().take(1) ++ XilinxStdTargets(
vivadoArtix7Path = "/eda/Xilinx/Vivado/2017.2/bin"
) ++ AlteraStdTargets(
quartusCycloneIVPath = "/eda/intelFPGA_lite/17.0/quartus/bin/",
quartusCycloneVPath = "/eda/intelFPGA_lite/17.0/quartus/bin/"
)
Bench(rtls, targets, "/eda/tmp/")
}
}
|
SpinalHDL/VexRiscv
|
src/test/scala/vexriscv/experimental/GenMicro.scala
|
Scala
|
mit
| 5,617 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity, Initializable}
import com.intel.analytics.bigdl.optim.Regularizer
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, BigDLModule}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.tensor._
import com.intel.analytics.bigdl.utils.serializer._
import com.intel.analytics.bigdl.utils.serializer.converters.DataConverter
import com.intel.analytics.bigdl.utils.{Shape, T, Table}
import scala.reflect.ClassTag
import scala.reflect.runtime.universe
/**
* Apply a 2D full convolution over an input image.
*
* The input tensor is expected to be a 3D or 4D(with batch) tensor. Note that instead
* of setting adjW and adjH, SpatialFullConvolution[Table, T] also accepts a table input
* with two tensors: T(convInput, sizeTensor) where convInput is the standard input tensor,
* and the size of sizeTensor is used to set the size of the output (will ignore the adjW and
* adjH values used to construct the module). This module can be used without a bias by setting
* parameter noBias = true while constructing the module.
*
* If input is a 3D tensor nInputPlane x height x width,
* owidth = (width - 1) * dW - 2*padW + kW + adjW
* oheight = (height - 1) * dH - 2*padH + kH + adjH
*
* Other frameworks call this operation "In-network Upsampling", "Fractionally-strided convolution",
* "Backwards Convolution," "Deconvolution", or "Upconvolution."
*
* Reference Paper: Long J, Shelhamer E, Darrell T. Fully convolutional networks for semantic
* segmentation[C]//Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition.
* 2015: 3431-3440.
*
* @param nInputPlane The number of expected input planes in the image given into forward()
* @param nOutputPlane The number of output planes the convolution layer will produce.
* @param kW The kernel width of the convolution.
* @param kH The kernel height of the convolution.
* @param dW The step of the convolution in the width dimension. Default is 1.
* @param dH The step of the convolution in the height dimension. Default is 1.
* @param padW The additional zeros added per width to the input planes. Default is 0.
* @param padH The additional zeros added per height to the input planes. Default is 0.
* @param adjW Extra width to add to the output image. Default is 0.
* @param adjH Extra height to add to the output image. Default is 0.
* @param nGroup Kernel group number.
* @param noBias If bias is needed.
* @param wRegularizer: instance of [[Regularizer]]
* (eg. L1 or L2 regularization), applied to the input weights matrices.
* @param bRegularizer: instance of [[Regularizer]]
* applied to the bias.
*/
@SerialVersionUID(- 3110412775551642284L)
class SpatialFullConvolution[T: ClassTag](
val nInputPlane: Int,
val nOutputPlane: Int,
val kW: Int,
val kH: Int,
val dW: Int = 1,
val dH: Int = 1,
val padW: Int = 0,
val padH: Int = 0,
var adjW: Int = 0,
var adjH: Int = 0,
val nGroup: Int = 1,
val noBias: Boolean = false,
var wRegularizer: Regularizer[T] = null,
var bRegularizer: Regularizer[T] = null
)(implicit ev: TensorNumeric[T])
extends AbstractModule[Activity, Tensor[T], T] with Initializable {
require(adjW <= dW - 1 && adjH <= dH - 1,
"SpatialFullConvolution: adjW=$adjW and adjH=$adjH must be smaller than " +
s"(dW - 1)=${dW - 1} and (dH - 1)=${dH - 1} respectively")
val weight: Tensor[T] = Tensor[T](nGroup, nInputPlane / nGroup,
nOutputPlane / nGroup, kH, kW)
val bias: Tensor[T] = if (noBias) null else Tensor[T](nOutputPlane)
val gradWeight: Tensor[T] = Tensor[T](nGroup, nInputPlane / nGroup, nOutputPlane / nGroup, kH, kW)
val gradBias: Tensor[T] = if (noBias) null else Tensor[T](nOutputPlane)
private val columns: Tensor[T] = Tensor[T]()
private val ones: Tensor[T] = Tensor[T]()
private val zeroScalar: Tensor[T] = Tensor[T]()
protected val onesBias = Tensor[T]()
protected val onesBatch = Tensor[T]()
protected var weightMM: Tensor[T] = null
protected val gradientBiasMT: Tensor[T] = Tensor[T]()
protected val gradWeightMMInBatch: Tensor[T] = Tensor[T]()
protected val _1x1 = if (kH == 1 && kW == 1 && dW == 1 && dH == 1
&& padH == 0 && padW == 0) {
true
} else {
false
}
{
val stdv = 1.0 / math.sqrt(kW * kH * nInputPlane)
val wInit = RandomUniform(-stdv, stdv)
val bInit = RandomUniform(-stdv, stdv)
setInitMethod(wInit, bInit)
}
private var im2colTime = 0L
private var col2imTime = 0L
def getIm2ColTime(): Double = im2colTime
def getCol2ImgTime(): Double = col2imTime
override def reset(): Unit = {
weightInitMethod.init(weight, VariableFormat.GP_IN_OUT_KW_KH)
Option(bias).foreach(biasInitMethod.init(_, VariableFormat.ONE_D))
zeroGradParameters()
}
private def calculateAdj(targetSize : Int, ker : Int, pad : Int, stride : Int) : Int = {
(targetSize + 2 * pad - ker) % stride
}
private def shapeCheck(input : Tensor[T], gradOutput : Tensor[T],
weight : Tensor[T], bias : Tensor[T],
kH : Int, kW : Int,
dH : Int, dW : Int,
padH : Int, padW : Int,
adjH : Int, adjW : Int) : Unit = {
require(kW > 0 && kH > 0, s"SpatialFullConvolution: kernel size should be greater than zero, " +
s"but got kH: $kH kW: $kW")
require(dW > 0 && dH > 0, s"SpatialFullConvolution: stride should be greater than zero, " +
s"but got dH: $dH dW: $dW")
require(weight.nDimension == 3 || weight.nDimension == 5,
s"SpatialFullConvolution: 3D or 5D weight tensor expected, but got size: ${weight.dim()}")
if (null != bias) {
require(bias.nDimension() == 1,
s"SpatialFullConvolution: bias should be 1 dim, but got dim:${bias.nDimension()}")
require(bias.size(1) == weight.size(3) * weight.size(1),
s"SpatialFullConvolution: bias's size equals to weight.size(3) * weight.size(1) " +
s"= ${weight.size(1) * weight.size(3)}, but got size:${bias.size(1)}")
}
val ndim = input.nDimension
val dimf = if (ndim == 4) 2 else 1
val dimh = if (ndim == 4) 3 else 2
val dimw = if (ndim == 4) 4 else 3
require(ndim == 3 || ndim == 4, s"SpatialFullConvolution: 3D or 4D input tensor expected, " +
s"but got size: ${input.dim()}")
val inputHeight = input.size(dimh)
val inputWidth = input.size(dimw)
val outputHeight = (inputHeight - 1) * dH - 2 * padH + kH + adjH
val outputWidth = (inputWidth - 1) * dW - 2 * padW + kW + adjW
require(outputWidth >= 1 || outputHeight >= 1,
s"SpatialFullConvolution: Given input size: ($nInputPlane x $inputHeight x $inputWidth). " +
s"Calculated output size: ($nOutputPlane x $outputHeight x $outputWidth). " +
s"Output size is too small")
require(input.nDimension() == ndim && input.size(dimf) == nInputPlane,
s"SpatialFullConvolution: input's feature maps should be $nInputPlane, " +
s"but got ${input.size(dimf)}")
if (null != gradOutput) {
require(gradOutput.nDimension() == ndim, s"SpatialFullConvolution: gradOutput should be " +
s"$ndim, but got ${gradOutput.nDimension()}")
require(gradOutput.size(dimf) == nOutputPlane
&& gradOutput.size(dimh) == outputHeight
&& gradOutput.size(dimw) == outputWidth,
s"SpatialFullConvolution: GradOutput's size should be (${nOutputPlane} x ${outputHeight} " +
s"x ${outputWidth}), but got (${gradOutput.size(dimf)} x ${gradOutput.size(dimh)} " +
s"x ${gradOutput.size(dimw)})")
}
}
protected def updateOutputFrame(
input: Tensor[T], output: Tensor[T], weight: Tensor[T],
bias: Tensor[T], columns: Tensor[T],
kW: Int, kH: Int, dW: Int, dH: Int, padW: Int, padH: Int,
nInputPlane: Int,
inputWidth: Int, inputHeight: Int,
nOutputPlane: Int,
outputWidth: Int, outputHeight: Int)(implicit ev: TensorNumeric[T]): Unit = {
val output2d = output.view(nOutputPlane, outputHeight * outputWidth)
// M,N,K are dims of matrix A and B
// (see https://software.intel.com/en-us/node/468480)
val m = weight.size(2)
val n = columns.size(2)
val k = weight.size(1)
// Do GEMM (note: this is a bit confusing because gemm assumes column-major matrices)
DenseTensorBLAS.gemm[T](
'N', 'T',
n, m, k,
ev.one,
input.storage().array(), input.storageOffset() - 1, n,
weight.storage().array(), weight.storageOffset() - 1, m,
ev.zero,
columns.storage().array(), columns.storageOffset() - 1, n
)
if (!_1x1) {
val before = System.nanoTime()
ev.getType() match {
case DoubleType => NNPrimitive.col2imWithDilationDouble(
columns.asInstanceOf[Tensor[Double]], output2d.asInstanceOf[Tensor[Double]],
nOutputPlane, outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case FloatType => NNPrimitive.col2imWithDilationFloat(
columns.asInstanceOf[Tensor[Float]], output2d.asInstanceOf[Tensor[Float]],
nOutputPlane, outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case _ => throw new UnsupportedOperationException(
"SpatialFullConvolution: only Float/Double type supported")
}
col2imTime += System.nanoTime() - before
}
if (null != bias) {
output2d.addr(ev.one, bias, onesBias)
}
}
override def computeOutputShape(inputShape: Shape): Shape = {
val input = inputShape.toSingle().toArray
require(input.length == 4,
s"Deconvolution2D requires 4D input, but got input dim ${input.length}")
val inputHeight = input(2)
val inputWidth = input(3)
val outputHeight = (inputHeight - 1) * dH - 2 * padH + kH + adjH
val outputWidth = (inputWidth - 1) * dW - 2 * padW + kW + adjW
Shape(input(0), nOutputPlane, outputHeight, outputWidth)
}
override def updateOutput(input: Activity): Tensor[T] = {
val inputTensor: Tensor[T] = if (input.isInstanceOf[Table]) {
if (gradInput == null || !gradInput.isInstanceOf[Table]) {
gradInput = T()
}
val targetTensor: Tensor[T] = input.toTable[Tensor[T]](2)
val tDims = targetTensor.dim()
val tH = targetTensor.size(tDims - 1)
val tW = targetTensor.size(tDims)
adjW = calculateAdj(tW, kW, padW, dW)
adjH = calculateAdj(tH, kH, padH, dH)
input.toTable[Tensor[T]](1)
} else {
if (gradInput == null || gradInput.isInstanceOf[Table]) {
gradInput = Tensor[T]()
}
input.toTensor[T]
}
shapeCheck(inputTensor, null, weight, bias, kH, kW, dH, dW, padH, padW, adjH, adjW)
require(inputTensor.isContiguous(), "SpatialFullConvolution: input should be contiguous")
val isBatch = if (inputTensor.nDimension() == 3) {
// Force batch
inputTensor.resize(1, inputTensor.size(1), inputTensor.size(2), inputTensor.size(3))
false
} else {
true
}
val inputHeight = inputTensor.size(3)
val inputWidth = inputTensor.size(4)
val outputHeight = (inputHeight - 1) * dH - 2 * padH + kH + adjH
val outputWidth = (inputWidth - 1) * dW - 2 * padW + kW + adjW
// Batch size + input planes
val batchSize = inputTensor.size(1)
// Resize output
output.resize(batchSize, nOutputPlane, outputHeight, outputWidth)
output.zero()
if (onesBias.dim() != 1 || onesBias.size(1) != outputHeight * outputWidth) {
onesBias.resize(Array(outputHeight * outputWidth)).fill(ev.one)
}
if (_1x1) {
columns.set(inputTensor)
columns.resize(Array(batchSize, nGroup, kW * kH * nOutputPlane / nGroup,
inputHeight * inputWidth))
} else {
columns.resize(Array(batchSize, nGroup, kW * kH * nOutputPlane / nGroup,
inputHeight * inputWidth))
}
// weight's storage might change, so make a view every time
weightMM = weight.view(nGroup, nInputPlane / nGroup,
nOutputPlane * kH * kW / nGroup)
var elt = 1
// For each element in batch, do:
while(elt <= batchSize) {
// Matrix mulitply per output:
val input_n = inputTensor.select(1, elt)
require(input_n.isContiguous(), s"SpatialFullConvolution: input($elt) should be contiguous")
val output_n = output.select(1, elt)
val columns_n = columns.select(1, elt)
var g = 0
while (g < nGroup) {
val bias_g = if (!noBias) {
bias.narrow(1, g * nOutputPlane / nGroup + 1, nOutputPlane / nGroup)
} else {
null
}
updateOutputFrame(
input_n.narrow(1, g * nInputPlane / nGroup + 1, nInputPlane / nGroup),
output_n.narrow(1, g * nOutputPlane / nGroup + 1, nOutputPlane / nGroup),
weightMM.select(1, g + 1),
bias_g,
columns_n.select(1, g + 1),
kW, kH, dW, dH,
padW, padH,
nInputPlane / nGroup, inputWidth, inputHeight,
nOutputPlane / nGroup, outputWidth, outputHeight)
g += 1
}
elt += 1
}
// Resize output
if(!isBatch) {
output.resize(nOutputPlane, outputHeight, outputWidth)
inputTensor.resize(nInputPlane, inputHeight, inputWidth)
}
output
}
protected def updateGradInputFrame(
gradInput: Tensor[T], gradOutput: Tensor[T],
weight: Tensor[T], columns: Tensor[T],
kW: Int, kH: Int,
dW: Int, dH: Int,
padW: Int, padH: Int,
outputHeight: Int, outputWidth: Int)(implicit ev: TensorNumeric[T]): Unit = {
// Extract columns:
val before = System.nanoTime()
ev.getType() match {
case DoubleType => NNPrimitive.im2colWithDilationDouble(
gradOutput.asInstanceOf[Tensor[Double]], columns.asInstanceOf[Tensor[Double]],
gradOutput.size(1), outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case FloatType => NNPrimitive.im2colWithDilationFloat(
gradOutput.asInstanceOf[Tensor[Float]], columns.asInstanceOf[Tensor[Float]],
gradOutput.size(1), outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case _ => throw new UnsupportedOperationException(
s"SpatialFullConvolution: only Float/Double type supported")
}
im2colTime += System.nanoTime() - before
// M,N,K are dims of matrix A and B
// (see https://software.intel.com/en-us/node/468480)
val m = weight.size(1)
val n = columns.size(2)
val k = weight.size(2)
// Do GEMM (note: this is a bit confusing because gemm assumes column-major matrices)
DenseTensorBLAS.gemm[T](
'N', 'N',
n, m, k,
ev.one,
columns.storage().array(), columns.storageOffset() - 1, n,
weight.storage().array(), weight.storageOffset() - 1, k,
ev.zero,
gradInput.storage().array(), gradInput.storageOffset() - 1, n
)
}
override def updateGradInput(input: Activity, gradOutput: Tensor[T]): Activity = {
val inputTensor: Tensor[T] = if (input.isInstanceOf[Table]) {
input.toTable[Tensor[T]](1)
} else {
input.toTensor[T]
}
val gradInputTensor: Tensor[T] = if (input.isInstanceOf[Table]) {
if (!gradInput.toTable.contains(1)) {
gradInput.toTable(1) = Tensor[T]()
}
gradInput.toTable[Tensor[T]](1)
} else {
gradInput.toTensor[T]
}
shapeCheck(inputTensor, gradOutput, weight, null, kH, kW, dH, dW, padH, padW, adjH, adjW)
val isBatch = if (inputTensor.nDimension() == 3) {
// Force batch
inputTensor.resize(1, inputTensor.size(1), inputTensor.size(2), inputTensor.size(3))
gradOutput.resize(1, gradOutput.size(1), gradOutput.size(2), gradOutput.size(3))
false
} else {
true
}
val inputWidth = inputTensor.size(4)
val inputHeight = inputTensor.size(3)
val outputWidth = (inputWidth - 1) * dW - 2 * padW + kW + adjW
val outputHeight = (inputHeight - 1) * dH - 2 * padH + kH + adjH
// Batch size + input planes
val batchSize = inputTensor.size(1)
gradInputTensor.resizeAs(inputTensor)
gradInputTensor.zero()
if (_1x1) {
columns.set(gradInputTensor)
columns.resize(Array(batchSize, nGroup, kW * kH * nOutputPlane / nGroup,
inputHeight * inputWidth))
} else {
columns.resize(Array(batchSize, nGroup, kW * kH * nOutputPlane / nGroup,
inputHeight * inputWidth))
}
var elt = 1
// For each element in batch, do:
while (elt <= batchSize) {
// Matrix mulitply per sample:
val gradInput_n = gradInputTensor.select(1, elt)
val gradOutput_n = gradOutput.select(1, elt)
val columns_n = columns.select(1, elt)
var g = 0
while (g < nGroup) {
updateGradInputFrame(
gradInput_n.narrow(1, g * nInputPlane / nGroup + 1, nInputPlane / nGroup),
gradOutput_n.narrow(1, g * nOutputPlane / nGroup + 1, nOutputPlane / nGroup),
weightMM.select(1, g + 1),
columns_n.select(1, g + 1),
kW, kH, dW, dH, padW, padH, outputHeight, outputWidth)
g += 1
}
elt += 1
}
// Resize output
if (!isBatch) {
gradOutput.resize(nOutputPlane, outputHeight, outputWidth)
inputTensor.resize(nInputPlane, inputHeight, inputWidth)
gradInputTensor.resize(nInputPlane, inputHeight, inputWidth)
}
if (input.isInstanceOf[Table]) {
val input2 = input.toTable[Tensor[T]](2)
zeroScalar.resizeAs(input2).zero()
ones.resizeAs(input2).fill(ev.one)
val zeroTensor = zeroScalar.view(ones.size()).expandAs(input2)
gradInput.toTable(1) = gradInputTensor
gradInput.toTable(2) = zeroTensor
}
return gradInput
}
protected def calcGradParametersFrame(
input: Tensor[T], gradOutput: Tensor[T], gradWeight: Tensor[T],
gradBias: Tensor[T], columns: Tensor[T],
outputHeight: Int, outputWidth: Int,
scaleW: T, scaleB: T)(implicit ev: TensorNumeric[T]): Unit = {
// Extract columns:
val before = System.nanoTime()
ev.getType() match {
case DoubleType => NNPrimitive.im2colWithDilationDouble(
gradOutput.asInstanceOf[Tensor[Double]], columns.asInstanceOf[Tensor[Double]],
gradOutput.size(1), outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case FloatType => NNPrimitive.im2colWithDilationFloat(
gradOutput.asInstanceOf[Tensor[Float]], columns.asInstanceOf[Tensor[Float]],
gradOutput.size(1), outputHeight, outputWidth,
kH, kW,
padH, padW,
dH, dW,
1, 1
)
case t => throw new NotImplementedError(s"$t is not supported")
}
im2colTime += System.nanoTime() - before
// M,N,K are dims of matrix A and B
// (see https://software.intel.com/en-us/node/468480)
val n = columns.size(1) // nOutputPlane * kh * kw
var m = input.size(1) // nInputPlane
var k = columns.size(2) // inputHeight * inputWidth
if (scaleW != 0) {
// Do GEMM (note: this is a bit confusing because gemm assumes column-major matrices)
DenseTensorBLAS.gemm[T](
'T', 'N',
n, m, k,
scaleW,
columns.storage().array(), columns.storageOffset() - 1, k,
input.storage().array(), input.storageOffset() - 1, k,
ev.one,
gradWeight.storage().array(), gradWeight.storageOffset() - 1, n
)
}
// Do Bias:
// M,N,K are dims of matrix A and B
// (see https://software.intel.com/en-us/node/468480)
m = gradOutput.size(1)
k = outputHeight * outputWidth
// Do GEMV (note: this is a bit confusing because gemv assumes column-major matrices)
if (null != gradBias && scaleB != 0) {
ev.gemv(
'T',
k, m,
scaleB,
gradOutput.storage().array(), gradOutput.storageOffset() - 1, k,
ones.storage().array(), ones.storageOffset() - 1, 1,
ev.one,
gradBias.storage().array(), gradBias.storageOffset() - 1, 1
)
}
}
override def accGradParameters(input: Activity, gradOutput: Tensor[T]): Unit = {
val inputTensor: Tensor[T] = if (input.isInstanceOf[Table]) {
val targetTensor: Tensor[T] = input.toTable[Tensor[T]](2)
val tDims = targetTensor.dim()
val tH = targetTensor.size(tDims - 1)
val tW = targetTensor.size(tDims)
adjW = calculateAdj(tW, kW, padW, dW)
adjH = calculateAdj(tH, kH, padH, dH)
input.toTable[Tensor[T]](1)
} else {
input.toTensor
}
shapeCheck(inputTensor, gradOutput, gradWeight, gradBias,
kH, kW, dH, dW, padH, padW, adjH, adjW)
val isBatch = if (inputTensor.nDimension() == 3) {
// Force batch
inputTensor.resize(1, inputTensor.size(1), inputTensor.size(2), inputTensor.size(3))
gradOutput.resize(1, gradOutput.size(1), gradOutput.size(2), gradOutput.size(3))
false
} else {
true
}
val inputWidth = inputTensor.size(4)
val inputHeight = inputTensor.size(3)
val outputWidth = (inputWidth - 1) * dW - 2 * padW + kW + adjW
val outputHeight = (inputHeight - 1) * dH - 2 * padH + kH + adjH
// Batch size + input planes
val batchSize = inputTensor.size(1)
gradWeightMMInBatch.resize(Array(batchSize, nGroup, nInputPlane / nGroup,
nOutputPlane * kH * kW / nGroup))
gradWeightMMInBatch.zero()
gradientBiasMT.resize(Array(batchSize, nOutputPlane))
// Define a buffer of ones, for bias accumulation
if (ones.nDimension != 2 || ones.size(1) * ones.size(2) < outputHeight * outputWidth) {
// Resize plane and fill with ones...
ones.resize(outputHeight, outputWidth)
ones.fill(ev.one)
}
if (onesBatch.dim() != 1 || onesBatch.size(1) != batchSize) {
onesBatch.resize(Array(batchSize)).fill(ev.one)
}
var elt = 1
// For each element in batch, do:
while (elt <= batchSize) {
// Matrix mulitply per output:
val input_n = inputTensor.select(1, elt)
val gradOutput_n = gradOutput.select(1, elt)
val column_n = columns.select(1, elt)
var g = 0
while (g < nGroup) {
val gradBias_G = if (noBias) {
null
} else if (isBatch) {
gradientBiasMT.select(1, elt).narrow(1, g * nOutputPlane / nGroup + 1,
nOutputPlane / nGroup)
} else {
gradBias.narrow(1, g * nOutputPlane / nGroup + 1,
nOutputPlane / nGroup)
}
calcGradParametersFrame(
input_n.narrow(1, g * nInputPlane / nGroup + 1, nInputPlane / nGroup),
gradOutput_n.narrow(1, g * nOutputPlane / nGroup + 1, nOutputPlane / nGroup),
gradWeightMMInBatch.select(1, elt).select(1, g + 1),
gradBias_G,
column_n.select(1, g + 1),
outputHeight, outputWidth,
ev.fromType[Double](scaleW),
ev.fromType[Double](scaleB))
g += 1
}
elt += 1
}
val gradView = gradWeightMMInBatch.view(batchSize,
nOutputPlane * nInputPlane * kH * kW / nGroup).t
val grad = gradWeight.view(nOutputPlane * nInputPlane * kH * kW / nGroup)
grad.addmv(ev.one, ev.one, gradView, onesBatch)
if (!noBias) gradBias.addmv(ev.one, ev.one, gradientBiasMT.t, onesBatch)
// Resize
if (!isBatch) {
gradOutput.resize(nOutputPlane, outputHeight, outputWidth)
inputTensor.resize(nInputPlane, inputHeight, inputWidth)
}
if (null != wRegularizer) {
wRegularizer.accRegularization(weight, gradWeight, scaleW)
}
if (null != bRegularizer) {
bRegularizer.accRegularization(bias, gradBias, scaleB)
}
}
override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = {
if (null == bias) {
(Array(this.weight), Array(this.gradWeight))
} else {
(Array(this.weight, this.bias), Array(this.gradWeight, this.gradBias))
}
}
override def clearState() : this.type = {
super.clearState()
columns.set()
ones.set()
zeroScalar.set()
onesBias.set()
onesBatch.set()
weightMM = null
gradientBiasMT.set()
gradWeightMMInBatch.set()
im2colTime = 0L
col2imTime = 0L
this
}
override def equals(obj: Any): Boolean = {
if (!super.equals(obj)) {
return false
}
if (!obj.isInstanceOf[SpatialFullConvolution[T]]) {
return false
}
val other = obj.asInstanceOf[SpatialFullConvolution[T]]
if (this.eq(other)) {
return true
}
nInputPlane == other.nInputPlane &&
nOutputPlane == other.nOutputPlane &&
kW == other.kW &&
kH == other.kH &&
dW == other.dW &&
dH == other.dH &&
padW == other.padW &&
padH == other.padH &&
adjW == other.adjW &&
adjH == other.adjH &&
weight == other.weight &&
bias == other.bias &&
gradWeight == other.gradWeight &&
gradBias == other.gradBias
}
override def hashCode() : Int = {
val seed = 37
var hash = super.hashCode()
hash = hash * seed + nInputPlane.hashCode()
hash = hash * seed + nOutputPlane.hashCode()
hash = hash * seed + kW.hashCode()
hash = hash * seed + kH.hashCode()
hash = hash * seed + dW.hashCode()
hash = hash * seed + dH.hashCode()
hash = hash * seed + padW.hashCode()
hash = hash * seed + padH.hashCode()
hash = hash * seed + adjW.hashCode()
hash = hash * seed + adjH.hashCode()
hash = hash * seed + weight.hashCode()
hash = hash * seed + bias.hashCode()
hash = hash * seed + gradWeight.hashCode()
hash = hash * seed + gradBias.hashCode()
hash
}
override def toString(): String = {
s"${getPrintName}($nInputPlane -> $nOutputPlane, " +
s"$kW x $kH, $dW, $dH, $padW, $padH, $adjW, $adjH)"
}
}
object SpatialFullConvolution extends ModuleSerializable {
def apply[@specialized(Float, Double) T: ClassTag](
nInputPlane: Int,
nOutputPlane: Int,
kW: Int,
kH: Int,
dW: Int = 1,
dH: Int = 1,
padW: Int = 0,
padH: Int = 0,
adjW: Int = 0,
adjH: Int = 0,
nGroup: Int = 1,
noBias: Boolean = false,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null
)(implicit ev: TensorNumeric[T]) : SpatialFullConvolution[T] = {
new SpatialFullConvolution[T](nInputPlane, nOutputPlane, kW, kH, dW, dH,
padW, padH, adjW, adjH, nGroup, noBias,
wRegularizer, bRegularizer)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) : AbstractModule[Activity, Activity, T] = {
val attrMap = context.bigdlModule.getAttrMap
val intParams = DataConverter.getAttributeValue(context, attrMap.get("intParams")).
asInstanceOf[Array[Int]]
val noBias = DataConverter.getAttributeValue(context, attrMap.get("noBias")).
asInstanceOf[Boolean]
val wRegularizer = DataConverter.getAttributeValue(context, attrMap.get("wRegularizer")).
asInstanceOf[Regularizer[T]]
val bRegularizer = DataConverter.getAttributeValue(context, attrMap.get("bRegularizer")).
asInstanceOf[Regularizer[T]]
val fullConv = SpatialFullConvolution(intParams(0), intParams(1), intParams(2), intParams(3),
intParams(4), intParams(5), intParams(6), intParams(7), intParams(8), intParams(9),
intParams(10), noBias, wRegularizer, bRegularizer)
fullConv
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
fullConvBuilder : BigDLModule.Builder)
(implicit ev: TensorNumeric[T]) : Unit = {
val fullConv = context.moduleData.module.asInstanceOf[SpatialFullConvolution[T]]
val intParamsBuilder = AttrValue.newBuilder
val intParams = Array(fullConv.nInputPlane, fullConv.nOutputPlane, fullConv.kW,
fullConv.kH, fullConv.dW, fullConv.dH, fullConv.padW, fullConv.padH, fullConv.adjW,
fullConv.adjH, fullConv.nGroup)
DataConverter.setAttributeValue(context, intParamsBuilder, intParams,
universe.typeOf[Array[Int]])
fullConvBuilder.putAttr("intParams", intParamsBuilder.build)
val biasBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, biasBuilder,
fullConv.noBias, universe.typeOf[Boolean])
fullConvBuilder.putAttr("noBias", biasBuilder.build)
val wRegularizerBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, wRegularizerBuilder,
fullConv.wRegularizer,
ModuleSerializer.regularizerType)
fullConvBuilder.putAttr("wRegularizer", wRegularizerBuilder.build)
val bRegularizerBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context,
bRegularizerBuilder, fullConv.bRegularizer,
ModuleSerializer.regularizerType)
fullConvBuilder.putAttr("bRegularizer", bRegularizerBuilder.build)
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/SpatialFullConvolution.scala
|
Scala
|
apache-2.0
| 29,702 |
package oauthorize.test
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
import play.api.libs.json._
import oauth2.spec.AccessTokenErrors._
import oauth2.spec.Req._
import oauth2.spec.GrantTypes
import oauthorize.model._
import org.apache.commons.codec.binary.Base64
import java.net.URLDecoder
import java.net.URLEncoder
import play.api.Play
import oauthorize.sample.Oauth
import play.api.libs.ws._
import com.ning.http.client.Realm
import play.api.libs.ws.WS.WSRequestHolder
@RunWith(classOf[JUnitRunner])
class ClientCredentialsApplicationSpec extends PlaySpecification with TestHelpers {
"Application" should {
s"send 401 if unregistered client" in new WithServer(port = 3333) {
val resp = postfWoRegisteredClient("/oauth/token", grant_type -> GrantTypes.client_credentials)
resp.status must equalTo(401)
(resp.json \ "error") must equalTo(JsString(invalid_client))
(resp.json \ "error_description") must equalTo(JsString("unregistered client"))
}
s"send 401 if bad client credentials" in new WithServer(port = 3333) {
val client = Some(Oauth2Client("the_client", Oauth.hashClientSecret(SecretInfo("wrongpass")), Seq("global"), Seq(GrantTypes.authorization_code), RedirectUri, Seq(), 3600, 3600, None, false))
val resp = postf("/oauth/token", grant_type -> GrantTypes.client_credentials)(client)
resp.status must equalTo(401)
(resp.json \ "error") must equalTo(JsString(invalid_client))
(resp.json \ "error_description") must equalTo(JsString("bad credentials"))
}
"respond with 400 if incorrect scope" in new WithServer(port = 3333) {
import oauth2.spec.AccessTokenResponseParams._
val c = Oauth.storeClient(Oauth2Client("the_client", hash("pass"),
Seq("global"), Seq(GrantTypes.client_credentials, GrantTypes.refresh_token),
RedirectUri, Seq(), 3600, 3600, None, true))
val resp = postf("/oauth/token", grant_type -> GrantTypes.client_credentials, "scope" -> "nonexistent")(Some(c))
resp.status must equalTo(400)
(resp.json \ "error") must equalTo(JsString(invalid_scope))
(resp.json \ "error_description") must equalTo(JsString("unsupported scope"))
}
"respond with 400 if client_credentials unsupported" in new WithServer(port = 3333) {
import oauth2.spec.AccessTokenResponseParams._
val c = Oauth.storeClient(Oauth2Client("the_client", hash("pass"),
Seq("global"), Seq(GrantTypes.authorization_code),
RedirectUri, Seq(), 3600, 3600, None, true))
val resp = postf("/oauth/token", grant_type -> GrantTypes.client_credentials, "scope" -> "global")(Some(c))
resp.status must equalTo(400)
(resp.json \ "error") must equalTo(JsString(unsupported_grant_type))
(resp.json \ "error_description") must equalTo(JsString("unsupported grant type"))
}
"respond with 200 and the access token if request is correct" in new WithServer(port = 3333) {
import oauth2.spec.AccessTokenResponseParams._
val c = Oauth.storeClient(Oauth2Client("the_client", hash("pass"),
Seq("global"), Seq(GrantTypes.client_credentials, GrantTypes.refresh_token),
RedirectUri, Seq(), 3600, 3600, None, true))
val resp = postf("/oauth/token", grant_type -> GrantTypes.client_credentials, "scope" -> "global")(Some(c))
resp.status must equalTo(200)
(resp.json \ access_token).as[String] must beMatching(".{53}")
(resp.json \ refresh_token).as[String] must beMatching(".{53}")
(resp.json \ token_type).as[String] must equalTo("bearer")
(resp.json \ scope).as[String] must equalTo("global")
(resp.json \ expires_in).as[Int] must beGreaterThan(0)
}
"respond with 200 and correct access token to refresh token created by client_credentials" in new WithServer(port = 3333) {
import oauth2.spec.AccessTokenResponseParams._
val c = Oauth.storeClient(Oauth2Client("the_client", hash("pass"),
Seq("global"), Seq(GrantTypes.client_credentials, GrantTypes.refresh_token),
RedirectUri, Seq(), 3600, 3600, None, true))
val resp = postf("/oauth/token", grant_type -> GrantTypes.client_credentials, "scope" -> "global")(Some(c))
val refToken = (resp.json \ refresh_token).as[String]
resp.status must equalTo(200)
val accessResp = postf1("/oauth/token", refresh_token -> refToken, grant_type -> GrantTypes.refresh_token)
accessResp.status must equalTo(200)
(accessResp.json \ access_token).as[String] must beMatching(".{53}")
(accessResp.json \ token_type).as[String] must equalTo("bearer")
(accessResp.json \ scope).as[String] must equalTo("global")
(accessResp.json \ expires_in).as[Int] must beGreaterThan(0)
}
}
}
|
adaptorel/oauthorize
|
oauthorize-play-sample/test/ClientCredentialsApplicationSpec.scala
|
Scala
|
apache-2.0
| 4,829 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600a.v3
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalBigDecimal}
import uk.gov.hmrc.ct.ct600.v3.calculations.LoansToParticipatorsCalculator
import uk.gov.hmrc.ct.ct600a.v3.retriever.CT600ABoxRetriever
case class A20(value: Option[BigDecimal]) extends CtBoxIdentifier(name = "A20 - Tax chargeable on loans - (Tax due before any relief for loans repaid, released, or written off after the end of the period)")
with CtOptionalBigDecimal
object A20 extends Calculated[A20, CT600ABoxRetriever] with LoansToParticipatorsCalculator {
override def calculate(fieldValueRetriever: CT600ABoxRetriever): A20 = {
calculateA20(fieldValueRetriever.a15(), fieldValueRetriever.loansToParticipators(), fieldValueRetriever.cp2())
}
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600a/v3/A20.scala
|
Scala
|
apache-2.0
| 1,374 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.