code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package services
import models._
import play.api.Logger
import play.api.libs.json.{JsArray, Json}
/**
* Created by meln1k on 23/08/14.
// */
object LikeService {
def getUsersLikedByUser(user: VkUser): Seq[(VkUser, Int)] = {
???
}
def getUsersWhoLikedUser(user: VkUser): Seq[(VkUser, Int)] = {
val userPhotos = EntityService.getUserPhotos(user).filter(_.likesCount != 0)
val userPosts = EntityService.getUserPosts(user).filter(_.likesCount != 0)
val userPhotoLiked = userPhotos flatMap EntityService.getLikedUsers
val userPostsLiked = userPosts flatMap EntityService.getLikedUsers
(userPhotoLiked ++ userPostsLiked).groupBy(l => l).map(t => (t._1, t._2.length)).toVector.sortBy(-_._2)
}
}
|
meln1k/like-o-meter
|
app/services/LikeService.scala
|
Scala
|
apache-2.0
| 727 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import FloodGate.Activate
import akka.actor._
/**
* Holds messages until an Activate is received.
*/
class FloodGate(
target: ActorRef
) extends Actor
with ActorLogging
with Stash {
override def receive = waiting
val waiting: Receive = {
case Activate =>
context.become(pass)
unstashAll()
case message =>
stash()
}
val pass: Receive = {
case message => target forward message
}
}
object FloodGate {
object Activate
def apply(target: ActorRef): Props = Props(new FloodGate(target))
}
|
VC1995/ensime-server
|
core/src/main/scala/org/ensime/core/FloodGate.scala
|
Scala
|
gpl-3.0
| 701 |
package scala.lms
package epfl
package test3
import test1._
import test2._
trait MatchProg { this: Matching with Extractors =>
case class Success(x: Int)
implicit def successTyp: Typ[Success]
implicit def intTyp: Typ[Int]
implicit def stringTyp: Typ[String]
implicit def listTyp[T:Typ]: Typ[List[T]]
implicit def consTyp[T:Typ]: Typ[::[T]]
object SuccessR {
def apply(x: Rep[Int]): Rep[Success] = construct(classOf[Success], Success.apply, x)
def unapply(x: Rep[Success]): Option[Rep[Int]] = deconstruct(classOf[Success], Success.unapply, x)
}
object :!: {
def apply[A:Typ](x: Rep[A], xs: Rep[List[A]]) = construct(classOf[::[A]], (::.apply[A] _).tupled, tuple(x, xs))
// def unapply[A](x: Rep[::[A]]) = deconstruct2(classOf[::[A]], ::.unapply[A], x) // doesn't work: hd is private in :: !
def unapply[A:Typ](x: Rep[List[A]]): Option[(Rep[A], Rep[List[A]])] =
deconstruct2(classOf[::[A]].asInstanceOf[Class[List[A]]], (x: List[A]) => Some(x.head, x.tail), x)
}
def infix_unapply(o: SuccessR.type, x: Rep[Success]): Option[Rep[Int]] = deconstruct(classOf[Success], Success.unapply, x)
// doesn't work...
def test(x: Rep[Success]): Rep[String] = x switch {
case SuccessR(x) if x guard 7 => unit("yes")
} orElse {
case SuccessR(x) => unit("maybe")
} orElse {
case _ => unit("no")
}
def testXX(x: Rep[Success]): Rep[String] = _match(x)({
case SuccessR(x) if x guard 7 => unit("yes")
},{
case SuccessR(x) => unit("maybe")
},{
case _ => unit("no")
})
}
trait MatchProgExp0 extends common.BaseExp with MatchProg { this: Matching with Extractors =>
implicit def successTyp: Typ[Success] = ManifestTyp(implicitly)
implicit def intTyp: Typ[Int] = ManifestTyp(implicitly)
implicit def stringTyp: Typ[String] = ManifestTyp(implicitly)
implicit def listTyp[T:Typ]: Typ[List[T]] = {
implicit val ManifestTyp(m) = typ[T]
ManifestTyp(implicitly)
}
implicit def consTyp[T:Typ]: Typ[::[T]] = {
implicit val ManifestTyp(m) = typ[T]
ManifestTyp(implicitly)
}
}
class TestMatch extends FileDiffSuite {
val prefix = home + "test-out/epfl/test3-"
/*
println {
object TestMatchString extends TestMatch with Matching with Extractors with MatchingExtractorsRepString
import TestMatchString._
test(SuccessR("7"))
}
*/
def testMatch1 = {
withOutFile(prefix+"match1") {
object MatchProgExp extends MatchProgExp0 with Matching with Extractors
with MatchingExtractorsExp with FunctionsExpUnfoldAll with Control
with FlatResult with DisableCSE
import MatchProgExp._
val r = reifyEffects(test(fresh[Success]))
println(globalDefs.mkString("\n"))
println(r)
val p = new ExtractorsGraphViz { val IR: MatchProgExp.type = MatchProgExp }
p.emitDepGraph(result[Unit](r), prefix+"match1-dot")
}
assertFileEqualsCheck(prefix+"match1")
assertFileEqualsCheck(prefix+"match1-dot")
}
def testMatch2 = {
withOutFile(prefix+"match2") {
object MatchProgExp extends MatchProgExp0 with Matching with Extractors
with MatchingExtractorsExpOpt with FunctionsExpUnfoldAll with Control
with FlatResult
import MatchProgExp._
val r = reifyEffects(test(fresh[Success]))
println(globalDefs.mkString("\n"))
println(r)
val p = new ExtractorsGraphViz { val IR: MatchProgExp.type = MatchProgExp }
p.emitDepGraph(result[Unit](r), prefix+"match2-dot")
}
assertFileEqualsCheck(prefix+"match2")
assertFileEqualsCheck(prefix+"match2-dot")
}
}
|
astojanov/virtualization-lms-core
|
test-src/epfl/test3-parsers/TestMatch.scala
|
Scala
|
bsd-3-clause
| 3,640 |
package xyz.hyperreal
package object rtcep
{
val EOF = '\\u0004'
}
|
edadma/rtcep
|
src/main/scala/rtcep.scala
|
Scala
|
mit
| 68 |
package hammock
package hi
import java.util.Base64
import cats._
import cats.implicits._
sealed trait Auth
object Auth {
case class BasicAuth(user: String, pass: String) extends Auth
case class OAuth2Bearer(token: String) extends Auth
case class OAuth2Token(token: String) extends Auth
implicit val authShow = new Show[Auth] {
def show(a: Auth): String = a match {
case BasicAuth(user, pass) =>
val toEncode = s"$user:$pass".getBytes
val encoded = Base64.getEncoder.encode(toEncode)
s"Basic ${new String(encoded)}"
case OAuth2Bearer(token) => s"Bearer $token"
case OAuth2Token(token) => s"token $token"
}
}
implicit val authEq = new Eq[Auth] {
def eqv(a: Auth, b: Auth): Boolean = (a, b) match {
case (BasicAuth(u1, p1), BasicAuth(u2, p2)) => u1 === u2 && p1 === p2
case (OAuth2Bearer(t1), OAuth2Bearer(t2)) => t1 === t2
case (OAuth2Token(t1), OAuth2Token(t2)) => t1 === t2
case _ => false
}
}
}
|
pepegar/hammock
|
core/src/main/scala/hammock/hi/Auth.scala
|
Scala
|
mit
| 1,062 |
/*
* Copyright 2015 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.circe.middleware
import cats.data._
import cats.effect._
import io.circe._
import io.circe.syntax._
import org.http4s._
import org.http4s.circe._
import org.http4s.headers.Connection
import org.typelevel.ci._
object JsonDebugErrorHandler {
private[this] val messageFailureLogger =
org.log4s.getLogger("org.http4s.circe.middleware.jsondebugerrorhandler.message-failures")
private[this] val serviceErrorLogger =
org.log4s.getLogger("org.http4s.circe.middleware.jsondebugerrorhandler.service-errors")
// Can be parametric on my other PR is merged.
def apply[F[_]: Concurrent, G[_]](
service: Kleisli[F, Request[G], Response[G]],
redactWhen: CIString => Boolean = Headers.SensitiveHeaders.contains,
): Kleisli[F, Request[G], Response[G]] =
Kleisli { req =>
import cats.syntax.applicative._
import cats.syntax.applicativeError._
implicit def entEnc[M[_]]: EntityEncoder.Pure[JsonErrorHandlerResponse[M]] =
JsonErrorHandlerResponse.entEnc[M](redactWhen)
service
.run(req)
.handleErrorWith {
case mf: MessageFailure =>
messageFailureLogger.debug(mf)(
s"""Message failure handling request: ${req.method} ${req.pathInfo} from ${req.remoteAddr
.getOrElse("<unknown>")}"""
)
val firstResp = mf.toHttpResponse[G](req.httpVersion)
Response[G](
status = firstResp.status,
httpVersion = firstResp.httpVersion,
headers = firstResp.headers.redactSensitive(redactWhen),
).withEntity(JsonErrorHandlerResponse[G](req, mf)).pure[F]
case t =>
serviceErrorLogger.error(t)(
s"""Error servicing request: ${req.method} ${req.pathInfo} from ${req.remoteAddr
.getOrElse("<unknown>")}"""
)
Response[G](
Status.InternalServerError,
req.httpVersion,
Headers(
Connection(ci"close")
),
)
.withEntity(JsonErrorHandlerResponse[G](req, t))
.pure[F]
}
}
private final case class JsonErrorHandlerResponse[F[_]](
req: Request[F],
caught: Throwable,
)
private object JsonErrorHandlerResponse {
def entEnc[F[_]](
redactWhen: CIString => Boolean
): EntityEncoder.Pure[JsonErrorHandlerResponse[F]] =
jsonEncoderOf(
encoder(redactWhen)
)
def encoder[F[_]](
redactWhen: CIString => Boolean
): Encoder[JsonErrorHandlerResponse[F]] =
(a: JsonErrorHandlerResponse[F]) =>
Json.obj(
"request" -> encodeRequest(a.req, redactWhen),
"throwable" -> encodeThrowable(a.caught),
)
}
private def encodeRequest[F[_]](req: Request[F], redactWhen: CIString => Boolean): Json =
Json
.obj(
"method" -> req.method.name.asJson,
"uri" -> Json
.obj(
"scheme" -> req.uri.scheme.map(_.value).asJson,
"authority" -> req.uri.authority
.map(auth =>
Json
.obj(
"host" -> auth.host.toString().asJson,
"port" -> auth.port.asJson,
"user_info" -> auth.userInfo
.map(_.toString())
.asJson,
)
.dropNullValues
)
.asJson,
"path" -> req.uri.path.renderString.asJson,
"query" -> req.uri.query.multiParams.asJson,
)
.dropNullValues,
"headers" -> req.headers
.redactSensitive(redactWhen)
.headers
.map { h =>
Json.obj(
"name" -> h.name.toString.asJson,
"value" -> h.value.asJson,
)
}
.asJson,
"path_info" -> req.pathInfo.renderString.asJson,
"remote_address" -> req.remoteAddr.toString.asJson,
"http_version" -> req.httpVersion.toString.asJson,
)
.dropNullValues
private def encodeThrowable(a: Throwable): Json =
Json
.obj(
"message" -> Option(a.getMessage).asJson,
"stack_trace" -> Option(a.getStackTrace())
.map(_.toList)
.map(_.map(stackTraceElem => stackTraceElem.toString))
.asJson,
"localized_message" ->
Option(a.getLocalizedMessage()).asJson,
"cause" -> Option(a.getCause())
.map(encodeThrowable(_))
.asJson,
"suppressed" -> Option(a.getSuppressed())
.map(_.toList.map(encodeThrowable(_)))
.asJson,
"class_name" -> Option(a.getClass())
.flatMap(c => Option(c.getName()))
.asJson,
)
.dropNullValues
}
|
http4s/http4s
|
circe/src/main/scala/org/http4s/circe/middleware/JsonDebugErrorHandler.scala
|
Scala
|
apache-2.0
| 5,426 |
package katas.scala.pascaltriangle
import org.junit.Test
import org.scalatest.Matchers
/**
* @author DKandalov
*/
class PascalTriangle1 extends Matchers {
@Test def shouldComputeElementsOfPascalsTriangle() {
pascal(0, -1) should equal(0)
pascal(0, 0) should equal(1)
pascal(0, 1) should equal(0)
pascal(1, 0) should equal(1)
pascal(1, 1) should equal(1)
pascal(2, 0) should equal(1)
pascal(2, 1) should equal(2)
pascal(2, 2) should equal(1)
pascal(3, 0) should equal(1)
pascal(3, 1) should equal(3)
pascal(3, 2) should equal(3)
pascal(3, 3) should equal(1)
}
def pascal(depth: Int, pos: Int): Int = depth match {
case 0 => if (pos == 0) 1 else 0
case 1 => if (pos == 0 || pos == 1) 1 else 0
case _ => pascal(depth - 1, pos - 1) + pascal(depth - 1, pos)
}
@Test def shouldComputePascalTriangle() {
// TODO finish
pascalTriangle(0) should equal(List())
pascalTriangle(1) should equal(List(List(1)))
pascalTriangle(2) should equal(List(
List(1),
List(1, 1)))
pascalTriangle(3) should equal(List(
List(1),
List(1, 1),
List(1, 2, 1)))
pascalTriangle(4) should equal(List(
List(1),
List(1, 1),
List(1, 2, 1),
List(1, 3, 3, 1)))
pascalTriangle(5) should equal(List(
List(1),
List(1, 1),
List(1, 2, 1),
List(1, 3, 3, 1),
List(1, 4, 6, 4, 1)))
}
def pascalTriangle(depth: Int): List[List[Int]] = depth match {
case 0 => List()
case 1 => pascalTriangle(depth - 1) ::: List(List(1))
case 2 => pascalTriangle(depth - 1) ::: List(List(1, 1))
case _ => val listOfRows = pascalTriangle(depth - 1)
listOfRows ::: List(computeNextRow(listOfRows.last)) // was confused with nested lists
}
def computeNextRow(list: List[Int]): List[Int] = 1 :: sumupList(list) ::: List(1) // spent some time figuring out that I could do this
def sumupList(list: List[Int]): List[Int] = list match {
case List(a, b) => List(a + b)
case _ => (list.head + list.tail.head) :: sumupList(list.tail)
}
}
|
dkandalov/katas
|
scala/src/katas/scala/pascaltriangle/PascalTriangle1.scala
|
Scala
|
unlicense
| 2,094 |
package mesosphere.marathon
package state
import mesosphere.UnitTest
import mesosphere.marathon.state.VersionInfo.FullVersionInfo
class VersionInfoTest extends UnitTest {
"VersionInfo" should {
"NoVersion upgrades to FullVersion on a scaling change" in {
Given("NoVersion")
val versionInfo = VersionInfo.NoVersion
val versionOfNoVersion = versionInfo.version
When("Applying a scaling change")
val newVersion = versionInfo.withScaleChange(Timestamp(1))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(1),
lastScalingAt = Timestamp(1),
lastConfigChangeAt = versionOfNoVersion
)
)
}
"NoVersion upgrades to FullVersion on a config change" in {
Given("NoVersion")
val versionInfo = VersionInfo.NoVersion
When("Applying a config change")
val newVersion = versionInfo.withConfigChange(Timestamp(1))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(1),
lastScalingAt = Timestamp(1),
lastConfigChangeAt = Timestamp(1)
)
)
}
"OnlyVersion upgrades to FullVersion on a scaling change" in {
Given("An OnlyVersion info")
val versionInfo = VersionInfo.OnlyVersion(Timestamp(1))
When("Applying a scaling change")
val newVersion = versionInfo.withScaleChange(Timestamp(2))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(2),
lastConfigChangeAt = Timestamp(1)
)
)
}
"OnlyVersion upgrades to FullVersion on a restart change" in {
Given("An OnlyVersion info")
val versionInfo = VersionInfo.OnlyVersion(Timestamp(1))
When("Applying a restart change")
val newVersion = versionInfo.withRestartChange(Timestamp(2))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(1),
lastConfigChangeAt = Timestamp(2)
)
)
}
"OnlyVersion upgrades to FullVersion on a config change" in {
Given("An OnlyVersion info")
val versionInfo = VersionInfo.OnlyVersion(Timestamp(1))
When("Applying a config change")
val newVersion = versionInfo.withConfigChange(Timestamp(2))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(2),
lastConfigChangeAt = Timestamp(2)
)
)
}
"A scaling change on FullVersion only changes scalingAt" in {
Given("A FullVersionInfo")
val versionInfo = VersionInfo.FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(2),
lastConfigChangeAt = Timestamp(1)
)
When("Applying a scaling change")
val newVersion = versionInfo.withScaleChange(Timestamp(3))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(3),
lastScalingAt = Timestamp(3),
lastConfigChangeAt = Timestamp(1)
)
)
}
"A restart change on FullVersion only changes lastConfigChangeAt" in {
Given("A FullVersionInfo")
val versionInfo = VersionInfo.FullVersionInfo(
version = Timestamp(1),
lastScalingAt = Timestamp(1),
lastConfigChangeAt = Timestamp(1)
)
When("Applying a restart change")
val newVersion = versionInfo.withRestartChange(Timestamp(2))
Then("lastConfigChangeAt is updated while lastScalingAt is not")
newVersion should be(
FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(1),
lastConfigChangeAt = Timestamp(2)
)
)
newVersion.lastConfigChangeVersion should equal(Timestamp(2))
}
"A config change on FullVersion changes scalingAt, lastConfigChangeAt" in {
Given("A FullVersionInfo")
val versionInfo = VersionInfo.FullVersionInfo(
version = Timestamp(2),
lastScalingAt = Timestamp(2),
lastConfigChangeAt = Timestamp(1)
)
When("Applying a config change")
val newVersion = versionInfo.withConfigChange(Timestamp(3))
Then("The version info is promoted to a FullVersion")
newVersion should be(
FullVersionInfo(
version = Timestamp(3),
lastScalingAt = Timestamp(3),
lastConfigChangeAt = Timestamp(3)
)
)
}
}
}
|
mesosphere/marathon
|
src/test/scala/mesosphere/marathon/state/VersionInfoTest.scala
|
Scala
|
apache-2.0
| 4,848 |
import leon.lang._
object Postconditions {
def passing_1(f: BigInt => BigInt, x: BigInt) = {
require(x >= 0 && forall((a: BigInt) => f(a) < 0))
x
} ensuring { res => forall((a: BigInt) => res > f(a)) }
def passing_2(f: BigInt => BigInt, x: BigInt) = {
require(x >= 0 && forall((a: BigInt) => a > 0 ==> f(a) < 0))
x
} ensuring { res => forall((a: BigInt) => a > 0 ==> res > f(a)) }
def passing_3(f: BigInt => BigInt) = {
require(forall((a: BigInt) => f(a) > 0))
f
} ensuring { res => forall((a: BigInt) => res(a) > 0) }
def passing_4() = {
(x: BigInt) => x + 1
} ensuring { res => forall((a: BigInt) => res(a) > a) }
}
|
epfl-lara/leon
|
testcases/verification/quantification/valid/Postcondition.scala
|
Scala
|
gpl-3.0
| 667 |
/*
* Copyright Β© 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.util.jsonld
import com.github.jsonldjava.core.{JsonLdOptions, JsonLdProcessor}
import com.github.jsonldjava.utils.JsonUtils
import org.knora.webapi._
import org.knora.webapi.messages.store.triplestoremessages.StringLiteralV2
import org.knora.webapi.util.IriConversions._
import org.knora.webapi.util.{JavaUtil, SmartIri, StringFormatter}
/**
* Constant strings used in JSON-LD.
*/
object JsonLDConstants {
val CONTEXT: String = "@context"
val ID: String = "@id"
val TYPE: String = "@type"
val GRAPH: String = "@graph"
val LANGUAGE: String = "@language"
val VALUE: String = "@value"
}
/**
* Represents a value in a JSON-LD document.
*/
sealed trait JsonLDValue extends Ordered[JsonLDValue] {
/**
* Converts this JSON-LD value to a Scala object that can be passed to [[org.knora.webapi.util.JavaUtil.deepScalaToJava]],
* whose return value can then be passed to the JSON-LD Java library.
*/
def toAny: Any
}
/**
* Represents a string value in a JSON-LD document.
*
* @param value the underlying string.
*/
case class JsonLDString(value: String) extends JsonLDValue {
override def toAny: Any = value
override def compare(that: JsonLDValue): Int = {
that match {
case thatStr: JsonLDString => value.compare(thatStr.value)
case _ => 0
}
}
}
/**
* Represents an integer value in a JSON-LD document.
*
* @param value the underlying integer.
*/
case class JsonLDInt(value: Int) extends JsonLDValue {
override def toAny: Any = value
override def compare(that: JsonLDValue): Int = {
that match {
case thatInt: JsonLDInt => value.compare(thatInt.value)
case _ => 0
}
}
}
/**
* Represents a boolean value in a JSON-LD document.
*
* @param value the underlying boolean value.
*/
case class JsonLDBoolean(value: Boolean) extends JsonLDValue {
override def toAny: Any = value
override def compare(that: JsonLDValue): Int = {
that match {
case thatBoolean: JsonLDBoolean => value.compare(thatBoolean.value)
case _ => 0
}
}
}
/**
* Represents a JSON object in a JSON-LD document.
*
* @param value a map of keys to JSON-LD values.
*/
case class JsonLDObject(value: Map[String, JsonLDValue]) extends JsonLDValue {
override def toAny: Map[String, Any] = value.map {
case (k, v) => (k, v.toAny)
}
/**
* Returns `true` if this JSON-LD object represents an IRI value.
*/
def isIri: Boolean = {
value.keySet == Set(JsonLDConstants.ID)
}
/**
* Returns `true` if this JSON-LD object represents a string with a language tag.
*/
def isStringWithLang: Boolean = {
value.keySet == Set(JsonLDConstants.VALUE, JsonLDConstants.LANGUAGE)
}
/**
* Returns `true` if this JSON-LD object represents a datatype value.
*/
def isDatatypeValue: Boolean = {
value.keySet == Set(JsonLDConstants.TYPE, JsonLDConstants.VALUE)
}
/**
* Converts an IRI value from its JSON-LD object value representation, validating it using the specified validation
* function.
*
* @param validationFun the validation function.
* @tparam T the type returned by the validation function.
* @return the return value of the validation function.
*/
def toIri[T](validationFun: (String, => Nothing) => T): T = {
if (isIri) {
val id: IRI = requireString(JsonLDConstants.ID)
validationFun(id, throw BadRequestException(s"Invalid IRI: $id"))
} else {
throw BadRequestException(s"This JSON-LD object does not represent an IRI: $this")
}
}
/**
* Converts a datatype value from its JSON-LD object value representation, validating it using the specified validation
* function.
*
* @param expectedDatatype the IRI of the expected datatype.
* @param validationFun the validation function.
* @tparam T the type returned by the validation function.
* @return the return value of the validation function.
*/
def toDatatypeValueLiteral[T](expectedDatatype: SmartIri, validationFun: (String, => Nothing) => T): T = {
if (isDatatypeValue) {
val datatype: IRI = requireString(JsonLDConstants.TYPE)
if (datatype != expectedDatatype.toString) {
throw BadRequestException(s"Expected datatype value of type <$expectedDatatype>, found <$datatype>")
}
val value: String = requireString(JsonLDConstants.VALUE)
validationFun(value, throw BadRequestException(s"Invalid datatype value literal: $value"))
} else {
throw BadRequestException(s"This JSON-LD object does not represent a datatype value: $this")
}
}
/**
* Gets a required string value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not a string.
*
* @param key the key of the required value.
* @return the value.
*/
def requireString(key: String): String = {
value.getOrElse(key, throw BadRequestException(s"No $key provided")) match {
case JsonLDString(str) => str
case other => throw BadRequestException(s"Invalid $key: $other (string expected)")
}
}
/**
* Gets a required string value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not a string.
* Then parses the value with the specified validation function (see [[org.knora.webapi.util.StringFormatter]]
* for examples of such functions), throwing [[BadRequestException]] if the validation fails.
*
* @param key the key of the required value.
* @param validationFun a validation function that takes two arguments: the string to be validated, and a function
* that throws an exception if the string is invalid. The function's return value is the
* validated string, possibly converted to another type T.
* @tparam T the type of the validation function's return value.
* @return the return value of the validation function.
*/
def requireStringWithValidation[T](key: String, validationFun: (String, => Nothing) => T): T = {
val str: String = requireString(key)
validationFun(str, throw BadRequestException(s"Invalid $key: $str"))
}
/**
* Gets an optional string value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not a string.
*
* @param key the key of the optional value.
* @return the value, or `None` if not found.
*/
def maybeString(key: String): Option[String] = {
value.get(key).map {
case JsonLDString(str) => str
case other => throw BadRequestException(s"Invalid $key: $other (string expected)")
}
}
/**
* Gets an optional string value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not a string. Parses the value with the specified validation
* function (see [[org.knora.webapi.util.StringFormatter]] for examples of such functions), throwing
* [[BadRequestException]] if the validation fails.
*
* @param key the key of the optional value.
* @param validationFun a validation function that takes two arguments: the string to be validated, and a function
* that throws an exception if the string is invalid. The function's return value is the
* validated string, possibly converted to another type T.
* @tparam T the type of the validation function's return value.
* @return the return value of the validation function, or `None` if the value was not present.
*/
def maybeStringWithValidation[T](key: String, validationFun: (String, => Nothing) => T): Option[T] = {
maybeString(key).map {
str => validationFun(str, throw BadRequestException(s"Invalid $key: $str"))
}
}
/**
* Gets a required IRI value (contained in a JSON-LD object) of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not a JSON-LD object.
* Then parses the object's ID with the specified validation function (see [[org.knora.webapi.util.StringFormatter]]
* for examples of such functions), throwing [[BadRequestException]] if the validation fails.
*
* @param key the key of the required value.
* @return the validated IRI.
*/
def requireIriInObject[T](key: String, validationFun: (String, => Nothing) => T): T = {
requireObject(key).toIri(validationFun)
}
/**
* Gets an optional IRI value (contained in a JSON-LD object) value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not a JSON-LD object. Parses the object's ID with the
* specified validation function (see [[org.knora.webapi.util.StringFormatter]] for examples of such functions),
* throwing [[BadRequestException]] if the validation fails.
*
* @param key the key of the optional value.
* @param validationFun a validation function that takes two arguments: the string to be validated, and a function
* that throws an exception if the string is invalid. The function's return value is the
* validated string, possibly converted to another type T.
* @tparam T the type of the validation function's return value.
* @return the return value of the validation function, or `None` if the value was not present.
*/
def maybeIriInObject[T](key: String, validationFun: (String, => Nothing) => T): Option[T] = {
maybeObject(key).map(_.toIri(validationFun))
}
/**
* Gets a required datatype value (contained in a JSON-LD object) of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not a JSON-LD object.
* Then parses the object's literal value with the specified validation function (see [[org.knora.webapi.util.StringFormatter]]
* for examples of such functions), throwing [[BadRequestException]] if the validation fails.
*
* @param key the key of the required value.
* @param expectedDatatype the IRI of the expected datatype.
* @tparam T the type of the validation function's return value.
* @return the validated literal value.
*/
def requireDatatypeValueInObject[T](key: String, expectedDatatype: SmartIri, validationFun: (String, => Nothing) => T): T = {
requireObject(key).toDatatypeValueLiteral(expectedDatatype, validationFun)
}
/**
* Gets an optional datatype value (contained in a JSON-LD object) value of a property of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not a JSON-LD object. Parses the object's literal value with the
* specified validation function (see [[org.knora.webapi.util.StringFormatter]] for examples of such functions),
* throwing [[BadRequestException]] if the validation fails.
*
* @param key the key of the optional value.
* @param expectedDatatype the IRI of the expected datatype.
* @param validationFun a validation function that takes two arguments: the string to be validated, and a function
* that throws an exception if the string is invalid. The function's return value is the
* validated string, possibly converted to another type T.
* @tparam T the type of the validation function's return value.
* @return the return value of the validation function, or `None` if the value was not present.
*/
def maybeDatatypeValueInObject[T](key: String, expectedDatatype: SmartIri, validationFun: (String, => Nothing) => T): Option[T] = {
maybeObject(key).map(_.toDatatypeValueLiteral(expectedDatatype, validationFun))
}
/**
* Gets the required object value of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not an object.
*
* @param key the key of the required value.
* @return the required value.
*/
def requireObject(key: String): JsonLDObject = {
value.getOrElse(key, throw BadRequestException(s"No $key provided")) match {
case obj: JsonLDObject => obj
case other => throw BadRequestException(s"Invalid $key: $other (object expected)")
}
}
/**
* Gets the optional object value of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not an object.
*
* @param key the key of the optional value.
* @return the optional value.
*/
def maybeObject(key: String): Option[JsonLDObject] = {
value.get(key).map {
case obj: JsonLDObject => obj
case other => throw BadRequestException(s"Invalid $key: $other (object expected)")
}
}
/**
* Gets the required array value of this JSON-LD object. If the value is not an array,
* returns a one-element array containing the value. Throws
* [[BadRequestException]] if the property is not found.
*
* @param key the key of the required value.
* @return the required value.
*/
def requireArray(key: String): JsonLDArray = {
value.getOrElse(key, throw BadRequestException(s"No $key provided")) match {
case obj: JsonLDArray => obj
case other => JsonLDArray(Seq(other))
}
}
/**
* Gets the optional array value of this JSON-LD object. If the value is not an array,
* returns a one-element array containing the value.
*
* @param key the key of the optional value.
* @return the optional value.
*/
def maybeArray(key: String): Option[JsonLDArray] = {
value.get(key).map {
case obj: JsonLDArray => obj
case other => JsonLDArray(Seq(other))
}
}
/**
* Gets the required integer value of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not an integer.
*
* @param key the key of the required value.
* @return the required value.
*/
def requireInt(key: String): Int = {
value.getOrElse(key, throw BadRequestException(s"No $key provided")) match {
case obj: JsonLDInt => obj.value
case other => throw BadRequestException(s"Invalid $key: $other (integer expected)")
}
}
/**
* Gets the optional integer value of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not an integer.
*
* @param key the key of the optional value.
* @return the optional value.
*/
def maybeInt(key: String): Option[Int] = {
value.get(key).map {
case obj: JsonLDInt => obj.value
case other => throw BadRequestException(s"Invalid $key: $other (integer expected)")
}
}
/**
* Gets the required boolean value of this JSON-LD object, throwing
* [[BadRequestException]] if the property is not found or if its value is not a boolean.
*
* @param key the key of the required value.
* @return the required value.
*/
def requireBoolean(key: String): Boolean = {
value.getOrElse(key, throw BadRequestException(s"No $key provided")) match {
case obj: JsonLDBoolean => obj.value
case other => throw BadRequestException(s"Invalid $key: $other (boolean expected)")
}
}
/**
* Gets the optional boolean value of this JSON-LD object, throwing
* [[BadRequestException]] if the property's value is not a boolean.
*
* @param key the key of the optional value.
* @return the optional value.
*/
def maybeBoolean(key: String): Option[Boolean] = {
value.get(key).map {
case obj: JsonLDBoolean => obj.value
case other => throw BadRequestException(s"Invalid $key: $other (boolean expected)")
}
}
override def compare(that: JsonLDValue): Int = 0
/**
* Validates the `@id` of a JSON-LD object as a Knora data IRI.
*
* @return a validated Knora data IRI.
*/
def getIDAsKnoraDataIri: SmartIri = {
implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
val dataIri = requireStringWithValidation(JsonLDConstants.ID, stringFormatter.toSmartIriWithErr)
if (!dataIri.isKnoraDataIri) {
throw BadRequestException(s"Invalid Knora data IRI: $dataIri")
}
dataIri
}
/**
* Validates the `@type` of a JSON-LD object as a Knora type IRI in the API v2 complex schema.
*
* @return a validated Knora type IRI.
*/
def getTypeAsKnoraApiV2ComplexTypeIri: SmartIri = {
implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
val typeIri = requireStringWithValidation(JsonLDConstants.TYPE, stringFormatter.toSmartIriWithErr)
if (!(typeIri.isKnoraEntityIri && typeIri.getOntologySchema.contains(ApiV2Complex))) {
throw BadRequestException(s"Invalid Knora API v2 complex type IRI: $typeIri")
}
typeIri
}
/**
* When called on a JSON-LD object representing a resource, ensures that it contains a single Knora property with
* a single value in the Knora API v2 complex schema.
*
* @return the property IRI and the value.
*/
def getResourcePropertyApiV2ComplexValue: (SmartIri, JsonLDObject) = {
implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
val resourceProps: Map[IRI, JsonLDValue] = value - JsonLDConstants.ID - JsonLDConstants.TYPE
if (resourceProps.isEmpty) {
throw BadRequestException("No value submitted")
}
if (resourceProps.size > 1) {
throw BadRequestException(s"Only one value can be submitted per request using this route")
}
resourceProps.head match {
case (key: IRI, jsonLDValue: JsonLDValue) =>
val propertyIri = key.toSmartIriWithErr(throw BadRequestException(s"Invalid property IRI: $key"))
if (!(propertyIri.isKnoraEntityIri && propertyIri.getOntologySchema.contains(ApiV2Complex))) {
throw BadRequestException(s"Invalid Knora API v2 complex property IRI: $propertyIri")
}
jsonLDValue match {
case jsonLDObject: JsonLDObject => propertyIri -> jsonLDObject
case _ => throw BadRequestException(s"Invalid value for $propertyIri")
}
}
}
}
/**
* Represents a JSON array in a JSON-LD document.
*
* @param value a sequence of JSON-LD values.
*/
case class JsonLDArray(value: Seq[JsonLDValue]) extends JsonLDValue {
implicit private val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
override def toAny: Seq[Any] = value.map(_.toAny)
/**
* Tries to interpret the elements of this array as JSON-LD objects containing `@language` and `@value`,
* and returns the results as a set of [[StringLiteralV2]]. Throws [[BadRequestException]]
* if the array can't be interpreted in this way.
*
* @return a map of language keys to values.
*/
def toObjsWithLang: Seq[StringLiteralV2] = {
value.map {
case obj: JsonLDObject =>
val lang = obj.requireStringWithValidation(JsonLDConstants.LANGUAGE, stringFormatter.toSparqlEncodedString)
if (!LanguageCodes.SupportedLanguageCodes(lang)) {
throw BadRequestException(s"Unsupported language code: $lang")
}
val text = obj.requireStringWithValidation(JsonLDConstants.VALUE, stringFormatter.toSparqlEncodedString)
StringLiteralV2(text, Some(lang))
case other => throw BadRequestException(s"Expected JSON-LD object: $other")
}
}
override def compare(that: JsonLDValue): Int = 0
}
/**
* Represents a JSON-LD document.
*
* @param body the body of the JSON-LD document.
* @param context the context of the JSON-LD document.
*/
case class JsonLDDocument(body: JsonLDObject, context: JsonLDObject = JsonLDObject(Map.empty[String, JsonLDValue])) {
/**
* A convenience function that calls `body.requireString`.
*/
def requireString(key: String): String = body.requireString(key)
/**
* A convenience function that calls `body.requireStringWithValidation`.
*/
def requireStringWithValidation[T](key: String, validationFun: (String, => Nothing) => T): T = body.requireStringWithValidation(key, validationFun)
/**
* A convenience function that calls `body.maybeString`.
*/
def maybeString(key: String): Option[String] = body.maybeString(key)
/**
* A convenience function that calls `body.maybeStringWithValidation`.
*/
def maybeStringWithValidation[T](key: String, validationFun: (String, => Nothing) => T): Option[T] = body.maybeStringWithValidation(key, validationFun)
/**
* A convenience function that calls `body.requireIriInObject`.
*/
def requireIriInObject[T](key: String, validationFun: (String, => Nothing) => T): T = body.requireIriInObject(key, validationFun)
/**
* A convenience function that calls `body.maybeIriInObject`.
*/
def maybeIriInObject[T](key: String, validationFun: (String, => Nothing) => T): Option[T] = body.maybeIriInObject(key, validationFun)
/**
* A convenience function that calls `body.requireDatatypeValueInObject`.
*/
def requireDatatypeValueInObject[T](key: String, expectedDatatype: SmartIri, validationFun: (String, => Nothing) => T): T =
body.requireDatatypeValueInObject(
key = key,
expectedDatatype = expectedDatatype,
validationFun = validationFun
)
/**
* A convenience function that calls `body.maybeDatatypeValueInObject`.
*/
def maybeDatatypeValueInObject[T](key: String, expectedDatatype: SmartIri, validationFun: (String, => Nothing) => T): Option[T] =
body.maybeDatatypeValueInObject(
key = key,
expectedDatatype = expectedDatatype,
validationFun = validationFun
)
/**
* A convenience function that calls `body.requireObject`.
*/
def requireObject(key: String): JsonLDObject = body.requireObject(key)
/**
* A convenience function that calls `body.maybeObject`.
*/
def maybeObject(key: String): Option[JsonLDObject] = body.maybeObject(key)
/**
* A convenience function that calls `body.requireArray`.
*/
def requireArray(key: String): JsonLDArray = body.requireArray(key)
/**
* A convenience function that calls `body.maybeArray`.
*/
def maybeArray(key: String): Option[JsonLDArray] = body.maybeArray(key)
/**
* A convenience function that calls `body.requireInt`.
*/
def requireInt(key: String): Int = body.requireInt(key)
/**
* A convenience function that calls `body.maybeInt`.
*/
def maybeInt(key: String): Option[Int] = body.maybeInt(key)
/**
* A convenience function that calls `body.requireBoolean`.
*/
def requireBoolean(key: String): Boolean = body.requireBoolean(key)
/**
* A convenience function that calls `body.maybeBoolean`.
*/
def maybeBoolean(key: String): Option[Boolean] = body.maybeBoolean(key)
/**
* A convenience function that calls `body.getIDAsKnoraDataIri`.
*/
def getIDAsKnoraDataIri: SmartIri = body.getIDAsKnoraDataIri
/**
* A convenience function that calls `body.getTypeAsKnoraApiV2ComplexTypeIri`.
*/
def getTypeAsKnoraTypeIri: SmartIri = body.getTypeAsKnoraApiV2ComplexTypeIri
/**
* A convenience function that calls `body.getResourcePropertyApiV2ComplexValue`.
*/
def getResourcePropertyValue: (SmartIri, JsonLDObject) = body.getResourcePropertyApiV2ComplexValue
/**
* Converts this JSON-LD object to its compacted Java representation.
*/
private def makeCompactedObject: java.util.Map[IRI, AnyRef] = {
val contextAsJava = JavaUtil.deepScalaToJava(context.toAny)
val jsonAsJava = JavaUtil.deepScalaToJava(body.toAny)
JsonLdProcessor.compact(jsonAsJava, contextAsJava, new JsonLdOptions())
}
/**
* Converts this [[JsonLDDocument]] to a pretty-printed JSON-LD string.
*
* @return the formatted document.
*/
def toPrettyString: String = {
JsonUtils.toPrettyString(makeCompactedObject)
}
/**
* Converts this [[JsonLDDocument]] to a compact JSON-LD string.
*
* @return the formatted document.
*/
def toCompactString: String = {
JsonUtils.toString(makeCompactedObject)
}
}
/**
* Utility functions for working with JSON-LD.
*/
object JsonLDUtil {
/**
* Makes a JSON-LD context containing prefixes for Knora and other ontologies.
*
* @param fixedPrefixes a map of fixed prefixes (e.g. `rdfs` or `knora-base`) to namespaces.
* @param knoraOntologiesNeedingPrefixes a set of IRIs of other Knora ontologies that need prefixes.
* @return a JSON-LD context.
*/
def makeContext(fixedPrefixes: Map[String, String], knoraOntologiesNeedingPrefixes: Set[SmartIri] = Set.empty): JsonLDObject = {
/**
* Given a function that makes a prefix from a Knora ontology IRI, returns an association list in which
* each element is a prefix associated with a namespace.
*
* @param prefixFun a function that makes a prefix from a Knora ontology IRI.
* @return an association list in which each element is a prefix associated with a namespace.
*/
def makeKnoraPrefixes(prefixFun: SmartIri => String): Seq[(String, String)] = {
knoraOntologiesNeedingPrefixes.toSeq.map {
ontology => prefixFun(ontology) -> (ontology.toString + '#')
}
}
/**
* Determines whether an association list returned by `makeKnoraPrefixes` contains conflicts,
* including conflicts with `fixedPrefixes`.
*
* @param knoraPrefixes the association list to check.
* @return `true` if the list contains conflicts.
*/
def hasPrefixConflicts(knoraPrefixes: Seq[(String, String)]): Boolean = {
val prefixSeq = knoraPrefixes.map(_._1) ++ fixedPrefixes.keys
prefixSeq.size != prefixSeq.distinct.size
}
// Make an association list of short prefixes to the ontologies in knoraOntologiesNeedingPrefixes.
val shortKnoraPrefixes: Seq[(String, String)] = makeKnoraPrefixes(ontology => ontology.getShortPrefixLabel)
// Are there conflicts in that list?
val knoraPrefixMap: Map[String, String] = if (hasPrefixConflicts(shortKnoraPrefixes)) {
// Yes. Try again with long prefixes.
val longKnoraPrefixes: Seq[(String, String)] = makeKnoraPrefixes(ontology => ontology.getLongPrefixLabel)
// Are there still conflicts?
if (hasPrefixConflicts(longKnoraPrefixes)) {
// Yes. This shouldn't happen, so throw InconsistentTriplestoreDataException.
throw InconsistentTriplestoreDataException(s"Can't make distinct prefixes for ontologies: ${(fixedPrefixes.values ++ knoraOntologiesNeedingPrefixes.map(_.toString)).mkString(", ")}")
} else {
// No. Use the long prefixes.
longKnoraPrefixes.toMap
}
} else {
// No. Use the short prefixes.
shortKnoraPrefixes.toMap
}
// Make a JSON-LD context containing the fixed prefixes as well as the ones generated by this method.
JsonLDObject((fixedPrefixes ++ knoraPrefixMap).map {
case (prefix, namespace) => prefix -> JsonLDString(namespace)
})
}
/**
* Converts an IRI value to its JSON-LD object value representation.
*
* @param iri the IRI to be converted.
* @return the JSON-LD representation of the IRI as an object value.
*/
def iriToJsonLDObject(iri: IRI): JsonLDObject = {
JsonLDObject(Map(JsonLDConstants.ID -> JsonLDString(iri)))
}
/**
* Given a predicate value and a language code, returns a JSON-LD object containing `@value` and `@language`
* predicates.
*
* @param obj a predicate value.
* @return a JSON-LD object containing `@value` and `@language` predicates.
*/
def objectWithLangToJsonLDObject(obj: String, lang: String): JsonLDObject = {
JsonLDObject(Map(
JsonLDConstants.VALUE -> JsonLDString(obj),
JsonLDConstants.LANGUAGE -> JsonLDString(lang)
))
}
/**
* Given a predicate value and a datatype, returns a JSON-LD object containing `@value` and `@type`
* predicates.
*
* @param value a predicate value.
* @param datatype the datatype.
* @return a JSON-LD object containing `@value` and `@type` predicates.
*/
def datatypeValueToJsonLDObject(value: String, datatype: SmartIri): JsonLDObject = {
// Normalise the formatting of decimal values to ensure consistency in tests.
val strValue: String = if (datatype.toString == OntologyConstants.Xsd.Decimal) {
BigDecimal(value).underlying.stripTrailingZeros.toPlainString
} else {
value
}
JsonLDObject(Map(
JsonLDConstants.VALUE -> JsonLDString(strValue),
JsonLDConstants.TYPE -> JsonLDString(datatype.toString)
))
}
/**
* Given a map of language codes to predicate values, returns a JSON-LD array in which each element
* has a `@value` predicate and a `@language` predicate.
*
* @param objectsWithLangs a map of language codes to predicate values.
* @return a JSON-LD array in which each element has a `@value` predicate and a `@language` predicate.
*/
def objectsWithLangsToJsonLDArray(objectsWithLangs: Map[String, String]): JsonLDArray = {
val objects: Seq[JsonLDObject] = objectsWithLangs.toSeq.map {
case (lang, obj) =>
objectWithLangToJsonLDObject(
obj = obj,
lang = lang
)
}
JsonLDArray(objects)
}
/**
* Parses a JSON-LD string as a [[JsonLDDocument]] with an empty context.
*
* @param jsonLDString the string to be parsed.
* @return a [[JsonLDDocument]].
*/
def parseJsonLD(jsonLDString: String): JsonLDDocument = {
val jsonObject: AnyRef = try {
JsonUtils.fromString(jsonLDString)
} catch {
case e: com.fasterxml.jackson.core.JsonParseException => throw BadRequestException(s"Couldn't parse JSON-LD: ${e.getMessage}")
}
val context: java.util.HashMap[String, Any] = new java.util.HashMap[String, Any]()
val options: JsonLdOptions = new JsonLdOptions()
val compact: java.util.Map[IRI, AnyRef] = JsonLdProcessor.compact(jsonObject, context, options)
val scalaColl: Any = JavaUtil.deepJavaToScala(compact)
val scalaMap: Map[String, Any] = try {
scalaColl.asInstanceOf[Map[String, Any]]
} catch {
case _: java.lang.ClassCastException => throw BadRequestException(s"Expected JSON-LD object: $scalaColl")
}
mapToJsonLDDocument(scalaMap)
}
/**
* Converts a map into a [[JsonLDDocument]].
*
* @param docContent a map representing a JSON-LD object.
* @return
*/
private def mapToJsonLDDocument(docContent: Map[String, Any]): JsonLDDocument = {
def anyToJsonLDValue(anyVal: Any): JsonLDValue = {
anyVal match {
case string: String => JsonLDString(string)
case int: Int => JsonLDInt(int)
case bool: Boolean => JsonLDBoolean(bool)
case obj: Map[_, _] =>
val content: Map[String, JsonLDValue] = obj.map {
case (key: String, value: Any) => key -> anyToJsonLDValue(value)
case (otherKey, otherValue) => throw BadRequestException(s"Unexpected types in JSON-LD object: $otherKey, $otherValue")
}
JsonLDObject(content)
case array: Seq[Any] => JsonLDArray(array.map(value => anyToJsonLDValue(value)))
case _ => throw BadRequestException(s"Unexpected type in JSON-LD input: $anyVal")
}
}
anyToJsonLDValue(docContent) match {
case obj: JsonLDObject => JsonLDDocument(body = obj, context = JsonLDObject(Map.empty[IRI, JsonLDValue]))
case _ => throw BadRequestException(s"Expected JSON-LD object: $docContent")
}
}
}
|
musicEnfanthen/Knora
|
webapi/src/main/scala/org/knora/webapi/util/jsonld/JsonLDUtil.scala
|
Scala
|
agpl-3.0
| 34,580 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.sumologic.sumobot.core.model
import akka.actor.ActorRef
import slack.models.{ActionField => SActionField, Attachment => SAttachment, AttachmentField => SAttachmentField, ConfirmField => SConfirmField}
import java.io.File
case class OutgoingMessage(channel: Channel, text: String, threadTs: Option[String] = None)
// NOTE(mccartney, 2018-11-02): Slack API doesn't allow sending messages with attachments using the RTM client,
// thus modelling it as a separate case class. Although the document structure is consistent with `OutgoingMessage`.
// See https://api.slack.com/rtm#formatting_messages
case class OutgoingMessageWithAttachments(channel: Channel, text: String,
threadTs: Option[String], attachments: Seq[Attachment] = Seq())
case class Attachment(fallback: Option[String] = None,
callbackId: Option[String] = None,
color: Option[String] = None,
pretext: Option[String] = None,
authorName: Option[String] = None,
authorLink: Option[String] = None,
authorIcon: Option[String] = None,
title: Option[String] = None,
titleLink: Option[String] = None,
text: Option[String] = None,
fields: Seq[AttachmentField] = Seq.empty,
imageUrl: Option[String] = None,
thumbUrl: Option[String] = None,
actions: Seq[ActionField] = Seq.empty,
mrkdwnIn: Seq[String] = Seq.empty)
case class AttachmentField(title: String, value: String, short: Boolean)
case class ActionField(name: String,
text: String,
actionType: String,
style: Option[String] = None,
value: Option[String] = None,
confirm: Option[ConfirmField] = None)
case class ConfirmField(text: String,
title: Option[String] = None,
okText: Option[String] = None,
cancelText: Option[String] = None)
case class OpenIM(userId: String, doneRecipient: ActorRef, doneMessage: AnyRef)
case class IncomingMessage(canonicalText: String,
addressedToUs: Boolean,
channel: Channel,
idTimestamp: String,
threadTimestamp: Option[String] = None,
attachments: Seq[IncomingMessageAttachment] = Seq(),
sentBy: Sender)
case class IncomingMessageAttachment(text: String, title: String)
case class OutgoingImage(channel: Channel, image: File, contentType: String, title: String,
comment: Option[String] = None, threadTimestamp: Option[String] = None)
case class Reaction(emoji: String, channelId: String, messageTimestamp: String, addedByUser: String)
sealed abstract class Sender {
def slackReference: String
def plainTextReference: String
}
case class UserSender(slackUser: slack.models.User) extends Sender {
override def slackReference: String = s"<@${slackUser.id}>"
override def plainTextReference: String = slackUser.id
}
case class BotSender(id: String) extends Sender {
override def slackReference: String = s"app: $id"
override def plainTextReference: String = slackReference
}
case class ResponseInProgress(channel: Channel)
object PublicHttpsReference {
def forMessage(baseSlackUrl: String, msg: IncomingMessage) = {
val clearId = msg.idTimestamp.replace(".", "")
val channelId = msg.channel.id
s"$baseSlackUrl/archives/$channelId/p$clearId"
}
}
object Messages {
def convertToSlackModel(attachments: Seq[Attachment]): Option[Seq[SAttachment]] = {
Some(
attachments.map {
a =>
SAttachment(fallback = a.fallback,
callback_id = a.callbackId,
color = a.color,
pretext = a.pretext,
author_name = a.authorName,
author_link = a.authorLink,
title = a.title,
title_link = a.titleLink,
text = a.text,
fields = convertFieldsToSlackModel(a.fields),
image_url = a.imageUrl,
thumb_url = a.thumbUrl,
actions = convertActionsToSlackModel(a.actions),
mrkdwn_in = Some(a.mrkdwnIn))
}
)
}
private def convertFieldsToSlackModel(fields: Seq[AttachmentField]): Option[Seq[SAttachmentField]] = {
Some(fields.map {
f =>
SAttachmentField(title = f.title, value = f.value, short = f.short)
})
}
private def convertActionsToSlackModel(actions: Seq[ActionField]): Option[Seq[SActionField]] = {
Some(actions.map {
a =>
SActionField(name = a.name, text = a.text, `type` = a.actionType, style = a.style, value = a.value,
confirm = a.confirm.map(convertConfirmFieldToSlackModel))
})
}
private def convertConfirmFieldToSlackModel(confirm: ConfirmField): SConfirmField = {
SConfirmField(text = confirm.text, title = confirm.title, ok_text = confirm.okText, cancel_text = confirm.cancelText)
}
}
|
SumoLogic/sumobot
|
src/main/scala/com/sumologic/sumobot/core/model/Messages.scala
|
Scala
|
apache-2.0
| 6,059 |
package org.infinispan.spark.rdd
import java.util.Properties
import org.apache.spark.api.java.{JavaPairRDD, JavaSparkContext}
import org.infinispan.spark._
import scala.reflect.ClassTag
/**
* @author gustavonalle
*/
object InfinispanJavaRDD {
def createInfinispanRDD[K, V](jsc: JavaSparkContext, config: Properties) = {
val infinispanRDD = new InfinispanRDD[K, V](jsc.sc, config, new PerServerSplitter)
implicit val keyClassTag = ClassTag.AnyRef.asInstanceOf[ClassTag[K]]
implicit val valueClassTag = ClassTag.AnyRef.asInstanceOf[ClassTag[V]]
JavaPairRDD.fromRDD(infinispanRDD)
}
def write[K, V](pairRDD: JavaPairRDD[K, V], config: Properties) = pairRDD.rdd.writeToInfinispan(config)
}
|
rnowling/infinispan-spark
|
src/main/scala/org/infinispan/spark/rdd/InfinispanJavaRDD.scala
|
Scala
|
apache-2.0
| 729 |
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package scalanlp;
package stage;
package text;
import generic.Filter;
/**
* Filters a set of documents so that all documents contain
* at least minTokens tokens.
*
* @author dramage
*/
case class DocumentMinimumLengthFilter[ID:Manifest](minTokens : Int)
extends Filter[ID,Iterable[String]] {
override def filter(doc : Iterable[String]) =
doc.size >= minTokens;
override def toString = "DocumentMinimumLengthFilter("+minTokens+")";
}
|
MLnick/scalanlp-core
|
data/src/main/scala/scalanlp/stage/text/DocumentFilters.scala
|
Scala
|
apache-2.0
| 1,028 |
/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala.observers
import java.util.concurrent.TimeUnit
import scala.collection.JavaConversions._
import scala.concurrent.duration.Duration
import rx.{Subscriber => JSubscriber, Observer => JObserver, Subscription => JSubscription}
import rx.annotations.Experimental
import rx.observers.{TestSubscriber => JTestSubscriber}
import rx.lang.scala.{Observable, Observer, Subscriber}
/**
* A [[TestSubscriber]] is a variety of [[Subscriber]] that you can use for unit testing, to perform
* assertions, inspect received events, or wrap a mocked [[Subscriber]].
*
* @define experimental
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*/
class TestSubscriber[T] private[scala](jTestSubscriber: JTestSubscriber[T]) extends Subscriber[T] {
private[scala] override val asJavaSubscriber: JSubscriber[_ >: T] = jTestSubscriber
private[scala] override val asJavaObserver: JObserver[_ >: T] = jTestSubscriber
private[scala] override val asJavaSubscription: JSubscription = jTestSubscriber
override def onNext(value: T): Unit = jTestSubscriber.onNext(value)
override def onError(error: Throwable): Unit = jTestSubscriber.onError(error)
override def onCompleted(): Unit = jTestSubscriber.onCompleted()
/**
* Get the `Throwable`s this [[Subscriber]] was notified of via [[onError]]
*
* @return a sequence of the `Throwable`s that were passed to the [[Subscriber.onError]] method
*/
def getOnErrorEvents: Seq[Throwable] = {
jTestSubscriber.getOnErrorEvents()
}
/**
* Get the sequence of items observed by this [[Subscriber]].
*
* @return a sequence of items observed by this [[Subscriber]], in the order in which they were observed
*/
def getOnNextEvents: Seq[T] = {
jTestSubscriber.getOnNextEvents()
}
/**
* Allow calling the protected [[request]] from unit tests.
*
* @param n the maximum number of items you want the Observable to emit to the Subscriber at this time, or
* `Long.MaxValue` if you want the Observable to emit items at its own pace
*/
def requestMore(n: Long): Unit = {
jTestSubscriber.requestMore(n)
}
/**
* Assert that a single terminal event occurred, either `onCompleted` or `onError`.
*
* @throws AssertionError if not exactly one terminal event notification was received
*/
@throws[AssertionError]
def assertTerminalEvent(): Unit = {
jTestSubscriber.assertTerminalEvent()
}
/**
* Assert that this [[Subscriber]] is unsubscribed.
*
* @throws AssertionError if this [[Subscriber]] is not unsubscribed
*/
@throws[AssertionError]
def assertUnsubscribed(): Unit = {
jTestSubscriber.assertUnsubscribed()
}
/**
* Assert that this [[Subscriber]] has received no `onError` notifications.
*
* @throws AssertionError if this [[Subscriber]] has received one or more `onError` notifications
*/
@throws[AssertionError]
def assertNoErrors(): Unit = {
jTestSubscriber.assertNoErrors()
}
/**
* Blocks until this [[Subscriber]] receives a notification that the [[Observable]] is complete
* (either an `onCompleted` or `onError` notification).
*
* @throws RuntimeException if the Subscriber is interrupted before the Observable is able to complete
*/
@throws[RuntimeException]
def awaitTerminalEvent(): Unit = {
jTestSubscriber.awaitTerminalEvent()
}
/**
* Blocks until this [[Subscriber]] receives a notification that the [[Observable]] is complete
* (either an `onCompleted` or `onError` notification), or until a timeout expires.
*
* @param timeout the duration of the timeout
* @throws RuntimeException if the Subscriber is interrupted before the Observable is able to complete
*/
@throws[RuntimeException]
def awaitTerminalEvent(timeout: Duration): Unit = {
jTestSubscriber.awaitTerminalEvent(timeout.toNanos, TimeUnit.NANOSECONDS)
}
/**
* Blocks until this [[Subscriber]] receives a notification that the [[Observable]] is complete
* (either an `onCompleted` or `onError` notification), or until a timeout expires; if the
* [[Subscriber]] is interrupted before either of these events take place, this method unsubscribes the
* [[Subscriber]] from the [[Observable]]).
*
* @param timeout the duration of the timeout
*/
def awaitTerminalEventAndUnsubscribeOnTimeout(timeout: Duration): Unit = {
jTestSubscriber.awaitTerminalEventAndUnsubscribeOnTimeout(timeout.toNanos, TimeUnit.NANOSECONDS)
}
/**
* Returns the last thread that was in use when an item or notification was received by this [[Subscriber]].
*
* @return the `Thread` on which this [[Subscriber]] last received an item or notification from the
* [[Observable]] it is subscribed to
*/
def getLastSeenThread: Thread = {
jTestSubscriber.getLastSeenThread
}
/**
* $experimental Assert if there is exactly a single completion event.
*
* @throws AssertionError if there were zero, or more than one, onCompleted events
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertCompleted(): Unit = {
jTestSubscriber.assertCompleted()
}
/**
* $experimental Assert if there is no completion event.
*
* @throws AssertionError if there were one or more than one onCompleted events
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertNotCompleted(): Unit = {
jTestSubscriber.assertNotCompleted()
}
/**
* $experimental Assert if there is exactly one error event which is a subclass of the given class.
*
* @param clazz the class to check the error against.
* @throws AssertionError if there were zero, or more than one, onError events, or if the single onError
* event did not carry an error of a subclass of the given class
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertError(clazz: Class[_ <: Throwable]): Unit = {
jTestSubscriber.assertError(clazz)
}
/**
* $experimental Assert there is a single onError event with the exact exception.
*
* @param throwable the throwable to check
* @throws AssertionError if there were zero, or more than one, onError events, or if the single onError
* event did not carry an error that matches the specified throwable
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertError(throwable: Throwable): Unit = {
jTestSubscriber.assertError(throwable)
}
/**
* $experimental Assert for no onError and onCompleted events.
*
* @throws AssertionError if there was either an onError or onCompleted event
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertNoTerminalEvent(): Unit = {
jTestSubscriber.assertNoTerminalEvent()
}
/**
* $experimental Assert if there are no onNext events received.
*
* @throws AssertionError if there were any onNext events
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertNoValues(): Unit = {
jTestSubscriber.assertNoValues()
}
/**
* $experimental Assert if the given number of onNext events are received.
*
* @param count the expected number of onNext events
* @throws AssertionError if there were more or fewer onNext events than specified by `count`
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertValueCount(count: Int): Unit = {
jTestSubscriber.assertValueCount(count)
}
/**
* $experimental Assert if the received onNext events, in order, are the specified items.
*
* @param values the items to check
* @throws AssertionError if the items emitted do not exactly match those specified by `values`
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertValues(values: T*): Unit = {
jTestSubscriber.assertValues(values: _*)
}
/**
* $experimental Assert if there is only a single received onNext event and that it marks the emission of a specific item.
*
* @param value the item to check
* @throws AssertionError if the [[Observable]] does not emit only the single item specified by `value`
* @since (if this graduates from "Experimental" replace this parenthetical with the release number)
*/
@Experimental
@throws[AssertionError]
def assertValue(value: T): Unit = {
jTestSubscriber.assertValue(value)
}
}
/**
* @define experimental
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*/
object TestSubscriber {
def apply[T](): TestSubscriber[T] =
new TestSubscriber(new JTestSubscriber[T]())
def apply[T](delegate: Observer[T]): TestSubscriber[T] =
new TestSubscriber(new JTestSubscriber[T](delegate.asJavaObserver.asInstanceOf[JObserver[T]]))
def apply[T](delegate: Subscriber[T]): TestSubscriber[T] =
new TestSubscriber(new JTestSubscriber[T](delegate.asJavaSubscriber.asInstanceOf[JSubscriber[T]]))
/**
* $experimental Constructs a [[TestSubscriber]] with the initial request to be requested from upstream.
* @param initialRequest the initial request value, negative value will revert to the default unbounded behavior
*/
@Experimental
def apply[T](initialRequest: Long): TestSubscriber[T] = {
new TestSubscriber(new JTestSubscriber[T](initialRequest))
}
/**
* Constructs a [[TestSubscriber]] with the initial request to be requested from upstream and a delegate [[Observer]] to wrap.
* @param initialRequest the initial request value, negative value will revert to the default unbounded behavior
* @param delegate the Observer instance to wrap
*/
@Experimental
def apply[T](delegate: Observer[T], initialRequest: Long): TestSubscriber[T] = {
new TestSubscriber(new JTestSubscriber[T](delegate.asJavaObserver.asInstanceOf[JObserver[T]], initialRequest))
}
}
|
zjrstar/RxScala
|
src/main/scala/rx/lang/scala/observers/TestSubscriber.scala
|
Scala
|
apache-2.0
| 11,129 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, Cast}
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.sources.{BaseRelation, HadoopFsRelation, InsertableRelation}
import org.apache.spark.sql.{AnalysisException, SQLContext, SaveMode}
/**
* Try to replaces [[UnresolvedRelation]]s with [[ResolvedDataSource]].
*/
private[sql] class ResolveDataSource(sqlContext: SQLContext) extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case u: UnresolvedRelation if u.tableIdentifier.database.isDefined =>
try {
val resolved = ResolvedDataSource(
sqlContext,
userSpecifiedSchema = None,
partitionColumns = Array(),
provider = u.tableIdentifier.database.get,
options = Map("path" -> u.tableIdentifier.table))
val plan = LogicalRelation(resolved.relation)
u.alias.map(a => Subquery(u.alias.get, plan)).getOrElse(plan)
} catch {
case e: ClassNotFoundException => u
case e: Exception =>
// the provider is valid, but failed to create a logical plan
u.failAnalysis(e.getMessage)
}
}
}
/**
* A rule to do pre-insert data type casting and field renaming. Before we insert into
* an [[InsertableRelation]], we will use this rule to make sure that
* the columns to be inserted have the correct data type and fields have the correct names.
*/
private[sql] object PreInsertCastAndRename extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan transform {
// Wait until children are resolved.
case p: LogicalPlan if !p.childrenResolved => p
// We are inserting into an InsertableRelation or HadoopFsRelation.
case i @ InsertIntoTable(
l @ LogicalRelation(_: InsertableRelation | _: HadoopFsRelation, _), _, child, _, _) => {
// First, make sure the data to be inserted have the same number of fields with the
// schema of the relation.
if (l.output.size != child.output.size) {
sys.error(
s"$l requires that the query in the SELECT clause of the INSERT INTO/OVERWRITE " +
s"statement generates the same number of columns as its schema.")
}
castAndRenameChildOutput(i, l.output, child)
}
}
/** If necessary, cast data types and rename fields to the expected types and names. */
def castAndRenameChildOutput(
insertInto: InsertIntoTable,
expectedOutput: Seq[Attribute],
child: LogicalPlan): InsertIntoTable = {
val newChildOutput = expectedOutput.zip(child.output).map {
case (expected, actual) =>
val needCast = !expected.dataType.sameType(actual.dataType)
// We want to make sure the filed names in the data to be inserted exactly match
// names in the schema.
val needRename = expected.name != actual.name
(needCast, needRename) match {
case (true, _) => Alias(Cast(actual, expected.dataType), expected.name)()
case (false, true) => Alias(actual, expected.name)()
case (_, _) => actual
}
}
if (newChildOutput == child.output) {
insertInto
} else {
insertInto.copy(child = Project(newChildOutput, child))
}
}
}
/**
* A rule to do various checks before inserting into or writing to a data source table.
*/
private[sql] case class PreWriteCheck(catalog: Catalog) extends (LogicalPlan => Unit) {
def failAnalysis(msg: String): Unit = { throw new AnalysisException(msg) }
def apply(plan: LogicalPlan): Unit = {
plan.foreach {
case i @ logical.InsertIntoTable(
l @ LogicalRelation(t: InsertableRelation, _), partition, query, overwrite, ifNotExists) =>
// Right now, we do not support insert into a data source table with partition specs.
if (partition.nonEmpty) {
failAnalysis(s"Insert into a partition is not allowed because $l is not partitioned.")
} else {
// Get all input data source relations of the query.
val srcRelations = query.collect {
case LogicalRelation(src: BaseRelation, _) => src
}
if (srcRelations.contains(t)) {
failAnalysis(
"Cannot insert overwrite into table that is also being read from.")
} else {
// OK
}
}
case logical.InsertIntoTable(
LogicalRelation(r: HadoopFsRelation, _), part, query, overwrite, _) =>
// We need to make sure the partition columns specified by users do match partition
// columns of the relation.
val existingPartitionColumns = r.partitionColumns.fieldNames.toSet
val specifiedPartitionColumns = part.keySet
if (existingPartitionColumns != specifiedPartitionColumns) {
failAnalysis(s"Specified partition columns " +
s"(${specifiedPartitionColumns.mkString(", ")}) " +
s"do not match the partition columns of the table. Please use " +
s"(${existingPartitionColumns.mkString(", ")}) as the partition columns.")
} else {
// OK
}
PartitioningUtils.validatePartitionColumnDataTypes(r.schema, part.keySet.toArray)
// Get all input data source relations of the query.
val srcRelations = query.collect {
case LogicalRelation(src: BaseRelation, _) => src
}
if (srcRelations.contains(r)) {
failAnalysis(
"Cannot insert overwrite into table that is also being read from.")
} else {
// OK
}
case logical.InsertIntoTable(l: LogicalRelation, _, _, _, _) =>
// The relation in l is not an InsertableRelation.
failAnalysis(s"$l does not allow insertion.")
case logical.InsertIntoTable(t, _, _, _, _) =>
if (!t.isInstanceOf[LeafNode] || t == OneRowRelation || t.isInstanceOf[LocalRelation]) {
failAnalysis(s"Inserting into an RDD-based table is not allowed.")
} else {
// OK
}
case CreateTableUsingAsSelect(tableIdent, _, _, partitionColumns, mode, _, query) =>
// When the SaveMode is Overwrite, we need to check if the table is an input table of
// the query. If so, we will throw an AnalysisException to let users know it is not allowed.
if (mode == SaveMode.Overwrite && catalog.tableExists(tableIdent)) {
// Need to remove SubQuery operator.
EliminateSubQueries(catalog.lookupRelation(tableIdent)) match {
// Only do the check if the table is a data source table
// (the relation is a BaseRelation).
case l @ LogicalRelation(dest: BaseRelation, _) =>
// Get all input data source relations of the query.
val srcRelations = query.collect {
case LogicalRelation(src: BaseRelation, _) => src
}
if (srcRelations.contains(dest)) {
failAnalysis(
s"Cannot overwrite table $tableIdent that is also being read from.")
} else {
// OK
}
case _ => // OK
}
} else {
// OK
}
PartitioningUtils.validatePartitionColumnDataTypes(query.schema, partitionColumns)
case _ => // OK
}
}
}
|
pronix/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
|
Scala
|
apache-2.0
| 8,384 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package integration.security
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import com.ibm.spark.kernel.protocol.v5._
import com.ibm.spark.kernel.protocol.v5.security.SignatureCheckerActor
import com.ibm.spark.security.Hmac
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import play.api.libs.json.Json
object SignatureCheckerActorSpecForIntegration {
val config = """
akka {
loglevel = "WARNING"
}"""
}
class SignatureCheckerActorSpecForIntegration extends TestKit(
ActorSystem(
"SignatureCheckerActorSpec",
ConfigFactory.parseString(SignatureCheckerActorSpecForIntegration.config)
)
) with ImplicitSender with FunSpecLike with Matchers with BeforeAndAfter
{
private val sigKey = "12345"
private val signature =
"1c4859a7606fd93eb5f73c3d9642f9bc860453ba42063961a00d02ed820147b5"
private val goodMessage =
KernelMessage(
null, signature,
Header("a", "b", "c", "d", "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private val badMessage =
KernelMessage(
null, "wrong signature",
Header("a", "b", "c", "d", "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private var signatureChecker: ActorRef = _
before {
val hmac = Hmac(sigKey)
signatureChecker =
system.actorOf(Props(classOf[SignatureCheckerActor], hmac))
}
after {
signatureChecker = null
}
describe("SignatureCheckerActor") {
describe("#receive") {
it("should return true if the kernel message is valid") {
val blob =
Json.stringify(Json.toJson(goodMessage.header)) ::
Json.stringify(Json.toJson(goodMessage.parentHeader)) ::
Json.stringify(Json.toJson(goodMessage.metadata)) ::
goodMessage.contentString ::
Nil
signatureChecker ! ((goodMessage.signature, blob))
expectMsg(true)
}
it("should return false if the kernel message is invalid") {
val blob =
Json.stringify(Json.toJson(badMessage.header)) ::
Json.stringify(Json.toJson(badMessage.parentHeader)) ::
Json.stringify(Json.toJson(badMessage.metadata)) ::
badMessage.contentString ::
Nil
signatureChecker ! ((badMessage.signature, blob))
expectMsg(false)
}
}
}
}
|
bpburns/spark-kernel
|
kernel/src/test/scala/integration/security/SignatureCheckerActorSpecForIntegration.scala
|
Scala
|
apache-2.0
| 3,049 |
package dataflow
import akka.dataflow._
import financial.MockServices
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by IntelliJ IDEA.
* Author: Steve Levine
* Date: 3/23/15
*/
object SimpleDataFlow extends App with MockServices {
val marketCap = flow {
val q = flow {quote("tsla")}
val os = flow {outstandingShares("tsla")}
calculateMarketCap(q(), os())
}
val u = flow {updateQuoteCache()}
marketCap onComplete {mc β logger.debug(s"market cap = ${mc.get}")}
while (!u.isCompleted) {}
}
|
slevine/concurrency-scratchpad
|
src/main/scala/dataflow/SimpleDataflow.scala
|
Scala
|
apache-2.0
| 547 |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package com.jspha.maia.internal
import scala.language.higherKinds
import com.jspha.maia._
/**
* A [[ReqTree]] is a binary, non-empty tree of [[Request]]s.
*
* Building [[Query]] values requires a lot of [[Request]] merging. If we
* merge each [[Request]] pairwise that requires `3n` trips between "flat"
* and [[shapeless.HList]] representations.
*
* Instead, we collect the [[Request]]s in a [[ReqTree]] trading a small
* amount of memory so as to only need to collapse the [[Request]]s at the
* end.
*/
sealed trait ReqTree[T[_ <: Dsl]] {
/**
* Pass once through the [[ReqTree]] and compute the combined [[Request]].
*/
def request(implicit merger: typelevel.MergeRequests[T]): Request[T] =
merger.mergeTree(this)
def *(other: ReqTree[T]): ReqTree[T] = ReqTree.Branch[T](this, other)
}
object ReqTree {
def apply[T[_ <: Dsl]](req: Request[T]): ReqTree[T] =
Leaf[T](req)
final case class Leaf[T[_ <: Dsl]](leaf: Request[T]) extends ReqTree[T]
final case class Branch[T[_ <: Dsl]](left: ReqTree[T], right: ReqTree[T])
extends ReqTree[T]
}
|
MaiaOrg/scala-maia
|
maia/src/main/scala-2.12/com/jspha/maia/internal/ReqTree.scala
|
Scala
|
mpl-2.0
| 1,308 |
package scala.c.engine
import java.nio.file.Paths
import scala.io.Source
class JpegTest extends StandardTest {
"jpeg test" should "print the correct results" in {
val code = """
void main() {
printf("nation emergency\\n");
}"""
val slre = Paths.get("tests", "scala", "c", "engine", "jpeg", "jpeg_encoder.c")
val slreText = Source.fromFile(slre.toFile, "utf-8").mkString
val allCode = Seq(slreText)
checkResults2(allCode, args = List("test.ppm", "10"))
}
}
|
bdwashbu/AstViewer
|
tests/scala/c/engine/JpegTest.scala
|
Scala
|
gpl-3.0
| 506 |
package com.alexitc.coinalerts.models
import com.alexitc.playsonify.models.WrappedString
import play.api.libs.json.{JsPath, Reads}
case class ReCaptchaResponse(string: String) extends AnyVal with WrappedString
object ReCaptchaResponse {
implicit val reads: Reads[ReCaptchaResponse] = JsPath.read[String].map(ReCaptchaResponse.apply)
}
|
AlexITC/crypto-coin-alerts
|
alerts-server/app/com/alexitc/coinalerts/models/ReCaptchaResponse.scala
|
Scala
|
gpl-3.0
| 339 |
package webhooks
import java.util.UUID
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.testkit.{ImplicitSender, TestKit}
import org.scalatest.{Matchers, WordSpecLike}
import webhooks.HookManager._
import webhooks.models._
class ManagerSpec extends TestKit(ActorSystem()) with WordSpecLike with Matchers with ImplicitSender {
implicit val mat = ActorMaterializer()
"manager" should {
val mgr = childActorOf(HookManager.props())
val id = UUID.randomUUID.toString
"register" in {
val cfg = HookConfigOpt("http://localhost")
mgr ! CreateHook(id, cfg)
expectMsgType[HookCreated]
}
"unregister" in {
mgr ! DeleteHook(id)
expectMsgType[HookDeleted]
}
}
}
|
jw3/awebapi
|
src/test/scala/webhooks/ManagerSpec.scala
|
Scala
|
apache-2.0
| 746 |
package wdl.util
import java.util.regex.Pattern
import scala.annotation.tailrec
/** This is under a `wdl` package because it exists purely to do WDLy stuff, but it's currently being called from the
* WOMmy TaskDefinition. That should get straightened out. */
object StringUtil {
val Ws = Pattern.compile("[\\\\ \\\\t]+")
/**
* 1) Remove all leading newline chars
* 2) Remove all trailing newline AND whitespace chars
* 3) Remove all *leading* whitespace that's common among every line in the input string
*
* For example, the input string:
*
* "
* first line
* second line
* third line
*
* "
*
* Would be normalized to:
*
* "first line
* second line
* third line"
*
* @param s String to process
* @return String which has common leading whitespace removed from each line
*/
def normalize(s: String): String = {
val trimmed = stripAll(s, "\\r\\n", "\\r\\n \\t")
val parts = trimmed.split("\\\\r?\\\\n")
val indent = parts.filterNot(_.trim.isEmpty).map(leadingWhitespaceCount).toList match {
case Nil => 0
case nonEmpty => nonEmpty.min
}
parts.map(_.drop(indent)).mkString("\\n")
}
private def leadingWhitespaceCount(s: String): Int = {
val matcher = Ws.matcher(s)
if (matcher.lookingAt) matcher.end else 0
}
def stripAll(s: String, startChars: String, endChars: String): String = {
/* https://stackoverflow.com/questions/17995260/trimming-strings-in-scala */
@tailrec
def start(n: Int): String = {
if (n == s.length) ""
else if (startChars.indexOf(s.charAt(n).toInt) < 0) end(n, s.length)
else start(1 + n)
}
@tailrec
def end(a: Int, n: Int): String = {
if (n <= a) s.substring(a, n)
else if (endChars.indexOf(s.charAt(n - 1).toInt) < 0) s.substring(a, n)
else end(a, n - 1)
}
start(0)
}
}
|
ohsu-comp-bio/cromwell
|
wom/src/main/scala/wdl/util/StringUtil.scala
|
Scala
|
bsd-3-clause
| 1,907 |
package fi.kapsi.kosmik.sfti
import scala.beans.BeanProperty
object Chapter05 {
/**
* Improve the Counter class in Section 5.1, βSimple Classes and Parameterless
* Methods,β on page 55 so that it doesnβt turn negative at Int.MaxValue.
*/
object Ex01 {
class Counter {
private var value: Long = 0
def increment() {
value += 1
}
def current: Long = value
}
}
/**
* Write a class BankAccount with methods deposit and withdraw, and a read-only
* property balance.
*/
object Ex02 {
class BankAccount {
private var value: Double = 0.0
def deposit(amount: Double): Unit = {
value += amount
}
def balance: Double = value
}
}
/**
* Write a class Time with read-only properties hours and minutes and a method
* before(other: Time): Boolean that checks whether this time comes before the
* other. A Time object should be constructed as new Time(hrs, min), where hrs is in
* military time format (between 0 and 23).
*/
object Ex03 {
class Time(val hours: Int, val minutes: Int) {
def before(other: Time): Boolean = {
if (hours == other.hours) minutes <= other.minutes
else hours <= other.hours
}
}
}
/**
* Reimplement the Time class from the preceding exercise so that the internal
* representation is the number of minutes since midnight (between 0 and 24 Γ
* 60 β 1). Do not change the public interface. That is, client code should be
* unaffected by your change.
*/
object Ex04 {
class Time {
private var time = 0
def this(hours: Int, minutes: Int) {
this()
this.time = hours * 60 + minutes
}
private def hours: Int = time / 60
private def minutes: Int = time - hours * 60
def before(other: Time): Boolean = {
if (hours == other.hours) minutes <= other.minutes
else hours <= other.hours
}
override def toString: String = f"$hours:$minutes"
}
}
/**
* <p>
* Make a class Student with read-write JavaBeans properties name (of type String )
* and id (of type Long). What methods are generated? (Use javap to check.) Can
* you call the JavaBeans getters and setters in Scala? Should you?
* <p>
* Answer: The generated Java class as reported by javap:
* <pre>
* public class fi.kapsi.kosmik.sfti.ch05.Chapter05$Ex05Student {
* public long id();
* public void id_$eq(long);
* public java.lang.String name();
* public void name_$eq(java.lang.String);
* public long getId();
* public void setId(long);
* public java.lang.String getName();
* public void setName(java.lang.String);
* public fi.kapsi.kosmik.sfti.ch05.Chapter05$Ex05Student(long, java.lang.String);
* }
* </pre>
*/
object Ex05 {
class Student(@BeanProperty var id: Long, @BeanProperty var name: String)
}
/**
* In the Person class of Section 5.1, βSimple Classes and Parameterless Methods,β
* on page 55, provide a primary constructor that turns negative ages to 0.
*/
object Ex06 {
class Person(private val initialAge: Int) {
var age: Int = if (initialAge >= 0) initialAge else 0
}
}
/**
* Write a class Person with a primary constructor that accepts a string containing
* a first name, a space, and a last name, such as new Person("Fred Smith"). Supply
* read-only properties firstName and lastName. Should the primary constructor
* parameter be a var, a val, or a plain parameter? Why?
* <p>
* Answer: Use a private val as we don't want the parameter to be visible outside the class nor
* modifiable.
*/
object Ex07 {
class Person(private val formattedName: String) {
def firstName: String = formattedName.split(" ").head
def lastName: String = formattedName.split(" ").last
}
}
/**
* Make a class Car with read-only properties for manufacturer, model name,
* and model year, and a read-write property for the license plate. Supply four
* constructors. All require the manufacturer and model name. Optionally,
* model year and license plate can also be specified in the constructor. If not,
* the model year is set to -1 and the license plate to the empty string. Which
* constructor are you choosing as the primary constructor? Why?
*/
object Ex08 {
class Car(val manufacturer: String, val model: String, val year: Int = -1, var license: String = "") {
def this(manufacturer: String, model: String, license: String) {
// NOTE: year parameter must be included here so as to avoid ambiguity between the two constructors
this(manufacturer, model, -1, license)
}
}
}
}
|
suniala/sfti-exercises
|
src/main/scala/fi/kapsi/kosmik/sfti/Chapter05.scala
|
Scala
|
apache-2.0
| 4,805 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, SqlTimeTypeInfo}
import org.apache.flink.table.api.{DataTypes, TableConfig, TableException, TableSchema}
import org.apache.flink.table.catalog.{CatalogTable, Column, ContextResolvedTable, ObjectIdentifier, ResolvedCatalogTable, ResolvedSchema, UniqueConstraint}
import org.apache.flink.table.connector.ChangelogMode
import org.apache.flink.table.connector.source.{DynamicTableSource, ScanTableSource}
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.plan.stats.{ColumnStats, TableStats}
import org.apache.flink.table.planner.calcite.{FlinkContext, FlinkContextImpl, FlinkTypeFactory, FlinkTypeSystem}
import org.apache.flink.table.planner.plan.schema.{FlinkPreparingTableBase, TableSourceTable}
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.fromTypeInfoToLogicalType
import org.apache.flink.table.types.logical.{BigIntType, DoubleType, IntType, LocalZonedTimestampType, LogicalType, TimestampKind, TimestampType, VarCharType}
import org.apache.flink.table.utils.CatalogManagerMocks
import org.apache.calcite.config.CalciteConnectionConfig
import org.apache.calcite.jdbc.CalciteSchema
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeFactory}
import org.apache.calcite.schema.Schema.TableType
import org.apache.calcite.schema.{Schema, SchemaPlus, Table}
import org.apache.calcite.sql.{SqlCall, SqlNode}
import java.util
import java.util.Collections
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
object MetadataTestUtil {
def initRootSchema(): SchemaPlus = {
val rootSchema = CalciteSchema.createRootSchema(true, false).plus()
rootSchema.add("student", createStudentTable())
rootSchema.add("emp", createEmpTable())
rootSchema.add("MyTable1", createMyTable1())
rootSchema.add("MyTable2", createMyTable2())
rootSchema.add("MyTable3", createMyTable3())
rootSchema.add("MyTable4", createMyTable4())
rootSchema.add("TemporalTable1", createTemporalTable1())
rootSchema.add("TemporalTable2", createTemporalTable2())
rootSchema.add("TemporalTable3", createTemporalTable3())
rootSchema.add("TableSourceTable1", createTableSourceTable1())
rootSchema.add("TableSourceTable2", createTableSourceTable2())
rootSchema.add("TableSourceTable3", createTableSourceTable3())
rootSchema.add("projected_table_source_table", createProjectedTableSourceTable())
rootSchema.add(
"projected_table_source_table_with_partial_pk",
createProjectedTableSourceTableWithPartialCompositePrimaryKey())
rootSchema
}
private def createStudentTable(): Table = {
val schema = new TableSchema(
Array("id", "name", "score", "age", "height", "sex", "class"),
Array(
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO))
val colStatsMap = Map[String, ColumnStats](
"id" -> new ColumnStats(50L, 0L, 8D, 8, null, 0),
"name" -> new ColumnStats(48L, 0L, 7.2, 12, null, null),
"score" -> new ColumnStats(20L, 6L, 8D, 8, 4.8D, 2.7D),
"age" -> new ColumnStats(7L, 0L, 4D, 4, 18, 12),
"height" -> new ColumnStats(35L, null, 8D, 8, 172.1D, 161.0D),
"sex" -> new ColumnStats(2L, 0L, 1D, 1, null, null))
val tableStats = new TableStats(50L, colStatsMap)
val uniqueKeys = Set(Set("id").asJava).asJava
getMetadataTable(schema, new FlinkStatistic(tableStats, uniqueKeys))
}
private def createEmpTable(): Table = {
val schema = new TableSchema(
Array("empno", "ename", "job", "mgr", "hiredate", "sal", "comm", "deptno"),
Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
SqlTimeTypeInfo.DATE,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO))
getMetadataTable(schema, new FlinkStatistic(TableStats.UNKNOWN))
}
private def createMyTable1(): Table = {
val schema = new TableSchema(
Array("a", "b", "c", "d", "e"),
Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.DATE_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(20000000L, 0L, 4D, 4, null, 0),
"b" -> new ColumnStats(800000000L, 0L, 8D, 8, 800000000L, 1L),
"c" -> new ColumnStats(1581L, 0L, 12D, 12, null, null),
"d" -> new ColumnStats(245623352L, 136231L, 88.8D, 140, null, null),
"e" -> new ColumnStats(null, 0L, 4d, 4, 100, 1)
)
val tableStats = new TableStats(800000000L, colStatsMap)
val uniqueKeys = Set(Set("b").asJava).asJava
getMetadataTable(schema, new FlinkStatistic(tableStats, uniqueKeys))
}
private def createMyTable2(): Table = {
val schema = new TableSchema(
Array("a", "b", "c", "d", "e"),
Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.DATE_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(20000000L, 0L, 4D, 4, null, null),
"b" -> new ColumnStats(2556L, 62L, 8D, 8, 5247L, 8L),
"c" -> new ColumnStats(682L, 0L, 12D, 12, null, null),
"d" -> new ColumnStats(125234L, 0L, 10.52, 16, null, null),
"e" -> new ColumnStats(null, 0L, 4d, 4, 300, 200)
)
val tableStats = new TableStats(20000000L, colStatsMap)
getMetadataTable(schema, new FlinkStatistic(tableStats))
}
private def createMyTable3(): Table = {
val schema = new TableSchema(
Array("a", "b", "c"),
Array(
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.STRING_TYPE_INFO))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(10L, 1L, 4D, 4, 5, -5),
"b" -> new ColumnStats(5L, 0L, 8D, 8, 6.1D, 0D),
"c" ->
ColumnStats.Builder.builder().setNdv(100L).setNullCount(1L).setAvgLen(16D).setMaxLen(128)
.setMax("zzzzz").setMin("").build()
)
val tableStats = new TableStats(100L, colStatsMap)
getMetadataTable(schema, new FlinkStatistic(tableStats))
}
private def createMyTable4(): Table = {
val schema = new TableSchema(
Array("a", "b", "c", "d"),
Array(BasicTypeInfo.LONG_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO,
BasicTypeInfo.INT_TYPE_INFO,
BasicTypeInfo.DOUBLE_TYPE_INFO))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(50L, 0L, 8D, 8, 50, 1),
"b" -> new ColumnStats(7L, 0L, 8D, 8, 5.1D, 0D),
"c" -> new ColumnStats(25L, 0L, 4D, 4, 46, 0),
"d" -> new ColumnStats(46L, 0L, 8D, 8, 172.1D, 161.0D)
)
val tableStats = new TableStats(50L, colStatsMap)
val uniqueKeys = Set(Set("a").asJava, Set("a", "b").asJava).asJava
getMetadataTable(schema, new FlinkStatistic(tableStats, uniqueKeys))
}
private def createTemporalTable1(): Table = {
val fieldNames = Array("a", "b", "c", "proctime", "rowtime")
val fieldTypes = Array[LogicalType](
new BigIntType(),
VarCharType.STRING_TYPE,
new IntType(),
new LocalZonedTimestampType(true, TimestampKind.PROCTIME, 3),
new TimestampType(true, TimestampKind.ROWTIME, 3))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(30L, 0L, 4D, 4, 45, 5),
"b" -> new ColumnStats(5L, 0L, 32D, 32, null, null),
"c" -> new ColumnStats(48L, 0L, 8D, 8, 50, 0)
)
val tableStats = new TableStats(50L, colStatsMap)
getMetadataTable(fieldNames, fieldTypes, new FlinkStatistic(tableStats))
}
private def createTemporalTable2(): Table = {
val fieldNames = Array("a", "b", "c", "proctime", "rowtime")
val fieldTypes = Array[LogicalType](
new BigIntType(),
VarCharType.STRING_TYPE,
new IntType(),
new LocalZonedTimestampType(true, TimestampKind.PROCTIME, 3),
new TimestampType(true, TimestampKind.ROWTIME, 3))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(50L, 0L, 8D, 8, 55, 5),
"b" -> new ColumnStats(5L, 0L, 16D, 32, null, null),
"c" -> new ColumnStats(48L, 0L, 4D, 4, 50, 0)
)
val tableStats = new TableStats(50L, colStatsMap)
val uniqueKeys = Set(Set("a").asJava).asJava
getMetadataTable(fieldNames, fieldTypes, new FlinkStatistic(tableStats, uniqueKeys))
}
private def createTemporalTable3(): Table = {
val fieldNames = Array("a", "b", "c", "proctime", "rowtime")
val fieldTypes = Array[LogicalType](
new IntType(),
new BigIntType(),
VarCharType.STRING_TYPE,
new LocalZonedTimestampType(true, TimestampKind.PROCTIME, 3),
new TimestampType(true, TimestampKind.ROWTIME, 3))
val colStatsMap = Map[String, ColumnStats](
"a" -> new ColumnStats(3740000000L, 0L, 4D, 4, null, null),
"b" -> new ColumnStats(53252726L, 1474L, 8D, 8, 100000000L, -100000000L),
"c" -> new ColumnStats(null, 0L, 18.6, 64, null, null)
)
val tableStats = new TableStats(4000000000L, colStatsMap)
getMetadataTable(fieldNames, fieldTypes, new FlinkStatistic(tableStats))
}
private val flinkContext = new FlinkContextImpl(
false,
TableConfig.getDefault,
new ModuleManager,
null,
CatalogManagerMocks.createEmptyCatalogManager,
null)
private def createProjectedTableSourceTable(): Table = {
val resolvedSchema = new ResolvedSchema(
util.Arrays.asList(
Column.physical("a", DataTypes.BIGINT().notNull()),
Column.physical("b", DataTypes.INT()),
Column.physical("c", DataTypes.VARCHAR(2147483647)),
Column.physical("d", DataTypes.BIGINT().notNull())),
Collections.emptyList(),
UniqueConstraint.primaryKey("PK_1", util.Arrays.asList("a", "d")))
val catalogTable = getCatalogTable(resolvedSchema)
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = typeFactory.buildRelNodeRowType(
Seq("a", "c", "d"),
Seq(new BigIntType(false), new DoubleType(), new VarCharType(false, 100)))
new MockTableSourceTable(
rowType,
new TestTableSource(),
true,
ContextResolvedTable.temporary(
ObjectIdentifier.of(
"default_catalog", "default_database", "projected_table_source_table"),
new ResolvedCatalogTable(catalogTable, resolvedSchema)),
flinkContext)
}
private def createTableSourceTable1(): Table = {
val catalogTable = CatalogTable.of(
org.apache.flink.table.api.Schema.newBuilder
.column("a", DataTypes.BIGINT.notNull)
.column("b", DataTypes.INT.notNull)
.column("c", DataTypes.VARCHAR(2147483647).notNull)
.column("d", DataTypes.BIGINT.notNull)
.primaryKeyNamed("PK_1", "a", "b")
.build,
null,
Collections.emptyList(),
Map(
"connector" -> "values",
"bounded" -> "true"
)
)
val resolvedSchema = new ResolvedSchema(
util.Arrays.asList(
Column.physical("a", DataTypes.BIGINT().notNull()),
Column.physical("b", DataTypes.INT().notNull()),
Column.physical("c", DataTypes.STRING().notNull()),
Column.physical("d", DataTypes.BIGINT().notNull())),
Collections.emptyList(),
UniqueConstraint.primaryKey("PK_1", util.Arrays.asList("a", "b")))
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = typeFactory.buildRelNodeRowType(
Seq("a", "b", "c", "d"),
Seq(new BigIntType(false), new IntType(), new VarCharType(false, 100), new BigIntType(false)))
new MockTableSourceTable(
rowType,
new TestTableSource(),
true,
ContextResolvedTable.temporary(
ObjectIdentifier.of("default_catalog", "default_database", "TableSourceTable1"),
new ResolvedCatalogTable(catalogTable, resolvedSchema)
),
flinkContext)
}
private def createTableSourceTable2(): Table = {
val resolvedSchema = new ResolvedSchema(
util.Arrays.asList(
Column.physical("a", DataTypes.BIGINT().notNull()),
Column.physical("b", DataTypes.INT().notNull()),
Column.physical("c", DataTypes.STRING().notNull()),
Column.physical("d", DataTypes.BIGINT().notNull())),
Collections.emptyList(),
UniqueConstraint.primaryKey("PK_1", util.Arrays.asList("b")))
val catalogTable = getCatalogTable(resolvedSchema)
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = typeFactory.buildRelNodeRowType(
Seq("a", "b", "c", "d"),
Seq(new BigIntType(false), new IntType(), new VarCharType(false, 100), new BigIntType(false)))
new MockTableSourceTable(
rowType,
new TestTableSource(),
true,
ContextResolvedTable.temporary(
ObjectIdentifier.of("default_catalog", "default_database", "TableSourceTable2"),
new ResolvedCatalogTable(catalogTable, resolvedSchema)
),
flinkContext)
}
private def createTableSourceTable3(): Table = {
val resolvedSchema = new ResolvedSchema(
util.Arrays.asList(
Column.physical("a", DataTypes.BIGINT().notNull()),
Column.physical("b", DataTypes.INT().notNull()),
Column.physical("c", DataTypes.STRING().notNull()),
Column.physical("d", DataTypes.BIGINT().notNull())),
Collections.emptyList(),
null)
val catalogTable = getCatalogTable(resolvedSchema)
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = typeFactory.buildRelNodeRowType(
Seq("a", "b", "c", "d"),
Seq(new BigIntType(false), new IntType(), new VarCharType(false, 100), new BigIntType(false)))
new MockTableSourceTable(
rowType,
new TestTableSource(),
true,
ContextResolvedTable.temporary(
ObjectIdentifier.of("default_catalog", "default_database", "TableSourceTable3"),
new ResolvedCatalogTable(catalogTable, resolvedSchema)
),
flinkContext)
}
private def createProjectedTableSourceTableWithPartialCompositePrimaryKey(): Table = {
val resolvedSchema = new ResolvedSchema(
util.Arrays.asList(
Column.physical("a", DataTypes.BIGINT().notNull()),
Column.physical("b", DataTypes.BIGINT().notNull())),
Collections.emptyList(),
UniqueConstraint.primaryKey("PK_1", util.Arrays.asList("a", "b")))
val catalogTable = getCatalogTable(resolvedSchema)
val typeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = typeFactory.buildRelNodeRowType(
Seq("a"),
Seq(new BigIntType(false)))
new MockTableSourceTable(
rowType,
new TestTableSource(),
true,
ContextResolvedTable.temporary(
ObjectIdentifier.of(
"default_catalog",
"default_database",
"projected_table_source_table_with_partial_pk"),
new ResolvedCatalogTable(catalogTable, resolvedSchema)
),
flinkContext)
}
private def getCatalogTable(resolvedSchema: ResolvedSchema) = {
CatalogTable.of(
org.apache.flink.table.api.Schema.newBuilder.fromResolvedSchema(resolvedSchema).build,
null,
Collections.emptyList(),
Map(
"connector" -> "values",
"bounded" -> "true"
)
)
}
private def getMetadataTable(
tableSchema: TableSchema,
statistic: FlinkStatistic,
producesUpdates: Boolean = false,
isAccRetract: Boolean = false): Table = {
val names = tableSchema.getFieldNames
val types = tableSchema.getFieldTypes.map(fromTypeInfoToLogicalType)
getMetadataTable(names, types, statistic)
}
private def getMetadataTable(
fieldNames: Array[String],
fieldTypes: Array[LogicalType],
statistic: FlinkStatistic): Table = {
val flinkTypeFactory = new FlinkTypeFactory(new FlinkTypeSystem)
val rowType = flinkTypeFactory.buildRelNodeRowType(fieldNames, fieldTypes)
new MockMetaTable(rowType, statistic)
}
}
/** A mock table used for metadata test, it implements both [[Table]]
* and [[FlinkPreparingTableBase]]. */
class MockMetaTable(rowType: RelDataType, statistic: FlinkStatistic)
extends FlinkPreparingTableBase(null, rowType,
Collections.singletonList("MockMetaTable"), statistic)
with Table {
override def getRowType(typeFactory: RelDataTypeFactory): RelDataType = rowType
override def getJdbcTableType: Schema.TableType = TableType.TABLE
override def isRolledUp(column: String): Boolean = false
override def rolledUpColumnValidInsideAgg(column: String,
call: SqlCall, parent: SqlNode, config: CalciteConnectionConfig): Boolean = false
}
class TestTableSource extends ScanTableSource {
override def getChangelogMode: ChangelogMode = ChangelogMode.insertOnly()
override def getScanRuntimeProvider(
context: ScanTableSource.ScanContext): ScanTableSource.ScanRuntimeProvider = {
throw new TableException("Unsupported operation")
}
override def copy = new TestTableSource()
override def asSummaryString = "test-source"
}
class MockTableSourceTable(
rowType: RelDataType,
tableSource: DynamicTableSource,
isStreamingMode: Boolean,
contextResolvedTable: ContextResolvedTable,
flinkContext: FlinkContext)
extends TableSourceTable(
null,
rowType,
FlinkStatistic.UNKNOWN,
tableSource,
isStreamingMode,
contextResolvedTable,
flinkContext)
with Table {
override def getRowType(typeFactory: RelDataTypeFactory): RelDataType = rowType
override def getJdbcTableType: Schema.TableType = TableType.TABLE
override def isRolledUp(column: String): Boolean = false
override def rolledUpColumnValidInsideAgg(column: String,
call: SqlCall, parent: SqlNode, config: CalciteConnectionConfig): Boolean = false
}
|
apache/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/MetadataTestUtil.scala
|
Scala
|
apache-2.0
| 19,134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io.File
import java.nio.ByteBuffer
import java.nio.file.Files
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.{Future, TimeoutException}
import scala.concurrent.duration._
import scala.language.implicitConversions
import scala.reflect.ClassTag
import org.apache.commons.lang3.RandomUtils
import org.mockito.{ArgumentCaptor, ArgumentMatchers => mc}
import org.mockito.Mockito.{doAnswer, mock, never, spy, times, verify, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, PrivateMethodTester}
import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
import org.scalatest.concurrent.Eventually._
import org.scalatest.matchers.must.Matchers
import org.scalatest.matchers.should.Matchers._
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager
import org.apache.spark.executor.DataReadMethod
import org.apache.spark.internal.config
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Tests._
import org.apache.spark.memory.{MemoryMode, UnifiedMemoryManager}
import org.apache.spark.network.{BlockDataManager, BlockTransferService, TransportContext}
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer, NioManagedBuffer}
import org.apache.spark.network.client.{RpcResponseCallback, TransportClient}
import org.apache.spark.network.netty.{NettyBlockTransferService, SparkTransportConf}
import org.apache.spark.network.server.{NoOpRpcHandler, TransportServer, TransportServerBootstrap}
import org.apache.spark.network.shuffle.{BlockFetchingListener, DownloadFileManager, ExecutorDiskUtils, ExternalBlockStoreClient}
import org.apache.spark.network.shuffle.protocol.{BlockTransferMessage, RegisterExecutor}
import org.apache.spark.network.util.{MapConfigProvider, TransportConf}
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEnv}
import org.apache.spark.scheduler.{LiveListenerBus, MapStatus, MergeStatus, SparkListenerBlockUpdated}
import org.apache.spark.scheduler.cluster.{CoarseGrainedClusterMessages, CoarseGrainedSchedulerBackend}
import org.apache.spark.security.{CryptoStreamUtils, EncryptionFunSuite}
import org.apache.spark.serializer.{JavaSerializer, KryoSerializer, SerializerManager}
import org.apache.spark.shuffle.{MigratableResolver, ShuffleBlockInfo, ShuffleBlockResolver, ShuffleManager}
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.storage.BlockManagerMessages._
import org.apache.spark.util._
import org.apache.spark.util.io.ChunkedByteBuffer
class BlockManagerSuite extends SparkFunSuite with Matchers with BeforeAndAfterEach
with PrivateMethodTester with LocalSparkContext with ResetSystemProperties
with EncryptionFunSuite with TimeLimits with BeforeAndAfterAll {
import BlockManagerSuite._
// Necessary to make ScalaTest 3.x interrupt a thread on the JVM like ScalaTest 2.2.x
implicit val defaultSignaler: Signaler = ThreadSignaler
var conf: SparkConf = null
val allStores = ArrayBuffer[BlockManager]()
val sortShuffleManagers = ArrayBuffer[SortShuffleManager]()
var rpcEnv: RpcEnv = null
var master: BlockManagerMaster = null
var liveListenerBus: LiveListenerBus = null
val securityMgr = new SecurityManager(new SparkConf(false))
val bcastManager = new BroadcastManager(true, new SparkConf(false))
val mapOutputTracker = new MapOutputTrackerMaster(new SparkConf(false), bcastManager, true)
val shuffleManager = new SortShuffleManager(new SparkConf(false))
// Reuse a serializer across tests to avoid creating a new thread-local buffer on each test
val serializer = new KryoSerializer(
new SparkConf(false).set(Kryo.KRYO_SERIALIZER_BUFFER_SIZE.key, "1m"))
// Implicitly convert strings to BlockIds for test clarity.
implicit def StringToBlockId(value: String): BlockId = new TestBlockId(value)
def rdd(rddId: Int, splitId: Int): RDDBlockId = RDDBlockId(rddId, splitId)
private def init(sparkConf: SparkConf): Unit = {
sparkConf
.set("spark.app.id", "test")
.set(IS_TESTING, true)
.set(MEMORY_FRACTION, 1.0)
.set(MEMORY_STORAGE_FRACTION, 0.999)
.set(SERIALIZER, "org.apache.spark.serializer.KryoSerializer")
.set(Kryo.KRYO_SERIALIZER_BUFFER_SIZE.key, "1m")
.set(STORAGE_UNROLL_MEMORY_THRESHOLD, 512L)
.set(Network.RPC_ASK_TIMEOUT, "5s")
.set(PUSH_BASED_SHUFFLE_ENABLED, true)
}
private def makeSortShuffleManager(conf: Option[SparkConf] = None): SortShuffleManager = {
val newMgr = new SortShuffleManager(conf.getOrElse(new SparkConf(false)))
sortShuffleManagers += newMgr
newMgr
}
private def makeBlockManager(
maxMem: Long,
name: String = SparkContext.DRIVER_IDENTIFIER,
master: BlockManagerMaster = this.master,
transferService: Option[BlockTransferService] = Option.empty,
testConf: Option[SparkConf] = None,
shuffleManager: ShuffleManager = shuffleManager): BlockManager = {
val bmConf = testConf.map(_.setAll(conf.getAll)).getOrElse(conf)
bmConf.set(TEST_MEMORY, maxMem)
bmConf.set(MEMORY_OFFHEAP_SIZE, maxMem)
val serializer = new KryoSerializer(bmConf)
val encryptionKey = if (bmConf.get(IO_ENCRYPTION_ENABLED)) {
Some(CryptoStreamUtils.createKey(bmConf))
} else {
None
}
val bmSecurityMgr = new SecurityManager(bmConf, encryptionKey)
val transfer = transferService
.getOrElse(new NettyBlockTransferService(conf, securityMgr, "localhost", "localhost", 0, 1))
val memManager = UnifiedMemoryManager(bmConf, numCores = 1)
val serializerManager = new SerializerManager(serializer, bmConf)
val externalShuffleClient = if (conf.get(config.SHUFFLE_SERVICE_ENABLED)) {
val transConf = SparkTransportConf.fromSparkConf(conf, "shuffle", 0)
Some(new ExternalBlockStoreClient(transConf, bmSecurityMgr,
bmSecurityMgr.isAuthenticationEnabled(), conf.get(config.SHUFFLE_REGISTRATION_TIMEOUT)))
} else {
None
}
val blockManager = new BlockManager(name, rpcEnv, master, serializerManager, bmConf,
memManager, mapOutputTracker, shuffleManager, transfer, bmSecurityMgr, externalShuffleClient)
memManager.setMemoryStore(blockManager.memoryStore)
allStores += blockManager
blockManager.initialize("app-id")
blockManager
}
// Save modified system properties so that we can restore them after tests.
val originalArch = System.getProperty("os.arch")
val originalCompressedOops = System.getProperty(TEST_USE_COMPRESSED_OOPS_KEY)
def reinitializeSizeEstimator(arch: String, useCompressedOops: String): Unit = {
def set(k: String, v: String): Unit = {
if (v == null) {
System.clearProperty(k)
} else {
System.setProperty(k, v)
}
}
set("os.arch", arch)
set(TEST_USE_COMPRESSED_OOPS_KEY, useCompressedOops)
val initialize = PrivateMethod[Unit](Symbol("initialize"))
SizeEstimator invokePrivate initialize()
}
override def beforeEach(): Unit = {
super.beforeEach()
// Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
reinitializeSizeEstimator("amd64", "true")
conf = new SparkConf(false)
init(conf)
rpcEnv = RpcEnv.create("test", conf.get(config.DRIVER_HOST_ADDRESS),
conf.get(config.DRIVER_PORT), conf, securityMgr)
conf.set(DRIVER_PORT, rpcEnv.address.port)
conf.set(DRIVER_HOST_ADDRESS, rpcEnv.address.host)
// Mock SparkContext to reduce the memory usage of tests. It's fine since the only reason we
// need to create a SparkContext is to initialize LiveListenerBus.
sc = mock(classOf[SparkContext])
when(sc.conf).thenReturn(conf)
val blockManagerInfo = new mutable.HashMap[BlockManagerId, BlockManagerInfo]()
liveListenerBus = spy(new LiveListenerBus(conf))
master = spy(new BlockManagerMaster(rpcEnv.setupEndpoint("blockmanager",
new BlockManagerMasterEndpoint(rpcEnv, true, conf,
liveListenerBus, None, blockManagerInfo, mapOutputTracker, isDriver = true)),
rpcEnv.setupEndpoint("blockmanagerHeartbeat",
new BlockManagerMasterHeartbeatEndpoint(rpcEnv, true, blockManagerInfo)), conf, true))
}
override def afterEach(): Unit = {
// Restore system properties and SizeEstimator to their original states.
reinitializeSizeEstimator(originalArch, originalCompressedOops)
try {
conf = null
allStores.foreach(_.stop())
allStores.clear()
sortShuffleManagers.foreach(_.stop())
sortShuffleManagers.clear()
rpcEnv.shutdown()
rpcEnv.awaitTermination()
rpcEnv = null
master = null
liveListenerBus = null
} finally {
super.afterEach()
}
}
override def afterAll(): Unit = {
try {
// Cleanup the reused items.
Option(bcastManager).foreach(_.stop())
Option(mapOutputTracker).foreach(_.stop())
Option(shuffleManager).foreach(_.stop())
} finally {
super.afterAll()
}
}
private def stopBlockManager(blockManager: BlockManager): Unit = {
allStores -= blockManager
blockManager.stop()
}
/**
* Setup driverEndpoint, executor-1(BlockManager), executor-2(BlockManager) to simulate
* the real cluster before the tests. Any requests from driver to executor-1 will be responded
* in time. However, any requests from driver to executor-2 will be timeouted, in order to test
* the specific handling of `TimeoutException`, which is raised at driver side.
*
* And, when `withLost` is true, we will not register the executor-2 to the driver. Therefore,
* it behaves like a lost executor in terms of driver's view. When `withLost` is false, we'll
* register the executor-2 normally.
*/
private def setupBlockManagerMasterWithBlocks(withLost: Boolean): Unit = {
// set up a simple DriverEndpoint which simply adds executorIds and
// checks whether a certain executorId has been added before.
val driverEndpoint = rpcEnv.setupEndpoint(CoarseGrainedSchedulerBackend.ENDPOINT_NAME,
new RpcEndpoint {
private val executorSet = mutable.HashSet[String]()
override val rpcEnv: RpcEnv = BlockManagerSuite.this.rpcEnv
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case CoarseGrainedClusterMessages.RegisterExecutor(executorId, _, _, _, _, _, _, _) =>
executorSet += executorId
context.reply(true)
case CoarseGrainedClusterMessages.IsExecutorAlive(executorId) =>
context.reply(executorSet.contains(executorId))
}
}
)
def createAndRegisterBlockManager(timeout: Boolean): BlockManagerId = {
val id = if (timeout) "timeout" else "normal"
val bmRef = rpcEnv.setupEndpoint(s"bm-$id", new RpcEndpoint {
override val rpcEnv: RpcEnv = BlockManagerSuite.this.rpcEnv
private def reply[T](context: RpcCallContext, response: T): Unit = {
if (timeout) {
Thread.sleep(conf.getTimeAsMs(Network.RPC_ASK_TIMEOUT.key) + 1000)
}
context.reply(response)
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case RemoveRdd(_) => reply(context, 1)
case RemoveBroadcast(_, _) => reply(context, 1)
case RemoveShuffle(_) => reply(context, true)
}
})
val bmId = BlockManagerId(s"exec-$id", "localhost", 1234, None)
master.registerBlockManager(bmId, Array.empty, 2000, 0, bmRef)
}
// set up normal bm1
val bm1Id = createAndRegisterBlockManager(false)
// set up bm2, which intentionally takes more time than RPC_ASK_TIMEOUT to
// remove rdd/broadcast/shuffle in order to raise timeout error
val bm2Id = createAndRegisterBlockManager(true)
driverEndpoint.askSync[Boolean](CoarseGrainedClusterMessages.RegisterExecutor(
bm1Id.executorId, null, bm1Id.host, 1, Map.empty, Map.empty,
Map.empty, 0))
if (!withLost) {
driverEndpoint.askSync[Boolean](CoarseGrainedClusterMessages.RegisterExecutor(
bm2Id.executorId, null, bm1Id.host, 1, Map.empty, Map.empty, Map.empty, 0))
}
eventually(timeout(5.seconds)) {
// make sure both bm1 and bm2 are registered at driver side BlockManagerMaster
verify(master, times(2))
.registerBlockManager(mc.any(), mc.any(), mc.any(), mc.any(), mc.any())
assert(driverEndpoint.askSync[Boolean](
CoarseGrainedClusterMessages.IsExecutorAlive(bm1Id.executorId)))
assert(driverEndpoint.askSync[Boolean](
CoarseGrainedClusterMessages.IsExecutorAlive(bm2Id.executorId)) === !withLost)
}
// update RDD block info for bm1 and bm2 (Broadcast and shuffle don't report block
// locations to BlockManagerMaster)
master.updateBlockInfo(bm1Id, RDDBlockId(0, 0), StorageLevel.MEMORY_ONLY, 100, 0)
master.updateBlockInfo(bm2Id, RDDBlockId(0, 1), StorageLevel.MEMORY_ONLY, 100, 0)
}
test("SPARK-36036: make sure temporary download files are deleted") {
val store = makeBlockManager(8000, "executor")
def createAndRegisterTempFileForDeletion(): String = {
val transportConf = new TransportConf("test", MapConfigProvider.EMPTY)
val tempDownloadFile = store.remoteBlockTempFileManager.createTempFile(transportConf)
tempDownloadFile.openForWriting().close()
assert(new File(tempDownloadFile.path()).exists(), "The file has been created")
val registered = store.remoteBlockTempFileManager.registerTempFileToClean(tempDownloadFile)
assert(registered, "The file has been successfully registered for auto clean up")
// tempDownloadFile and the channel for writing are local to the function so the references
// are going to be eliminated on exit
tempDownloadFile.path()
}
val filePath = createAndRegisterTempFileForDeletion()
val numberOfTries = 100 // try increasing if the test starts to behave flaky
val fileHasBeenDeleted = (1 to numberOfTries).exists { tryNo =>
// Unless -XX:-DisableExplicitGC is set it works in Hotspot JVM
System.gc()
Thread.sleep(tryNo)
val fileStillExists = new File(filePath).exists()
!fileStillExists
}
assert(fileHasBeenDeleted,
s"The file was supposed to be auto deleted (GC hinted $numberOfTries times)")
}
test("SPARK-32091: count failures from active executors when remove rdd/broadcast/shuffle") {
setupBlockManagerMasterWithBlocks(false)
// fail because bm2 will timeout and it's not lost anymore
assert(intercept[Exception](master.removeRdd(0, true))
.getCause.isInstanceOf[TimeoutException])
assert(intercept[Exception](master.removeBroadcast(0, true, true))
.getCause.isInstanceOf[TimeoutException])
assert(intercept[Exception](master.removeShuffle(0, true))
.getCause.isInstanceOf[TimeoutException])
}
test("SPARK-32091: ignore failures from lost executors when remove rdd/broadcast/shuffle") {
setupBlockManagerMasterWithBlocks(true)
// succeed because bm1 will remove rdd/broadcast successfully and bm2 will
// timeout but ignored as it's lost
master.removeRdd(0, true)
master.removeBroadcast(0, true, true)
master.removeShuffle(0, true)
}
test("StorageLevel object caching") {
val level1 = StorageLevel(false, false, false, 3)
// this should return the same object as level1
val level2 = StorageLevel(false, false, false, 3)
// this should return a different object
val level3 = StorageLevel(false, false, false, 2)
assert(level2 === level1, "level2 is not same as level1")
assert(level2.eq(level1), "level2 is not the same object as level1")
assert(level3 != level1, "level3 is same as level1")
val bytes1 = Utils.serialize(level1)
val level1_ = Utils.deserialize[StorageLevel](bytes1)
val bytes2 = Utils.serialize(level2)
val level2_ = Utils.deserialize[StorageLevel](bytes2)
assert(level1_ === level1, "Deserialized level1 not same as original level1")
assert(level1_.eq(level1), "Deserialized level1 not the same object as original level2")
assert(level2_ === level2, "Deserialized level2 not same as original level2")
assert(level2_.eq(level1), "Deserialized level2 not the same object as original level1")
}
test("BlockManagerId object caching") {
val id1 = BlockManagerId("e1", "XXX", 1)
val id2 = BlockManagerId("e1", "XXX", 1) // this should return the same object as id1
val id3 = BlockManagerId("e1", "XXX", 2) // this should return a different object
assert(id2 === id1, "id2 is not same as id1")
assert(id2.eq(id1), "id2 is not the same object as id1")
assert(id3 != id1, "id3 is same as id1")
val bytes1 = Utils.serialize(id1)
val id1_ = Utils.deserialize[BlockManagerId](bytes1)
val bytes2 = Utils.serialize(id2)
val id2_ = Utils.deserialize[BlockManagerId](bytes2)
assert(id1_ === id1, "Deserialized id1 is not same as original id1")
assert(id1_.eq(id1), "Deserialized id1 is not the same object as original id1")
assert(id2_ === id2, "Deserialized id2 is not same as original id2")
assert(id2_.eq(id1), "Deserialized id2 is not the same object as original id1")
}
test("BlockManagerId.isDriver() with DRIVER_IDENTIFIER (SPARK-27090)") {
assert(BlockManagerId(SparkContext.DRIVER_IDENTIFIER, "XXX", 1).isDriver)
assert(!BlockManagerId("notADriverIdentifier", "XXX", 1).isDriver)
}
test("master + 1 manager interaction") {
val store = makeBlockManager(20000)
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
// Putting a1, a2 and a3 in memory and telling master only about a1 and a2
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
store.putSingle("a2", a2, StorageLevel.MEMORY_ONLY)
store.putSingle("a3", a3, StorageLevel.MEMORY_ONLY, tellMaster = false)
// Checking whether blocks are in memory
assert(store.getSingleAndReleaseLock("a1").isDefined, "a1 was not in store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3").isDefined, "a3 was not in store")
// Checking whether master knows about the blocks or not
assert(master.getLocations("a1").size > 0, "master was not told about a1")
assert(master.getLocations("a2").size > 0, "master was not told about a2")
assert(master.getLocations("a3").size === 0, "master was told about a3")
// Drop a1 and a2 from memory; this should be reported back to the master
store.dropFromMemoryIfExists("a1", () => null: Either[Array[Any], ChunkedByteBuffer])
store.dropFromMemoryIfExists("a2", () => null: Either[Array[Any], ChunkedByteBuffer])
assert(store.getSingleAndReleaseLock("a1") === None, "a1 not removed from store")
assert(store.getSingleAndReleaseLock("a2") === None, "a2 not removed from store")
assert(master.getLocations("a1").size === 0, "master did not remove a1")
assert(master.getLocations("a2").size === 0, "master did not remove a2")
}
test("master + 2 managers interaction") {
val store = makeBlockManager(2000, "exec1")
val store2 = makeBlockManager(2000, "exec2")
val peers = master.getPeers(store.blockManagerId)
assert(peers.size === 1, "master did not return the other manager as a peer")
assert(peers.head === store2.blockManagerId, "peer returned by master is not the other manager")
val a1 = new Array[Byte](400)
val a2 = new Array[Byte](400)
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY_2)
store2.putSingle("a2", a2, StorageLevel.MEMORY_ONLY_2)
assert(master.getLocations("a1").size === 2, "master did not report 2 locations for a1")
assert(master.getLocations("a2").size === 2, "master did not report 2 locations for a2")
}
test("removing block") {
val store = makeBlockManager(20000)
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
// Putting a1, a2 and a3 in memory and telling master only about a1 and a2
store.putSingle("a1-to-remove", a1, StorageLevel.MEMORY_ONLY)
store.putSingle("a2-to-remove", a2, StorageLevel.MEMORY_ONLY)
store.putSingle("a3-to-remove", a3, StorageLevel.MEMORY_ONLY, tellMaster = false)
// Checking whether blocks are in memory and memory size
val memStatus = master.getMemoryStatus.head._2
assert(memStatus._1 == 40000L, "total memory " + memStatus._1 + " should equal 40000")
assert(memStatus._2 <= 32000L, "remaining memory " + memStatus._2 + " should <= 12000")
assert(store.getSingleAndReleaseLock("a1-to-remove").isDefined, "a1 was not in store")
assert(store.getSingleAndReleaseLock("a2-to-remove").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3-to-remove").isDefined, "a3 was not in store")
// Checking whether master knows about the blocks or not
assert(master.getLocations("a1-to-remove").size > 0, "master was not told about a1")
assert(master.getLocations("a2-to-remove").size > 0, "master was not told about a2")
assert(master.getLocations("a3-to-remove").size === 0, "master was told about a3")
// Remove a1 and a2 and a3. Should be no-op for a3.
master.removeBlock("a1-to-remove")
master.removeBlock("a2-to-remove")
master.removeBlock("a3-to-remove")
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(!store.hasLocalBlock("a1-to-remove"))
master.getLocations("a1-to-remove") should have size 0
assertUpdateBlockInfoReportedForRemovingBlock(store, "a1-to-remove",
removedFromMemory = true, removedFromDisk = false)
}
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(!store.hasLocalBlock("a2-to-remove"))
master.getLocations("a2-to-remove") should have size 0
assertUpdateBlockInfoReportedForRemovingBlock(store, "a2-to-remove",
removedFromMemory = true, removedFromDisk = false)
}
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(store.hasLocalBlock("a3-to-remove"))
master.getLocations("a3-to-remove") should have size 0
assertUpdateBlockInfoNotReported(store, "a3-to-remove")
}
eventually(timeout(1.second), interval(10.milliseconds)) {
val memStatus = master.getMemoryStatus.head._2
memStatus._1 should equal (40000L)
memStatus._2 should equal (40000L)
}
}
test("removing rdd") {
val store = makeBlockManager(20000)
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
// Putting a1, a2 and a3 in memory.
store.putSingle(rdd(0, 0), a1, StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 1), a2, StorageLevel.MEMORY_ONLY)
store.putSingle("nonrddblock", a3, StorageLevel.MEMORY_ONLY)
master.removeRdd(0, blocking = false)
eventually(timeout(1.second), interval(10.milliseconds)) {
store.getSingleAndReleaseLock(rdd(0, 0)) should be (None)
master.getLocations(rdd(0, 0)) should have size 0
}
eventually(timeout(1.second), interval(10.milliseconds)) {
store.getSingleAndReleaseLock(rdd(0, 1)) should be (None)
master.getLocations(rdd(0, 1)) should have size 0
}
eventually(timeout(1.second), interval(10.milliseconds)) {
store.getSingleAndReleaseLock("nonrddblock") should not be (None)
master.getLocations("nonrddblock") should have size (1)
}
store.putSingle(rdd(0, 0), a1, StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 1), a2, StorageLevel.MEMORY_ONLY)
master.removeRdd(0, blocking = true)
store.getSingleAndReleaseLock(rdd(0, 0)) should be (None)
master.getLocations(rdd(0, 0)) should have size 0
store.getSingleAndReleaseLock(rdd(0, 1)) should be (None)
master.getLocations(rdd(0, 1)) should have size 0
}
test("removing broadcast") {
val store = makeBlockManager(2000)
val driverStore = store
val executorStore = makeBlockManager(2000, "executor")
val a1 = new Array[Byte](400)
val a2 = new Array[Byte](400)
val a3 = new Array[Byte](400)
val a4 = new Array[Byte](400)
val broadcast0BlockId = BroadcastBlockId(0)
val broadcast1BlockId = BroadcastBlockId(1)
val broadcast2BlockId = BroadcastBlockId(2)
val broadcast2BlockId2 = BroadcastBlockId(2, "_")
// insert broadcast blocks in both the stores
Seq(driverStore, executorStore).foreach { case s =>
s.putSingle(broadcast0BlockId, a1, StorageLevel.DISK_ONLY)
s.putSingle(broadcast1BlockId, a2, StorageLevel.DISK_ONLY)
s.putSingle(broadcast2BlockId, a3, StorageLevel.DISK_ONLY)
s.putSingle(broadcast2BlockId2, a4, StorageLevel.DISK_ONLY)
}
// verify whether the blocks exist in both the stores
Seq(driverStore, executorStore).foreach { case s =>
assert(s.hasLocalBlock(broadcast0BlockId))
assert(s.hasLocalBlock(broadcast1BlockId))
assert(s.hasLocalBlock(broadcast2BlockId))
assert(s.hasLocalBlock(broadcast2BlockId2))
}
// remove broadcast 0 block only from executors
master.removeBroadcast(0, removeFromMaster = false, blocking = true)
// only broadcast 0 block should be removed from the executor store
assert(!executorStore.hasLocalBlock(broadcast0BlockId))
assert(executorStore.hasLocalBlock(broadcast1BlockId))
assert(executorStore.hasLocalBlock(broadcast2BlockId))
assertUpdateBlockInfoReportedForRemovingBlock(executorStore, broadcast0BlockId,
removedFromMemory = false, removedFromDisk = true)
// nothing should be removed from the driver store
assert(driverStore.hasLocalBlock(broadcast0BlockId))
assert(driverStore.hasLocalBlock(broadcast1BlockId))
assert(driverStore.hasLocalBlock(broadcast2BlockId))
assertUpdateBlockInfoNotReported(driverStore, broadcast0BlockId)
// remove broadcast 0 block from the driver as well
master.removeBroadcast(0, removeFromMaster = true, blocking = true)
assert(!driverStore.hasLocalBlock(broadcast0BlockId))
assert(driverStore.hasLocalBlock(broadcast1BlockId))
assertUpdateBlockInfoReportedForRemovingBlock(driverStore, broadcast0BlockId,
removedFromMemory = false, removedFromDisk = true)
// remove broadcast 1 block from both the stores asynchronously
// and verify all broadcast 1 blocks have been removed
master.removeBroadcast(1, removeFromMaster = true, blocking = false)
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(!driverStore.hasLocalBlock(broadcast1BlockId))
assert(!executorStore.hasLocalBlock(broadcast1BlockId))
assertUpdateBlockInfoReportedForRemovingBlock(driverStore, broadcast1BlockId,
removedFromMemory = false, removedFromDisk = true)
assertUpdateBlockInfoReportedForRemovingBlock(executorStore, broadcast1BlockId,
removedFromMemory = false, removedFromDisk = true)
}
// remove broadcast 2 from both the stores asynchronously
// and verify all broadcast 2 blocks have been removed
master.removeBroadcast(2, removeFromMaster = true, blocking = false)
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(!driverStore.hasLocalBlock(broadcast2BlockId))
assert(!driverStore.hasLocalBlock(broadcast2BlockId2))
assert(!executorStore.hasLocalBlock(broadcast2BlockId))
assert(!executorStore.hasLocalBlock(broadcast2BlockId2))
assertUpdateBlockInfoReportedForRemovingBlock(driverStore, broadcast2BlockId,
removedFromMemory = false, removedFromDisk = true)
assertUpdateBlockInfoReportedForRemovingBlock(driverStore, broadcast2BlockId2,
removedFromMemory = false, removedFromDisk = true)
assertUpdateBlockInfoReportedForRemovingBlock(executorStore, broadcast2BlockId,
removedFromMemory = false, removedFromDisk = true)
assertUpdateBlockInfoReportedForRemovingBlock(executorStore, broadcast2BlockId2,
removedFromMemory = false, removedFromDisk = true)
}
executorStore.stop()
driverStore.stop()
}
private def assertUpdateBlockInfoReportedForRemovingBlock(
store: BlockManager,
blockId: BlockId,
removedFromMemory: Boolean,
removedFromDisk: Boolean): Unit = {
def assertSizeReported(captor: ArgumentCaptor[Long], expectRemoved: Boolean): Unit = {
assert(captor.getAllValues().size() === 1)
if (expectRemoved) {
assert(captor.getValue() > 0)
} else {
assert(captor.getValue() === 0)
}
}
val memSizeCaptor = ArgumentCaptor.forClass(classOf[Long]).asInstanceOf[ArgumentCaptor[Long]]
val diskSizeCaptor = ArgumentCaptor.forClass(classOf[Long]).asInstanceOf[ArgumentCaptor[Long]]
verify(master).updateBlockInfo(mc.eq(store.blockManagerId), mc.eq(blockId),
mc.eq(StorageLevel.NONE), memSizeCaptor.capture(), diskSizeCaptor.capture())
assertSizeReported(memSizeCaptor, removedFromMemory)
assertSizeReported(diskSizeCaptor, removedFromDisk)
}
private def assertUpdateBlockInfoNotReported(store: BlockManager, blockId: BlockId): Unit = {
verify(master, never()).updateBlockInfo(mc.eq(store.blockManagerId), mc.eq(blockId),
mc.eq(StorageLevel.NONE), mc.anyInt(), mc.anyInt())
}
test("reregistration on heart beat") {
val store = makeBlockManager(2000)
val a1 = new Array[Byte](400)
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
assert(store.getSingleAndReleaseLock("a1").isDefined, "a1 was not in store")
assert(master.getLocations("a1").size > 0, "master was not told about a1")
master.removeExecutor(store.blockManagerId.executorId)
assert(master.getLocations("a1").size == 0, "a1 was not removed from master")
val reregister = !master.driverHeartbeatEndPoint.askSync[Boolean](
BlockManagerHeartbeat(store.blockManagerId))
assert(reregister)
}
test("reregistration on block update") {
val store = makeBlockManager(2000)
val a1 = new Array[Byte](400)
val a2 = new Array[Byte](400)
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
assert(master.getLocations("a1").size > 0, "master was not told about a1")
master.removeExecutor(store.blockManagerId.executorId)
assert(master.getLocations("a1").size == 0, "a1 was not removed from master")
store.putSingle("a2", a2, StorageLevel.MEMORY_ONLY)
store.waitForAsyncReregister()
assert(master.getLocations("a1").size > 0, "a1 was not reregistered with master")
assert(master.getLocations("a2").size > 0, "master was not told about a2")
}
test("reregistration doesn't dead lock") {
val store = makeBlockManager(2000)
val a1 = new Array[Byte](400)
val a2 = List(new Array[Byte](400))
// try many times to trigger any deadlocks
for (i <- 1 to 100) {
master.removeExecutor(store.blockManagerId.executorId)
val t1 = new Thread {
override def run(): Unit = {
store.putIterator(
"a2", a2.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
}
val t2 = new Thread {
override def run(): Unit = {
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY)
}
}
val t3 = new Thread {
override def run(): Unit = {
store.reregister()
}
}
t1.start()
t2.start()
t3.start()
t1.join()
t2.join()
t3.join()
store.dropFromMemoryIfExists("a1", () => null: Either[Array[Any], ChunkedByteBuffer])
store.dropFromMemoryIfExists("a2", () => null: Either[Array[Any], ChunkedByteBuffer])
store.waitForAsyncReregister()
}
}
test("correct BlockResult returned from get() calls") {
val store = makeBlockManager(12000)
val list1 = List(new Array[Byte](2000), new Array[Byte](2000))
val list2 = List(new Array[Byte](500), new Array[Byte](1000), new Array[Byte](1500))
val list1SizeEstimate = SizeEstimator.estimate(list1.iterator.toArray)
val list2SizeEstimate = SizeEstimator.estimate(list2.iterator.toArray)
store.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator(
"list2memory", list2.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator(
"list2disk", list2.iterator, StorageLevel.DISK_ONLY, tellMaster = true)
val list1Get = store.get("list1")
assert(list1Get.isDefined, "list1 expected to be in store")
assert(list1Get.get.data.size === 2)
assert(list1Get.get.bytes === list1SizeEstimate)
assert(list1Get.get.readMethod === DataReadMethod.Memory)
val list2MemoryGet = store.get("list2memory")
assert(list2MemoryGet.isDefined, "list2memory expected to be in store")
assert(list2MemoryGet.get.data.size === 3)
assert(list2MemoryGet.get.bytes === list2SizeEstimate)
assert(list2MemoryGet.get.readMethod === DataReadMethod.Memory)
val list2DiskGet = store.get("list2disk")
assert(list2DiskGet.isDefined, "list2memory expected to be in store")
assert(list2DiskGet.get.data.size === 3)
// We don't know the exact size of the data on disk, but it should certainly be > 0.
assert(list2DiskGet.get.bytes > 0)
assert(list2DiskGet.get.readMethod === DataReadMethod.Disk)
}
test("optimize a location order of blocks without topology information") {
val localHost = "localhost"
val otherHost = "otherHost"
val bmMaster = mock(classOf[BlockManagerMaster])
val bmId1 = BlockManagerId("id1", localHost, 1)
val bmId2 = BlockManagerId("id2", localHost, 2)
val bmId3 = BlockManagerId("id3", otherHost, 3)
when(bmMaster.getLocations(mc.any[BlockId])).thenReturn(Seq(bmId1, bmId2, bmId3))
val blockManager = makeBlockManager(128, "exec", bmMaster)
val sortLocations = PrivateMethod[Seq[BlockManagerId]](Symbol("sortLocations"))
val locations = blockManager invokePrivate sortLocations(bmMaster.getLocations("test"))
assert(locations.map(_.host) === Seq(localHost, localHost, otherHost))
}
test("optimize a location order of blocks with topology information") {
val localHost = "localhost"
val otherHost = "otherHost"
val localRack = "localRack"
val otherRack = "otherRack"
val bmMaster = mock(classOf[BlockManagerMaster])
val bmId1 = BlockManagerId("id1", localHost, 1, Some(localRack))
val bmId2 = BlockManagerId("id2", localHost, 2, Some(localRack))
val bmId3 = BlockManagerId("id3", otherHost, 3, Some(otherRack))
val bmId4 = BlockManagerId("id4", otherHost, 4, Some(otherRack))
val bmId5 = BlockManagerId("id5", otherHost, 5, Some(localRack))
when(bmMaster.getLocations(mc.any[BlockId]))
.thenReturn(Seq(bmId1, bmId2, bmId5, bmId3, bmId4))
val blockManager = makeBlockManager(128, "exec", bmMaster)
blockManager.blockManagerId =
BlockManagerId(SparkContext.DRIVER_IDENTIFIER, localHost, 1, Some(localRack))
val sortLocations = PrivateMethod[Seq[BlockManagerId]](Symbol("sortLocations"))
val locations = blockManager invokePrivate sortLocations(bmMaster.getLocations("test"))
assert(locations.map(_.host) === Seq(localHost, localHost, otherHost, otherHost, otherHost))
assert(locations.flatMap(_.topologyInfo)
=== Seq(localRack, localRack, localRack, otherRack, otherRack))
}
test("SPARK-9591: getRemoteBytes from another location when Exception throw") {
conf.set("spark.shuffle.io.maxRetries", "0")
val store = makeBlockManager(8000, "executor1")
val store2 = makeBlockManager(8000, "executor2")
val store3 = makeBlockManager(8000, "executor3")
val list1 = List(new Array[Byte](4000))
store2.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store3.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(store.getRemoteBytes("list1").isDefined, "list1Get expected to be fetched")
stopBlockManager(store2)
assert(store.getRemoteBytes("list1").isDefined, "list1Get expected to be fetched")
stopBlockManager(store3)
// Should return None instead of throwing an exception:
assert(store.getRemoteBytes("list1").isEmpty)
}
Seq(
StorageLevel(useDisk = true, useMemory = false, deserialized = false),
StorageLevel(useDisk = true, useMemory = false, deserialized = true),
StorageLevel(useDisk = true, useMemory = false, deserialized = true, replication = 2)
).foreach { storageLevel =>
test(s"SPARK-27622: avoid the network when block requested from same host, $storageLevel") {
conf.set("spark.shuffle.io.maxRetries", "0")
val sameHostBm = makeBlockManager(8000, "sameHost", master)
val otherHostTransferSrv = spy(sameHostBm.blockTransferService)
doAnswer { _ =>
"otherHost"
}.when(otherHostTransferSrv).hostName
val otherHostBm = makeBlockManager(8000, "otherHost", master, Some(otherHostTransferSrv))
// This test always uses the cleanBm to get the block. In case of replication
// the block can be added to the otherHostBm as direct disk read will use
// the local disk of sameHostBm where the block is replicated to.
// When there is no replication then block must be added via sameHostBm directly.
val bmToPutBlock = if (storageLevel.replication > 1) otherHostBm else sameHostBm
val array = Array.fill(16)(Byte.MinValue to Byte.MaxValue).flatten
val blockId = "list"
bmToPutBlock.putIterator(blockId, List(array).iterator, storageLevel, tellMaster = true)
val sameHostTransferSrv = spy(sameHostBm.blockTransferService)
doAnswer { _ =>
fail("Fetching over network is not expected when the block is requested from same host")
}.when(sameHostTransferSrv).fetchBlockSync(mc.any(), mc.any(), mc.any(), mc.any(), mc.any())
val cleanBm = makeBlockManager(8000, "clean", master, Some(sameHostTransferSrv))
// check getRemoteBytes
val bytesViaStore1 = cleanBm.getRemoteBytes(blockId)
assert(bytesViaStore1.isDefined)
val expectedContent = sameHostBm.getLocalBlockData(blockId).nioByteBuffer().array()
assert(bytesViaStore1.get.toArray === expectedContent)
// check getRemoteValues
val valueViaStore1 = cleanBm.getRemoteValues[List.type](blockId)
assert(valueViaStore1.isDefined)
assert(valueViaStore1.get.data.toList.head === array)
}
}
private def testWithFileDelAfterLocalDiskRead(level: StorageLevel, getValueOrBytes: Boolean) = {
val testedFunc = if (getValueOrBytes) "getRemoteValue()" else "getRemoteBytes()"
val testNameSuffix = s"$level, $testedFunc"
test(s"SPARK-27622: as file is removed fall back to network fetch, $testNameSuffix") {
conf.set("spark.shuffle.io.maxRetries", "0")
// variable to check the usage of the local disk of the remote executor on the same host
var sameHostExecutorTried: Boolean = false
val store2 = makeBlockManager(8000, "executor2", this.master,
Some(new MockBlockTransferService(0)))
val blockId = "list"
val array = Array.fill(16)(Byte.MinValue to Byte.MaxValue).flatten
store2.putIterator(blockId, List(array).iterator, level, true)
val expectedBlockData = store2.getLocalBytes(blockId)
assert(expectedBlockData.isDefined)
val expectedByteBuffer = expectedBlockData.get.toByteBuffer()
val mockTransferService = new MockBlockTransferService(0) {
override def fetchBlockSync(
host: String,
port: Int,
execId: String,
blockId: String,
tempFileManager: DownloadFileManager): ManagedBuffer = {
assert(sameHostExecutorTried, "before using the network local disk of the remote " +
"executor (running on the same host) is expected to be tried")
new NioManagedBuffer(expectedByteBuffer)
}
}
val store1 = makeBlockManager(8000, "executor1", this.master, Some(mockTransferService))
val spiedStore1 = spy(store1)
doAnswer { inv =>
val blockId = inv.getArguments()(0).asInstanceOf[BlockId]
val localDirs = inv.getArguments()(1).asInstanceOf[Array[String]]
val blockSize = inv.getArguments()(2).asInstanceOf[Long]
val res = store1.readDiskBlockFromSameHostExecutor(blockId, localDirs, blockSize)
assert(res.isDefined)
val file = ExecutorDiskUtils.getFile(localDirs, store1.subDirsPerLocalDir, blockId.name)
// delete the file behind the blockId
assert(file.delete())
sameHostExecutorTried = true
res
}.when(spiedStore1).readDiskBlockFromSameHostExecutor(mc.any(), mc.any(), mc.any())
if (getValueOrBytes) {
val valuesViaStore1 = spiedStore1.getRemoteValues(blockId)
assert(sameHostExecutorTried)
assert(valuesViaStore1.isDefined)
assert(valuesViaStore1.get.data.toList.head === array)
} else {
val bytesViaStore1 = spiedStore1.getRemoteBytes(blockId)
assert(sameHostExecutorTried)
assert(bytesViaStore1.isDefined)
assert(bytesViaStore1.get.toByteBuffer === expectedByteBuffer)
}
}
}
Seq(
StorageLevel(useDisk = true, useMemory = false, deserialized = false),
StorageLevel(useDisk = true, useMemory = false, deserialized = true)
).foreach { storageLevel =>
Seq(true, false).foreach { valueOrBytes =>
testWithFileDelAfterLocalDiskRead(storageLevel, valueOrBytes)
}
}
test("SPARK-14252: getOrElseUpdate should still read from remote storage") {
val store = makeBlockManager(8000, "executor1")
val store2 = makeBlockManager(8000, "executor2")
val list1 = List(new Array[Byte](4000))
store2.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(store.getOrElseUpdate(
"list1",
StorageLevel.MEMORY_ONLY,
ClassTag.Any,
() => fail("attempted to compute locally")).isLeft)
}
test("in-memory LRU storage") {
testInMemoryLRUStorage(StorageLevel.MEMORY_ONLY)
}
test("in-memory LRU storage with serialization") {
testInMemoryLRUStorage(StorageLevel.MEMORY_ONLY_SER)
}
test("in-memory LRU storage with off-heap") {
testInMemoryLRUStorage(StorageLevel(
useDisk = false,
useMemory = true,
useOffHeap = true,
deserialized = false, replication = 1))
}
private def testInMemoryLRUStorage(storageLevel: StorageLevel): Unit = {
val store = makeBlockManager(12000)
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
store.putSingle("a1", a1, storageLevel)
store.putSingle("a2", a2, storageLevel)
store.putSingle("a3", a3, storageLevel)
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3").isDefined, "a3 was not in store")
assert(store.getSingleAndReleaseLock("a1") === None, "a1 was in store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
// At this point a2 was gotten last, so LRU will getSingle rid of a3
store.putSingle("a1", a1, storageLevel)
assert(store.getSingleAndReleaseLock("a1").isDefined, "a1 was not in store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3") === None, "a3 was in store")
}
test("in-memory LRU for partitions of same RDD") {
val store = makeBlockManager(12000)
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
store.putSingle(rdd(0, 1), a1, StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 2), a2, StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 3), a3, StorageLevel.MEMORY_ONLY)
// Even though we accessed rdd_0_3 last, it should not have replaced partitions 1 and 2
// from the same RDD
assert(store.getSingleAndReleaseLock(rdd(0, 3)) === None, "rdd_0_3 was in store")
assert(store.getSingleAndReleaseLock(rdd(0, 2)).isDefined, "rdd_0_2 was not in store")
assert(store.getSingleAndReleaseLock(rdd(0, 1)).isDefined, "rdd_0_1 was not in store")
// Check that rdd_0_3 doesn't replace them even after further accesses
assert(store.getSingleAndReleaseLock(rdd(0, 3)) === None, "rdd_0_3 was in store")
assert(store.getSingleAndReleaseLock(rdd(0, 3)) === None, "rdd_0_3 was in store")
assert(store.getSingleAndReleaseLock(rdd(0, 3)) === None, "rdd_0_3 was in store")
}
test("in-memory LRU for partitions of multiple RDDs") {
val store = makeBlockManager(12000)
store.putSingle(rdd(0, 1), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 2), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(1, 1), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
// At this point rdd_1_1 should've replaced rdd_0_1
assert(store.memoryStore.contains(rdd(1, 1)), "rdd_1_1 was not in store")
assert(!store.memoryStore.contains(rdd(0, 1)), "rdd_0_1 was in store")
assert(store.memoryStore.contains(rdd(0, 2)), "rdd_0_2 was not in store")
// Do a get() on rdd_0_2 so that it is the most recently used item
assert(store.getSingleAndReleaseLock(rdd(0, 2)).isDefined, "rdd_0_2 was not in store")
// Put in more partitions from RDD 0; they should replace rdd_1_1
store.putSingle(rdd(0, 3), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(0, 4), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
// Now rdd_1_1 should be dropped to add rdd_0_3, but then rdd_0_2 should *not* be dropped
// when we try to add rdd_0_4.
assert(!store.memoryStore.contains(rdd(1, 1)), "rdd_1_1 was in store")
assert(!store.memoryStore.contains(rdd(0, 1)), "rdd_0_1 was in store")
assert(!store.memoryStore.contains(rdd(0, 4)), "rdd_0_4 was in store")
assert(store.memoryStore.contains(rdd(0, 2)), "rdd_0_2 was not in store")
assert(store.memoryStore.contains(rdd(0, 3)), "rdd_0_3 was not in store")
}
encryptionTest("on-disk storage") { _conf =>
val store = makeBlockManager(1200, testConf = Some(_conf))
val a1 = new Array[Byte](400)
val a2 = new Array[Byte](400)
val a3 = new Array[Byte](400)
store.putSingle("a1", a1, StorageLevel.DISK_ONLY)
store.putSingle("a2", a2, StorageLevel.DISK_ONLY)
store.putSingle("a3", a3, StorageLevel.DISK_ONLY)
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was in store")
assert(store.getSingleAndReleaseLock("a3").isDefined, "a3 was in store")
assert(store.getSingleAndReleaseLock("a1").isDefined, "a1 was in store")
}
encryptionTest("disk and memory storage") { _conf =>
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK, getAsBytes = false, testConf = conf)
}
encryptionTest("disk and memory storage with getLocalBytes") { _conf =>
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK, getAsBytes = true, testConf = conf)
}
encryptionTest("disk and memory storage with serialization") { _conf =>
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK_SER, getAsBytes = false, testConf = conf)
}
encryptionTest("disk and memory storage with serialization and getLocalBytes") { _conf =>
testDiskAndMemoryStorage(StorageLevel.MEMORY_AND_DISK_SER, getAsBytes = true, testConf = conf)
}
encryptionTest("disk and off-heap memory storage") { _conf =>
testDiskAndMemoryStorage(StorageLevel.OFF_HEAP, getAsBytes = false, testConf = conf)
}
encryptionTest("disk and off-heap memory storage with getLocalBytes") { _conf =>
testDiskAndMemoryStorage(StorageLevel.OFF_HEAP, getAsBytes = true, testConf = conf)
}
def testDiskAndMemoryStorage(
storageLevel: StorageLevel,
getAsBytes: Boolean,
testConf: SparkConf): Unit = {
val store = makeBlockManager(12000, testConf = Some(testConf))
val accessMethod =
if (getAsBytes) store.getLocalBytesAndReleaseLock else store.getSingleAndReleaseLock
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
store.putSingle("a1", a1, storageLevel)
store.putSingle("a2", a2, storageLevel)
store.putSingle("a3", a3, storageLevel)
assert(accessMethod("a2").isDefined, "a2 was not in store")
assert(accessMethod("a3").isDefined, "a3 was not in store")
assert(accessMethod("a1").isDefined, "a1 was not in store")
val dataShouldHaveBeenCachedBackIntoMemory = {
if (storageLevel.deserialized) {
!getAsBytes
} else {
// If the block's storage level is serialized, then always cache the bytes in memory, even
// if the caller requested values.
true
}
}
if (dataShouldHaveBeenCachedBackIntoMemory) {
assert(store.memoryStore.contains("a1"), "a1 was not in memory store")
} else {
assert(!store.memoryStore.contains("a1"), "a1 was in memory store")
}
}
encryptionTest("LRU with mixed storage levels") { _conf =>
val store = makeBlockManager(12000, testConf = Some(_conf))
val a1 = new Array[Byte](4000)
val a2 = new Array[Byte](4000)
val a3 = new Array[Byte](4000)
val a4 = new Array[Byte](4000)
// First store a1 and a2, both in memory, and a3, on disk only
store.putSingle("a1", a1, StorageLevel.MEMORY_ONLY_SER)
store.putSingle("a2", a2, StorageLevel.MEMORY_ONLY_SER)
store.putSingle("a3", a3, StorageLevel.DISK_ONLY)
// At this point LRU should not kick in because a3 is only on disk
assert(store.getSingleAndReleaseLock("a1").isDefined, "a1 was not in store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3").isDefined, "a3 was not in store")
// Now let's add in a4, which uses both disk and memory; a1 should drop out
store.putSingle("a4", a4, StorageLevel.MEMORY_AND_DISK_SER)
assert(store.getSingleAndReleaseLock("a1") == None, "a1 was in store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
assert(store.getSingleAndReleaseLock("a3").isDefined, "a3 was not in store")
assert(store.getSingleAndReleaseLock("a4").isDefined, "a4 was not in store")
}
encryptionTest("in-memory LRU with streams") { _conf =>
val store = makeBlockManager(12000, testConf = Some(_conf))
val list1 = List(new Array[Byte](2000), new Array[Byte](2000))
val list2 = List(new Array[Byte](2000), new Array[Byte](2000))
val list3 = List(new Array[Byte](2000), new Array[Byte](2000))
store.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator(
"list2", list2.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator(
"list3", list3.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
assert(store.getAndReleaseLock("list3").isDefined, "list3 was not in store")
assert(store.get("list3").get.data.size === 2)
assert(store.getAndReleaseLock("list1") === None, "list1 was in store")
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
// At this point list2 was gotten last, so LRU will getSingle rid of list3
store.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(store.getAndReleaseLock("list1").isDefined, "list1 was not in store")
assert(store.get("list1").get.data.size === 2)
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
assert(store.getAndReleaseLock("list3") === None, "list1 was in store")
}
encryptionTest("LRU with mixed storage levels and streams") { _conf =>
val store = makeBlockManager(12000, testConf = Some(_conf))
val list1 = List(new Array[Byte](2000), new Array[Byte](2000))
val list2 = List(new Array[Byte](2000), new Array[Byte](2000))
val list3 = List(new Array[Byte](2000), new Array[Byte](2000))
val list4 = List(new Array[Byte](2000), new Array[Byte](2000))
// First store list1 and list2, both in memory, and list3, on disk only
store.putIterator(
"list1", list1.iterator, StorageLevel.MEMORY_ONLY_SER, tellMaster = true)
store.putIterator(
"list2", list2.iterator, StorageLevel.MEMORY_ONLY_SER, tellMaster = true)
store.putIterator(
"list3", list3.iterator, StorageLevel.DISK_ONLY, tellMaster = true)
val listForSizeEstimate = new ArrayBuffer[Any]
listForSizeEstimate ++= list1.iterator
val listSize = SizeEstimator.estimate(listForSizeEstimate)
// At this point LRU should not kick in because list3 is only on disk
assert(store.getAndReleaseLock("list1").isDefined, "list1 was not in store")
assert(store.get("list1").get.data.size === 2)
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
assert(store.getAndReleaseLock("list3").isDefined, "list3 was not in store")
assert(store.get("list3").get.data.size === 2)
assert(store.getAndReleaseLock("list1").isDefined, "list1 was not in store")
assert(store.get("list1").get.data.size === 2)
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
assert(store.getAndReleaseLock("list3").isDefined, "list3 was not in store")
assert(store.get("list3").get.data.size === 2)
// Now let's add in list4, which uses both disk and memory; list1 should drop out
store.putIterator(
"list4", list4.iterator, StorageLevel.MEMORY_AND_DISK_SER, tellMaster = true)
assert(store.getAndReleaseLock("list1") === None, "list1 was in store")
assert(store.getAndReleaseLock("list2").isDefined, "list2 was not in store")
assert(store.get("list2").get.data.size === 2)
assert(store.getAndReleaseLock("list3").isDefined, "list3 was not in store")
assert(store.get("list3").get.data.size === 2)
assert(store.getAndReleaseLock("list4").isDefined, "list4 was not in store")
assert(store.get("list4").get.data.size === 2)
}
test("negative byte values in ByteBufferInputStream") {
val buffer = ByteBuffer.wrap(Array[Int](254, 255, 0, 1, 2).map(_.toByte).toArray)
val stream = new ByteBufferInputStream(buffer)
val temp = new Array[Byte](10)
assert(stream.read() === 254, "unexpected byte read")
assert(stream.read() === 255, "unexpected byte read")
assert(stream.read() === 0, "unexpected byte read")
assert(stream.read(temp, 0, temp.length) === 2, "unexpected number of bytes read")
assert(stream.read() === -1, "end of stream not signalled")
assert(stream.read(temp, 0, temp.length) === -1, "end of stream not signalled")
}
test("overly large block") {
val store = makeBlockManager(5000)
store.putSingle("a1", new Array[Byte](10000), StorageLevel.MEMORY_ONLY)
assert(store.getSingleAndReleaseLock("a1") === None, "a1 was in store")
store.putSingle("a2", new Array[Byte](10000), StorageLevel.MEMORY_AND_DISK)
assert(!store.memoryStore.contains("a2"), "a2 was in memory store")
assert(store.getSingleAndReleaseLock("a2").isDefined, "a2 was not in store")
}
test("block compression") {
try {
conf.set(SHUFFLE_COMPRESS, true)
var store = makeBlockManager(20000, "exec1")
store.putSingle(
ShuffleBlockId(0, 0, 0), new Array[Byte](1000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(ShuffleBlockId(0, 0, 0)) <= 100,
"shuffle_0_0_0 was not compressed")
stopBlockManager(store)
conf.set(SHUFFLE_COMPRESS, false)
store = makeBlockManager(20000, "exec2")
store.putSingle(
ShuffleBlockId(0, 0, 0), new Array[Byte](10000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(ShuffleBlockId(0, 0, 0)) >= 10000,
"shuffle_0_0_0 was compressed")
stopBlockManager(store)
conf.set(BROADCAST_COMPRESS, true)
store = makeBlockManager(20000, "exec3")
store.putSingle(
BroadcastBlockId(0), new Array[Byte](10000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(BroadcastBlockId(0)) <= 1000,
"broadcast_0 was not compressed")
stopBlockManager(store)
conf.set(BROADCAST_COMPRESS, false)
store = makeBlockManager(20000, "exec4")
store.putSingle(
BroadcastBlockId(0), new Array[Byte](10000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(BroadcastBlockId(0)) >= 10000, "broadcast_0 was compressed")
stopBlockManager(store)
conf.set(RDD_COMPRESS, true)
store = makeBlockManager(20000, "exec5")
store.putSingle(rdd(0, 0), new Array[Byte](10000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(rdd(0, 0)) <= 1000, "rdd_0_0 was not compressed")
stopBlockManager(store)
conf.set(RDD_COMPRESS, false)
store = makeBlockManager(20000, "exec6")
store.putSingle(rdd(0, 0), new Array[Byte](10000), StorageLevel.MEMORY_ONLY_SER)
assert(store.memoryStore.getSize(rdd(0, 0)) >= 10000, "rdd_0_0 was compressed")
stopBlockManager(store)
// Check that any other block types are also kept uncompressed
store = makeBlockManager(20000, "exec7")
store.putSingle("other_block", new Array[Byte](10000), StorageLevel.MEMORY_ONLY)
assert(store.memoryStore.getSize("other_block") >= 10000, "other_block was compressed")
stopBlockManager(store)
} finally {
System.clearProperty(SHUFFLE_COMPRESS.key)
System.clearProperty(BROADCAST_COMPRESS.key)
System.clearProperty(RDD_COMPRESS.key)
}
}
test("block store put failure") {
// Use Java serializer so we can create an unserializable error.
conf.set(TEST_MEMORY, 1200L)
val transfer = new NettyBlockTransferService(conf, securityMgr, "localhost", "localhost", 0, 1)
val memoryManager = UnifiedMemoryManager(conf, numCores = 1)
val serializerManager = new SerializerManager(new JavaSerializer(conf), conf)
val store = new BlockManager(SparkContext.DRIVER_IDENTIFIER, rpcEnv, master,
serializerManager, conf, memoryManager, mapOutputTracker,
shuffleManager, transfer, securityMgr, None)
allStores += store
store.initialize("app-id")
// The put should fail since a1 is not serializable.
class UnserializableClass
val a1 = new UnserializableClass
intercept[java.io.NotSerializableException] {
store.putSingle("a1", a1, StorageLevel.DISK_ONLY)
}
// Make sure get a1 doesn't hang and returns None.
failAfter(1.second) {
assert(store.getSingleAndReleaseLock("a1").isEmpty, "a1 should not be in store")
}
}
def testPutBlockDataAsStream(blockManager: BlockManager, storageLevel: StorageLevel): Unit = {
val message = "message"
val ser = serializer.newInstance().serialize(message).array()
val blockId = new RDDBlockId(0, 0)
val streamCallbackWithId =
blockManager.putBlockDataAsStream(blockId, storageLevel, ClassTag(message.getClass))
streamCallbackWithId.onData("0", ByteBuffer.wrap(ser))
streamCallbackWithId.onComplete("0")
val blockStatusOption = blockManager.getStatus(blockId)
assert(!blockStatusOption.isEmpty)
val blockStatus = blockStatusOption.get
assert((blockStatus.diskSize > 0) === !storageLevel.useMemory)
assert((blockStatus.memSize > 0) === storageLevel.useMemory)
assert(blockManager.getLocalBlockData(blockId).nioByteBuffer().array() === ser)
}
Seq(
"caching" -> StorageLevel.MEMORY_ONLY,
"caching, serialized" -> StorageLevel.MEMORY_ONLY_SER,
"caching on disk" -> StorageLevel.DISK_ONLY
).foreach { case (name, storageLevel) =>
encryptionTest(s"test putBlockDataAsStream with $name") { conf =>
init(conf)
val ioEncryptionKey =
if (conf.get(IO_ENCRYPTION_ENABLED)) Some(CryptoStreamUtils.createKey(conf)) else None
val securityMgr = new SecurityManager(conf, ioEncryptionKey)
val serializerManager = new SerializerManager(serializer, conf, ioEncryptionKey)
val transfer =
new NettyBlockTransferService(conf, securityMgr, "localhost", "localhost", 0, 1)
val memoryManager = UnifiedMemoryManager(conf, numCores = 1)
val blockManager = new BlockManager(SparkContext.DRIVER_IDENTIFIER, rpcEnv, master,
serializerManager, conf, memoryManager, mapOutputTracker,
shuffleManager, transfer, securityMgr, None)
try {
blockManager.initialize("app-id")
testPutBlockDataAsStream(blockManager, storageLevel)
} finally {
blockManager.stop()
}
}
}
test("turn off updated block statuses") {
val conf = new SparkConf()
conf.set(TASK_METRICS_TRACK_UPDATED_BLOCK_STATUSES, false)
val store = makeBlockManager(12000, testConf = Some(conf))
store.registerTask(0)
val list = List.fill(2)(new Array[Byte](2000))
def getUpdatedBlocks(task: => Unit): Seq[(BlockId, BlockStatus)] = {
val context = TaskContext.empty()
try {
TaskContext.setTaskContext(context)
task
} finally {
TaskContext.unset()
}
context.taskMetrics.updatedBlockStatuses
}
// 1 updated block (i.e. list1)
val updatedBlocks1 = getUpdatedBlocks {
store.putIterator(
"list1", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
assert(updatedBlocks1.size === 0)
}
test("updated block statuses") {
val conf = new SparkConf()
conf.set(TASK_METRICS_TRACK_UPDATED_BLOCK_STATUSES, true)
val store = makeBlockManager(12000, testConf = Some(conf))
store.registerTask(0)
val list = List.fill(2)(new Array[Byte](2000))
val bigList = List.fill(8)(new Array[Byte](2000))
def getUpdatedBlocks(task: => Unit): Seq[(BlockId, BlockStatus)] = {
val context = TaskContext.empty()
try {
TaskContext.setTaskContext(context)
task
} finally {
TaskContext.unset()
}
context.taskMetrics.updatedBlockStatuses
}
// 1 updated block (i.e. list1)
val updatedBlocks1 = getUpdatedBlocks {
store.putIterator(
"list1", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
assert(updatedBlocks1.size === 1)
assert(updatedBlocks1.head._1 === TestBlockId("list1"))
assert(updatedBlocks1.head._2.storageLevel === StorageLevel.MEMORY_ONLY)
// 1 updated block (i.e. list2)
val updatedBlocks2 = getUpdatedBlocks {
store.putIterator(
"list2", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
}
assert(updatedBlocks2.size === 1)
assert(updatedBlocks2.head._1 === TestBlockId("list2"))
assert(updatedBlocks2.head._2.storageLevel === StorageLevel.MEMORY_ONLY)
// 2 updated blocks - list1 is kicked out of memory while list3 is added
val updatedBlocks3 = getUpdatedBlocks {
store.putIterator(
"list3", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
assert(updatedBlocks3.size === 2)
updatedBlocks3.foreach { case (id, status) =>
id match {
case TestBlockId("list1") => assert(status.storageLevel === StorageLevel.NONE)
case TestBlockId("list3") => assert(status.storageLevel === StorageLevel.MEMORY_ONLY)
case _ => fail("Updated block is neither list1 nor list3")
}
}
assert(store.memoryStore.contains("list3"), "list3 was not in memory store")
// 2 updated blocks - list2 is kicked out of memory (but put on disk) while list4 is added
val updatedBlocks4 = getUpdatedBlocks {
store.putIterator(
"list4", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
assert(updatedBlocks4.size === 2)
updatedBlocks4.foreach { case (id, status) =>
id match {
case TestBlockId("list2") => assert(status.storageLevel === StorageLevel.DISK_ONLY)
case TestBlockId("list4") => assert(status.storageLevel === StorageLevel.MEMORY_ONLY)
case _ => fail("Updated block is neither list2 nor list4")
}
}
assert(store.diskStore.contains("list2"), "list2 was not in disk store")
assert(store.memoryStore.contains("list4"), "list4 was not in memory store")
// No updated blocks - list5 is too big to fit in store and nothing is kicked out
val updatedBlocks5 = getUpdatedBlocks {
store.putIterator(
"list5", bigList.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
assert(updatedBlocks5.size === 0)
// memory store contains only list3 and list4
assert(!store.memoryStore.contains("list1"), "list1 was in memory store")
assert(!store.memoryStore.contains("list2"), "list2 was in memory store")
assert(store.memoryStore.contains("list3"), "list3 was not in memory store")
assert(store.memoryStore.contains("list4"), "list4 was not in memory store")
assert(!store.memoryStore.contains("list5"), "list5 was in memory store")
// disk store contains only list2
assert(!store.diskStore.contains("list1"), "list1 was in disk store")
assert(store.diskStore.contains("list2"), "list2 was not in disk store")
assert(!store.diskStore.contains("list3"), "list3 was in disk store")
assert(!store.diskStore.contains("list4"), "list4 was in disk store")
assert(!store.diskStore.contains("list5"), "list5 was in disk store")
// remove block - list2 should be removed from disk
val updatedBlocks6 = getUpdatedBlocks {
store.removeBlock(
"list2", tellMaster = true)
}
assert(updatedBlocks6.size === 1)
assert(updatedBlocks6.head._1 === TestBlockId("list2"))
assert(updatedBlocks6.head._2.storageLevel == StorageLevel.NONE)
assert(!store.diskStore.contains("list2"), "list2 was in disk store")
}
test("query block statuses") {
val store = makeBlockManager(12000)
val list = List.fill(2)(new Array[Byte](2000))
// Tell master. By LRU, only list2 and list3 remains.
store.putIterator(
"list1", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
store.putIterator(
"list2", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
store.putIterator(
"list3", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
// getLocations and getBlockStatus should yield the same locations
assert(store.master.getLocations("list1").size === 0)
assert(store.master.getLocations("list2").size === 1)
assert(store.master.getLocations("list3").size === 1)
assert(store.master.getBlockStatus("list1", askStorageEndpoints = false).size === 0)
assert(store.master.getBlockStatus("list2", askStorageEndpoints = false).size === 1)
assert(store.master.getBlockStatus("list3", askStorageEndpoints = false).size === 1)
assert(store.master.getBlockStatus("list1", askStorageEndpoints = true).size === 0)
assert(store.master.getBlockStatus("list2", askStorageEndpoints = true).size === 1)
assert(store.master.getBlockStatus("list3", askStorageEndpoints = true).size === 1)
// This time don't tell master and see what happens. By LRU, only list5 and list6 remains.
store.putIterator(
"list4", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = false)
store.putIterator(
"list5", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = false)
store.putIterator(
"list6", list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = false)
// getLocations should return nothing because the master is not informed
// getBlockStatus without asking storage endpoints should have the same result
// getBlockStatus with asking storage endpoints, however, should return the actual statuses
assert(store.master.getLocations("list4").size === 0)
assert(store.master.getLocations("list5").size === 0)
assert(store.master.getLocations("list6").size === 0)
assert(store.master.getBlockStatus("list4", askStorageEndpoints = false).size === 0)
assert(store.master.getBlockStatus("list5", askStorageEndpoints = false).size === 0)
assert(store.master.getBlockStatus("list6", askStorageEndpoints = false).size === 0)
assert(store.master.getBlockStatus("list4", askStorageEndpoints = true).size === 0)
assert(store.master.getBlockStatus("list5", askStorageEndpoints = true).size === 1)
assert(store.master.getBlockStatus("list6", askStorageEndpoints = true).size === 1)
}
test("get matching blocks") {
val store = makeBlockManager(12000)
val list = List.fill(2)(new Array[Byte](100))
// insert some blocks
store.putIterator(
"list1", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
store.putIterator(
"list2", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
store.putIterator(
"list3", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
// getLocations and getBlockStatus should yield the same locations
assert(store.master.getMatchingBlockIds(
_.toString.contains("list"), askStorageEndpoints = false).size
=== 3)
assert(store.master.getMatchingBlockIds(
_.toString.contains("list1"), askStorageEndpoints = false).size
=== 1)
// insert some more blocks
store.putIterator(
"newlist1", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = true)
store.putIterator(
"newlist2", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = false)
store.putIterator(
"newlist3", list.iterator, StorageLevel.MEMORY_AND_DISK, tellMaster = false)
// getLocations and getBlockStatus should yield the same locations
assert(
store.master.getMatchingBlockIds(
_.toString.contains("newlist"), askStorageEndpoints = false).size
=== 1)
assert(
store.master.getMatchingBlockIds(
_.toString.contains("newlist"), askStorageEndpoints = true).size
=== 3)
val blockIds = Seq(RDDBlockId(1, 0), RDDBlockId(1, 1), RDDBlockId(2, 0))
blockIds.foreach { blockId =>
store.putIterator(
blockId, list.iterator, StorageLevel.MEMORY_ONLY, tellMaster = true)
}
val matchedBlockIds = store.master.getMatchingBlockIds(_ match {
case RDDBlockId(1, _) => true
case _ => false
}, askStorageEndpoints = true)
assert(matchedBlockIds.toSet === Set(RDDBlockId(1, 0), RDDBlockId(1, 1)))
}
test("SPARK-1194 regression: fix the same-RDD rule for cache replacement") {
val store = makeBlockManager(12000)
store.putSingle(rdd(0, 0), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
store.putSingle(rdd(1, 0), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
// Access rdd_1_0 to ensure it's not least recently used.
assert(store.getSingleAndReleaseLock(rdd(1, 0)).isDefined, "rdd_1_0 was not in store")
// According to the same-RDD rule, rdd_1_0 should be replaced here.
store.putSingle(rdd(0, 1), new Array[Byte](4000), StorageLevel.MEMORY_ONLY)
// rdd_1_0 should have been replaced, even it's not least recently used.
assert(store.memoryStore.contains(rdd(0, 0)), "rdd_0_0 was not in store")
assert(store.memoryStore.contains(rdd(0, 1)), "rdd_0_1 was not in store")
assert(!store.memoryStore.contains(rdd(1, 0)), "rdd_1_0 was in store")
}
test("safely unroll blocks through putIterator (disk)") {
val store = makeBlockManager(12000)
val memoryStore = store.memoryStore
val diskStore = store.diskStore
val smallList = List.fill(40)(new Array[Byte](100))
val bigList = List.fill(40)(new Array[Byte](1000))
def smallIterator: Iterator[Any] = smallList.iterator.asInstanceOf[Iterator[Any]]
def bigIterator: Iterator[Any] = bigList.iterator.asInstanceOf[Iterator[Any]]
assert(memoryStore.currentUnrollMemoryForThisTask === 0)
store.putIterator("b1", smallIterator, StorageLevel.MEMORY_AND_DISK)
store.putIterator("b2", smallIterator, StorageLevel.MEMORY_AND_DISK)
// Unroll with not enough space. This should succeed but kick out b1 in the process.
// Memory store should contain b2 and b3, while disk store should contain only b1
val result3 = memoryStore.putIteratorAsValues("b3", smallIterator, MemoryMode.ON_HEAP,
ClassTag.Any)
assert(result3.isRight)
assert(!memoryStore.contains("b1"))
assert(memoryStore.contains("b2"))
assert(memoryStore.contains("b3"))
assert(diskStore.contains("b1"))
assert(!diskStore.contains("b2"))
assert(!diskStore.contains("b3"))
memoryStore.remove("b3")
store.putIterator("b3", smallIterator, StorageLevel.MEMORY_ONLY)
assert(memoryStore.currentUnrollMemoryForThisTask === 0)
// Unroll huge block with not enough space. This should fail and return an iterator so that
// the block may be stored to disk. During the unrolling process, block "b2" should be kicked
// out, so the memory store should contain only b3, while the disk store should contain
// b1, b2 and b4.
val result4 = memoryStore.putIteratorAsValues("b4", bigIterator, MemoryMode.ON_HEAP,
ClassTag.Any)
assert(result4.isLeft)
assert(!memoryStore.contains("b1"))
assert(!memoryStore.contains("b2"))
assert(memoryStore.contains("b3"))
assert(!memoryStore.contains("b4"))
}
test("read-locked blocks cannot be evicted from memory") {
val store = makeBlockManager(12000)
val arr = new Array[Byte](4000)
// First store a1 and a2, both in memory, and a3, on disk only
store.putSingle("a1", arr, StorageLevel.MEMORY_ONLY_SER)
store.putSingle("a2", arr, StorageLevel.MEMORY_ONLY_SER)
assert(store.getSingle("a1").isDefined, "a1 was not in store")
assert(store.getSingle("a2").isDefined, "a2 was not in store")
// This put should fail because both a1 and a2 should be read-locked:
store.putSingle("a3", arr, StorageLevel.MEMORY_ONLY_SER)
assert(store.getSingle("a3").isEmpty, "a3 was in store")
assert(store.getSingle("a1").isDefined, "a1 was not in store")
assert(store.getSingle("a2").isDefined, "a2 was not in store")
// Release both pins of block a2:
store.releaseLock("a2")
store.releaseLock("a2")
// Block a1 is the least-recently accessed, so an LRU eviction policy would evict it before
// block a2. However, a1 is still pinned so this put of a3 should evict a2 instead:
store.putSingle("a3", arr, StorageLevel.MEMORY_ONLY_SER)
assert(store.getSingle("a2").isEmpty, "a2 was in store")
assert(store.getSingle("a1").isDefined, "a1 was not in store")
assert(store.getSingle("a3").isDefined, "a3 was not in store")
}
private def testReadWithLossOfOnDiskFiles(
storageLevel: StorageLevel,
readMethod: BlockManager => Option[_]): Unit = {
val store = makeBlockManager(12000)
assert(store.putSingle("blockId", new Array[Byte](4000), storageLevel))
assert(store.getStatus("blockId").isDefined)
// Directly delete all files from the disk store, triggering failures when reading blocks:
store.diskBlockManager.getAllFiles().foreach(_.delete())
// The BlockManager still thinks that these blocks exist:
assert(store.getStatus("blockId").isDefined)
// Because the BlockManager's metadata claims that the block exists (i.e. that it's present
// in at least one store), the read attempts to read it and fails when the on-disk file is
// missing.
intercept[SparkException] {
readMethod(store)
}
// Subsequent read attempts will succeed; the block isn't present but we return an expected
// "block not found" response rather than a fatal error:
assert(readMethod(store).isEmpty)
// The reason why this second read succeeded is because the metadata entry for the missing
// block was removed as a result of the read failure:
assert(store.getStatus("blockId").isEmpty)
}
test("remove block if a read fails due to missing DiskStore files (SPARK-15736)") {
val storageLevels = Seq(
StorageLevel(useDisk = true, useMemory = false, deserialized = false),
StorageLevel(useDisk = true, useMemory = false, deserialized = true))
val readMethods = Map[String, BlockManager => Option[_]](
"getLocalBytes" -> ((m: BlockManager) => m.getLocalBytes("blockId")),
"getLocalValues" -> ((m: BlockManager) => m.getLocalValues("blockId"))
)
testReadWithLossOfOnDiskFiles(StorageLevel.DISK_ONLY, _.getLocalBytes("blockId"))
for ((readMethodName, readMethod) <- readMethods; storageLevel <- storageLevels) {
withClue(s"$readMethodName $storageLevel") {
testReadWithLossOfOnDiskFiles(storageLevel, readMethod)
}
}
}
test("SPARK-13328: refresh block locations (fetch should fail after hitting a threshold)") {
val mockBlockTransferService =
new MockBlockTransferService(conf.get(BLOCK_FAILURES_BEFORE_LOCATION_REFRESH))
val store =
makeBlockManager(8000, "executor1", transferService = Option(mockBlockTransferService))
store.putSingle("item", 999L, StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(store.getRemoteBytes("item").isEmpty)
}
test("SPARK-13328: refresh block locations (fetch should succeed after location refresh)") {
val maxFailuresBeforeLocationRefresh =
conf.get(BLOCK_FAILURES_BEFORE_LOCATION_REFRESH)
val mockBlockManagerMaster = mock(classOf[BlockManagerMaster])
val mockBlockTransferService =
new MockBlockTransferService(maxFailuresBeforeLocationRefresh)
// make sure we have more than maxFailuresBeforeLocationRefresh locations
// so that we have a chance to do location refresh
val blockManagerIds = (0 to maxFailuresBeforeLocationRefresh)
.map { i => BlockManagerId(s"id-$i", s"host-$i", i + 1) }
when(mockBlockManagerMaster.getLocationsAndStatus(mc.any[BlockId], mc.any[String])).thenReturn(
Option(BlockLocationsAndStatus(blockManagerIds, BlockStatus.empty, None)))
when(mockBlockManagerMaster.getLocations(mc.any[BlockId])).thenReturn(
blockManagerIds)
val store = makeBlockManager(8000, "executor1", mockBlockManagerMaster,
transferService = Option(mockBlockTransferService))
val block = store.getRemoteBytes("item")
.asInstanceOf[Option[ByteBuffer]]
assert(block.isDefined)
verify(mockBlockManagerMaster, times(1))
.getLocationsAndStatus("item", "MockBlockTransferServiceHost")
verify(mockBlockManagerMaster, times(1)).getLocations("item")
}
test("SPARK-17484: block status is properly updated following an exception in put()") {
val mockBlockTransferService = new MockBlockTransferService(maxFailures = 10) {
override def uploadBlock(
hostname: String,
port: Int, execId: String,
blockId: BlockId,
blockData: ManagedBuffer,
level: StorageLevel,
classTag: ClassTag[_]): Future[Unit] = {
throw new InterruptedException("Intentional interrupt")
}
}
val store =
makeBlockManager(8000, "executor1", transferService = Option(mockBlockTransferService))
val store2 =
makeBlockManager(8000, "executor2", transferService = Option(mockBlockTransferService))
intercept[InterruptedException] {
store.putSingle("item", "value", StorageLevel.MEMORY_ONLY_2, tellMaster = true)
}
assert(store.getLocalBytes("item").isEmpty)
assert(master.getLocations("item").isEmpty)
assert(store2.getRemoteBytes("item").isEmpty)
}
test("SPARK-17484: master block locations are updated following an invalid remote block fetch") {
val store = makeBlockManager(8000, "executor1")
val store2 = makeBlockManager(8000, "executor2")
store.putSingle("item", "value", StorageLevel.MEMORY_ONLY, tellMaster = true)
assert(master.getLocations("item").nonEmpty)
store.removeBlock("item", tellMaster = false)
assert(master.getLocations("item").nonEmpty)
assert(store2.getRemoteBytes("item").isEmpty)
assert(master.getLocations("item").isEmpty)
}
test("SPARK-25888: serving of removed file not detected by shuffle service") {
// although the existence of the file is checked before serving it but a delete can happen
// somewhere after that check
val store = makeBlockManager(8000, "executor1")
val emptyBlockFetcher = new MockBlockTransferService(0) {
override def fetchBlockSync(
host: String,
port: Int,
execId: String,
blockId: String,
tempFileManager: DownloadFileManager): ManagedBuffer = {
val transConf = SparkTransportConf.fromSparkConf(conf, "shuffle", numUsableCores = 1)
// empty ManagedBuffer
new FileSegmentManagedBuffer(transConf, new File("missing.file"), 0, 0)
}
}
val store2 = makeBlockManager(8000, "executor2", this.master, Some(emptyBlockFetcher))
store.putSingle("item", "value", StorageLevel.DISK_ONLY, tellMaster = true)
assert(master.getLocations("item").nonEmpty)
assert(store2.getRemoteBytes("item").isEmpty)
}
test("test sorting of block locations") {
val localHost = "localhost"
val otherHost = "otherHost"
val store = makeBlockManager(8000, "executor1")
val externalShuffleServicePort = StorageUtils.externalShuffleServicePort(conf)
val port = store.blockTransferService.port
val rack = Some("rack")
val blockManagerWithTopologyInfo = BlockManagerId(
store.blockManagerId.executorId,
store.blockManagerId.host,
store.blockManagerId.port,
rack)
store.blockManagerId = blockManagerWithTopologyInfo
val locations = Seq(
BlockManagerId("executor4", otherHost, externalShuffleServicePort, rack),
BlockManagerId("executor3", otherHost, port, rack),
BlockManagerId("executor6", otherHost, externalShuffleServicePort),
BlockManagerId("executor5", otherHost, port),
BlockManagerId("executor2", localHost, externalShuffleServicePort),
BlockManagerId("executor1", localHost, port))
val sortedLocations = Seq(
BlockManagerId("executor1", localHost, port),
BlockManagerId("executor2", localHost, externalShuffleServicePort),
BlockManagerId("executor3", otherHost, port, rack),
BlockManagerId("executor4", otherHost, externalShuffleServicePort, rack),
BlockManagerId("executor5", otherHost, port),
BlockManagerId("executor6", otherHost, externalShuffleServicePort))
assert(store.sortLocations(locations) === sortedLocations)
}
test("SPARK-20640: Shuffle registration timeout and maxAttempts conf are working") {
val tryAgainMsg = "test_spark_20640_try_again"
val timingoutExecutor = "timingoutExecutor"
val tryAgainExecutor = "tryAgainExecutor"
val succeedingExecutor = "succeedingExecutor"
val failure = new Exception(tryAgainMsg)
val success = ByteBuffer.wrap(new Array[Byte](0))
var secondExecutorFailedOnce = false
var thirdExecutorFailedOnce = false
val handler = new NoOpRpcHandler {
override def receive(
client: TransportClient,
message: ByteBuffer,
callback: RpcResponseCallback): Unit = {
val msgObj = BlockTransferMessage.Decoder.fromByteBuffer(message)
msgObj match {
case exec: RegisterExecutor if exec.execId == timingoutExecutor =>
() // No reply to generate client-side timeout
case exec: RegisterExecutor
if exec.execId == tryAgainExecutor && !secondExecutorFailedOnce =>
secondExecutorFailedOnce = true
callback.onFailure(failure)
case exec: RegisterExecutor if exec.execId == tryAgainExecutor =>
callback.onSuccess(success)
case exec: RegisterExecutor
if exec.execId == succeedingExecutor && !thirdExecutorFailedOnce =>
thirdExecutorFailedOnce = true
callback.onFailure(failure)
case exec: RegisterExecutor if exec.execId == succeedingExecutor =>
callback.onSuccess(success)
}
}
}
val transConf = SparkTransportConf.fromSparkConf(conf, "shuffle", numUsableCores = 0)
Utils.tryWithResource(new TransportContext(transConf, handler, true)) { transCtx =>
// a server which delays response 50ms and must try twice for success.
def newShuffleServer(port: Int): (TransportServer, Int) = {
(transCtx.createServer(port, Seq.empty[TransportServerBootstrap].asJava), port)
}
val candidatePort = RandomUtils.nextInt(1024, 65536)
val (server, shufflePort) = Utils.startServiceOnPort(candidatePort,
newShuffleServer, conf, "ShuffleServer")
conf.set(SHUFFLE_SERVICE_ENABLED.key, "true")
conf.set(SHUFFLE_SERVICE_PORT.key, shufflePort.toString)
conf.set(SHUFFLE_REGISTRATION_TIMEOUT.key, "40")
conf.set(SHUFFLE_REGISTRATION_MAX_ATTEMPTS.key, "1")
var e = intercept[SparkException] {
makeBlockManager(8000, timingoutExecutor)
}.getMessage
assert(e.contains("TimeoutException"))
conf.set(SHUFFLE_REGISTRATION_TIMEOUT.key, "1000")
conf.set(SHUFFLE_REGISTRATION_MAX_ATTEMPTS.key, "1")
e = intercept[SparkException] {
makeBlockManager(8000, tryAgainExecutor)
}.getMessage
assert(e.contains(tryAgainMsg))
conf.set(SHUFFLE_REGISTRATION_TIMEOUT.key, "1000")
conf.set(SHUFFLE_REGISTRATION_MAX_ATTEMPTS.key, "2")
makeBlockManager(8000, succeedingExecutor)
server.close()
}
}
test("fetch remote block to local disk if block size is larger than threshold") {
conf.set(MAX_REMOTE_BLOCK_SIZE_FETCH_TO_MEM, 1000L)
val mockBlockManagerMaster = mock(classOf[BlockManagerMaster])
val mockBlockTransferService = new MockBlockTransferService(0)
val blockLocations = Seq(BlockManagerId("id-0", "host-0", 1))
val blockStatus = BlockStatus(StorageLevel.DISK_ONLY, 0L, 2000L)
when(mockBlockManagerMaster.getLocationsAndStatus(mc.any[BlockId], mc.any[String])).thenReturn(
Option(BlockLocationsAndStatus(blockLocations, blockStatus, None)))
when(mockBlockManagerMaster.getLocations(mc.any[BlockId])).thenReturn(blockLocations)
val store = makeBlockManager(8000, "executor1", mockBlockManagerMaster,
transferService = Option(mockBlockTransferService))
val block = store.getRemoteBytes("item")
.asInstanceOf[Option[ByteBuffer]]
assert(block.isDefined)
assert(mockBlockTransferService.numCalls === 1)
// assert FileManager is not null if the block size is larger than threshold.
assert(mockBlockTransferService.tempFileManager === store.remoteBlockTempFileManager)
}
test("query locations of blockIds") {
val mockBlockManagerMaster = mock(classOf[BlockManagerMaster])
val blockLocations = Seq(BlockManagerId("1", "host1", 100), BlockManagerId("2", "host2", 200))
when(mockBlockManagerMaster.getLocations(mc.any[Array[BlockId]]))
.thenReturn(Array(blockLocations))
val env = mock(classOf[SparkEnv])
val blockIds: Array[BlockId] = Array(StreamBlockId(1, 2))
val locs = BlockManager.blockIdsToLocations(blockIds, env, mockBlockManagerMaster)
val expectedLocs = Seq("executor_host1_1", "executor_host2_2")
assert(locs(blockIds(0)) == expectedLocs)
}
test("SPARK-30594: Do not post SparkListenerBlockUpdated when updateBlockInfo returns false") {
// update block info for non-existent block manager
val updateInfo = UpdateBlockInfo(BlockManagerId("1", "host1", 100),
BlockId("test_1"), StorageLevel.MEMORY_ONLY, 1, 1)
val result = master.driverEndpoint.askSync[Boolean](updateInfo)
assert(!result)
verify(liveListenerBus, never()).post(SparkListenerBlockUpdated(BlockUpdatedInfo(updateInfo)))
}
test("we reject putting blocks when we have the wrong shuffle resolver") {
val badShuffleManager = mock(classOf[ShuffleManager])
val badShuffleResolver = mock(classOf[ShuffleBlockResolver])
when(badShuffleManager.shuffleBlockResolver).thenReturn(badShuffleResolver)
val shuffleBlockId = ShuffleDataBlockId(0, 0, 0)
val bm = makeBlockManager(100, "exec1", shuffleManager = badShuffleManager)
val message = "message"
val exception = intercept[SparkException] {
bm.putBlockDataAsStream(shuffleBlockId, StorageLevel.DISK_ONLY, ClassTag(message.getClass))
}
assert(exception.getMessage.contains("unsupported shuffle resolver"))
}
test("test decommission block manager should not be part of peers") {
val exec1 = "exec1"
val exec2 = "exec2"
val exec3 = "exec3"
val store1 = makeBlockManager(1000, exec1)
val store2 = makeBlockManager(1000, exec2)
val store3 = makeBlockManager(1000, exec3)
assert(master.getPeers(store3.blockManagerId).map(_.executorId).toSet === Set(exec1, exec2))
val data = new Array[Byte](4)
val blockId = rdd(0, 0)
store1.putSingle(blockId, data, StorageLevel.MEMORY_ONLY_2)
assert(master.getLocations(blockId).size === 2)
master.decommissionBlockManagers(Seq(exec1))
// store1 is decommissioned, so it should not be part of peer list for store3
assert(master.getPeers(store3.blockManagerId).map(_.executorId).toSet === Set(exec2))
}
test("test decommissionRddCacheBlocks should migrate all cached blocks") {
val store1 = makeBlockManager(1000, "exec1")
val store2 = makeBlockManager(1000, "exec2")
val store3 = makeBlockManager(1000, "exec3")
val data = new Array[Byte](4)
val blockId = rdd(0, 0)
store1.putSingle(blockId, data, StorageLevel.MEMORY_ONLY_2)
assert(master.getLocations(blockId).size === 2)
assert(master.getLocations(blockId).contains(store1.blockManagerId))
val decomManager = new BlockManagerDecommissioner(conf, store1)
decomManager.decommissionRddCacheBlocks()
assert(master.getLocations(blockId).size === 2)
assert(master.getLocations(blockId).toSet === Set(store2.blockManagerId,
store3.blockManagerId))
}
test("test decommissionRddCacheBlocks should keep the block if it is not able to migrate") {
val store1 = makeBlockManager(3500, "exec1")
val store2 = makeBlockManager(1000, "exec2")
val dataLarge = new Array[Byte](1500)
val blockIdLarge = rdd(0, 0)
val dataSmall = new Array[Byte](1)
val blockIdSmall = rdd(0, 1)
store1.putSingle(blockIdLarge, dataLarge, StorageLevel.MEMORY_ONLY)
store1.putSingle(blockIdSmall, dataSmall, StorageLevel.MEMORY_ONLY)
assert(master.getLocations(blockIdLarge) === Seq(store1.blockManagerId))
assert(master.getLocations(blockIdSmall) === Seq(store1.blockManagerId))
val decomManager = new BlockManagerDecommissioner(conf, store1)
decomManager.decommissionRddCacheBlocks()
// Smaller block migrated to store2
assert(master.getLocations(blockIdSmall) === Seq(store2.blockManagerId))
// Larger block still present in store1 as it can't be migrated
assert(master.getLocations(blockIdLarge) === Seq(store1.blockManagerId))
}
private def testShuffleBlockDecommissioning(maxShuffleSize: Option[Int], willReject: Boolean) = {
maxShuffleSize.foreach{ size =>
conf.set(STORAGE_DECOMMISSION_SHUFFLE_MAX_DISK_SIZE.key, s"${size}b")
}
val shuffleManager1 = makeSortShuffleManager(Some(conf))
val bm1 = makeBlockManager(3500, "exec1", shuffleManager = shuffleManager1)
shuffleManager1.shuffleBlockResolver._blockManager = bm1
val shuffleManager2 = makeSortShuffleManager(Some(conf))
val bm2 = makeBlockManager(3500, "exec2", shuffleManager = shuffleManager2)
shuffleManager2.shuffleBlockResolver._blockManager = bm2
val blockSize = 5
val shuffleDataBlockContent = Array[Byte](0, 1, 2, 3, 4)
val shuffleData = ShuffleDataBlockId(0, 0, 0)
val shuffleData2 = ShuffleDataBlockId(1, 0, 0)
Files.write(bm1.diskBlockManager.getFile(shuffleData).toPath(), shuffleDataBlockContent)
Files.write(bm2.diskBlockManager.getFile(shuffleData2).toPath(), shuffleDataBlockContent)
val shuffleIndexBlockContent = Array[Byte](5, 6, 7, 8, 9)
val shuffleIndex = ShuffleIndexBlockId(0, 0, 0)
val shuffleIndexOnly = ShuffleIndexBlockId(0, 1, 0)
val shuffleIndex2 = ShuffleIndexBlockId(1, 0, 0)
Files.write(bm1.diskBlockManager.getFile(shuffleIndex).toPath(), shuffleIndexBlockContent)
Files.write(bm1.diskBlockManager.getFile(shuffleIndexOnly).toPath(), shuffleIndexBlockContent)
Files.write(bm2.diskBlockManager.getFile(shuffleIndex2).toPath(), shuffleIndexBlockContent)
mapOutputTracker.registerShuffle(0, 2, MergeStatus.SHUFFLE_PUSH_DUMMY_NUM_REDUCES)
val decomManager = new BlockManagerDecommissioner(
conf.set(config.STORAGE_DECOMMISSION_SHUFFLE_BLOCKS_ENABLED, true), bm1)
try {
mapOutputTracker.registerMapOutput(0, 0, MapStatus(bm1.blockManagerId, Array(blockSize), 0))
mapOutputTracker.registerMapOutput(0, 1, MapStatus(bm1.blockManagerId, Array(blockSize), 1))
assert(mapOutputTracker.shuffleStatuses(0).mapStatuses(0).location === bm1.blockManagerId)
assert(mapOutputTracker.shuffleStatuses(0).mapStatuses(1).location === bm1.blockManagerId)
val env = mock(classOf[SparkEnv])
when(env.conf).thenReturn(conf)
SparkEnv.set(env)
decomManager.refreshMigratableShuffleBlocks()
if (willReject) {
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(mapOutputTracker.shuffleStatuses(0).mapStatuses(0).location === bm2.blockManagerId)
assert(mapOutputTracker.shuffleStatuses(0).mapStatuses(1).location === bm2.blockManagerId)
}
assert(Files.readAllBytes(bm2.diskBlockManager.getFile(shuffleData).toPath())
=== shuffleDataBlockContent)
assert(Files.readAllBytes(bm2.diskBlockManager.getFile(shuffleIndex).toPath())
=== shuffleIndexBlockContent)
} else {
Thread.sleep(1000)
assert(mapOutputTracker.shuffleStatuses(0).mapStatuses(0).location === bm1.blockManagerId)
}
} finally {
mapOutputTracker.unregisterShuffle(0)
// Avoid thread leak
decomManager.stopMigratingShuffleBlocks()
}
}
test("test migration of shuffle blocks during decommissioning - no limit") {
testShuffleBlockDecommissioning(None, true)
}
test("test migration of shuffle blocks during decommissioning - larger limit") {
testShuffleBlockDecommissioning(Some(10000), true)
}
test("[SPARK-34363]test migration of shuffle blocks during decommissioning - small limit") {
testShuffleBlockDecommissioning(Some(1), false)
}
test("SPARK-32919: Shuffle push merger locations should be bounded with in" +
" spark.shuffle.push.retainedMergerLocations") {
assert(master.getShufflePushMergerLocations(10, Set.empty).isEmpty)
makeBlockManager(100, "execA",
transferService = Some(new MockBlockTransferService(10, "hostA")))
makeBlockManager(100, "execB",
transferService = Some(new MockBlockTransferService(10, "hostB")))
makeBlockManager(100, "execC",
transferService = Some(new MockBlockTransferService(10, "hostC")))
makeBlockManager(100, "execD",
transferService = Some(new MockBlockTransferService(10, "hostD")))
makeBlockManager(100, "execE",
transferService = Some(new MockBlockTransferService(10, "hostA")))
assert(master.getShufflePushMergerLocations(10, Set.empty).size == 4)
assert(master.getShufflePushMergerLocations(10, Set.empty).map(_.host).sorted ===
Seq("hostC", "hostD", "hostA", "hostB").sorted)
assert(master.getShufflePushMergerLocations(10, Set("hostB")).size == 3)
}
test("SPARK-32919: Prefer active executor locations for shuffle push mergers") {
makeBlockManager(100, "execA",
transferService = Some(new MockBlockTransferService(10, "hostA")))
makeBlockManager(100, "execB",
transferService = Some(new MockBlockTransferService(10, "hostB")))
makeBlockManager(100, "execC",
transferService = Some(new MockBlockTransferService(10, "hostC")))
makeBlockManager(100, "execD",
transferService = Some(new MockBlockTransferService(10, "hostD")))
makeBlockManager(100, "execE",
transferService = Some(new MockBlockTransferService(10, "hostA")))
assert(master.getShufflePushMergerLocations(5, Set.empty).size == 4)
assert(master.getExecutorEndpointRef(SparkContext.DRIVER_IDENTIFIER).isEmpty)
makeBlockManager(100, SparkContext.DRIVER_IDENTIFIER,
transferService = Some(new MockBlockTransferService(10, "host-driver")))
assert(master.getExecutorEndpointRef(SparkContext.DRIVER_IDENTIFIER).isDefined)
master.removeExecutor("execA")
master.removeExecutor("execE")
assert(master.getShufflePushMergerLocations(3, Set.empty).size == 3)
assert(master.getShufflePushMergerLocations(3, Set.empty).map(_.host).sorted ===
Seq("hostC", "hostB", "hostD").sorted)
assert(master.getShufflePushMergerLocations(4, Set.empty).map(_.host).sorted ===
Seq("hostB", "hostA", "hostC", "hostD").sorted)
master.removeShufflePushMergerLocation("hostA")
assert(master.getShufflePushMergerLocations(4, Set.empty).map(_.host).sorted ===
Seq("hostB", "hostC", "hostD").sorted)
}
test("SPARK-33387 Support ordered shuffle block migration") {
val blocks: Seq[ShuffleBlockInfo] = Seq(
ShuffleBlockInfo(1, 0L),
ShuffleBlockInfo(0, 1L),
ShuffleBlockInfo(0, 0L),
ShuffleBlockInfo(1, 1L))
val sortedBlocks = blocks.sortBy(b => (b.shuffleId, b.mapId))
val resolver = mock(classOf[MigratableResolver])
when(resolver.getStoredShuffles).thenReturn(blocks)
val bm = mock(classOf[BlockManager])
when(bm.migratableResolver).thenReturn(resolver)
when(bm.getPeers(mc.any())).thenReturn(Seq.empty)
val decomManager = new BlockManagerDecommissioner(conf, bm)
decomManager.refreshMigratableShuffleBlocks()
assert(sortedBlocks.sameElements(decomManager.shufflesToMigrate.asScala.map(_._1)))
}
test("SPARK-34193: Potential race condition during decommissioning with TorrentBroadcast") {
// Validate that we allow putting of broadcast blocks during decommissioning
val exec1 = "exec1"
val store = makeBlockManager(1000, exec1)
master.decommissionBlockManagers(Seq(exec1))
val a = new Array[Byte](1)
// Put a broadcast block, no exception
val broadcast0BlockId = BroadcastBlockId(0)
store.putSingle(broadcast0BlockId, a, StorageLevel.DISK_ONLY)
}
class MockBlockTransferService(
val maxFailures: Int,
override val hostName: String = "MockBlockTransferServiceHost") extends BlockTransferService {
var numCalls = 0
var tempFileManager: DownloadFileManager = null
override def init(blockDataManager: BlockDataManager): Unit = {}
override def fetchBlocks(
host: String,
port: Int,
execId: String,
blockIds: Array[String],
listener: BlockFetchingListener,
tempFileManager: DownloadFileManager): Unit = {
listener.onBlockFetchSuccess("mockBlockId", new NioManagedBuffer(ByteBuffer.allocate(1)))
}
override def close(): Unit = {}
override def port: Int = { 63332 }
override def uploadBlock(
hostname: String,
port: Int,
execId: String,
blockId: BlockId,
blockData: ManagedBuffer,
level: StorageLevel,
classTag: ClassTag[_]): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
Future {}
}
override def fetchBlockSync(
host: String,
port: Int,
execId: String,
blockId: String,
tempFileManager: DownloadFileManager): ManagedBuffer = {
numCalls += 1
this.tempFileManager = tempFileManager
if (numCalls <= maxFailures) {
throw new RuntimeException("Failing block fetch in the mock block transfer service")
}
super.fetchBlockSync(host, port, execId, blockId, tempFileManager)
}
}
}
private object BlockManagerSuite {
// Necessary to make ScalaTest 3.x interrupt a thread on the JVM like ScalaTest 2.2.x
implicit val defaultSignaler: Signaler = ThreadSignaler
private implicit class BlockManagerTestUtils(store: BlockManager) {
def dropFromMemoryIfExists(
blockId: BlockId,
data: () => Either[Array[Any], ChunkedByteBuffer]): Unit = {
store.blockInfoManager.lockForWriting(blockId).foreach { info =>
val newEffectiveStorageLevel = store.dropFromMemory(blockId, data)
if (newEffectiveStorageLevel.isValid) {
// The block is still present in at least one store, so release the lock
// but don't delete the block info
store.releaseLock(blockId)
} else {
// The block isn't present in any store, so delete the block info so that the
// block can be stored again
store.blockInfoManager.removeBlock(blockId)
}
}
}
private def wrapGet[T](f: BlockId => Option[T]): BlockId => Option[T] = (blockId: BlockId) => {
val result = f(blockId)
if (result.isDefined) {
store.releaseLock(blockId)
}
result
}
def hasLocalBlock(blockId: BlockId): Boolean = {
getLocalAndReleaseLock(blockId).isDefined
}
val getLocalAndReleaseLock: (BlockId) => Option[BlockResult] = wrapGet(store.getLocalValues)
val getAndReleaseLock: (BlockId) => Option[BlockResult] = wrapGet(store.get)
val getSingleAndReleaseLock: (BlockId) => Option[Any] = wrapGet(store.getSingle)
val getLocalBytesAndReleaseLock: (BlockId) => Option[ChunkedByteBuffer] = {
val allocator = ByteBuffer.allocate _
wrapGet { bid => store.getLocalBytes(bid).map(_.toChunkedByteBuffer(allocator)) }
}
}
}
|
nchammas/spark
|
core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala
|
Scala
|
apache-2.0
| 102,860 |
package org.example.json
import spray.json._
/**
* Created by kailianghe on 1/23/15.
*/
// simple example grabbed from https://github.com/spray/spray-json
case class Color(name: String, red: Int, green: Int, blue: Int)
object MyJsonProtocol extends DefaultJsonProtocol {
implicit val colorFormat = jsonFormat4(Color)
}
object SimpleJsonApp extends App {
import MyJsonProtocol._
val color = Color("CadetBlue", 95, 158, 160)
val json = color.toJson
println(json)
println(json.convertTo[Color])
}
|
hekailiang/akka-play
|
actor-samples/src/main/scala/org/example/json/SimpleJsonApp.scala
|
Scala
|
apache-2.0
| 513 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.rex.RexNode
import org.apache.calcite.sql.SqlIntervalQualifier
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.sql.parser.SqlParserPos
import org.apache.calcite.tools.RelBuilder
import org.apache.calcite.util.{DateString, TimeString, TimestampString}
import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.typeutils.{RowIntervalTypeInfo, TimeIntervalTypeInfo}
import java.sql.{Date, Time, Timestamp}
import java.util.{Calendar, TimeZone}
import org.apache.commons.lang3.StringEscapeUtils
object Literal {
private[flink] val UTC = TimeZone.getTimeZone("UTC")
private[flink] def apply(l: Any): Literal = l match {
case i: Int => Literal(i, BasicTypeInfo.INT_TYPE_INFO)
case s: Short => Literal(s, BasicTypeInfo.SHORT_TYPE_INFO)
case b: Byte => Literal(b, BasicTypeInfo.BYTE_TYPE_INFO)
case l: Long => Literal(l, BasicTypeInfo.LONG_TYPE_INFO)
case d: Double => Literal(d, BasicTypeInfo.DOUBLE_TYPE_INFO)
case f: Float => Literal(f, BasicTypeInfo.FLOAT_TYPE_INFO)
case str: String => Literal(str, BasicTypeInfo.STRING_TYPE_INFO)
case bool: Boolean => Literal(bool, BasicTypeInfo.BOOLEAN_TYPE_INFO)
case javaDec: java.math.BigDecimal => Literal(javaDec, BasicTypeInfo.BIG_DEC_TYPE_INFO)
case scalaDec: scala.math.BigDecimal =>
Literal(scalaDec.bigDecimal, BasicTypeInfo.BIG_DEC_TYPE_INFO)
case sqlDate: Date => Literal(sqlDate, SqlTimeTypeInfo.DATE)
case sqlTime: Time => Literal(sqlTime, SqlTimeTypeInfo.TIME)
case sqlTimestamp: Timestamp => Literal(sqlTimestamp, SqlTimeTypeInfo.TIMESTAMP)
}
}
case class Literal(value: Any, resultType: TypeInformation[_]) extends LeafExpression {
override def toString: String = resultType match {
case _: BasicTypeInfo[_] => value.toString
case [email protected] => value.toString + ".toDate"
case [email protected] => value.toString + ".toTime"
case [email protected] => value.toString + ".toTimestamp"
case [email protected]_MILLIS => value.toString + ".millis"
case [email protected]_MONTHS => value.toString + ".months"
case [email protected]_ROWS => value.toString + ".rows"
case _ => s"Literal($value, $resultType)"
}
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = {
resultType match {
case BasicTypeInfo.BIG_DEC_TYPE_INFO =>
val bigDecValue = value.asInstanceOf[java.math.BigDecimal]
val decType = relBuilder.getTypeFactory.createSqlType(SqlTypeName.DECIMAL)
relBuilder.getRexBuilder.makeExactLiteral(bigDecValue, decType)
// create BIGINT literals for long type
case BasicTypeInfo.LONG_TYPE_INFO =>
val bigint = java.math.BigDecimal.valueOf(value.asInstanceOf[Long])
relBuilder.getRexBuilder.makeBigintLiteral(bigint)
// date/time
case SqlTimeTypeInfo.DATE =>
val datestr = DateString.fromCalendarFields(valueAsCalendar)
relBuilder.getRexBuilder.makeDateLiteral(datestr)
case SqlTimeTypeInfo.TIME =>
val timestr = TimeString.fromCalendarFields(valueAsCalendar)
relBuilder.getRexBuilder.makeTimeLiteral(timestr, 0)
case SqlTimeTypeInfo.TIMESTAMP =>
val timestampstr = TimestampString.fromCalendarFields(valueAsCalendar)
relBuilder.getRexBuilder.makeTimestampLiteral(timestampstr, 3)
case TimeIntervalTypeInfo.INTERVAL_MONTHS =>
val interval = java.math.BigDecimal.valueOf(value.asInstanceOf[Int])
val intervalQualifier = new SqlIntervalQualifier(
TimeUnit.YEAR,
TimeUnit.MONTH,
SqlParserPos.ZERO)
relBuilder.getRexBuilder.makeIntervalLiteral(interval, intervalQualifier)
case TimeIntervalTypeInfo.INTERVAL_MILLIS =>
val interval = java.math.BigDecimal.valueOf(value.asInstanceOf[Long])
val intervalQualifier = new SqlIntervalQualifier(
TimeUnit.DAY,
TimeUnit.SECOND,
SqlParserPos.ZERO)
relBuilder.getRexBuilder.makeIntervalLiteral(interval, intervalQualifier)
case BasicTypeInfo.STRING_TYPE_INFO =>
relBuilder.getRexBuilder.makeLiteral(
StringEscapeUtils.escapeJava(value.asInstanceOf[String])
)
case _ => relBuilder.literal(value)
}
}
/**
* Convert a Date value to a Calendar. Calcite's fromCalendarField functions use the
* Calendar.get methods, so the raw values of the individual fields are preserved when
* converted to the String formats.
*
* @return get the Calendar value
*/
private def valueAsCalendar: Calendar = {
val date = value.asInstanceOf[java.util.Date]
val cal = Calendar.getInstance
cal.setTime(date)
cal
}
}
case class Null(resultType: TypeInformation[_]) extends LeafExpression {
override def toString = s"null"
override private[flink] def toRexNode(implicit relBuilder: RelBuilder): RexNode = {
val rexBuilder = relBuilder.getRexBuilder
val typeFactory = relBuilder.getTypeFactory.asInstanceOf[FlinkTypeFactory]
rexBuilder
.makeCast(
typeFactory.createTypeFromTypeInfo(resultType, isNullable = true),
rexBuilder.constantNull())
}
}
|
zhangminglei/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/expressions/literals.scala
|
Scala
|
apache-2.0
| 6,265 |
/*******************************************************************************
Copyright 2009,2011, Oracle and/or its affiliates.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.scala_src.useful
import kr.ac.kaist.jsaf.useful.HasAt
import kr.ac.kaist.jsaf.scala_src.useful.Lists._
import kr.ac.kaist.jsaf.exceptions.JSAFError
import kr.ac.kaist.jsaf.exceptions.StaticError
class ErrorLog() {
var errors = List[StaticError]()
def signal(msg: String, hasAt: HasAt): Unit =
signal(JSAFError.makeStaticError(msg, hasAt))
def signal(error: StaticError) = {
errors = error :: errors
}
def syntaxError(msg: String, hasAt: HasAt): Unit =
signal(JSAFError.makeSyntaxError(msg, hasAt))
def asList() = {Errors.removeDuplicates(errors)}
def asJavaList() = {toJavaList(asList())}
}
object Errors {
def removeDuplicates(errors: List[StaticError]): List[StaticError] = {
errors match {
case Nil => errors
case fst :: rst =>
if (rst contains fst) {removeDuplicates(rst)}
else {fst :: removeDuplicates(rst)}
}
}
}
/**
* Stores the error and then throws it as an exception. Error messages should be
* printed with nested spacing so that any errors from the tryCheck that are
* actually reported will be nested inside an outer error from the type checker.
*/
class TryErrorLog extends ErrorLog {
override def signal(error: StaticError) = {
super.signal(error)
throw error
}
}
/** Does not maintain any errors; no throwing, no storing. */
object DummyErrorLog extends ErrorLog {
override def signal(error: StaticError) = ()
}
|
darkrsw/safe
|
src/main/scala/kr/ac/kaist/jsaf/scala_src/useful/ErrorLog.scala
|
Scala
|
bsd-3-clause
| 1,801 |
package com.themillhousegroup.edn
import org.specs2.mutable.Specification
import com.themillhousegroup.edn.test.{ StreamChecking, EDNParsing }
class StreamingUsageSpec extends Specification with EDNParsing with StreamChecking {
"Using the Streaming Scala EDN parser" should {
"Allow iteration over a flat keyspace" in new ParserScope(
""":a 1 :b "foo" :c? true """) {
val s = p.asStream(values)
keyValueStreamMustHave(s, "a" -> 1, "b" -> "foo", "c?" -> true)
}
"Allow iteration over a flat keyspace with a map within" in new ParserScope(
""" :a 1 :b "foo" :c? true :d { :da "bar" :db "baz" } """) {
val s = p.asStream(values).toSeq
s must haveSize(4)
s(0) must beEqualTo("a" -> 1)
s(1) must beEqualTo("b" -> "foo")
s(2) must beEqualTo("c?" -> true)
s(3)._1 must beEqualTo("d")
val nestedStream = s(3)._2.asInstanceOf[Stream[(String, AnyRef)]]
keyValueStreamMustHave(nestedStream, "da" -> "bar", "db" -> "baz")
}
"Allow iteration over a flat keyspace with a vector within" in new ParserScope(
""" :a 1 :b "foo" :c? true :d [ 5 6 7 8 ] """) {
val s = p.asStream(values).toSeq
s must haveSize(4)
s(0) must beEqualTo("a" -> 1)
s(1) must beEqualTo("b" -> "foo")
s(2) must beEqualTo("c?" -> true)
s(3)._1 must beEqualTo("d")
val nestedStream = s(3)._2.asInstanceOf[Stream[AnyRef]]
valueStreamMustHave(nestedStream, 5, 6, 7, 8)
}
"Allow iteration over a flat keyspace with a list within" in new ParserScope(
""" :a 1 :b "foo" :c? true :d ( 5 6 7 8 ) """) {
val s = p.asStream(values).toSeq
s must haveSize(4)
s(0) must beEqualTo("a" -> 1)
s(1) must beEqualTo("b" -> "foo")
s(2) must beEqualTo("c?" -> true)
s(3)._1 must beEqualTo("d")
val nestedStream = s(3)._2.asInstanceOf[Stream[AnyRef]]
valueStreamMustHave(nestedStream, 5, 6, 7, 8)
}
"Allow iteration over a flat keyspace with a set within" in new ParserScope(
""" :a 1 :b "foo" :c? true :d #{ 5 6 7 8 } """) {
val s = p.asStream(values).toSeq
s must haveSize(4)
s(0) must beEqualTo("a" -> 1)
s(1) must beEqualTo("b" -> "foo")
s(2) must beEqualTo("c?" -> true)
s(3)._1 must beEqualTo("d")
val nestedStream = s(3)._2.asInstanceOf[Stream[AnyRef]]
valueStreamMustHave(nestedStream, 5, 6, 7, 8)
}
"Allow iteration over a flat keyspace with a nested map within" in new ParserScope(
""" :a 1 :b "foo" :c? true
:d {
:da "bar"
:db "baz"
:dc {
:dc1 "inner"
:dc2 "most"
:dc3 "values"
}
} """) {
val s = p.asStream(values)
s must haveSize(4)
keyStreamMustHave(s, "a", "b", "c?", "d")
val nestedStream = s(3)._2.asInstanceOf[Stream[(String, AnyRef)]]
keyStreamMustHave(nestedStream, "da", "db", "dc")
val mostNestedStream = nestedStream.find {
case (k, v) =>
"dc" == (k)
}.get._2.asInstanceOf[Stream[(String, AnyRef)]]
keyValueStreamMustHave(mostNestedStream,
"dc1" -> "inner",
"dc2" -> "most",
"dc3" -> "values"
)
}
"Allow iteration over an EDN expressed as a map" in new ParserScope(""" {:a 1 :b "foo" :c? true }""") {
val s = p.asStream(values).toSeq
// As per comments on asStream - we view this as a map that happens to have an empty label
s must haveSize(1)
s.head._1 must beEqualTo("")
val nestedStream = s.head._2.asInstanceOf[Stream[(String, AnyRef)]]
keyValueStreamMustHave(nestedStream, "a" -> 1, "b" -> "foo", "c?" -> true)
}
}
}
|
themillhousegroup/edn-scala
|
src/test/scala/com/themillhousegroup/edn/StreamingUsageSpec.scala
|
Scala
|
gpl-2.0
| 3,761 |
package net.kemuridama.kafcon.protocol
import spray.json._
import net.kemuridama.kafcon.model.{APIResponse, APIError}
trait APIResponseJsonProtocol
extends JsonProtocol
with APIErrorJsonProtocol {
implicit def apiResponseFormat[T: JsonFormat] = new RootJsonFormat[APIResponse[T]] {
def read(json: JsValue) = {
val jsObject = json.asJsObject
APIResponse(
jsObject.getFields("data").headOption.map(_.convertTo[T]),
jsObject.getFields("error").headOption.map(_.convertTo[APIError])
)
}
def write(apiResponse: APIResponse[T]) = JsObject(
"data" -> apiResponse.data.toJson,
"error" -> apiResponse.error.toJson
)
}
}
|
kemuridama/kafcon
|
src/main/scala/net/kemuridama/kafcon/protocol/APIResponseProtocol.scala
|
Scala
|
mit
| 689 |
/*
* Copyright (C) 2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cc.spray.json
import org.specs2.mutable._
class ProductFormatsSpec extends Specification {
case class Test2(a: Int, b: Option[Double])
case class Test3[A, B](as: List[A], bs: List[B])
trait TestProtocol {
this: DefaultJsonProtocol =>
implicit val test2Format = jsonFormat2(Test2)
implicit def test3Format[A: JsonFormat, B: JsonFormat] = jsonFormat2(Test3.apply[A, B])
}
object TestProtocol1 extends DefaultJsonProtocol with TestProtocol
object TestProtocol2 extends DefaultJsonProtocol with TestProtocol with NullOptions
"A JsonFormat created with `jsonFormat`, for a case class with 2 elements," should {
import TestProtocol1._
val obj = Test2(42, Some(4.2))
val json = JsObject("a" -> JsNumber(42), "b" -> JsNumber(4.2))
"convert to a respective JsObject" in {
obj.toJson mustEqual json
}
"convert a JsObject to the respective case class instance" in {
json.convertTo[Test2] mustEqual obj
}
"throw a DeserializationException if the JsObject does not all required members" in (
JsObject("b" -> JsNumber(4.2)).convertTo[Test2] must
throwA(new DeserializationException("Object is missing required member 'a'"))
)
"not require the presence of optional fields for deserialization" in {
JsObject("a" -> JsNumber(42)).convertTo[Test2] mustEqual Test2(42, None)
}
"not render `None` members during serialization" in {
Test2(42, None).toJson mustEqual JsObject("a" -> JsNumber(42))
}
"ignore additional members during deserialization" in {
JsObject("a" -> JsNumber(42), "b" -> JsNumber(4.2), "c" -> JsString('no)).convertTo[Test2] mustEqual obj
}
"not depend on any specific member order for deserialization" in {
JsObject("b" -> JsNumber(4.2), "a" -> JsNumber(42)).convertTo[Test2] mustEqual obj
}
"throw a DeserializationException if the JsValue is not a JsObject" in (
JsNull.convertTo[Test2] must throwA(new DeserializationException("Object expected"))
)
}
"A JsonProtocol mixing in NullOptions" should {
"render `None` members to `null`" in {
import TestProtocol2._
Test2(42, None).toJson mustEqual JsObject("a" -> JsNumber(42), "b" -> JsNull)
}
}
"A JsonFormat for a generic case class and created with `jsonFormat`" should {
import TestProtocol1._
val obj = Test3(42 :: 43 :: Nil, "x" :: "y" :: "z" :: Nil)
val json = JsObject(
"as" -> JsArray(JsNumber(42), JsNumber(43)),
"bs" -> JsArray(JsString("x"), JsString("y"), JsString("z"))
)
"convert to a respective JsObject" in {
obj.toJson mustEqual json
}
"convert a JsObject to the respective case class instance" in {
json.convertTo[Test3[Int, String]] mustEqual obj
}
}
}
|
beamly/spray-json
|
src/test/scala/cc/spray/json/ProductFormatsSpec.scala
|
Scala
|
apache-2.0
| 3,389 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.Checkers
import org.scalacheck._
import Arbitrary._
import Prop._
import org.scalatest.exceptions.TestFailedException
import Matchers._
class ShouldIncludeSubstringSpec extends Spec with Checkers with ReturnsNormallyThrowsAssertion {
object `The include substring syntax` {
def `should do nothing if the string includes the specified substring` {
"1.78" should include ("1.7")
"21.7" should include ("1.7")
"21.78" should include ("1.7")
"1.7" should include ("1.7")
check((s: String, t: String, u: String) => returnsNormally(s + t + u should include (t)))
}
def `should do nothing if the string does not include the specified substring when used with not` {
"eight" should not { include ("1.7") }
"eight" should not include ("1.7")
check((s: String, t: String, u: String) => (s + u).indexOf(t) == -1 ==> returnsNormally(s + u should not (include (t))))
check((s: String, t: String, u: String) => (s + u).indexOf(t) == -1 ==> returnsNormally(s + u should not include (t)))
}
def `should do nothing if the string does not include the specified substring when used in a logical-and expression` {
"a1.7" should (include ("1.7") and (include ("1.7")))
"a1.7" should (include ("1.7") and (include ("1.7")))
"a1.7" should (include ("1.7") and (include ("1.7")))
"1.7b" should ((include ("1.7")) and (include ("1.7")))
"1.7b" should ((include ("1.7")) and (include ("1.7")))
"1.7b" should ((include ("1.7")) and (include ("1.7")))
"a1.7b" should (include ("1.7") and include ("1.7"))
"a1.7b" should (include ("1.7") and include ("1.7"))
"a1.7b" should (include ("1.7") and include ("1.7"))
"1.7" should (include ("1.7") and (include ("1.7")))
"1.7" should ((include ("1.7")) and (include ("1.7")))
"1.7" should (include ("1.7") and include ("1.7"))
check((s: String, t: String, u: String) => returnsNormally(s + t + u should (include (s) and include (t) and include (u))))
}
def `should do nothing if the string does not include the specified substring when used in a logical-or expression` {
"a1.7" should (include ("hello") or (include ("1.7")))
"a1.7" should (include ("hello") or (include ("1.7")))
"a1.7" should (include ("hello") or (include ("1.7")))
"1.7b" should ((include ("hello")) or (include ("1.7")))
"1.7b" should ((include ("hello")) or (include ("1.7")))
"1.7b" should ((include ("hello")) or (include ("1.7")))
"a1.7b" should (include ("hello") or include ("1.7"))
"a1.7b" should (include ("hello") or include ("1.7"))
"a1.7b" should (include ("hello") or include ("1.7"))
"1.7" should (include ("hello") or (include ("1.7")))
"1.7" should ((include ("hello")) or (include ("1.7")))
"1.7" should (include ("hello") or include ("1.7"))
check((s: String, t: String, u: String) => returnsNormally(s + t + u should (include ("hi") or include ("ho") or include (t))))
}
def `should do nothing if the string does not include the specified substring when used in a logical-and expression with not` {
"fred" should (not (include ("bob")) and not (include ("1.7")))
"fred" should ((not include ("bob")) and (not include ("1.7")))
"fred" should (not include ("bob") and not include ("1.7"))
check((s: String) => s.indexOf("bob") == -1 && s.indexOf("1.7") == -1 ==> returnsNormally(s should (not include ("bob") and not include ("1.7"))))
}
def `should do nothing if the string does not include the specified substring when used in a logical-or expression with not` {
"fred" should (not (include ("fred")) or not (include ("1.7")))
"fred" should ((not include ("fred")) or (not include ("1.7")))
"fred" should (not include ("fred") or not include ("1.7"))
check((s: String) => s.indexOf("a") == -1 || s.indexOf("b") == -1 ==> returnsNormally(s should (not include ("a") or not include ("b"))))
}
def `should throw TestFailedException if the string does not match the specified substring` {
val caught1 = intercept[TestFailedException] {
"1.7" should include ("1.78")
}
assert(caught1.getMessage === "\\"1.7\\" did not include substring \\"1.78\\"")
val caught2 = intercept[TestFailedException] {
"1.7" should include ("21.7")
}
assert(caught2.getMessage === "\\"1.7\\" did not include substring \\"21.7\\"")
val caught3 = intercept[TestFailedException] {
"-one.eight" should include ("1.7")
}
assert(caught3.getMessage === "\\"-one.eight\\" did not include substring \\"1.7\\"")
val caught6 = intercept[TestFailedException] {
"eight" should include ("1.7")
}
assert(caught6.getMessage === "\\"eight\\" did not include substring \\"1.7\\"")
val caught7 = intercept[TestFailedException] {
"one.eight" should include ("1.7")
}
assert(caught7.getMessage === "\\"one.eight\\" did not include substring \\"1.7\\"")
val caught8 = intercept[TestFailedException] {
"onedoteight" should include ("1.7")
}
assert(caught8.getMessage === "\\"onedoteight\\" did not include substring \\"1.7\\"")
val caught9 = intercept[TestFailedException] {
"***" should include ("1.7")
}
assert(caught9.getMessage === "\\"***\\" did not include substring \\"1.7\\"")
check((s: String) => s.indexOf("1.7") == -1 ==> throwsTestFailedException(s should include ("1.7")))
}
def `should throw TestFailedException if the string does matches the specified substring when used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should not { include ("1.7") }
}
assert(caught1.getMessage === "\\"1.7\\" included substring \\"1.7\\"")
val caught2 = intercept[TestFailedException] {
"1.7" should not { include ("1.7") }
}
assert(caught2.getMessage === "\\"1.7\\" included substring \\"1.7\\"")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { include ("1.8") }
}
assert(caught3.getMessage === "\\"-1.8\\" included substring \\"1.8\\"")
val caught4 = intercept[TestFailedException] {
"8" should not { include ("8") }
}
assert(caught4.getMessage === "\\"8\\" included substring \\"8\\"")
val caught5 = intercept[TestFailedException] {
"1." should not { include (".") }
}
assert(caught5.getMessage === "\\"1.\\" included substring \\".\\"")
val caught11 = intercept[TestFailedException] {
"1.7" should not include ("1.7")
}
assert(caught11.getMessage === "\\"1.7\\" included substring \\"1.7\\"")
val caught13 = intercept[TestFailedException] {
"-1.8" should not include ("-")
}
assert(caught13.getMessage === "\\"-1.8\\" included substring \\"-\\"")
val caught14 = intercept[TestFailedException] {
"8" should not include ("")
}
assert(caught14.getMessage === "\\"8\\" included substring \\"\\"")
val caught15 = intercept[TestFailedException] {
"1." should not include ("1.")
}
assert(caught15.getMessage === "\\"1.\\" included substring \\"1.\\"")
val caught21 = intercept[TestFailedException] {
"a1.7" should not { include ("1.7") }
}
assert(caught21.getMessage === "\\"a1.7\\" included substring \\"1.7\\"")
val caught22 = intercept[TestFailedException] {
"1.7b" should not { include ("1.7") }
}
assert(caught22.getMessage === "\\"1.7b\\" included substring \\"1.7\\"")
val caught23 = intercept[TestFailedException] {
"a-1.8b" should not { include ("1.8") }
}
assert(caught23.getMessage === "\\"a-1.8b\\" included substring \\"1.8\\"")
// substring at the beginning
check((s: String) => s.length != 0 ==> throwsTestFailedException(s should not include (s.substring(0, 1))))
// substring at the end
check((s: String) => s.length != 0 ==> throwsTestFailedException(s should not include (s.substring(s.length - 1, s.length))))
// substring in the middle
check((s: String) => s.length > 1 ==> throwsTestFailedException(s should not include (s.substring(1, 2))))
}
def `should throw TestFailedException if the string includes the specified substring when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"1.7" should (include ("1.7") and (include ("1.8")))
}
assert(caught1.getMessage === "\\"1.7\\" included substring \\"1.7\\", but \\"1.7\\" did not include substring \\"1.8\\"")
val caught2 = intercept[TestFailedException] {
"1.7" should ((include ("1.7")) and (include ("1.8")))
}
assert(caught2.getMessage === "\\"1.7\\" included substring \\"1.7\\", but \\"1.7\\" did not include substring \\"1.8\\"")
val caught3 = intercept[TestFailedException] {
"1.7" should (include ("1.7") and include ("1.8"))
}
assert(caught3.getMessage === "\\"1.7\\" included substring \\"1.7\\", but \\"1.7\\" did not include substring \\"1.8\\"")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"one.eight" should (include ("1.7") and (include ("1.8")))
}
assert(caught4.getMessage === "\\"one.eight\\" did not include substring \\"1.7\\"")
val caught5 = intercept[TestFailedException] {
"one.eight" should ((include ("1.7")) and (include ("1.8")))
}
assert(caught5.getMessage === "\\"one.eight\\" did not include substring \\"1.7\\"")
val caught6 = intercept[TestFailedException] {
"one.eight" should (include ("1.7") and include ("1.8"))
}
assert(caught6.getMessage === "\\"one.eight\\" did not include substring \\"1.7\\"")
check((s: String, t: String, u: String) => (s + u).indexOf(t) == -1 ==> throwsTestFailedException(s + u should (include (s) and include (t))))
}
def `should throw TestFailedException if the string includes the specified substring when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"one.seven" should (include ("1.7") or (include ("1.8")))
}
assert(caught1.getMessage === "\\"one.seven\\" did not include substring \\"1.7\\", and \\"one.seven\\" did not include substring \\"1.8\\"")
val caught2 = intercept[TestFailedException] {
"one.seven" should ((include ("1.7")) or (include ("1.8")))
}
assert(caught2.getMessage === "\\"one.seven\\" did not include substring \\"1.7\\", and \\"one.seven\\" did not include substring \\"1.8\\"")
val caught3 = intercept[TestFailedException] {
"one.seven" should (include ("1.7") or include ("1.8"))
}
assert(caught3.getMessage === "\\"one.seven\\" did not include substring \\"1.7\\", and \\"one.seven\\" did not include substring \\"1.8\\"")
check(
(s: String, t: String, u: String, v: String) => {
(t.length != 0 && v.length != 0 && (s + u).indexOf(t) == -1 && (s + u).indexOf(v) == -1) ==>
throwsTestFailedException(s + u should (include (t) or include (v)))
}
)
}
def `should throw TestFailedException if the string includes the specified substring when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include ("1.8") and (not include ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" did not include substring \\"1.8\\", but \\"1.7\\" included substring \\"1.7\\"")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include ("1.8")) and (not include ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" did not include substring \\"1.8\\", but \\"1.7\\" included substring \\"1.7\\"")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include ("1.8") and not include ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" did not include substring \\"1.8\\", but \\"1.7\\" included substring \\"1.7\\"")
val caught4 = intercept[TestFailedException] {
"a1.7" should (not include ("1.8") and (not include ("1.7")))
}
assert(caught4.getMessage === "\\"a1.7\\" did not include substring \\"1.8\\", but \\"a1.7\\" included substring \\"1.7\\"")
val caught5 = intercept[TestFailedException] {
"1.7b" should ((not include ("1.8")) and (not include ("1.7")))
}
assert(caught5.getMessage === "\\"1.7b\\" did not include substring \\"1.8\\", but \\"1.7b\\" included substring \\"1.7\\"")
val caught6 = intercept[TestFailedException] {
"a1.7b" should (not include ("1.8") and not include ("1.7"))
}
assert(caught6.getMessage === "\\"a1.7b\\" did not include substring \\"1.8\\", but \\"a1.7b\\" included substring \\"1.7\\"")
check(
(s: String, t: String, u: String) =>
(s + t + u).indexOf("hi") == -1 ==>
throwsTestFailedException(s + t + u should (not include ("hi") and not include (t)))
)
}
def `should throw TestFailedException if the string includes the specified substring when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include ("1.7") or (not include ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" included substring \\"1.7\\", and \\"1.7\\" included substring \\"1.7\\"")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include ("1.7")) or (not include ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" included substring \\"1.7\\", and \\"1.7\\" included substring \\"1.7\\"")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include ("1.7") or not include ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" included substring \\"1.7\\", and \\"1.7\\" included substring \\"1.7\\"")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (include ("1.7")) or not (include ("1.7")))
}
assert(caught4.getMessage === "\\"1.7\\" included substring \\"1.7\\", and \\"1.7\\" included substring \\"1.7\\"")
val caught5 = intercept[TestFailedException] {
"a1.7" should (not include ("1.7") or (not include ("1.7")))
}
assert(caught5.getMessage === "\\"a1.7\\" included substring \\"1.7\\", and \\"a1.7\\" included substring \\"1.7\\"")
val caught6 = intercept[TestFailedException] {
"1.7b" should ((not include ("1.7")) or (not include ("1.7")))
}
assert(caught6.getMessage === "\\"1.7b\\" included substring \\"1.7\\", and \\"1.7b\\" included substring \\"1.7\\"")
val caught7 = intercept[TestFailedException] {
"a1.7b" should (not include ("1.7") or not include ("1.7"))
}
assert(caught7.getMessage === "\\"a1.7b\\" included substring \\"1.7\\", and \\"a1.7b\\" included substring \\"1.7\\"")
val caught8 = intercept[TestFailedException] {
"a1.7b" should (not (include ("1.7")) or not (include ("1.7")))
}
assert(caught8.getMessage === "\\"a1.7b\\" included substring \\"1.7\\", and \\"a1.7b\\" included substring \\"1.7\\"")
check(
(s: String, t: String, u: String) =>
throwsTestFailedException(s + t + u should (not include (s) or not include (t) or not include (u)))
)
}
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/ShouldIncludeSubstringSpec.scala
|
Scala
|
apache-2.0
| 16,112 |
package util
/**
* GraPHPizer source code analytics engine
* Copyright (C) 2015 Martin Helmich <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import akka.actor.ActorLogging
trait WrappingActorLogging {
this: ActorLogging =>
sealed trait LogWrapper {
def exec[T](m: => T)
}
def withLog(msg: String): LogWrapper = new LogWrapper {
override def exec[T](m: => T): Unit = {
log.info(msg)
m
log.info("Done")
}
}
}
|
martin-helmich/graphpizer-server
|
app/util/WrappingActorLogging.scala
|
Scala
|
gpl-3.0
| 1,091 |
package org.jetbrains.plugins.scala.debugger.evaluation.evaluator
import com.intellij.debugger.JavaDebuggerBundle
import com.intellij.debugger.engine.DebuggerUtils
import com.intellij.debugger.engine.evaluation.EvaluationContextImpl
import com.intellij.debugger.engine.evaluation.expression.{Evaluator, Modifier}
import com.intellij.debugger.ui.impl.watch.{ArrayElementDescriptorImpl, NodeDescriptorImpl}
import com.intellij.openapi.project.Project
import com.sun.jdi._
import org.jetbrains.plugins.scala.debugger.evaluation.EvaluationException
/**
* User: Alexander Podkhalyuzin
* Date: 08.11.11
*/
class ScalaArrayAccessEvaluator(arrayReferenceEvaluator: Evaluator, indexEvaluator: Evaluator) extends Evaluator {
override def evaluate(context: EvaluationContextImpl): AnyRef = {
myEvaluatedIndex = 0
myEvaluatedArrayReference = null
val indexValue: Value = indexEvaluator.evaluate(context).asInstanceOf[Value]
val arrayValue: Value = arrayReferenceEvaluator.evaluate(context).asInstanceOf[Value]
if (!arrayValue.isInstanceOf[ArrayReference]) {
throw EvaluationException(JavaDebuggerBundle.message("evaluation.error.array.reference.expected"))
}
myEvaluatedArrayReference = arrayValue.asInstanceOf[ArrayReference]
if (!DebuggerUtils.isInteger(indexValue)) {
throw EvaluationException(JavaDebuggerBundle.message("evaluation.error.invalid.index.expression"))
}
myEvaluatedIndex = indexValue.asInstanceOf[PrimitiveValue].intValue
try {
myEvaluatedArrayReference.getValue(myEvaluatedIndex)
}
catch {
case e: Exception =>
throw EvaluationException(e)
}
}
override def getModifier: Modifier = {
var modifier: Modifier = null
if (myEvaluatedArrayReference != null) {
modifier = new Modifier {
override def canInspect: Boolean = true
override def canSetValue: Boolean = true
override def setValue(value: Value): Unit = {
myEvaluatedArrayReference.setValue(myEvaluatedIndex, value)
}
override def getExpectedType: Type = {
try {
val tp: ArrayType = myEvaluatedArrayReference.referenceType.asInstanceOf[ArrayType]
tp.componentType
}
catch {
case e: ClassNotLoadedException =>
throw EvaluationException(e)
}
}
override def getInspectItem(project: Project): NodeDescriptorImpl = {
new ArrayElementDescriptorImpl(project, myEvaluatedArrayReference, myEvaluatedIndex)
}
}
}
modifier
}
private var myEvaluatedArrayReference: ArrayReference = null
private var myEvaluatedIndex: Int = 0
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/debugger/evaluation/evaluator/ScalaArrayAccessEvaluator.scala
|
Scala
|
apache-2.0
| 2,682 |
package com.twitter.gizzard.nameserver
object HostStatus extends Enumeration {
val Normal = Value(0)
val Blackholed = Value(1)
val Blocked = Value(2)
}
case class Host(hostname: String, port: Int, cluster: String, status: HostStatus.Value)
|
kmiku7/gizzard
|
src/main/scala/com/twitter/gizzard/nameserver/Host.scala
|
Scala
|
apache-2.0
| 255 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
// TODO: Use this Helper in the matchers/ClassicMatchers.scala
import org.scalatest.matchers._
import java.lang.reflect.Method
import java.lang.reflect.Modifier
import scala.util.matching.Regex
import java.lang.reflect.Field
import scala.reflect.Manifest
import Helper.transformOperatorChars
import scala.collection.Traversable
import Assertions.areEqualComparingArraysStructurally
import org.scalatest.exceptions.TestFailedException
import scala.collection.GenTraversable
import scala.collection.GenSeq
import scala.collection.GenMap
import org.scalautils.Tolerance
import org.scalautils.Interval
import org.scalautils.TripleEqualsInvocation
import scala.annotation.tailrec
import org.scalautils.Equality
import org.scalatest.words.ShouldVerb
import org.scalautils.TripleEqualsInvocationOnInterval
import org.scalautils.EqualityConstraint
import org.scalatest.matchers.HavePropertyMatcher
import org.scalatest.matchers.HavePropertyMatchResult
import org.scalatest.matchers.BePropertyMatcher
import org.scalatest.matchers.BePropertyMatchResult
import org.scalatest.matchers.BeMatcher
import org.scalatest.matchers.Matcher
import org.scalatest.matchers.MatchResult
import Matchers.andMatchersAndApply
import Matchers.orMatchersAndApply
// TODO: drop generic support for be as an equality comparison, in favor of specific ones.
// TODO: mention on JUnit and TestNG docs that you can now mix in ShouldMatchers or MustMatchers
// TODO: Put links from ShouldMatchers to wherever I reveal the matrix and algo of how properties are checked dynamically.
// TODO: double check that I wrote tests for (length (7)) and (size (8)) in parens
// TODO: document how to turn off the === implicit conversion
// TODO: Document you can use JMock, EasyMock, etc.
private[scalatest] object Helper {
// If the symbol passed is 'title, this will look for a field named "title", a method named "title", or a
// method named "getTitle". The method must take no parameters.
//
// F (field) | M (method) | G (get or is method) | Result
// 0 0 0 None
// 0 0 1 Some(G)
// 0 1 0 Some(M)
// 0 1 1 Some(M) prefer a Scala style one of a Java style, such as when using BeanProperty annotation
// 1 0 0 Some(F) ignore the field if there's a method. in Java often name a field and get method the same
// 1 0 1 Some(G)
// 1 1 0 Some(M)
// 1 1 1 Some(M) prefer a Scala style one of a Java style, such as when using BeanProperty annotation
//
def accessProperty(objectWithProperty: AnyRef, propertySymbol: Symbol, isBooleanProperty: Boolean): Option[Any] = {
// If 'title passed, propertyName would be "title"
val propertyName = propertySymbol.name
// if propertyName is '>, mangledPropertyName would be "$greater"
val mangledPropertyName = transformOperatorChars(propertyName)
// fieldNameToAccess and methodNameToInvoke would also be "title"
val fieldNameToAccess = mangledPropertyName
val methodNameToInvoke = mangledPropertyName
// methodNameToInvokeWithGet would be "getTitle"
val prefix = if (isBooleanProperty) "is" else "get"
val methodNameToInvokeWithGet = prefix + mangledPropertyName(0).toUpper + mangledPropertyName.substring(1)
val firstChar = propertyName(0).toLower
val methodNameStartsWithVowel = firstChar == 'a' || firstChar == 'e' || firstChar == 'i' ||
firstChar == 'o' || firstChar == 'u'
def isFieldToAccess(field: Field): Boolean = field.getName == fieldNameToAccess
// If it is a predicate, I check the result type, otherwise I don't. Maybe I should just do that. Could be a later enhancement.
def isMethodToInvoke(method: Method): Boolean =
method.getName == methodNameToInvoke && method.getParameterTypes.length == 0 && !Modifier.isStatic(method.getModifiers()) &&
(!isBooleanProperty || method.getReturnType == classOf[Boolean])
def isGetMethodToInvoke(method: Method): Boolean =
method.getName == methodNameToInvokeWithGet && method.getParameterTypes.length == 0 && !Modifier.isStatic(method.getModifiers()) &&
(!isBooleanProperty || method.getReturnType == classOf[Boolean])
val fieldOption = objectWithProperty.getClass.getFields.find(isFieldToAccess)
val methodOption = objectWithProperty.getClass.getMethods.find(isMethodToInvoke)
val getMethodOption = objectWithProperty.getClass.getMethods.find(isGetMethodToInvoke)
(fieldOption, methodOption, getMethodOption) match {
case (_, Some(method), _) => Some(method.invoke(objectWithProperty, Array[AnyRef](): _*))
case (_, None, Some(getMethod)) => Some(getMethod.invoke(objectWithProperty, Array[AnyRef](): _*))
case (Some(field), None, None) => Some(field.get(objectWithProperty))
case (None, None, None) => None
}
}
def transformOperatorChars(s: String): String = {
val builder = new StringBuilder
for (i <- 0 until s.length) {
val ch = s.charAt(i)
val replacement =
ch match {
case '!' => "$bang"
case '#' => "$hash"
case '~' => "$tilde"
case '|' => "$bar"
case '^' => "$up"
case '\\' => "$bslash"
case '@' => "$at"
case '?' => "$qmark"
case '>' => "$greater"
case '=' => "$eq"
case '<' => "$less"
case ':' => "$colon"
case '/' => "$div"
case '-' => "$minus"
case '+' => "$plus"
case '*' => "$times"
case '&' => "$amp"
case '%' => "$percent"
case _ => ""
}
if (replacement.length > 0)
builder.append(replacement)
else
builder.append(ch)
}
builder.toString
}
}
|
svn2github/scalatest
|
src/main/scala/org/scalatest/Helper.scala
|
Scala
|
apache-2.0
| 6,625 |
package msgpack4z
import scala.util.Random
import scalaprops._
import scalaz._
abstract class UnionSpec(unionGen0: Gen[MsgpackUnion] = UnionGen.unionGen) extends SpecBase {
private implicit def unionGen: Gen[MsgpackUnion] = unionGen0
private def supportExtType: Boolean = unionGen == UnionGen.unionGen
val union = checkLaw(MsgpackUnion.codecInstance, unionGen)
val `equals hashCode` = Property.forAll { (a: MsgpackUnion) =>
val M = MsgpackCodec[MsgpackUnion]
val bytes = M.toBytes(a, packer())
M.unpackAndClose(unpacker(bytes)) match {
case \\/-(b) =>
(a == b) && (a.hashCode == b.hashCode)
case -\\/(e) =>
println(e)
false
}
}.toProperties((), Param.minSuccessful(10000))
val `MsgpackLong and MsgpackULong` = Property.forAll { (a: Long) =>
val x = MsgpackLong(a)
val y = MsgpackULong(java.math.BigInteger.valueOf(a))
(y == x) && (x == y) && (x.hashCode == y.hashCode)
}
val `MsgpackLong pack/unpack MsgpackLong` = Property.forAll { (a: Long) =>
val M = MsgpackCodec[MsgpackUnion]
val b = MsgpackLong(a)
val c = M.toBytes(b, packer())
M.unpackAndClose(unpacker(c)) match {
case \\/-(_: MsgpackLong) =>
true
case other =>
sys.error(other.toString)
}
}
val extEqualsHashcode = Property.forAllG(UnionGen.extGen) { (e1: MsgpackExt) =>
val e2 = e1.copy()
(e1 ne e2) && (e1 == e2) && (e1.## == e2.##)
}
private def extSizeTest(e: MsgpackExt) = {
val bytes = MsgpackCodec[MsgpackUnion].toBytes(e, packer())
e.data.length match {
case n @ (1 | 2 | 4 | 8 | 16) =>
assert(e.tpe == bytes(1))
bytes.length == (n + 2) // header(1) + type(1) + data(n)
case n if n < (1 << 8) =>
assert(e.tpe == bytes(2))
bytes.length == (n + 3) // header(1) + size(1) + type(1) + data(n)
case n if n < (1 << 16) =>
assert(e.tpe == bytes(3))
bytes.length == (n + 4) // header(1) + size(2) + type(1) + data(n)
case n =>
assert(e.tpe == bytes(5))
bytes.length == (n + 6) // header(1) + size(4) + type(1) + data(n)
}
}
val extSize1 = Property.forAllG(UnionGen.extGen) { e =>
if (supportExtType) {
extSizeTest(e)
} else true
}
val extSize2 = extSize1.toProperties((), Param.maxSize(1 << 18))
val ext16 = Property.forAll {
if (supportExtType) {
val size = 1 << 10
val e = MsgpackUnion.ext((Random.nextInt().toByte, Array.fill[Byte](size)(Random.nextInt().toByte)))
val bytes = MsgpackCodec[MsgpackUnion].toBytes(e, packer())
assert(bytes.length == (size + 4)) // header(1) + size(2) + type(1) + data(n)
assert(bytes(0) == 0xc8.toByte)
MsgpackCodec[MsgpackUnion].unpackAndClose(unpacker(bytes)) match {
case \\/-(a) =>
a == e
case -\\/(a) =>
println(a)
throw a
}
} else true
}
val ext32 = Property.forAll {
if (supportExtType) {
val size = 1 << 17
val e = MsgpackUnion.ext((Random.nextInt().toByte, Array.fill[Byte](size)(Random.nextInt().toByte)))
val bytes = MsgpackCodec[MsgpackUnion].toBytes(e, packer())
assert(bytes.length == (size + 6)) // header(1) + size(4) + type(1) + data(n)
assert(bytes(0) == 0xc9.toByte)
MsgpackCodec[MsgpackUnion].unpackAndClose(unpacker(bytes)) match {
case \\/-(a) =>
a == e
case -\\/(a) =>
println(a)
throw a
}
} else true
}
val `map imap` = Property.forAll { (a: Map[MsgpackUnion, MsgpackUnion]) =>
val b = MsgpackUnion.map(a)
val c = IMap.fromList(a.toList)
val d = MsgpackUnion.imap(c)
assert(b.map == Opt(a))
assert(b.imap == Opt(c))
assert(b.map.map(x => IMap.fromList(x.toList)) == Opt(c))
assert(d.map == Opt(a))
assert(d.imap == Opt(c))
assert(b.map.map(x => IMap.fromList(x.toList)) == Opt(c))
true
}
}
|
msgpack4z/msgpack4z-core
|
src/test/scala/msgpack4z/UnionSpec.scala
|
Scala
|
mit
| 3,926 |
package handlers.client
import handlers.packets.{PacketReader, PacketsUtils}
import handlers.server.VersionAndCryptKey
import scala.concurrent.Future
/**
* Created by franblas on 26/03/17.
*/
class CryptKeyRequest() extends HandlerProcessor {
override def process(data: Array[Byte]): Future[Array[Byte]] = {
val reader = new PacketReader(data)
val rc4 = reader.readByte // should bo 0 (if 1 then encrypted requests)
val clientTypeTmp = reader.readByte
/*
client_type
unknown = -1
classic = 1
shrouded_isles = 2
trials_of_atlantis = 3
catacombs = 4
darkness_rising = 5
labyrinth_of_the_minotaur = 6
*/
// client_type = hex(int(client_type_tmp) & 0x0F)
// client_addons = hex(int(client_type_tmp) & 0xF0)
val major = reader.readByte
val minor = reader.readByte
val build = reader.readByte
val version = PacketsUtils.versionBuilder(major, minor, build)
new VersionAndCryptKey(version).process()
}
}
|
franblas/NAOC
|
src/main/scala/handlers/client/CryptKeyRequest.scala
|
Scala
|
mit
| 1,004 |
package com.github.log0ymxm.mapper.examples.taxi
import org.apache.log4j.{ LogManager, Level }
import org.apache.spark.ml
import org.apache.spark.ml.Pipeline
import org.apache.spark.ml.feature.{ OneHotEncoder, StringIndexer, StandardScaler, VectorAssembler }
import org.apache.spark.mllib
import org.apache.spark.mllib.linalg.distributed.{ IndexedRowMatrix, IndexedRow, CoordinateMatrix, MatrixEntry }
import org.apache.spark.mllib.linalg.{ DenseVector, Vectors, Vector }
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import com.github.log0ymxm.mapper.Mapper
object TaxiDriver {
// Converts from an ml Vector to an mllib Vector to be used with coordinate matrix
def objToVector(features: Object): mllib.linalg.Vector = {
println(s"features $features")
val dense: ml.linalg.DenseVector = features match {
case f: ml.linalg.DenseVector => f
case sparse: ml.linalg.SparseVector => sparse.toDense
}
println(s"dense $dense")
mllib.linalg.Vectors.dense(dense.toArray)
}
def main(args: Array[String]): Unit = {
LogManager.getLogger("org").setLevel(Level.WARN)
val builder = SparkSession.builder()
.appName("Taxi Mapper")
val spark = builder.getOrCreate()
val sc = spark.sparkContext
val df = spark.read
.option("header", "false")
.schema(TaxiConfig.schema)
.csv("s3n://frst-nyc-data/trips/trips_*.csv.gz")
.repartition(3000)
.cache
df.show
println(df.stat.freqItems(Array("vendor_id", "store_and_fwd_flag")))
val taxiDf = df
.withColumn("pickup_timestamp", unix_timestamp(col("pickup_datetime")))
.withColumn("pickup_dayofmonth", dayofmonth(col("pickup_datetime")))
.withColumn("pickup_dayofyear", dayofyear(col("pickup_datetime")))
.withColumn("pickup_hour", hour(col("pickup_datetime")))
.withColumn("pickup_minute", minute(col("pickup_datetime")))
.withColumn("pickup_month", month(col("pickup_datetime")))
.withColumn("pickup_quarter", quarter(col("pickup_datetime")))
.withColumn("pickup_weekofyear", weekofyear(col("pickup_datetime")))
.withColumn("pickup_year", year(col("pickup_datetime")))
.withColumn("pickup_dayofweek", TaxiConfig.int_dayofweek(date_format(col("pickup_datetime"), "E")))
.withColumn("pickup_time_num", TaxiConfig.time_num(col("pickup_hour"), col("pickup_minute")))
.withColumn("pickup_time_cos", TaxiConfig.cos2pi(col("pickup_time_num")))
.withColumn("pickup_time_sin", TaxiConfig.sin2pi(col("pickup_time_num")))
.withColumn("pickup_week_num", TaxiConfig.week_num(col("pickup_dayofweek"), col("pickup_hour"), col("pickup_minute")))
.withColumn("pickup_week_cos", TaxiConfig.cos2pi(col("pickup_week_num")))
.withColumn("pickup_week_sin", TaxiConfig.sin2pi(col("pickup_week_num")))
.withColumn("pickup_month_lastday", dayofmonth(last_day(col("pickup_datetime"))))
.withColumn("pickup_month_cos", TaxiConfig.cos2pi(TaxiConfig.month_num(col("pickup_month"), col("pickup_month_lastday"))))
.withColumn("pickup_month_sin", TaxiConfig.sin2pi(TaxiConfig.month_num(col("pickup_month"), col("pickup_month_lastday"))))
.withColumn("pickup_year_cos", TaxiConfig.cos2pi(TaxiConfig.year_num(col("pickup_year"))))
.withColumn("pickup_year_sin", TaxiConfig.sin2pi(TaxiConfig.year_num(col("pickup_year"))))
.withColumn("pickup_isweekend", TaxiConfig.is_weekend(col("pickup_dayofweek")))
.withColumn("pickup_ispm", TaxiConfig.int_ampm(date_format(col("pickup_datetime"), "a")))
.withColumn("dropoff_timestamp", unix_timestamp(col("dropoff_datetime")))
.withColumn("dropoff_dayofmonth", dayofmonth(col("dropoff_datetime")))
.withColumn("dropoff_dayofyear", dayofyear(col("dropoff_datetime")))
.withColumn("dropoff_hour", hour(col("dropoff_datetime")))
.withColumn("dropoff_minute", minute(col("dropoff_datetime")))
.withColumn("dropoff_month", month(col("dropoff_datetime")))
.withColumn("dropoff_quarter", quarter(col("dropoff_datetime")))
.withColumn("dropoff_weekofyear", weekofyear(col("dropoff_datetime")))
.withColumn("dropoff_year", year(col("dropoff_datetime")))
.withColumn("dropoff_dayofweek", TaxiConfig.int_dayofweek(date_format(col("dropoff_datetime"), "E")))
.withColumn("dropoff_time_num", TaxiConfig.time_num(col("dropoff_hour"), col("dropoff_minute")))
.withColumn("dropoff_time_cos", TaxiConfig.cos2pi(col("dropoff_time_num")))
.withColumn("dropoff_time_sin", TaxiConfig.sin2pi(col("dropoff_time_num")))
.withColumn("dropoff_week_num", TaxiConfig.week_num(col("dropoff_dayofweek"), col("dropoff_hour"), col("dropoff_minute")))
.withColumn("dropoff_week_cos", TaxiConfig.cos2pi(col("dropoff_week_num")))
.withColumn("dropoff_week_sin", TaxiConfig.sin2pi(col("dropoff_week_num")))
.withColumn("dropoff_month_lastday", dayofmonth(last_day(col("dropoff_datetime"))))
.withColumn("dropoff_month_cos", TaxiConfig.cos2pi(TaxiConfig.month_num(col("dropoff_month"), col("dropoff_month_lastday"))))
.withColumn("dropoff_month_sin", TaxiConfig.sin2pi(TaxiConfig.month_num(col("dropoff_month"), col("dropoff_month_lastday"))))
.withColumn("dropoff_year_cos", TaxiConfig.cos2pi(TaxiConfig.year_num(col("dropoff_year"))))
.withColumn("dropoff_year_sin", TaxiConfig.sin2pi(TaxiConfig.year_num(col("dropoff_year"))))
.withColumn("dropoff_isweekend", TaxiConfig.is_weekend(col("dropoff_dayofweek")))
.withColumn("dropoff_ispm", TaxiConfig.int_ampm(date_format(col("dropoff_datetime"), "a")))
taxiDf.printSchema
taxiDf.show
println(s"--- approx count ${taxiDf.rdd.countApprox(100000)}")
val nonnullDf = taxiDf.na.fill("unknown", TaxiConfig.oneHotEncodeColumns)
.na.fill(0, List(
"fare_amount", "extra", "mta_tax", "tip_amount",
"tolls_amount", "improvement_surcharge", "total_amount",
"precipitation", "snow_depth", "snowfall"
))
.na.drop()
.cache
println(s"---- nonull count ${nonnullDf.rdd.count()}")
//val sampleDF = nonnullDf.sample(true, 0.001).cache
val indexers = TaxiConfig.oneHotEncodeColumns.map { c => (c, new StringIndexer().setInputCol(c).setOutputCol(s"${c}_index")) }
val oneHotEncoders = TaxiConfig.oneHotEncodeColumns.map { c => (c, new OneHotEncoder().setInputCol(s"${c}_index").setOutputCol(s"${c}_onehot")) }
val assembler = new VectorAssembler()
.setInputCols(TaxiConfig.oneHotEncodeColumns.map(x => s"${x}_onehot").toArray ++ TaxiConfig.standardizeColumns)
.setOutputCol("assembled")
val scaler = new StandardScaler()
.setInputCol("assembled")
.setOutputCol("features")
.setWithStd(true)
.setWithMean(true)
val pipeStages = (
(indexers.map(_._2) ++ oneHotEncoders.map(_._2))
++ Array(assembler, scaler)
).toArray
val pipeline = new Pipeline().setStages(pipeStages)
val model = pipeline.fit(nonnullDf)
val transformedDf = model.transform(nonnullDf).select("id", "features").cache
transformedDf.show(2)
val matrix = new IndexedRowMatrix(transformedDf.select("id", "features").rdd.map {
case Row(id: Long, features: Object) =>
IndexedRow(id, objToVector(features))
})
val similarities = matrix.toCoordinateMatrix
.transpose()
.toIndexedRowMatrix()
.columnSimilarities()
val dist = new CoordinateMatrix(
similarities
.entries
.map((entry) => new MatrixEntry(entry.i, entry.j, 1 - entry.value))
)
val filtered = new IndexedRowMatrix(transformedDf.rdd.map({
case Row(id: Long, features: Vector) =>
IndexedRow(id, new DenseVector(Array(
Vectors.norm(features, 2)
)))
}))
println("Running Mapper")
val graph = Mapper.mapper(
sc,
dist,
filtered
)
Mapper.writeAsJson(graph, "s3n://frst-nyc-data/graph.json")
spark.stop()
}
}
|
log0ymxm/spark-mapper
|
src/main/scala/com/github/log0ymxm/mapper/examples/taxi/TaxiDriver.scala
|
Scala
|
apache-2.0
| 8,033 |
package scala.pickling.internal
import java.util
import scala.pickling.PicklingErrors.LogicException
import scala.pickling.spi.{RefUnpicklingRegistry, RefPicklingRegistry, RefRegistry}
/** Default implementation of the Ref registry that allows circular dependencies to be handled.
* Uses thread-local caches (per pickler/unpickler thread).
*/
final class DefaultRefRegistry extends RefRegistry {
private object picklerTl extends ThreadLocal[RefPicklingRegistry] {
override def initialValue(): RefPicklingRegistry = new DefaultRefPicklingRegistry
}
private object unpicklerTl extends ThreadLocal[RefUnpicklingRegistry] {
override def initialValue(): RefUnpicklingRegistry = new DefaultRefUnpicklingRegistry()
}
override def pickle: RefPicklingRegistry = picklerTl.get()
override def unpickle: RefUnpicklingRegistry = unpicklerTl.get()
}
class DefaultRefPicklingRegistry extends RefPicklingRegistry {
private val refs = new util.IdentityHashMap[AnyRef, Integer]()
private var nextPicklee: Int = 0
override def registerPicklee(picklee: Any): Int = {
val anyRefPicklee = picklee.asInstanceOf[AnyRef]
// check if `anyRefPicklee` is already in the map.
// if so, obtain its index, else insert at index `nextPicklee`.
if (refs.containsKey(anyRefPicklee)) {
refs.get(anyRefPicklee).intValue
} else {
refs.put(anyRefPicklee, new Integer(nextPicklee))
nextPicklee = nextPicklee + 1
-1
}
}
override def clear(): Unit = {
refs.clear()
nextPicklee = 0
}
}
// Single-threaded unpickling registry. */
class DefaultRefUnpicklingRegistry(maxRefs: Int = 655536) extends RefUnpicklingRegistry {
private var refs: Array[Any] = new Array[Any](maxRefs)
private var idx = 0
override def preregisterUnpicklee(): Int = {
val index = idx
val len = refs.length
val target = if (index == len) {
val newArr = Array.ofDim[Any](len * 2)
System.arraycopy(refs, 0, newArr, 0, len)
refs = newArr
newArr
} else refs
target(index) = null
idx += 1
index
}
override def clear(): Unit = {
val last = idx
idx = 0
var i = 0
while (i < last) {
refs(i) = null
i += 1
}
}
override def registerUnpicklee(oid: Int, value: Any): Unit = {
refs(oid) = value
}
override def lookupUnpicklee(oid: Int): Any = {
if (oid >= idx) throw new LogicException(
s"Fatal error: invalid index $oid unpicklee cache of length $idx")
val result = refs(oid)
if (result == null) throw new LogicException(
s"Fatal error: unpicklee cache is corrupted at $oid")
result
}
}
|
scala/pickling
|
core/src/main/scala/scala/pickling/internal/DefaultRefRegistry.scala
|
Scala
|
bsd-3-clause
| 2,636 |
/*
* Copyright 2014β2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.mongodb
sealed abstract class RenameSemantics
object RenameSemantics {
case object Overwrite extends RenameSemantics
case object FailIfExists extends RenameSemantics
}
|
drostron/quasar
|
mongodb/src/main/scala/quasar/physical/mongodb/RenameSemantics.scala
|
Scala
|
apache-2.0
| 803 |
import sbt.Keys._
import sbt._
object Common {
val _scalaVersion = "2.12.2"
val _scalacOptions = Seq(
"-feature",
"-deprecation",
"-language:postfixOps",
"-language:implicitConversions",
"-Xcheckinit", // Should be removed for production use
// Found this list on tpolecat's github site
"-deprecation",
"-encoding", "UTF-8", // yes, this is 2 args
"-feature",
"-unchecked",
"-Xfatal-warnings",
"-Xlint",
"-Yno-adapted-args",
// "-Ywarn-dead-code", // N.B. doesn't work well with the ??? hole
// "-Ywarn-numeric-widen", // This is annoying af
// "-Ywarn-value-discard",
"-Xfuture",
"-Ywarn-unused-import" // 2.11 only
)
val Settings = Seq(
version := "0.1",
scalacOptions ++= _scalacOptions,
scalaVersion := _scalaVersion
)
}
|
SKNZ/SpinaciCore
|
wow/project/Common.scala
|
Scala
|
mit
| 820 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the Meeting entity.
*/
class MeetingGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-CSRF-TOKEN" -> "${csrf_token}"
)
val scn = scenario("Test the Meeting entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("remember-me", "true")
.formParam("submit", "Login"))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.repeat(2) {
exec(http("Get all meetings")
.get("/api/meetings")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new meeting")
.put("/api/meetings")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "title":"SAMPLE_TEXT", "submitter":"SAMPLE_TEXT", "aliasUsed":"SAMPLE_TEXT", "startDate":"2020-01-01T00:00:00.000Z", "endDate":"2020-01-01T00:00:00.000Z", "uid":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_meeting_url")))
.pause(10)
.repeat(5) {
exec(http("Get created meeting")
.get("${new_meeting_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created meeting")
.delete("${new_meeting_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
|
TransparencyInternationalEU/lobbycal
|
src/test/gatling/simulations/MeetingGatlingTest.scala
|
Scala
|
cc0-1.0
| 3,439 |
package tiger
import org.scalatest.Inside._
import org.scalatest.Matchers._
import org.scalatest._
import tiger.Abs.{FunctionDec, LetExp, VarDec, _}
import tiger.TigerTestUtil.{TigerAbs, TigerEscapes}
class EscapesSpec extends FlatSpec {
"EscapesComponent" should "escapes nested functions" in new TigerAbs("/aditionals/escap0.tig") with TigerEscapes {
inside(tigerProgram()) { case LetExp(List(decX, decY, decsF) , body, _) =>
inside(decX) { case VarDec(name, escape, _, _, _) =>
name should be ("x")
escape should be (true)
}
inside(decY) { case VarDec(name, escape, _, _, _) =>
name should be ("y")
escape should be (false)
}
inside(decsF) { case FunctionDecs(List(decF)) =>
inside(decF) { case FunctionDec(fname, List(Field(arg1_name, arg1_escape,_)), _ ,_ ,_) =>
fname should be ("f")
arg1_name should be ("y")
arg1_escape should be (true)
}
}
}
}
it should "escapes for counter variable" in new TigerAbs("/type/assign-loop-var.tig") with TigerEscapes {
inside(tigerProgram()) { case ForExp(symbol, s, hi, lo, body, _) =>
symbol should be("i")
}
}
// "escape2" should "be correct" in new TigerAbs("/aditionals/escap2.tig") with TigerEscapes {
// inside(tigerProgram()) { case LetExp(List(decF) , body, _) =>
//
// inside(decsF) { case FunctionDecs(List(decF)) =>
// inside(decF) { case FunctionDec(fname, List(Field(arg1_name, arg1_escape,_)), _ ,body ,_) =>
// fname should be ("f")
// arg1_name should be ("i")
// arg1_escape should be (false)
//
// inside(body) { case LetExp(List(iDec, gDec))}
//
//
// }
// }
// }
// }
}
|
joseluisdiaz/tigerc
|
src/test/scala/tiger/EscapesSpec.scala
|
Scala
|
apache-2.0
| 1,764 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs10x.boxes
import uk.gov.hmrc.ct.accounts.frs10x.retriever.Frs10xDirectorsBoxRetriever
import uk.gov.hmrc.ct.box.ValidatableBox._
import uk.gov.hmrc.ct.box._
case class AC8052(value: Option[String]) extends CtBoxIdentifier(name = "Political and charitable donations") with CtOptionalString with Input with ValidatableBox[Frs10xDirectorsBoxRetriever] {
override def validate(boxRetriever: Frs10xDirectorsBoxRetriever): Set[CtValidation] =
validateOptionalStringByLength("AC8052", this, 0, StandardCohoTextFieldLimit) ++ validateCoHoStringReturnIllegalChars("AC8052", this)
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs10x/boxes/AC8052.scala
|
Scala
|
apache-2.0
| 1,221 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util.{Locale, TimeZone}
import scala.reflect.ClassTag
import org.scalatest.Matchers
import org.apache.spark.api.python.PythonEvalType
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{Count, Sum}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.plans.{Cross, Inner}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, RangePartitioning, RoundRobinPartitioning}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
class AnalysisSuite extends AnalysisTest with Matchers {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("union project *") {
val plan = (1 to 120)
.map(_ => testRelation)
.fold[LogicalPlan](testRelation) { (a, b) =>
a.select(UnresolvedStar(None)).select($"a").union(b.select(UnresolvedStar(None)))
}
assertAnalysisSuccess(plan)
}
test("check project's resolved") {
assert(Project(testRelation.output, testRelation).resolved)
assert(!Project(Seq(UnresolvedAttribute("a")), testRelation).resolved)
val explode = Explode(AttributeReference("a", IntegerType, nullable = true)())
assert(!Project(Seq(Alias(explode, "explode")()), testRelation).resolved)
assert(!Project(Seq(Alias(count(Literal(1)), "count")()), testRelation).resolved)
}
test("analyze project") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("a")), testRelation),
Project(testRelation.output, testRelation))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation))
assertAnalysisError(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Seq("cannot resolve"))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
}
test("resolve sort references - filter/limit") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
// Case 1: one missing attribute is in the leaf node and another is in the unary node
val plan1 = testRelation2
.where($"a" > "str").select($"a", $"b")
.where($"b" > "str").select($"a")
.sortBy($"b".asc, $"c".desc)
val expected1 = testRelation2
.where(a > "str").select(a, b, c)
.where(b > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan1, expected1)
// Case 2: all the missing attributes are in the leaf node
val plan2 = testRelation2
.where($"a" > "str").select($"a")
.where($"a" > "str").select($"a")
.sortBy($"b".asc, $"c".desc)
val expected2 = testRelation2
.where(a > "str").select(a, b, c)
.where(a > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan2, expected2)
}
test("resolve sort references - join") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val h = testRelation3.output(3)
// Case: join itself can resolve all the missing attributes
val plan = testRelation2.join(testRelation3)
.where($"a" > "str").select($"a", $"b")
.sortBy($"c".desc, $"h".asc)
val expected = testRelation2.join(testRelation3)
.where(a > "str").select(a, b, c, h)
.sortBy(c.desc, h.asc)
.select(a, b)
checkAnalysis(plan, expected)
}
test("resolve sort references - aggregate") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val alias_a3 = count(a).as("a3")
val alias_b = b.as("aggOrder")
// Case 1: when the child of Sort is not Aggregate,
// the sort reference is handled by the rule ResolveSortReferences
val plan1 = testRelation2
.groupBy($"a", $"c", $"b")($"a", $"c", count($"a").as("a3"))
.select($"a", $"c", $"a3")
.orderBy($"b".asc)
val expected1 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, b)
.select(a, c, alias_a3.toAttribute, b)
.orderBy(b.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan1, expected1)
// Case 2: when the child of Sort is Aggregate,
// the sort reference is handled by the rule ResolveAggregateFunctions
val plan2 = testRelation2
.groupBy($"a", $"c", $"b")($"a", $"c", count($"a").as("a3"))
.orderBy($"b".asc)
val expected2 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, alias_b)
.orderBy(alias_b.toAttribute.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan2, expected2)
}
test("resolve relations") {
assertAnalysisError(UnresolvedRelation(TableIdentifier("tAbLe")), Seq())
checkAnalysis(UnresolvedRelation(TableIdentifier("TaBlE")), testRelation)
checkAnalysis(
UnresolvedRelation(TableIdentifier("tAbLe")), testRelation, caseSensitive = false)
checkAnalysis(
UnresolvedRelation(TableIdentifier("TaBlE")), testRelation, caseSensitive = false)
}
test("divide should be casted into fractional types") {
val plan = caseInsensitiveAnalyzer.execute(
testRelation2.select(
$"a" / Literal(2) as "div1",
$"a" / $"b" as "div2",
$"a" / $"c" as "div3",
$"a" / $"d" as "div4",
$"e" / $"e" as "div5"))
val pl = plan.asInstanceOf[Project].projectList
assert(pl(0).dataType == DoubleType)
assert(pl(1).dataType == DoubleType)
assert(pl(2).dataType == DoubleType)
assert(pl(3).dataType == DoubleType)
assert(pl(4).dataType == DoubleType)
}
test("pull out nondeterministic expressions from RepartitionByExpression") {
val plan = RepartitionByExpression(Seq(Rand(33)), testRelation, numPartitions = 10)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
RepartitionByExpression(Seq(projected.toAttribute),
Project(testRelation.output :+ projected, testRelation),
numPartitions = 10))
checkAnalysis(plan, expected)
}
test("pull out nondeterministic expressions from Sort") {
val plan = Sort(Seq(SortOrder(Rand(33), Ascending)), false, testRelation)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
Sort(Seq(SortOrder(projected.toAttribute, Ascending)), false,
Project(testRelation.output :+ projected, testRelation)))
checkAnalysis(plan, expected)
}
test("SPARK-9634: cleanup unnecessary Aliases in LogicalPlan") {
val a = testRelation.output.head
var plan = testRelation.select(((a + 1).as("a+1") + 2).as("col"))
var expected = testRelation.select((a + 1 + 2).as("col"))
checkAnalysis(plan, expected)
plan = testRelation.groupBy(a.as("a1").as("a2"))((min(a).as("min_a") + 1).as("col"))
expected = testRelation.groupBy(a)((min(a) + 1).as("col"))
checkAnalysis(plan, expected)
// CreateStruct is a special case that we should not trim Alias for it.
plan = testRelation.select(CreateStruct(Seq(a, (a + 1).as("a+1"))).as("col"))
expected = testRelation.select(CreateNamedStruct(Seq(
Literal(a.name), a,
Literal("a+1"), (a + 1))).as("col"))
checkAnalysis(plan, expected)
}
test("Analysis may leave unnecessary aliases") {
val att1 = testRelation.output.head
var plan = testRelation.select(
CreateStruct(Seq(att1, ((att1.as("aa")) + 1).as("a_plus_1"))).as("col"),
att1
)
val prevPlan = getAnalyzer(true).execute(plan)
plan = prevPlan.select(CreateArray(Seq(
CreateStruct(Seq(att1, (att1 + 1).as("a_plus_1"))).as("col1"),
/** alias should be eliminated by [[CleanupAliases]] */
"col".attr.as("col2")
)).as("arr"))
plan = getAnalyzer(true).execute(plan)
val expectedPlan = prevPlan.select(
CreateArray(Seq(
CreateNamedStruct(Seq(
Literal(att1.name), att1,
Literal("a_plus_1"), (att1 + 1))),
Symbol("col").struct(prevPlan.output(0).dataType.asInstanceOf[StructType]).notNull
)).as("arr")
)
checkAnalysis(plan, expectedPlan)
}
test("SPARK-10534: resolve attribute references in order by clause") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val plan = testRelation2.select($"c").orderBy(Floor($"a").asc)
val expected = testRelation2.select(c, a)
.orderBy(Floor(Cast(a, DoubleType, Option(TimeZone.getDefault().getID))).asc).select(c)
checkAnalysis(plan, expected)
}
test("self intersect should resolve duplicate expression IDs") {
val plan = testRelation.intersect(testRelation, isAll = false)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: invalid CAST in NULL IN(...) expression") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(2))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: different types in inlist but can be converted to a common type") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(1.2345))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: check type compatibility error") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(true), Literal(1))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisError(plan, Seq("data type mismatch: Arguments must be same type"))
}
test("SPARK-11725: correctly handle null inputs for ScalaUDF") {
val testRelation = LocalRelation(
AttributeReference("a", StringType)(),
AttributeReference("b", DoubleType)(),
AttributeReference("c", ShortType)(),
AttributeReference("d", DoubleType, nullable = false)())
val string = testRelation.output(0)
val double = testRelation.output(1)
val short = testRelation.output(2)
val nonNullableDouble = testRelation.output(3)
val nullResult = Literal.create(null, StringType)
def checkUDF(udf: Expression, transformed: Expression): Unit = {
checkAnalysis(
Project(Alias(udf, "")() :: Nil, testRelation),
Project(Alias(transformed, "")() :: Nil, testRelation)
)
}
// non-primitive parameters do not need special null handling
val udf1 = ScalaUDF((s: String) => "x", StringType, string :: Nil, false :: Nil)
val expected1 = udf1
checkUDF(udf1, expected1)
// only primitive parameter needs special null handling
val udf2 = ScalaUDF((s: String, d: Double) => "x", StringType, string :: double :: Nil,
false :: true :: Nil)
val expected2 =
If(IsNull(double), nullResult, udf2.copy(children = string :: KnownNotNull(double) :: Nil))
checkUDF(udf2, expected2)
// special null handling should apply to all primitive parameters
val udf3 = ScalaUDF((s: Short, d: Double) => "x", StringType, short :: double :: Nil,
true :: true :: Nil)
val expected3 = If(
IsNull(short) || IsNull(double),
nullResult,
udf3.copy(children = KnownNotNull(short) :: KnownNotNull(double) :: Nil))
checkUDF(udf3, expected3)
// we can skip special null handling for primitive parameters that are not nullable
val udf4 = ScalaUDF(
(s: Short, d: Double) => "x",
StringType,
short :: nonNullableDouble :: Nil,
true :: true :: Nil)
val expected4 = If(
IsNull(short),
nullResult,
udf4.copy(children = KnownNotNull(short) :: nonNullableDouble :: Nil))
checkUDF(udf4, expected4)
}
test("SPARK-24891 Fix HandleNullInputsForUDF rule") {
val a = testRelation.output(0)
val func = (x: Int, y: Int) => x + y
val udf1 = ScalaUDF(func, IntegerType, a :: a :: Nil, false :: false :: Nil)
val udf2 = ScalaUDF(func, IntegerType, a :: udf1 :: Nil, false :: false :: Nil)
val plan = Project(Alias(udf2, "")() :: Nil, testRelation)
comparePlans(plan.analyze, plan.analyze.analyze)
}
test("SPARK-11863 mixture of aliases and real columns in order by clause - tpcds 19,55,71") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val alias1 = a.as("a1")
val alias2 = c.as("a2")
val alias3 = count(a).as("a3")
val plan = testRelation2
.groupBy($"a", $"c")($"a".as("a1"), $"c".as("a2"), count($"a").as("a3"))
.orderBy($"a1".asc, $"c".asc)
val expected = testRelation2
.groupBy(a, c)(alias1, alias2, alias3)
.orderBy(alias1.toAttribute.asc, alias2.toAttribute.asc)
.select(alias1.toAttribute, alias2.toAttribute, alias3.toAttribute)
checkAnalysis(plan, expected)
}
test("Eliminate the unnecessary union") {
val plan = Union(testRelation :: Nil)
val expected = testRelation
checkAnalysis(plan, expected)
}
test("SPARK-12102: Ignore nullablity when comparing two sides of case") {
val relation = LocalRelation(Symbol("a").struct(Symbol("x").int),
Symbol("b").struct(Symbol("x").int.withNullability(false)))
val plan = relation.select(
CaseWhen(Seq((Literal(true), Symbol("a").attr)), Symbol("b")).as("val"))
assertAnalysisSuccess(plan)
}
test("Keep attribute qualifiers after dedup") {
val input = LocalRelation(Symbol("key").int, Symbol("value").string)
val query =
Project(Seq($"x.key", $"y.key"),
Join(
Project(Seq($"x.key"), SubqueryAlias("x", input)),
Project(Seq($"y.key"), SubqueryAlias("y", input)),
Cross, None, JoinHint.NONE))
assertAnalysisSuccess(query)
}
private def assertExpressionType(
expression: Expression,
expectedDataType: DataType): Unit = {
val afterAnalyze =
Project(Seq(Alias(expression, "a")()), OneRowRelation()).analyze.expressions.head
if (!afterAnalyze.dataType.equals(expectedDataType)) {
fail(
s"""
|data type of expression $expression doesn't match expected:
|Actual data type:
|${afterAnalyze.dataType}
|
|Expected data type:
|${expectedDataType}
""".stripMargin)
}
}
test("SPARK-15776: test whether Divide expression's data type can be deduced correctly by " +
"analyzer") {
assertExpressionType(sum(Divide(1, 2)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0f)), DoubleType)
assertExpressionType(sum(Divide(1.0f, 2)), DoubleType)
assertExpressionType(sum(Divide(1, Decimal(2))), DecimalType(22, 11))
assertExpressionType(sum(Divide(Decimal(1), 2)), DecimalType(26, 6))
assertExpressionType(sum(Divide(Decimal(1), 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, Decimal(2.0))), DoubleType)
}
test("SPARK-18058: union and set operations shall not care about the nullability" +
" when comparing column types") {
val firstTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = true))), nullable = false)())
val secondTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = false))), nullable = false)())
val unionPlan = Union(firstTable, secondTable)
assertAnalysisSuccess(unionPlan)
val r1 = Except(firstTable, secondTable, isAll = false)
val r2 = Intersect(firstTable, secondTable, isAll = false)
assertAnalysisSuccess(r1)
assertAnalysisSuccess(r2)
}
test("resolve as with an already existed alias") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("tbl2.a")),
SubqueryAlias("tbl", testRelation).as("tbl2")),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(SubqueryAlias("tbl", testRelation).as("tbl2"), testRelation)
}
test("SPARK-20311 range(N) as alias") {
def rangeWithAliases(args: Seq[Int], outputNames: Seq[String]): LogicalPlan = {
SubqueryAlias("t", UnresolvedTableValuedFunction("range", args.map(Literal(_)), outputNames))
.select(star())
}
assertAnalysisSuccess(rangeWithAliases(3 :: Nil, "a" :: Nil))
assertAnalysisSuccess(rangeWithAliases(1 :: 4 :: Nil, "b" :: Nil))
assertAnalysisSuccess(rangeWithAliases(2 :: 6 :: 2 :: Nil, "c" :: Nil))
assertAnalysisError(
rangeWithAliases(3 :: Nil, "a" :: "b" :: Nil),
Seq("Number of given aliases does not match number of output columns. "
+ "Function name: range; number of aliases: 2; number of output columns: 1."))
}
test("SPARK-20841 Support table column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias("t", UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20962 Support subquery column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"t",
UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20963 Support aliases for join relations in FROM clause") {
def joinRelationWithAliases(outputNames: Seq[String]): LogicalPlan = {
val src1 = LocalRelation(Symbol("id").int, Symbol("v1").string).as("s1")
val src2 = LocalRelation(Symbol("id").int, Symbol("v2").string).as("s2")
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"dst",
src1.join(src2, Inner, Option(Symbol("s1.id") === Symbol("s2.id"))))
).select(star())
}
assertAnalysisSuccess(joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
joinRelationWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-22614 RepartitionByExpression partitioning") {
def checkPartitioning[T <: Partitioning: ClassTag](
numPartitions: Int, exprs: Expression*): Unit = {
val partitioning = RepartitionByExpression(exprs, testRelation2, numPartitions).partitioning
val clazz = implicitly[ClassTag[T]].runtimeClass
assert(clazz.isInstance(partitioning))
}
checkPartitioning[HashPartitioning](numPartitions = 10, exprs = Literal(20))
checkPartitioning[HashPartitioning](numPartitions = 10,
exprs = Symbol("a").attr, Symbol("b").attr)
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder(Literal(10), Ascending))
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder(Symbol("a").attr, Ascending), SortOrder(Symbol("b").attr, Descending))
checkPartitioning[RoundRobinPartitioning](numPartitions = 10, exprs = Seq.empty: _*)
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 0, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = -1, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 10, exprs =
SortOrder(Symbol("a").attr, Ascending), Symbol("b").attr)
}
}
test("SPARK-24208: analysis fails on self-join with FlatMapGroupsInPandas") {
val pythonUdf = PythonUDF("pyUDF", null,
StructType(Seq(StructField("a", LongType))),
Seq.empty,
PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF,
true)
val output = pythonUdf.dataType.asInstanceOf[StructType].toAttributes
val project = Project(Seq(UnresolvedAttribute("a")), testRelation)
val flatMapGroupsInPandas = FlatMapGroupsInPandas(
Seq(UnresolvedAttribute("a")), pythonUdf, output, project)
val left = SubqueryAlias("temp0", flatMapGroupsInPandas)
val right = SubqueryAlias("temp1", flatMapGroupsInPandas)
val join = Join(left, right, Inner, None, JoinHint.NONE)
assertAnalysisSuccess(
Project(Seq(UnresolvedAttribute("temp0.a"), UnresolvedAttribute("temp1.a")), join))
}
test("SPARK-24488 Generator with multiple aliases") {
assertAnalysisSuccess(
listRelation.select(Explode($"list").as("first_alias").as("second_alias")))
assertAnalysisSuccess(
listRelation.select(MultiAlias(MultiAlias(
PosExplode($"list"), Seq("first_pos", "first_val")), Seq("second_pos", "second_val"))))
}
test("SPARK-24151: CURRENT_DATE, CURRENT_TIMESTAMP should be case insensitive") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
val input = Project(Seq(
UnresolvedAttribute("current_date"),
UnresolvedAttribute("CURRENT_DATE"),
UnresolvedAttribute("CURRENT_TIMESTAMP"),
UnresolvedAttribute("current_timestamp")), testRelation)
val expected = Project(Seq(
Alias(CurrentDate(), toPrettySQL(CurrentDate()))(),
Alias(CurrentDate(), toPrettySQL(CurrentDate()))(),
Alias(CurrentTimestamp(), toPrettySQL(CurrentTimestamp()))(),
Alias(CurrentTimestamp(), toPrettySQL(CurrentTimestamp()))()), testRelation).analyze
checkAnalysis(input, expected)
}
}
test("SPARK-25691: AliasViewChild with different nullabilities") {
object ViewAnalyzer extends RuleExecutor[LogicalPlan] {
val batches = Batch("View", Once, EliminateView) :: Nil
}
val relation = LocalRelation(Symbol("a").int.notNull, Symbol("b").string)
val view = View(CatalogTable(
identifier = TableIdentifier("v1"),
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = StructType(Seq(StructField("a", IntegerType), StructField("b", StringType)))),
output = Seq(Symbol("a").int, Symbol("b").string),
child = relation)
val tz = Option(conf.sessionLocalTimeZone)
val expected = Project(Seq(
Alias(Cast(Symbol("a").int.notNull, IntegerType, tz), "a")(),
Alias(Cast(Symbol("b").string, StringType, tz), "b")()),
relation)
val res = ViewAnalyzer.execute(view)
comparePlans(res, expected)
}
test("CTE with non-existing column alias") {
assertAnalysisError(parsePlan("WITH t(x) AS (SELECT 1) SELECT * FROM t WHERE y = 1"),
Seq("cannot resolve '`y`' given input columns: [x]"))
}
test("CTE with non-matching column alias") {
assertAnalysisError(parsePlan("WITH t(x, y) AS (SELECT 1) SELECT * FROM t WHERE x = 1"),
Seq("Number of column aliases does not match number of columns. Number of column aliases: " +
"2; number of columns: 1."))
}
test("SPARK-28251: Insert into non-existing table error message is user friendly") {
assertAnalysisError(parsePlan("INSERT INTO test VALUES (1)"),
Seq("Table not found: test"))
}
test("check CollectMetrics resolved") {
val a = testRelation.output.head
val sum = Sum(a).toAggregateExpression().as("sum")
val random_sum = Sum(Rand(1L)).toAggregateExpression().as("rand_sum")
val literal = Literal(1).as("lit")
// Ok
assert(CollectMetrics("event", literal :: sum :: random_sum :: Nil, testRelation).resolved)
// Bad name
assert(!CollectMetrics("", sum :: Nil, testRelation).resolved)
assertAnalysisError(CollectMetrics("", sum :: Nil, testRelation),
"observed metrics should be named" :: Nil)
// No columns
assert(!CollectMetrics("evt", Nil, testRelation).resolved)
def checkAnalysisError(exprs: Seq[NamedExpression], errors: String*): Unit = {
assertAnalysisError(CollectMetrics("event", exprs, testRelation), errors)
}
// Unwrapped attribute
checkAnalysisError(
a :: Nil,
"Attribute", "can only be used as an argument to an aggregate function")
// Unwrapped non-deterministic expression
checkAnalysisError(
Rand(10).as("rnd") :: Nil,
"non-deterministic expression", "can only be used as an argument to an aggregate function")
// Distinct aggregate
checkAnalysisError(
Sum(a).toAggregateExpression(isDistinct = true).as("sum") :: Nil,
"distinct aggregates are not allowed in observed metrics, but found")
// Nested aggregate
checkAnalysisError(
Sum(Sum(a).toAggregateExpression()).toAggregateExpression().as("sum") :: Nil,
"nested aggregates are not allowed in observed metrics, but found")
// Windowed aggregate
val windowExpr = WindowExpression(
RowNumber(),
WindowSpecDefinition(Nil, a.asc :: Nil,
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow)))
checkAnalysisError(
windowExpr.as("rn") :: Nil,
"window expressions are not allowed in observed metrics, but found")
}
test("check CollectMetrics duplicates") {
val a = testRelation.output.head
val sum = Sum(a).toAggregateExpression().as("sum")
val count = Count(Literal(1)).toAggregateExpression().as("cnt")
// Same result - duplicate names are allowed
assertAnalysisSuccess(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", count :: Nil, testRelation) :: Nil))
// Same children, structurally different metrics - fail
assertAnalysisError(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", sum :: Nil, testRelation) :: Nil),
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
// Different children, same metrics - fail
val b = Symbol("b").string
val tblB = LocalRelation(b)
assertAnalysisError(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", count :: Nil, tblB) :: Nil),
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
// Subquery different tree - fail
val subquery = Aggregate(Nil, sum :: Nil, CollectMetrics("evt1", count :: Nil, testRelation))
val query = Project(
b :: ScalarSubquery(subquery, Nil).as("sum") :: Nil,
CollectMetrics("evt1", count :: Nil, tblB))
assertAnalysisError(query, "Multiple definitions of observed metrics" :: "evt1" :: Nil)
}
}
|
jkbradley/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
|
Scala
|
apache-2.0
| 29,403 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.service.assets2
import java.io._
import java.net.URI
import com.waz.api.impl.ErrorResponse
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import com.waz.log.LogSE._
import com.waz.log.LogShow
import com.waz.model.errors.NotFoundLocal
import com.waz.model.{AssetId, Mime, Sha256, UploadAssetId}
import com.waz.service.assets2.Asset.UploadGeneral
import com.waz.sync.SyncServiceHandle
import com.waz.sync.client.AssetClient2.{FileWithSha, Retention}
import com.waz.sync.client.{AssetClient2, AssetClient2Impl}
import com.waz.threading.CancellableFuture
import com.waz.utils.{IoUtils, ReactiveStorageImpl2, UnlimitedInMemoryStorage, returning}
import com.waz.{AuthenticationConfig, FilesystemUtils, ZIntegrationMockSpec}
import scala.concurrent.Future
import scala.util.{Failure, Random, Success}
class AssetServiceSpec extends ZIntegrationMockSpec with AuthenticationConfig with DerivedLogTag {
private val assetStorage = mock[AssetStorage]
private val inProgressAssetStorage = mock[DownloadAssetStorage]
private val rawAssetStorage = mock[UploadAssetStorage]
private val assetDetailsService = mock[AssetDetailsService]
private val restrictionsService = mock[AssetRestrictionsService]
private val transformationsService = mock[AssetTransformationsService]
private val previewService = mock[AssetPreviewService]
private val cache = mock[AssetContentCache]
private val rawCache = mock[UploadAssetContentCache]
private val client = mock[AssetClient2]
private val uriHelperMock = mock[UriHelper]
private val syncHandle = mock[SyncServiceHandle]
private val testAssetContent = returning(Array.ofDim[Byte](128))(Random.nextBytes)
private val testAsset = Asset(
id = AssetId(),
token = None,
sha = Sha256.calculate(testAssetContent),
mime = Mime.Default,
encryption = NoEncryption,
localSource = None,
preview = None,
name = "test_content",
size = testAssetContent.length,
details = BlobDetails,
convId = None
)
verbose(l"Test asset: $testAsset")
private def service(rawAssetStorage: UploadAssetStorage = rawAssetStorage,
client: AssetClient2 = client): AssetService =
new AssetServiceImpl(
assetStorage,
rawAssetStorage,
inProgressAssetStorage,
assetDetailsService,
previewService,
transformationsService,
restrictionsService,
uriHelperMock,
cache,
rawCache,
client,
syncHandle
)
feature("Assets") {
scenario("load asset content if it does not exist in cache and asset does not exist in storage") {
val testDir = FilesystemUtils.createDirectoryForTest()
val downloadAssetResult = {
val file = new File(testDir, "asset_content")
IoUtils.write(new ByteArrayInputStream(testAssetContent), new FileOutputStream(file))
FileWithSha(file, Sha256.calculate(testAssetContent))
}
(assetStorage.find _).expects(*).once().returns(Future.successful(None))
(assetStorage.save _).expects(testAsset).once().returns(Future.successful(()))
(client.loadAssetContent _)
.expects(testAsset, *)
.once()
.returns(CancellableFuture.successful(Right(downloadAssetResult)))
(cache.put _).expects(*, *, *).once().returns(Future.successful(()))
(cache.getStream _).expects(*).once().returns(Future.successful(new ByteArrayInputStream(testAssetContent)))
for {
result <- service().loadContent(testAsset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("load asset content if it does not exist in cache") {
val testDir = FilesystemUtils.createDirectoryForTest()
val downloadAssetResult = {
val file = new File(testDir, "asset_content")
IoUtils.write(new ByteArrayInputStream(testAssetContent), new FileOutputStream(file))
FileWithSha(file, Sha256.calculate(testAssetContent))
}
(assetStorage.find _).expects(*).once().returns(Future.successful(Some(testAsset)))
(cache.getStream _).expects(*).once().returns(Future.failed(NotFoundLocal("not found")))
(client.loadAssetContent _)
.expects(testAsset, *)
.once()
.returns(CancellableFuture.successful(Right(downloadAssetResult)))
(cache.put _).expects(*, *, *).once().returns(Future.successful(()))
(cache.getStream _).expects(*).once().returns(Future.successful(new ByteArrayInputStream(testAssetContent)))
for {
result <- service().loadContent(testAsset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("load asset content if it exists in cache") {
(assetStorage.find _).expects(*).once().returns(Future.successful(Some(testAsset)))
(cache.getStream _).expects(*).once().returns(Future.successful(new ByteArrayInputStream(testAssetContent)))
for {
result <- service().loadContent(testAsset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("load asset content if it has not empty local source") {
val asset =
testAsset.copy(localSource = Some(LocalSource(new URI("www.test"), Sha256.calculate(testAssetContent))))
(assetStorage.find _).expects(*).once().returns(Future.successful(Some(asset)))
(uriHelperMock.openInputStream _)
.expects(*)
.once()
.onCall({ _: URI =>
Success(new ByteArrayInputStream(testAssetContent))
})
for {
result <- service().loadContent(asset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("load asset content if it has not empty local source and we can not load content") {
val asset =
testAsset.copy(localSource = Some(LocalSource(new URI("www.test"), Sha256.calculate(testAssetContent))))
val testDir = FilesystemUtils.createDirectoryForTest()
val downloadAssetResult = {
val file = new File(testDir, "asset_content")
IoUtils.write(new ByteArrayInputStream(testAssetContent), new FileOutputStream(file))
FileWithSha(file, Sha256.calculate(testAssetContent))
}
(assetStorage.find _).expects(*).once().returns(Future.successful(Some(asset)))
(uriHelperMock.openInputStream _).expects(*).once().returns(Failure(new IllegalArgumentException))
(assetStorage.save _).expects(asset.copy(localSource = None)).once().returns(Future.successful(()))
(client.loadAssetContent _)
.expects(asset, *)
.once()
.returns(CancellableFuture.successful(Right(downloadAssetResult)))
(cache.put _).expects(*, *, *).once().returns(Future.successful(()))
(cache.getStream _).expects(*).once().returns(Future.successful(new ByteArrayInputStream(testAssetContent)))
for {
result <- service().loadContent(asset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("load asset content if it has not empty local source but local source content has changed") {
val testContentSha = Sha256.calculate(testAssetContent)
val asset = testAsset.copy(localSource = Some(LocalSource(new URI("www.test"), testContentSha)))
val testDir = FilesystemUtils.createDirectoryForTest()
val downloadAssetResult = {
val file = new File(testDir, "asset_content")
IoUtils.write(new ByteArrayInputStream(testAssetContent), new FileOutputStream(file))
FileWithSha(file, testContentSha)
}
(assetStorage.find _).expects(*).once().returns(Future.successful(Some(asset)))
//emulating file changing
(uriHelperMock.openInputStream _)
.expects(*)
.once()
.returns(Success(new ByteArrayInputStream(testAssetContent :+ 1.toByte)))
(assetStorage.save _).expects(asset.copy(localSource = None)).once().returns(Future.successful(()))
(client.loadAssetContent _)
.expects(asset, *)
.once()
.returns(CancellableFuture.successful(Right(downloadAssetResult)))
(cache.put _).expects(*, *, *).once().returns(Future.successful(()))
(cache.getStream _).expects(*).once().returns(Future.successful(new ByteArrayInputStream(testAssetContent)))
for {
result <- service().loadContent(asset, callback = None)
bytes = IoUtils.toByteArray(result)
} yield {
bytes shouldBe testAssetContent
}
}
scenario("upload asset to backend and download it back. check sha") {
val encryption = AES_CBC_Encryption.random
val fakeUri = new URI("https://www.youtube.com")
val contentForUpload = ContentForUpload("test_uri_content", Content.Uri(fakeUri))
(uriHelperMock.openInputStream _).expects(*).anyNumberOfTimes().onCall { _: URI =>
Success(new ByteArrayInputStream(testAssetContent))
}
(uriHelperMock.extractMime _).expects(*).anyNumberOfTimes().returns(Success(Mime.Default))
(uriHelperMock.extractSize _).expects(*).anyNumberOfTimes().returns(Success(testAssetContent.length))
(uriHelperMock.extractFileName _).expects(*).anyNumberOfTimes().returns(Success("test_file_name"))
(assetDetailsService.extract _).expects(*).anyNumberOfTimes().returns((BlobDetails, Mime.Default))
(cache.putStream _).expects(*, *).anyNumberOfTimes().returns(Future.successful(()))
(assetStorage.save _).expects(*).anyNumberOfTimes().returns(Future.successful(()))
(rawCache.remove _).expects(*).anyNumberOfTimes().returns(Future.successful(()))
(transformationsService.getTransformations _).expects(*, *).once().returns(List())
(restrictionsService.validate _).expects(*).once().returns(Success(()))
for {
_ <- Future.successful(())
client = new AssetClient2Impl
rawAssetStorage = new ReactiveStorageImpl2(new UnlimitedInMemoryStorage[UploadAssetId, UploadAsset]()) with UploadAssetStorage
assetService = service(rawAssetStorage, client)
rawAsset <- assetService.createAndSaveUploadAsset(contentForUpload, encryption, public = false, Retention.Persistent, None)
asset <- assetService.uploadAsset(rawAsset.id)
assetContent <- client.loadAssetContent(asset, None)
encryptedContent = IoUtils.toByteArray(rawAsset.encryption.encrypt(new ByteArrayInputStream(testAssetContent), rawAsset.encryptionSalt))
encryptedSha = Sha256.calculate(new ByteArrayInputStream(encryptedContent)).get
} yield {
implicit val AssetResponseShow: LogShow[Either[ErrorResponse, FileWithSha]] = LogShow.create(_.toString)
implicit val StringShow: LogShow[String] = LogShow.create(_.toString)
debug(l"Download asset response: $assetContent")
assetContent shouldBe an[Right[ErrorResponse, FileWithSha]]
val fileWithSha = assetContent.right.get
debug(l"Initial content : ${testAssetContent.mkString(",")}")
debug(l"Expected content: ${encryptedContent.mkString(",")}")
debug(l"Initial content sha: ${Sha256.calculate(testAssetContent)}")
debug(l"Expected content sha: $encryptedSha")
asset.sha shouldBe rawAsset.sha
fileWithSha.sha256 shouldBe asset.sha
}
}
}
}
|
wireapp/wire-android-sync-engine
|
zmessaging/src/test/scala/com/waz/service/assets2/AssetServiceSpec.scala
|
Scala
|
gpl-3.0
| 12,308 |
package endpoints.play.client
import endpoints.algebra.Codec
import play.api.http.ContentTypes
import play.api.libs.ws.{BodyWritable, InMemoryBody}
/**
* Interpreter for [[endpoints.algebra.JsonEntitiesFromCodec]] that encodes JSON requests
* and decodes JSON responses.
*/
trait JsonEntitiesFromCodec extends Endpoints with endpoints.algebra.JsonEntitiesFromCodec {
def jsonRequest[A](implicit codec: Codec[String, A]): RequestEntity[A] = { (a, wsRequest) =>
val playCodec: play.api.mvc.Codec = implicitly[play.api.mvc.Codec]
val writable = BodyWritable((s: String) => InMemoryBody(playCodec.encode(s)), ContentTypes.JSON)
wsRequest.withBody(codec.encode(a))(writable)
}
def jsonResponse[A](implicit codec: Codec[String, A]): Response[A] =
response => codec.decode(response.body)
}
|
Krever/endpoints
|
play/client/src/main/scala/endpoints/play/client/JsonEntitiesFromCodec.scala
|
Scala
|
mit
| 816 |
package repositories
import fixtures.DBSpecBase
import models.Session
import models._
import org.joda.time.DateTime
import play.api.db.slick._
import repositories.SessionSongRepositoryMessages.{SessionNotAcceptingSongs, InvalidSession, RequestSongRequest}
import org.virtuslab.unicorn.LongUnicornPlay.driver.simple._
import scalaz.{Success, Failure}
import play.api.db.slick.{Session => DBSession}
class SessionSongRepositorySpec extends DBSpecBase {
abstract class SessionBase extends SessionSongRepositoryComponent with SessionRepositoryComponent
"SessionSongRepository#requestSong" when {
"the session does not exist" should {
"return an error" in { implicit dbSession =>
new SessionBase {
val singer = Singer(id = Some(SingerId(1)), sessionId = SessionId(5555), name = "Bob")
sessionSongRepository.requestSong(RequestSongRequest("title", "artist"), singer) shouldBe Failure(InvalidSession(SessionId(5555)))
}
}
}
"the session is not accepting requests" should {
"return an error" in { implicit dbSession =>
new SessionBase {
val sessionId = sessionRepository.save(Session(status = AwaitingOpen, name = "TestSession", userId = UserId(1)))
val session = sessionRepository.findById(sessionId).get
val singer = Singer(id = Some(SingerId(1)), sessionId = sessionId, name = "Bob")
sessionSongRepository.requestSong(RequestSongRequest("title", "artist"), singer) shouldBe Failure(SessionNotAcceptingSongs(sessionId))
}
}
}
"every is in order" should {
"return the new saved song" in { implicit dbSession =>
new SessionBase {
val sessionId = sessionRepository.save(Session(status = Open, name = "TestSession", userId = UserId(1)))
val session = sessionRepository.findById(sessionId).get
val singer = Singer(id = Some(SingerId(1)), sessionId = sessionId, name = "Bob")
sessionSongRepository.requestSong(RequestSongRequest("title", "artist"), singer) match {
case Success(s) =>
s.id shouldNot be(None)
s.artist shouldBe "artist"
s.title shouldBe "title"
s.singerId shouldBe singer.id.get
s.sessionId shouldBe sessionId
case Failure(_) => fail()
}
}
}
}
}
"SessionSongRepository#availableSongsByDateQuery" when {
abstract class EmptySession extends SessionBase {
implicit def dbSession: DBSession
val sessionId = sessionRepository.save(Session(status = Open, name = "TestSession", userId = UserId(1)))
val session = sessionRepository.findById(sessionId).get
}
"there are no songs" should {
"return an empty list" in { implicit s =>
new EmptySession {
def dbSession = s
sessionSongRepository.availableSongsByDateQuery(sessionId).list shouldBe Nil
}
}
}
"there are songs" should {
abstract class FullSession extends EmptySession {
val songs = (1 to 5).zip(List(Queued, Queued, Complete, Queued, Queued)).map({ case (index, status) =>
sessionSongRepository.findById(sessionSongRepository.save(SessionSong(sessionId = sessionId, singerId = SingerId(1),
artist = s"artist$index", title = s"title$index", submitDate = DateTime.now.minus(index))))
}).flatten
}
"return an empty list" in { implicit s =>
new FullSession {
def dbSession = s
val expectedOrder = songs.filter(_.status == Queued).sortBy(_.submitDate.getMillis)
sessionSongRepository.availableSongsByDateQuery(sessionId).list shouldBe expectedOrder
}
}
}
}
}
|
nagirrab/Karaoke
|
test/repositories/SessionSongRepositorySpec.scala
|
Scala
|
mit
| 3,740 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io._
object GenerateInput {
def main(args : Array[String]) {
if (args.length != 3) {
println("usage: GenerateInput output-dir n-output-files n-points-per-file")
return;
}
val outputDir = args(0)
val nOutputFiles = args(1).toInt
val pointsPerFile = args(2).toInt
val r = new scala.util.Random
val range = 100.0
for (f <- 0 until nOutputFiles) {
val writer = new PrintWriter(new File(outputDir + "/input." + f))
for (p <- 0 until pointsPerFile) {
val curr = r.nextDouble * range
writer.write(curr.toString + "\\n")
}
writer.close
}
}
}
|
agrippa/spark-swat
|
functional-tests/prim-input-prim-output/src/main/scala/sparksimple/GenerateInput.scala
|
Scala
|
bsd-3-clause
| 2,235 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.avro
import org.geotools.factory.CommonFactoryFinder
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AvroSimpleFeatureFactoryTest extends Specification {
"GeoTools must use AvroSimpleFeatureFactory when hint is set" in {
AvroSimpleFeatureFactory.init
val featureFactory = CommonFactoryFinder.getFeatureFactory(null)
featureFactory.getClass mustEqual classOf[AvroSimpleFeatureFactory]
}
"SimpleFeatureBuilder should return an AvroSimpleFeature when using an AvroSimpleFeatureFactory" in {
AvroSimpleFeatureFactory.init
val featureFactory = CommonFactoryFinder.getFeatureFactory(null)
val sft = SimpleFeatureTypes.createType("testavro", "name:String,geom:Point:srid=4326")
val builder = new SimpleFeatureBuilder(sft, featureFactory)
builder.reset()
builder.add("Hello")
builder.add("POINT (1 1)")
val feature = builder.buildFeature("id")
feature.getClass mustEqual classOf[AvroSimpleFeature]
feature.getAttribute(0) mustEqual "Hello"
feature.getAttribute(1) mustEqual WKTUtils.read("POINT (1 1)")
}
}
|
aheyne/geomesa
|
geomesa-features/geomesa-feature-avro/src/test/scala/org/locationtech/geomesa/features/avro/AvroSimpleFeatureFactoryTest.scala
|
Scala
|
apache-2.0
| 1,850 |
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.flatfile.plain
import fm.lazyseq.LazySeq
import java.io.Reader
import java.lang.{StringBuilder => JavaStringBuilder}
final class LineReader(reader: Reader) extends LazySeq[JavaStringBuilder] {
private[this] val BufferSize: Int = 1024
def foreach[U](f: JavaStringBuilder => U): Unit = {
var sb: JavaStringBuilder = new JavaStringBuilder
val buf: Array[Char] = new Array(BufferSize)
var bufSize: Int = -1
do {
bufSize = reader.read(buf)
var idx: Int = 0
while(idx < bufSize) {
val ch: Char = buf(idx)
if (ch == '\n') {
f(sb)
sb = new JavaStringBuilder
} else if (isValidChar(ch)) {
sb.append(ch)
}
idx += 1
}
} while (bufSize != -1)
// If the last line is empty, skip it
if (sb.length > 0) f(sb)
}
private def isValidChar(ch: Char): Boolean = !ignoreChar(ch)
private def ignoreChar(ch: Char): Boolean = {
'\t' != ch && Character.isISOControl(ch)
}
}
|
frugalmechanic/fm-flatfile
|
src/main/scala/fm/flatfile/plain/LineReader.scala
|
Scala
|
apache-2.0
| 1,694 |
package com.blogspot.nhu313.tictactoe
class GameFactory(val io: IO) {
private val playerFactory = new PlayerFactory(io)
def isValidType(selection: Int) = selection > 0 && selection <= types.length
def types() = {
for {
x <- playerFactory.types
y <- playerFactory.types
} yield Tuple2(x, y)
}
def create(gameType: Int): Game = {
val playerTypes = types()(gameType - 1)
val player1 = playerFactory.create(playerTypes._1, Marker.X)
val player2 = playerFactory.create(playerTypes._2, Marker.O)
new Game(Array(player1, player2), io)
}
}
|
nhu313/tic_tac_toe_scala
|
src/main/scala/tictactoe/GameFactory.scala
|
Scala
|
apache-2.0
| 585 |
/*
Copyright 2016 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import java.util.TimeZone
import org.scalatest.{Matchers, WordSpec}
class TimePathedSourceTest extends WordSpec with Matchers {
"TimePathedSource.hdfsWritePath" should {
val dateRange = DateRange(RichDate(0L), RichDate(0L))
val utcTZ = DateOps.UTC
"crib if path == /*" in {
intercept[AssertionError](TestTimePathedSource("/*", dateRange, utcTZ).hdfsWritePath)
}
"crib if path doesn't end with /*" in {
intercept[AssertionError](TestTimePathedSource("/my/invalid/path", dateRange, utcTZ).hdfsWritePath)
}
"work for path ending with /*" in {
TestTimePathedSource("/my/path/*", dateRange, utcTZ).hdfsWritePath.startsWith("/my/path")
}
}
}
case class TestTimePathedSource(p: String, dr: DateRange, t: TimeZone) extends TimePathedSource(p, dr, t)
|
twitter/scalding
|
scalding-core/src/test/scala/com/twitter/scalding/TimePathedSourceTest.scala
|
Scala
|
apache-2.0
| 1,389 |
package io.sqooba.oss.timeseries.entity
/** Identifies an entity that can have time series data attached to it. The different
* possible signals are defined by [[io.sqooba.oss.timeseries.entity.TsLabel!]]s.
*
* @note It is the implementor's responsibility to choose a way of identifying
* entities. Be that by different type, numerical or lexical identifiers. The idea is
* that users define the identifying attributes on their implementation of the trait.
*/
// Not requiring a specific type and shape of identifying property leaves the choice
// to the user and also enables use-cases where multiple identifying mechanisms need
// to be combined with trait polymorphism.
trait TimeSeriesEntityId {
/**
* @param signal the label of the time series signal
* @return a TsId that fully identifies a time series signal of a the entity
* specified by this EntityId
*/
// TODO: the use of this function probably causes AnyVal implementations to require an
// instantiation. At the moment we mostly care about correctness, not performance, but
// we need to keep it in mind. If at some point we want to do things allocation-free,
// have a look at https://docs.scala-lang.org/overviews/core/value-classes.html under
// the extension methods paragraph
def buildTsId(signal: TsLabel): TsId[this.type] = TsId(this, signal)
}
|
Shastick/tslib
|
src/main/scala/io/sqooba/oss/timeseries/entity/TimeSeriesEntityId.scala
|
Scala
|
mit
| 1,366 |
package hearthstone.data.enum
object MinionType extends Enumeration {
type MinionType = Value
val Beast, Murloc = Value
}
|
ukupat/hearthstone
|
src/hearthstone/data/enum/MinionType.scala
|
Scala
|
mit
| 127 |
package com.vorlov.helper.similarity
import org.scalatest.{Matchers, WordSpec}
import com.vorlov.util.TwitterTokenizer._
class CircularSimhashDeduplicationSpec extends WordSpec with Matchers {
implicit def stringify(str: String): String = str.tokens.mkString
"remove similar documents when all documents are similar" in {
val dataset = Seq(
"As collected deficient objection by it discovery sincerity curiosity",
"As collected deficient objection by it discovery sincerity curiosity",
"As collected deficient objection by it discovery sincerity curiosity",
"As collected deficient objection by it discovery sincerity curiosity"
)
CircularSimhashDeduplication.deduplicate(dataset)(stringify).size should === (dataset.distinct.size)
}
"CircularSimhashDeduplication" should {
"remove similar documents" in {
val dataset = Seq(
"As collected deficient objection by it discovery sincerity curiosity", //1
"As collected deficient objection by it discovery sincerity trust", //1
"As collected deficient objection by it discovery sincerity curiosity", //1
"Quiet decay who round three world whole has mrs man", //2
"Assure in adieus wicket it is", //3
"Offending her moonlight men sweetness see unwilling", //4
"Often of it tears whole oh balls share an", //5
"Quiet decay who round three world whole has mrs", //2
"As collected deficient objection by it discovery sincerity curiosity", //1
"Very often of it tears whole oh balls share an" //5
)
CircularSimhashDeduplication.deduplicate(dataset, 0.2)(stringify).size should === (5)
}
"do not fail when there are no documents" in {
CircularSimhashDeduplication.deduplicate(Seq.empty[String])(stringify).size should === (0)
}
}
}
|
VolodymyrOrlov/tweets-opinion-mining
|
src/test/scala/com/vorlov/helper/similarity/CircularSimhashDeduplicationSpec.scala
|
Scala
|
apache-2.0
| 1,854 |
package com.ambrosoft
/**
* Created by jacek on 7/7/16.
*/
object Multiplication extends App {
def multiply(a: Int, b: Int): Int =
if (a == 0 || b == 0) 0
else mult(a, b, 0)
def mult(a: Int, b: Int, acc: Int): Int =
if (b == 0)
acc
else if ((b & 0x01) != 0)
mult(a << 1, b >> 1, acc + a)
else
mult(a << 1, b >> 1, acc)
def test(a: Int, b: Int) = {
val res = multiply(a, b)
println(s"$a * $b = $res")
}
test(3, 5)
test(4, 4)
test(10, 20)
test(0, 10)
test(10, 0)
test(1, 1)
test(1, 123)
test(123, 1)
}
|
JacekAmbroziak/Ambrosoft
|
src/main/scala/com/ambrosoft/Multiplication.scala
|
Scala
|
apache-2.0
| 579 |
package org.aprsdroid.app
import _root_.android.database.Cursor
import _root_.android.content.Context
import _root_.android.graphics.Typeface
import _root_.android.view.View
import _root_.android.widget.SimpleCursorAdapter
import _root_.android.widget.SimpleCursorAdapter.ViewBinder
import _root_.android.widget.TextView
object PostListAdapter {
val LIST_FROM = Array("TSS", StorageDatabase.Post.STATUS,
StorageDatabase.Post.MESSAGE)
val LIST_TO = Array(R.id.listts, R.id.liststatus, R.id.listmessage)
}
class PostListAdapter(context : Context)
extends SimpleCursorAdapter(context, R.layout.listitem,
null, PostListAdapter.LIST_FROM, PostListAdapter.LIST_TO) {
setViewBinder(new PostViewBinder())
}
class PostViewBinder extends ViewBinder {
// post, info, error, incoming, tx
val COLORS = Array(0xff30b030, 0xffc0c080, 0xffffb0b0, 0xff8080b0, 0xff30b030)
override def setViewValue (view : View, cursor : Cursor, columnIndex : Int) : Boolean = {
import StorageDatabase.Post._
columnIndex match {
case COLUMN_MESSAGE =>
val t = cursor.getInt(COLUMN_TYPE)
val m = cursor.getString(COLUMN_MESSAGE)
val v = view.asInstanceOf[TextView]
v.setText(m)
v.setTextColor(COLORS(t))
if (t == TYPE_POST || t == TYPE_INCMG || t == TYPE_TX)
v.setTypeface(Typeface.MONOSPACE)
else
v.setTypeface(Typeface.DEFAULT)
true
case _ => false
}
}
}
|
ge0rg/aprsdroid
|
src/PostListAdapter.scala
|
Scala
|
gpl-2.0
| 1,386 |
package skinny.logging
trait LoggerProvider {
// The logger. Instantiated the first time it's used.
private lazy val _logger = Logger(getClass)
/**
* Get the `Logger` for the class that mixes this trait in. The `Logger`
* is created the first time this method is call. The other methods (e.g.,
* `error`, `info`, etc.) call this method to get the logger.
*/
protected def logger: Logger = _logger
/**
* Get the name associated with this logger.
*/
protected def loggerName = logger.name
}
|
xerial/skinny-micro
|
micro-common/src/main/scala/skinny/logging/LoggerProvider.scala
|
Scala
|
bsd-2-clause
| 524 |
package de.tu_berlin.formic.common.datastructure.persistence
import akka.actor.{ActorSystem, Props}
import de.tu_berlin.formic.common.controlalgo.{ControlAlgorithm, WaveOTServer}
import de.tu_berlin.formic.common.{ClientId, DataStructureInstanceId, OperationId}
import de.tu_berlin.formic.common.datastructure._
import de.tu_berlin.formic.common.datastructure.persistence.AbstractServerDataStructureFactoryPersistenceSpec.AbstractServerDataStructureFactoryPersistenceSpecFactory
import de.tu_berlin.formic.common.message.CreateRequest
import de.tu_berlin.formic.common.server.datastructure.{AbstractServerDataStructure, AbstractServerDataStructureFactory}
import org.scalatest.Assertions._
import scala.concurrent.Await
import scala.concurrent.duration._
/**
* @author Ronny BrΓ€unlich
*/
class AbstractServerDataStructureFactoryPersistenceSpec extends PersistenceSpec(ActorSystem("AbstractServerDataStructureFactoryPersistenceSpec"))
with PersistenceCleanup {
"An AbstractServerDataStructureFactoryPersistenceSpec" should {
"re-apply stored operations after recovery" in {
val factory = system.actorOf(Props(new AbstractServerDataStructureFactoryPersistenceSpecFactory), AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName.name)
val dataTypeInstanceId = DataStructureInstanceId()
val dataTypeInstanceId2 = DataStructureInstanceId()
factory ! CreateRequest(ClientId(), dataTypeInstanceId, AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName)
factory ! CreateRequest(ClientId(), dataTypeInstanceId2, AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName)
receiveN(2)
killActors(factory)
val recoveredFactory = system.actorOf(Props(new AbstractServerDataStructureFactoryPersistenceSpecFactory), AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName.name)
Thread.sleep(2000)
val dataType = system.actorSelection(recoveredFactory.path.child(dataTypeInstanceId.id)).resolveOne(5.seconds)
Await.result(dataType, 5.seconds) shouldNot be(null)
val dataType2 = system.actorSelection(recoveredFactory.path.child(dataTypeInstanceId2.id)).resolveOne(5.seconds)
Await.result(dataType2, 5.seconds) shouldNot be(null)
}
}
}
object AbstractServerDataStructureFactoryPersistenceSpec {
val dataTypeName = DataStructureName("persistenceFactory")
class AbstractServerDataStructureFactoryPersistenceSpecServerDataStructure(id: DataStructureInstanceId, controlAlgorithm: ControlAlgorithm) extends AbstractServerDataStructure(id, controlAlgorithm) {
val transformer = new OperationTransformer {
override def transform(pair: (DataStructureOperation, DataStructureOperation)): DataStructureOperation = pair._1
override def bulkTransform(operation: DataStructureOperation, bridge: List[DataStructureOperation]): List[DataStructureOperation] = bridge
override protected def transformInternal(pair: (DataStructureOperation, DataStructureOperation), withNewContext: Boolean): DataStructureOperation = pair._1
}
var data = ""
override def apply(op: DataStructureOperation): Unit = {
op match {
case _ => fail
}
}
override val dataStructureName: DataStructureName = AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName
override def getDataAsJson: String = data
}
class AbstractServerDataStructureFactoryPersistenceSpecFactory
extends AbstractServerDataStructureFactory[AbstractServerDataStructureFactoryPersistenceSpecServerDataStructure] {
override def create(dataTypeInstanceId: DataStructureInstanceId): AbstractServerDataStructureFactoryPersistenceSpecServerDataStructure = {
new AbstractServerDataStructureFactoryPersistenceSpecServerDataStructure(dataTypeInstanceId, new WaveOTServer())
}
override val name: DataStructureName = AbstractServerDataStructureFactoryPersistenceSpec.dataTypeName
}
}
|
rbraeunlich/formic
|
common/jvm/src/test/scala/de/tu_berlin/formic/common/datastructure/persistence/AbstractServerDataStructureFactoryPersistenceSpec.scala
|
Scala
|
apache-2.0
| 3,934 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources.tsextractors
import java.util
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.{Types, ValidationException}
import org.apache.flink.table.descriptors.Rowtime
import org.apache.flink.table.expressions.utils.ApiExpressionUtils.{unresolvedCall, typeLiteral, valueLiteral}
import org.apache.flink.table.expressions._
import org.apache.flink.table.functions.BuiltInFunctionDefinitions
import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
/**
* Converts an existing [[Long]], [[java.sql.Timestamp]], or
* timestamp formatted [[java.lang.String]] field (e.g., "2018-05-28 12:34:56.000") into
* a rowtime attribute.
*
* @param field The field to convert into a rowtime attribute.
*/
final class ExistingField(val field: String) extends TimestampExtractor {
override def getArgumentFields: Array[String] = Array(field)
@throws[ValidationException]
override def validateArgumentFields(argumentFieldTypes: Array[TypeInformation[_]]): Unit = {
val fieldType = argumentFieldTypes(0)
fieldType match {
case Types.LONG => // OK
case Types.SQL_TIMESTAMP => // OK
case Types.STRING => // OK
case _: TypeInformation[_] =>
throw new ValidationException(
s"Field '$field' must be of type Long or Timestamp or String but is of type $fieldType.")
}
}
/**
* Returns an [[Expression]] that casts a [[Long]], [[java.sql.Timestamp]], or
* timestamp formatted [[java.lang.String]] field (e.g., "2018-05-28 12:34:56.000")
* into a rowtime attribute.
*/
override def getExpression(fieldAccesses: Array[ResolvedFieldReference]): Expression = {
val fieldAccess: ExestingFieldFieldReference = fieldAccesses(0)
.asInstanceOf[ExestingFieldFieldReference]
val fieldReferenceExpr = new FieldReferenceExpression(
fieldAccess.name,
fromLegacyInfoToDataType(fieldAccess.resultType),
0,
fieldAccess.fieldIndex)
fieldAccess.resultType match {
case Types.LONG =>
// access LONG field
val innerDiv = unresolvedCall(
BuiltInFunctionDefinitions.DIVIDE,
fieldReferenceExpr,
valueLiteral(new java.math.BigDecimal(1000)))
unresolvedCall(
BuiltInFunctionDefinitions.CAST,
innerDiv,
typeLiteral(fromLegacyInfoToDataType(Types.SQL_TIMESTAMP)))
case Types.SQL_TIMESTAMP =>
fieldReferenceExpr
case Types.STRING =>
unresolvedCall(
BuiltInFunctionDefinitions.CAST,
fieldReferenceExpr,
typeLiteral(fromLegacyInfoToDataType(Types.SQL_TIMESTAMP)))
}
}
override def equals(other: Any): Boolean = other match {
case that: ExistingField => field == that.field
case _ => false
}
override def hashCode(): Int = {
field.hashCode
}
override def toProperties: util.Map[String, String] = {
val javaMap = new util.HashMap[String, String]()
javaMap.put(Rowtime.ROWTIME_TIMESTAMPS_TYPE, Rowtime.ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)
javaMap.put(Rowtime.ROWTIME_TIMESTAMPS_FROM, field)
javaMap
}
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/sources/tsextractors/ExistingField.scala
|
Scala
|
apache-2.0
| 3,997 |
package test
import com.julianpeeters.avro.annotations._
//Primitive Types
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest00.avro")
@AvroRecord
case class AvroTypeProviderTest00()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest01.avro")
@AvroRecord
case class AvroTypeProviderTest01()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest02.avro")
@AvroRecord
case class AvroTypeProviderTest02()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest03.avro")
@AvroRecord
case class AvroTypeProviderTest03()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest04.avro")
@AvroRecord
case class AvroTypeProviderTest04()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest05.avro")
@AvroRecord
case class AvroTypeProviderTest05()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest06.avro")
@AvroRecord
case class AvroTypeProviderTest06()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest07.avro")
@AvroRecord
case class AvroTypeProviderTest07()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest08.avro")
@AvroRecord
case class AvroTypeProviderTest08()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest10.avro")
@AvroRecord
case class AvroTypeProviderTest10()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest11.avro")
@AvroRecord
case class AvroTypeProviderTest11()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest12.avro")
@AvroRecord
case class AvroTypeProviderTest12()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest13.avro")
@AvroRecord
case class AvroTypeProviderTest13()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap01.avro")
@AvroRecord
case class AvroTypeProviderTestMap01()//var x: Map[String, Int])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap02.avro")
@AvroRecord
case class AvroTypeProviderTestMap02()//var x: Map[String, String])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap03.avro")
@AvroRecord
case class AvroTypeProviderTestMap03()//var x: Map[String, Option[List[Int]]])
//Primitive, 2-arity records
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest14.avro")
@AvroRecord
case class AvroTypeProviderTest14()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest15.avro")
@AvroRecord
case class AvroTypeProviderTest15()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest16.avro")
@AvroRecord
case class AvroTypeProviderTest16()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest17.avro")
@AvroRecord
case class AvroTypeProviderTest17()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest18.avro")
@AvroRecord
case class AvroTypeProviderTest18()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest19.avro")
@AvroRecord
case class AvroTypeProviderTest19()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest20.avro")
@AvroRecord
case class AvroTypeProviderTest20()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest24.avro")
@AvroRecord
case class AvroTypeProviderTest24()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest25.avro")
@AvroRecord
case class AvroTypeProviderTest25()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest26.avro")
@AvroRecord
case class AvroTypeProviderTest26()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest27.avro")
@AvroRecord
case class AvroTypeProviderTest27()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap04.avro")
@AvroRecord
case class AvroTypeProviderTestMap04()//var x: Map[String, Int], var y: Map[String, Int])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap05.avro")
@AvroRecord
case class AvroTypeProviderTestMap05()//var x: Map[String, String], var y: Map[String, String])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap06.avro")
@AvroRecord
case class AvroTypeProviderTestMap06()//var x: Map[String, Option[List[Int]]], var y: Map[String, Option[List[Int]]])
//Primitive nested
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest28.avro")
@AvroRecord
case class AvroTypeProviderTest28()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest29.avro")
@AvroRecord
case class AvroTypeProviderTest29()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest30.avro")
@AvroRecord
case class AvroTypeProviderTest30()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest31.avro")
@AvroRecord
case class AvroTypeProviderTest31()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest32.avro")
@AvroRecord
case class AvroTypeProviderTest32()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest33.avro")
@AvroRecord
case class AvroTypeProviderTest33()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest34.avro")
@AvroRecord
case class AvroTypeProviderTest34()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest35.avro")
@AvroRecord
case class AvroTypeProviderTest35()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest36.avro")
@AvroRecord
case class AvroTypeProviderTest36()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest37.avro")
@AvroRecord
case class AvroTypeProviderTest37()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap07.avro")
@AvroRecord
case class AvroTypeProviderTestMap07()//var x: Map[String, Map[String, Int]])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap08.avro")
@AvroRecord
case class AvroTypeProviderTestMap08()//var x: List[Map[String, Map[String, String]]])
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap09.avro")
@AvroRecord
case class AvroTypeProviderTestMap09()//var x: Option[Map[String, Option[List[String]]]])
//Primitive nested, 2-arity
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest38.avro")
@AvroRecord
case class AvroTypeProviderTest38()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest39.avro")
@AvroRecord
case class AvroTypeProviderTest39()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest40.avro")
@AvroRecord
case class AvroTypeProviderTest40()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest41.avro")
@AvroRecord
case class AvroTypeProviderTest41()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest42.avro")
@AvroRecord
case class AvroTypeProviderTest42()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest43.avro")
@AvroRecord
case class AvroTypeProviderTest43()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest44.avro")
@AvroRecord
case class AvroTypeProviderTest44()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest45.avro")
@AvroRecord
case class AvroTypeProviderTest45()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest46.avro")
@AvroRecord
case class AvroTypeProviderTest46()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest47.avro")
@AvroRecord
case class AvroTypeProviderTest47()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap10.avro")
@AvroRecord
case class AvroTypeProviderTestMap10()//var x: Map[String, Map[String, Int]], var y: Map[String, Map[String, Int]])
//Primitive, 2-arity, heterogenous members
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest48.avro")
@AvroRecord
case class AvroTypeProviderTest48()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest49.avro")
@AvroRecord
case class AvroTypeProviderTest49()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest50.avro")
@AvroRecord
case class AvroTypeProviderTest50()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest51.avro")
@AvroRecord
case class AvroTypeProviderTest51()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest52.avro")
@AvroRecord
case class AvroTypeProviderTest52()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest53.avro")
@AvroRecord
case class AvroTypeProviderTest53()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest54.avro")
@AvroRecord
case class AvroTypeProviderTest54()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest55.avro")
@AvroRecord
case class AvroTypeProviderTest55()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest56.avro")
@AvroRecord
case class AvroTypeProviderTest56()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest57.avro")
@AvroRecord
case class AvroTypeProviderTest57()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap11.avro")
@AvroRecord
case class AvroTypeProviderTestMap11()//var x: Map[String, Map[String, Int]], var y: List[Map[String, Map[String, String]]])
//User-defined types
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest58.avro")
@AvroRecord
case class AvroTypeProviderTest58()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest59.avro")
@AvroRecord
case class AvroTypeProviderTest59()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest60.avro")
@AvroRecord
case class AvroTypeProviderTest60()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest61.avro")
@AvroRecord
case class AvroTypeProviderTest61()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest62.avro")
@AvroRecord
case class AvroTypeProviderTest62()
/* //TODO make readable file for this class - not very urgent since this field type is tested in other contexts also
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest63.avro")
@AvroRecord
case class AvroTypeProviderTest63()
*/
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest64.avro")
@AvroRecord
case class AvroTypeProviderTest64()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest65.avro")
@AvroRecord
case class AvroTypeProviderTest65()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest66.avro")
@AvroRecord
case class AvroTypeProviderTest66()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest67.avro")
@AvroRecord
case class AvroTypeProviderTest67()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTest68.avro")
@AvroRecord
case class AvroTypeProviderTest68()
@AvroTypeProvider("tests/src/test/resources/AvroTypeProviderTestMap12.avro")
@AvroRecord
case class AvroTypeProviderTestMap12()//var x: Map[String, Map[String, AvroTypeProviderTest00]], var y: Map[String, AvroTypeProviderTest58])
|
rvvincelli/avro-scala-macro-annotations
|
tests/src/test/scala/AvroTypeProviderTestClasses.scala
|
Scala
|
apache-2.0
| 10,594 |
package com.twitter.finagle
import com.twitter.finagle.stats.DefaultStatsReceiver
import com.twitter.finagle.toggle.{StandardToggleMap, ToggleMap}
package object kestrel {
private[this] val LibraryName: String = "com.twitter.finagle.kestrel"
/**
* The [[ToggleMap]] used for finagle-kestrel
*/
private[finagle] val Toggles: ToggleMap =
StandardToggleMap(LibraryName, DefaultStatsReceiver)
}
|
spockz/finagle
|
finagle-kestrel/src/main/scala/com/twitter/finagle/package.scala
|
Scala
|
apache-2.0
| 410 |
package net.rosien.configz
import com.typesafe.config._
import scalaz._
import Scalaz._
/** Reads settings from a [[com.typesafe.config.Config]]. */
sealed trait Configz[A] { self =>
/** Read the settings from a config. */
def settings(config: Config): Settings[A]
/** Validate the computed settings with a predicate.
* @param f predicate function
* @param message failure message if f returns false
* @return a new instance that validates the settings
*/
def validate(f: A => Boolean, message: String): Configz[A] =
new Configz[A] {
def settings(config: Config): Settings[A] =
self.settings(config).ensure(NonEmptyList(new ConfigException.Generic(message)))(f)
}
}
object Configz {
implicit val ConfigzApplicative: Applicative[Configz] =
new Applicative[Configz] {
def point[A](a: => A): Configz[A] = new Configz[A] {
def settings(config: Config) =
try a.success catch {
case e: ConfigException => e.failNel
}
}
def ap[A, B](fa: => Configz[A])(f: => Configz[(A) => B]): Configz[B] =
new Configz[B] {
def settings(config: Config) =
try fa.settings(config) <*> f.settings(config) catch {
case e: ConfigException => e.failNel
}
}
}
implicit val ConfigzFunctor: Functor[Configz] =
new Functor[Configz] {
def map[A, B](fa: Configz[A])(f: A => B) =
new Configz[B] {
def settings(config: Config) = fa.settings(config) map f
}
}
/** Get a value at a path from a [[com.typesafe.config.Config]]. */
def atPath[A](f: Config => String => A): Configz[String => A] =
new Configz[String => A] {
def settings(config: Config): Settings[String => A] = f(config).point[Configz].settings(config)
override def toString = "Configz(atPath)[%s]".format(f)
}
import collection.JavaConversions._
implicit val BooleanAtPath: Configz[String => Boolean] = atPath(config => path => config.getBoolean(path))
implicit val BooleanListAtPath: Configz[String => List[Boolean]] = atPath(config => path => config.getBooleanList(path).toList.map(Boolean.unbox))
implicit val ConfigAtPath: Configz[String => Config] = atPath(config => path => config.getConfig(path))
implicit val ConfigListAtPath: Configz[String => List[Config]] = atPath(config => path => config.getConfigList(path).toList)
implicit val DoubleAtPath: Configz[String => Double] = atPath(config => path => config.getDouble(path))
implicit val DoubleListAtPath: Configz[String => List[Double]] = atPath(config => path => config.getDoubleList(path).toList.map(Double.unbox))
implicit val IntAtPath: Configz[String => Int] = atPath(config => path => config.getInt(path))
implicit val IntListAtPath: Configz[String => List[Int]] = atPath(config => path => config.getIntList(path).toList.map(Int.unbox))
implicit val LongAtPath: Configz[String => Long] = atPath(config => path => config.getLong(path))
implicit val LongListAtPath: Configz[String => List[Long]] = atPath(config => path => config.getLongList(path).toList.map(Long.unbox))
implicit val NumberAtPath: Configz[String => Number] = atPath(config => path => config.getNumber(path))
implicit val NumberListAtPath: Configz[String => List[Number]] = atPath(config => path => config.getNumberList(path).toList)
implicit val StringAtPath: Configz[String => String] = atPath(config => path => config.getString(path))
implicit val StringListAtPath: Configz[String => List[String]] = atPath(config => path => config.getStringList(path).toList)
}
|
arosien/configz
|
core/src/main/scala/Configz.scala
|
Scala
|
apache-2.0
| 3,712 |
/*
* Copyright 2013 Sanjin Sehic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.ac.tuwien.infosys
package amber
package util
class ValueSpec extends Spec {
class O
class A extends O
class U extends A
class B
trait Fixture {
val a = new A
val value = new Value(a)
}
"Value" when {
"return the value if asked for a same type" in {
new Fixture {
value.as[A].value should be(a)
}
}
"return the value if asked for a super type" in {
new Fixture {
value.as[O].value should be(a)
}
}
"return None if asked for a different type" in {
new Fixture {
value.as[B] should be(None)
}
}
"return None if asked for a sub type" in {
new Fixture {
value.as[U] should be(None)
}
}
}
}
|
tuwiendsg/CAPA
|
core/test/scala/util/ValueSpec.scala
|
Scala
|
apache-2.0
| 1,336 |
package models.connection
import java.time.OffsetDateTime
abstract class Connection(val id: Option[Long], val child: Long, val parent: Long, val creationDate: OffsetDateTime,
val endDate: Option[OffsetDateTime], val discriminator: String)
object Connection {
def tupled(t: (Option[Long], Long, Long, OffsetDateTime, Option[OffsetDateTime], String)) = new Connection(t._1, t._2,
t._3, t._4, t._5, t._6){}
def unapply(connection: Connection) = Some((connection.id, connection.child, connection.parent,
connection.creationDate, connection.endDate, connection.discriminator))
}
|
HackerSchool/Passport
|
app/models/connection/Connection.scala
|
Scala
|
bsd-3-clause
| 616 |
package uk.gov.gds.location.importer.model
/**
* ONS Country codes
*/
object Countries {
val countries = Map(
"S92000003" -> "Scotland",
"E92000001" -> "England",
"W92000004" -> "Wales"
)
def countryForGssCode(gssCode: String) = gssCode.substring(0, 1) match {
case "E" => "England"
case "S" => "Scotland"
case "W" => "Wales"
}
}
|
alphagov/location-data-importer
|
src/main/scala/uk/gov/gds/location/importer/model/Countries.scala
|
Scala
|
mit
| 366 |
package scala
package collection
package immutable
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.mutable.Builder
import scala.language.higherKinds
trait SortedMap[K, +V]
extends Map[K, V]
with collection.SortedMap[K, V]
with SortedMapOps[K, V, SortedMap, SortedMap[K, V]] {
override def sortedMapFactory: SortedMapFactory[SortedMapCC] = SortedMap
/** The same map with a given default function.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefault`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d the function mapping keys to values, used for non-present keys
* @return a wrapper of the map with a default value
*/
override def withDefault[V1 >: V](d: K => V1): SortedMap.WithDefault[K, V1] = new SortedMap.WithDefault[K, V1](this, d)
/** The same map with a given default value.
* Note: The default is only used for `apply`. Other methods like `get`, `contains`, `iterator`, `keys`, etc.
* are not affected by `withDefaultValue`.
*
* Invoking transformer methods (e.g. `map`) will not preserve the default value.
*
* @param d default value used for non-present keys
* @return a wrapper of the map with a default value
*/
override def withDefaultValue[V1 >: V](d: V1): SortedMap.WithDefault[K, V1] = new SortedMap.WithDefault[K, V1](this, _ => d)
}
trait SortedMapOps[K, +V, +CC[X, +Y] <: Map[X, Y] with SortedMapOps[X, Y, CC, _], +C <: SortedMapOps[K, V, CC, C]]
extends MapOps[K, V, Map, C]
with collection.SortedMapOps[K, V, CC, C] { self =>
protected def coll: C with CC[K, V]
override def keySet: SortedSet[K] = new ImmutableKeySortedSet
/** The implementation class of the set returned by `keySet` */
@SerialVersionUID(3L)
protected class ImmutableKeySortedSet extends AbstractSet[K] with SortedSet[K] with GenKeySet with GenKeySortedSet {
def rangeImpl(from: Option[K], until: Option[K]): SortedSet[K] = {
val map = self.rangeImpl(from, until)
new map.ImmutableKeySortedSet
}
def incl(elem: K): SortedSet[K] = fromSpecificIterable(this).incl(elem)
def excl(elem: K): SortedSet[K] = fromSpecificIterable(this).excl(elem)
}
// We override these methods to fix their return type (which would be `Map` otherwise)
def updated[V1 >: V](key: K, value: V1): CC[K, V1]
@`inline` final override def +[V1 >: V](kv: (K, V1)): CC[K, V1] = updated(kv._1, kv._2)
override def concat[V2 >: V](xs: collection.Iterable[(K, V2)]): CC[K, V2] = {
var result: CC[K, V2] = coll
val it = xs.iterator()
while (it.hasNext) result = result + it.next()
result
}
override def transform[W](f: (K, V) => W): CC[K, W] = map({ case (k, v) => (k, f(k, v)) })
}
object SortedMap extends SortedMapFactory.Delegate[SortedMap](TreeMap) {
@SerialVersionUID(3L)
final class WithDefault[K, +V](underlying: SortedMap[K, V], defaultValue: K => V)
extends Map.WithDefault[K, V](underlying, defaultValue)
with SortedMap[K, V]
with SortedMapOps[K, V, SortedMap, WithDefault[K, V]]
with Serializable {
implicit def ordering: Ordering[K] = underlying.ordering
override def sortedMapFactory: SortedMapFactory[SortedMap] = underlying.sortedMapFactory
def iteratorFrom(start: K): scala.collection.Iterator[(K, V)] = underlying.iteratorFrom(start)
def keysIteratorFrom(start: K): scala.collection.Iterator[K] = underlying.keysIteratorFrom(start)
def rangeImpl(from: Option[K], until: Option[K]): WithDefault[K, V] =
new WithDefault[K, V](underlying.rangeImpl(from, until), defaultValue)
// Need to override following methods to match type signatures of `SortedMap.WithDefault`
// for operations preserving default value
override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] =
new WithDefault[K, V1](underlying.updated(key, value), defaultValue)
override def remove(key: K): WithDefault[K, V] =
new WithDefault[K, V](underlying.remove(key), defaultValue)
override def empty: WithDefault[K, V] = new WithDefault[K, V](underlying.empty, defaultValue)
override protected def fromSpecificIterable(coll: scala.collection.Iterable[(K, V)] @uncheckedVariance): WithDefault[K, V] =
new WithDefault[K, V](sortedMapFactory.from(coll), defaultValue)
override protected def newSpecificBuilder(): Builder[(K, V), WithDefault[K, V]] @uncheckedVariance =
SortedMap.newBuilder().mapResult((p: SortedMap[K, V]) => new WithDefault[K, V](p, defaultValue))
}
}
|
rorygraves/perf_tester
|
corpus/scala-library/src/main/scala/collection/immutable/SortedMap.scala
|
Scala
|
apache-2.0
| 4,749 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.cli
import javax.xml.transform._
import javax.xml.transform.stream._
import com.rackspace.com.papi.components.checker.Config
import com.rackspace.com.papi.components.checker.util.URLResolver
import com.rackspace.com.papi.components.checker.wadl.WADLDotBuilder
import org.clapper.argot.ArgotConverters._
import org.clapper.argot.{ArgotParser, ArgotUsageException}
object Wadl2Dot {
val title = getClass.getPackage.getImplementationTitle
val version = getClass.getPackage.getImplementationVersion
val parser = new ArgotParser("java -jar wadl2dot.jar", preUsage=Some(s"$title v$version"))
val removeDups = parser.flag[Boolean] (List("d", "remove-dups"),
"Remove duplicate nodes. Default: false")
val raxRoles = parser.flag[Boolean] (List("r", "rax-roles"),
"Enable Rax-Roles extension. Default: false")
val raxRolesMask403 = parser.flag[Boolean] (List("M", "rax-roles-mask-403s"),
"When Rax-Roles is enable mask 403 errors with 404 or 405s. Default: false")
val wellFormed = parser.flag[Boolean] (List("w", "well-formed"),
"Add checks to ensure that XML and JSON are well formed. Default: false")
val joinXPaths = parser.flag[Boolean] (List("j", "join-xpaths"),
"Join multiple XPath and XML well-formed checks into a single check. Default: false")
val xsdGrammarTransform = parser.flag[Boolean] (List("g", "xsd-grammar-transform"),
"Transform the XML after validation, to fill in things like default values etc. Default: false")
val preserveRequestBody = parser.flag[Boolean] (List("b", "preserve-req-body"),
"Ensure that the request body is preserved after validating the request.")
val xsdCheck = parser.flag[Boolean] (List("x", "xsd"),
"Add checks to ensure that XML validates against XSD grammar Default: false")
val jsonCheck = parser.flag[Boolean] (List("J", "json"),
"Add checks to ensure that JSON validates against JSON Schema grammar Default: false")
val element = parser.flag[Boolean] (List("l", "element"),
"Add checks to ensure that XML requests use the correct element : false")
val header = parser.flag[Boolean] (List("H", "header"),
"Add checks to ensure that required headers are passed in: false")
val plainParam = parser.flag[Boolean] (List("p", "plain"),
"Add checks for plain parameters : false")
val preProc = parser.flag[Boolean] (List("P", "disable-preproc-ext"),
"Disable preprocess extension : false")
val ignoreXSD = parser.flag[Boolean] (List("i", "disable-ignore-xsd-ext"),
"Disable Ignore XSD extension : false")
val ignoreJSON = parser.flag[Boolean] (List("I", "disable-ignore-json-ext"),
"Disable Ignore JSON Schema extension : false")
val message = parser.flag[Boolean] (List("m", "disable-message-ext"),
"Disable Message extension : false")
val captureHeader = parser.flag[Boolean] (List("c", "disable-capture-header-ext"),
"Disable capture header extension : false")
val showErrors = parser.flag[Boolean] (List("e", "show-errors"),
"Show error nodes. Default: false")
val nfaMode = parser.flag[Boolean] (List("n", "nfa-mode"),
"Display in NFA mode. Default: false")
val help = parser.flag[Boolean] (List("h", "help"),
"Display usage.")
val input = parser.parameter[String]("wadl",
"WADL file/uri to read. If not specified, stdin will be used.",
true)
val output = parser.parameter[String]("output",
"Output file. If not specified, stdout will be used.",
true)
val printVersion = parser.flag[Boolean] (List("version"),
"Display version.")
def getSource: Source = {
var source: Source = null
if (input.value.isEmpty) {
source = new StreamSource(System.in)
} else {
source = new StreamSource(URLResolver.toAbsoluteSystemId(input.value.get))
}
source
}
def getResult: Result = {
var result: Result = null
if (output.value.isEmpty) {
result = new StreamResult(System.out)
} else {
result = new StreamResult(URLResolver.toAbsoluteSystemId(output.value.get))
}
result
}
def handleArgs(args: Array[String]): Unit = {
parser.parse(args)
if (help.value.getOrElse(false)) {
parser.usage()
}
}
def main(args: Array[String]) = {
try {
handleArgs(args)
if (printVersion.value.getOrElse(false)) {
println(s"$title v$version")
} else {
val c = new Config
c.removeDups = removeDups.value.getOrElse(false)
c.enableRaxRolesExtension = raxRoles.value.getOrElse(false)
c.maskRaxRoles403 = raxRolesMask403.value.getOrElse(false)
c.checkWellFormed = wellFormed.value.getOrElse(false)
c.checkXSDGrammar = xsdCheck.value.getOrElse(false)
c.checkJSONGrammar = jsonCheck.value.getOrElse(false)
c.checkElements = element.value.getOrElse(false)
c.checkPlainParams = plainParam.value.getOrElse(false)
c.enablePreProcessExtension = !preProc.value.getOrElse(false)
c.joinXPathChecks = joinXPaths.value.getOrElse(false)
c.checkHeaders = header.value.getOrElse(false)
c.enableIgnoreXSDExtension = !ignoreXSD.value.getOrElse(false)
c.enableIgnoreJSONSchemaExtension = !ignoreJSON.value.getOrElse(false)
c.enableMessageExtension = !message.value.getOrElse(false)
c.enableCaptureHeaderExtension = !captureHeader.value.getOrElse(false)
c.preserveRequestBody = preserveRequestBody.value.getOrElse(false)
c.doXSDGrammarTransform = xsdGrammarTransform.value.getOrElse(false)
c.validateChecker = true
new WADLDotBuilder().build(getSource, getResult,
c,
!showErrors.value.getOrElse(false),
nfaMode.value.getOrElse(false))
}
} catch {
case e: ArgotUsageException => println(e.message)
}
}
}
|
tylerroyal/api-checker
|
cli/wadl2dot/src/main/scala/com/rackspace/com/papi/components/checker/cli/Wadl2Dot.scala
|
Scala
|
apache-2.0
| 7,425 |
package gitbucket.core.servlet
import javax.servlet._
import javax.servlet.http.HttpServletRequest
import gitbucket.core.controller.ControllerBase
import gitbucket.core.plugin.PluginRegistry
class PluginControllerFilter extends Filter {
private var filterConfig: FilterConfig = null
override def init(filterConfig: FilterConfig): Unit = {
this.filterConfig = filterConfig
}
override def destroy(): Unit = {
PluginRegistry().getControllers().foreach {
case (controller, _) =>
controller.destroy()
}
}
override def doFilter(request: ServletRequest, response: ServletResponse, chain: FilterChain): Unit = {
val contextPath = request.getServletContext.getContextPath
val requestUri = request.asInstanceOf[HttpServletRequest].getRequestURI.substring(contextPath.length)
PluginRegistry()
.getControllers()
.filter {
case (_, path) =>
val start = path.replaceFirst("/\\\\*$", "/")
(requestUri + "/").startsWith(start)
}
.foreach {
case (controller, _) =>
controller match {
case x: ControllerBase if (x.config == null) => x.init(filterConfig)
case _ => ()
}
val mockChain = new MockFilterChain()
controller.doFilter(request, response, mockChain)
if (mockChain.continue == false) {
return ()
}
}
chain.doFilter(request, response)
}
}
|
x-way/gitbucket
|
src/main/scala/gitbucket/core/servlet/PluginControllerFilter.scala
|
Scala
|
apache-2.0
| 1,491 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.internal.Logging
import org.apache.spark.Partition
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession, SQLContext}
import org.apache.spark.sql.jdbc.JdbcDialects
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
/**
* Instructions on how to partition the table among workers.
*/
private[sql] case class JDBCPartitioningInfo(
column: String,
lowerBound: Long,
upperBound: Long,
numPartitions: Int)
private[sql] object JDBCRelation extends Logging {
/**
* Given a partitioning schematic (a column of integral type, a number of
* partitions, and upper and lower bounds on the column's value), generate
* WHERE clauses for each partition so that each row in the table appears
* exactly once. The parameters minValue and maxValue are advisory in that
* incorrect values may cause the partitioning to be poor, but no data
* will fail to be represented.
*
* Null value predicate is added to the first partition where clause to include
* the rows with null value for the partitions column.
*
* @param partitioning partition information to generate the where clause for each partition
* @return an array of partitions with where clause for each partition
*/
def columnPartition(partitioning: JDBCPartitioningInfo): Array[Partition] = {
if (partitioning == null || partitioning.numPartitions <= 1 ||
partitioning.lowerBound == partitioning.upperBound) {
return Array[Partition](JDBCPartition(null, 0))
}
val lowerBound = partitioning.lowerBound
val upperBound = partitioning.upperBound
require (lowerBound <= upperBound,
"Operation not allowed: the lower bound of partitioning column is larger than the upper " +
s"bound. Lower bound: $lowerBound; Upper bound: $upperBound")
val numPartitions =
if ((upperBound - lowerBound) >= partitioning.numPartitions) {
partitioning.numPartitions
} else {
logWarning("The number of partitions is reduced because the specified number of " +
"partitions is less than the difference between upper bound and lower bound. " +
s"Updated number of partitions: ${upperBound - lowerBound}; Input number of " +
s"partitions: ${partitioning.numPartitions}; Lower bound: $lowerBound; " +
s"Upper bound: $upperBound.")
upperBound - lowerBound
}
// Overflow and silliness can happen if you subtract then divide.
// Here we get a little roundoff, but that's (hopefully) OK.
val stride: Long = upperBound / numPartitions - lowerBound / numPartitions
val column = partitioning.column
var i: Int = 0
var currentValue: Long = lowerBound
var ans = new ArrayBuffer[Partition]()
while (i < numPartitions) {
val lBound = if (i != 0) s"$column >= $currentValue" else null
currentValue += stride
val uBound = if (i != numPartitions - 1) s"$column < $currentValue" else null
val whereClause =
if (uBound == null) {
lBound
} else if (lBound == null) {
s"$uBound or $column is null"
} else {
s"$lBound AND $uBound"
}
ans += JDBCPartition(whereClause, i)
i = i + 1
}
ans.toArray
}
}
private[sql] case class JDBCRelation(
parts: Array[Partition], jdbcOptions: JDBCOptions)(@transient val sparkSession: SparkSession)
extends BaseRelation
with PrunedFilteredScan
with InsertableRelation {
override def sqlContext: SQLContext = sparkSession.sqlContext
override val needConversion: Boolean = false
override val schema: StructType = JDBCRDD.resolveTable(jdbcOptions)
// Check if JDBCRDD.compileFilter can accept input filters
override def unhandledFilters(filters: Array[Filter]): Array[Filter] = {
filters.filter(JDBCRDD.compileFilter(_, JdbcDialects.get(jdbcOptions.url)).isEmpty)
}
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
// Rely on a type erasure hack to pass RDD[InternalRow] back as RDD[Row]
JDBCRDD.scanTable(
sparkSession.sparkContext,
schema,
requiredColumns,
filters,
parts,
jdbcOptions).asInstanceOf[RDD[Row]]
}
override def insert(data: DataFrame, overwrite: Boolean): Unit = {
import scala.collection.JavaConverters._
val options = jdbcOptions.asProperties.asScala +
("url" -> jdbcOptions.url, "dbtable" -> jdbcOptions.table)
val mode = if (overwrite) SaveMode.Overwrite else SaveMode.Append
new JdbcRelationProvider().createRelation(
data.sparkSession.sqlContext, mode, options.toMap, data)
}
override def toString: String = {
val partitioningInfo = if (parts.nonEmpty) s" [numPartitions=${parts.length}]" else ""
// credentials should not be included in the plan output, table information is sufficient.
s"JDBCRelation(${jdbcOptions.table})" + partitioningInfo
}
}
|
bOOm-X/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JDBCRelation.scala
|
Scala
|
apache-2.0
| 5,903 |
/* Copyright 2014 White Label Personal Clouds Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.welcomer.framework
import scala.concurrent.duration._
import java.util.concurrent.TimeUnit
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
private[framework] object Settings {
def apply() = {
new Settings
}
def apply(config: Config) = {
new Settings(Option(config))
}
def apply(configOption: Option[Config]) = {
new Settings(configOption)
}
}
private[framework] class Settings(configOption: Option[Config] = None) {
val config: Config = configOption getOrElse { ConfigFactory.load() }
config.checkValid(ConfigFactory.defaultReference())
// config.checkValid(ConfigFactory.defaultReference(), "simple-lib")
object Database {
object WelcomerFramework {
val uri: String = config.getString("db.welcomerFramework.uri")
// val testUri = config.getString("db.test-uri")
val name: String = config.getString("db.welcomerFramework.name")
object Collections {
val picoContainer: String = config.getString("db.welcomerFramework.collections.picoContainer") // TODO: Rename this to 'picos'?
val picoPds: String = config.getString("db.welcomerFramework.collections.picoPds")
val ecis: String = config.getString("db.welcomerFramework.collections.ecis")
}
}
}
// object EventedGateway {
//
// }
object EventedEntityResolver {
val timeout: FiniteDuration = FiniteDuration(
config.getDuration("eventedEntityResolver.timeout", TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS)
val retries: Int = config.getInt("eventedEntityResolver.retries")
val eventTraceLogDepth: Int = config.getInt("eventedEntityResolver.eventTraceLogDepth")
}
object ExternalEventGateway {
object Bind {
val interface: String = config.getString("externalEventGateway.bind.interface")
val port: Int = config.getInt("externalEventGateway.bind.port")
}
object EventedFunction {
val defaultTimeout: FiniteDuration = FiniteDuration(
config.getDuration("externalEventGateway.eventedFunction.defaultTimeout", TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS)
val maxTimeout: FiniteDuration = FiniteDuration(
config.getDuration("externalEventGateway.eventedFunction.maxTimeout", TimeUnit.MILLISECONDS),
TimeUnit.MILLISECONDS)
}
}
}
|
welcomer/framework
|
src/main/scala/me/welcomer/framework/Settings.scala
|
Scala
|
apache-2.0
| 2,964 |
/**
* This file is part of objc2swift.
* https://github.com/yahoojapan/objc2swift
*
* Copyright (c) 2015 Yahoo Japan Corporation
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
package org.objc2swift
import org.objc2swift.ObjCParser._
import org.antlr.v4.runtime.RuleContext
import org.antlr.v4.runtime.tree.ParseTreeProperty
import org.objc2swift.ObjCParser._
import scala.collection.JavaConversions._
/**
* Implements visit methods for enum contexts.
*/
protected trait EnumVisitor extends BaseConverter {
private val identifiers = new ParseTreeProperty[String]()
def findDeclarationSpecifiers(ctx: RuleContext): Option[Declaration_specifiersContext] =
ctx match {
case c: Declaration_specifiersContext => Some(c)
case c: Translation_unitContext => None
case _ => findDeclarationSpecifiers(ctx.parent)
}
def getClassName(ctx: Declaration_specifiersContext): String =
Option(ctx.type_specifier()) match {
case Some(list) if list.size >= 2 =>
Option(list.last.class_name()).map(visit).getOrElse("")
case _ => ""
}
/**
* Get name of enumerator.
* @param ctx parse tree
* @return
*/
def getEnumName(ctx: Enum_specifierContext): String =
Option(ctx.identifier()) match {
case Some(id) => visit(id)
case None => findDeclarationSpecifiers(ctx).map(getClassName).getOrElse("")
}
override def visitEnum_specifier(ctx: Enum_specifierContext): String =
getEnumName(ctx) match {
case id if !id.isEmpty => visitEnum_specifier(ctx, id)
case _ => ""
}
/**
* Return translated text of enum_specifier context.
*
* @param ctx parse tree
* @param identifier enum id
* @return translated text
*/
def visitEnum_specifier(ctx: Enum_specifierContext, identifier: String): String = {
val builder = List.newBuilder[String]
val typeStr = for {
c1 <- Option(ctx.type_name())
c2 <- Option(c1.specifier_qualifier_list())
c3 <- Option(c2.type_specifier())
} yield concatType(c3)
// save this enum id
identifiers.put(ctx, identifier)
builder += s"enum $identifier : ${typeStr.getOrElse("Int")}"
builder += Option(ctx.enumerator_list()).map(visit).getOrElse("")
builder.result().mkString
}
/**
* Returns translated text of enumerator_list context.
*
* @param ctx the parse tree
**/
override def visitEnumerator_list(ctx: Enumerator_listContext): String =
s" {\\n${ctx.enumerator().map(visit).mkString("\\n")}\\n}"
/**
* Returns translated text of enumerator context.
*
* @param ctx the parse tree
**/
override def visitEnumerator(ctx: EnumeratorContext): String =
s"${indent(ctx)}case ${getEnumIdentifier(ctx)}${getEnumConstant(ctx)}"
/**
* Returns translated text of identifier under the enumerator context
*
* @param ctx the parse tree
* @return translated text
*/
private def getEnumIdentifier(ctx: EnumeratorContext): String = {
val origId = visit(ctx.identifier())
val enumId = identifiers.get(ctx.parent.parent)
val digitId = "[0-9].*".r
// Trim duplicate prefix
origId.stripPrefix(enumId) match {
case digitId() => origId
case s => s
}
}
/**
* Returns translated text of constant_expression under the enumerator context
*
* @param ctx the parse tree
* @return translated text
*/
private def getEnumConstant(ctx: EnumeratorContext): String =
Option(ctx.constant_expression()).map(c => s" = ${visit(c)}").getOrElse("")
}
|
johndpope/objc2swift
|
src/main/scala/org/objc2swift/EnumVisitor.scala
|
Scala
|
mit
| 3,622 |
package com.yammer.metrics.experiments
import collection.JavaConversions._
import java.util.concurrent.TimeUnit
import com.yammer.metrics.stats.{ExponentiallyDecayingSample}
import com.yammer.metrics.stats.{UniformSample}
import java.io.{PrintWriter, FileOutputStream}
/**
* A simple experiment to see how uniform and exponentially-decaying samples
* respond to a linearly-increasing set of measurements.
*
* For two hours, it measures the number of seconds the test has been running
* and places that value in each sample every second.
*
* Then for analysis, compares the mean of the uniform sample with the mean of
* the data set to date and compares the mean of the exponentially-decaying
* sample with the mean of the previous 5 minutes of values.
*/
object RecencyBiasExperiment {
def main(args: Array[String]) {
val expSample = new ExponentiallyDecayingSample(10, 0.015)
val uniSample = new UniformSample(10)
val output = new PrintWriter(new FileOutputStream("timings.csv"), true)
output.println("t,exponential mean,expected exponential mean,uniform mean,expected uniform mean")
for (t <- 1 to TimeUnit.HOURS.toSeconds(2).toInt) {
expSample.update(t)
uniSample.update(t)
val expValues = expSample.values.map {_.longValue}.sorted
val uniValues = uniSample.values.map {_.longValue}.sorted
val expMean = expValues.sum / expValues.size.toDouble
val expExpectedMean = ((t - 300).max(1) to t).sum / 300.0.min(t)
val uniMean = uniValues.sum / uniValues.size.toDouble
val uniExpectedMean = (1 to t).sum / t.toDouble
println("=" * 80)
println("t: " + t)
println("exp: " + expValues.mkString(", "))
printf( " mean: %2.2f\\n", expMean)
printf( " expected: %2.2f\\n", expExpectedMean)
println("uni: " + uniValues.mkString(", "))
printf(" mean: %2.2f\\n", uniMean)
printf(" expected: %2.2f\\n", uniExpectedMean)
output.println("%d,%2.2f,%2.2f,%2.2f,%2.2f".format(t, expMean, expExpectedMean, uniMean, uniExpectedMean))
Thread.sleep(TimeUnit.SECONDS.toMillis(1))
}
output.close()
}
}
|
box/metrics
|
metrics-scala_2.9.1/src/test/scala/com/yammer/metrics/experiments/RecencyBiasExperiment.scala
|
Scala
|
mit
| 2,158 |
package com.avsystem.scex
package compiler
import java.{lang => jl, util => ju}
import scala.reflect.internal.util.Position
/**
* Created: 13-12-2013
* Author: ghik
*/
class ExpressionSourceFile(
val exprDef: ExpressionDef,
sourceName: String,
val code: String,
startOffset: Int) extends ScexSourceFile(sourceName, code, shared = false) {
require(exprDef != null, "Expression definition cannot be null")
val expressionPos = Position.range(this, startOffset, startOffset, startOffset + exprDef.expression.length)
lazy val bareSource = new ScexSourceFile(sourceName, exprDef.originalExpression, shared = false)
}
|
pnf/scex
|
scex-core/src/main/scala/com/avsystem/scex/compiler/ExpressionSourceFile.scala
|
Scala
|
apache-2.0
| 633 |
package com.lucaswilkins.newtonfractals
import com.lucaswilkins.newtonfractals.complex.Complex
import com.lucaswilkins.newtonfractals.polynomial._
/**
* Newton-Raphson Solver for complex Polynomials
*/
class Newton(val poly: Polynomial, iters: Int = 10) {
val deriv = poly.diff
def solve(x: Complex, remainingIters: Int = iters): Tuple2[Complex, Double] = {
if(remainingIters <= 0){
(x, (poly(x)/deriv(x)).mag)
} else {
solve(x - (poly(x)/deriv(x)), remainingIters - 1)
}
}
}
|
drlucaswilkins/newtonfractal
|
NewtonFractal/src/main/scala/com/lucaswilkins/newtonfractals/Newton.scala
|
Scala
|
gpl-2.0
| 511 |
package akka.persistence.eventstore.journal
import akka.persistence.journal.JournalPerfSpec
import scala.concurrent.duration._
class JournalPerfIntegrationSpec extends JournalIntegrationSpec with JournalPerfSpec {
override def awaitDurationMillis = 30.seconds.toMillis
}
|
petervdm/EventStore.Akka.Persistence
|
src/test/scala/akka/persistence/eventstore/journal/JournalPerfIntegrationSpec.scala
|
Scala
|
bsd-3-clause
| 274 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.calcite.rel.core.JoinRelType
import org.apache.calcite.sql.fun.SqlStdOperatorTable.{EQUALS, LESS_THAN_OR_EQUAL}
import org.junit.Assert._
import org.junit.Test
import scala.collection.JavaConversions._
class FlinkRelMdColumnOriginNullCountTest extends FlinkRelMdHandlerTestBase {
@Test
def testGetColumnOriginNullCountOnTableScan(): Unit = {
Array(studentLogicalScan, studentFlinkLogicalScan, studentBatchScan, studentStreamScan)
.foreach { scan =>
assertEquals(0.0, mq.getColumnOriginNullCount(scan, 0))
assertEquals(0.0, mq.getColumnOriginNullCount(scan, 1))
assertEquals(6.0, mq.getColumnOriginNullCount(scan, 2))
assertEquals(0.0, mq.getColumnOriginNullCount(scan, 3))
assertNull(mq.getColumnOriginNullCount(scan, 4))
assertEquals(0.0, mq.getColumnOriginNullCount(scan, 5))
assertNull(mq.getColumnOriginNullCount(scan, 6))
}
val ts = relBuilder.scan("MyTable3").build()
assertEquals(1.0, mq.getColumnOriginNullCount(ts, 0))
assertEquals(0.0, mq.getColumnOriginNullCount(ts, 1))
}
@Test
def testGetColumnOriginNullCountOnSnapshot(): Unit = {
(0 until flinkLogicalSnapshot.getRowType.getFieldCount).foreach { idx =>
assertNull(mq.getColumnOriginNullCount(flinkLogicalSnapshot, idx))
}
}
@Test
def testGetColumnOriginNullCountOnProject(): Unit = {
assertEquals(0.0, mq.getColumnOriginNullCount(logicalProject, 0))
assertEquals(0.0, mq.getColumnOriginNullCount(logicalProject, 1))
assertNull(mq.getColumnOriginNullCount(logicalProject, 2))
assertNull(mq.getColumnOriginNullCount(logicalProject, 3))
assertNull(mq.getColumnOriginNullCount(logicalProject, 4))
assertNull(mq.getColumnOriginNullCount(logicalProject, 5))
assertNull(mq.getColumnOriginNullCount(logicalProject, 6))
assertNull(mq.getColumnOriginNullCount(logicalProject, 7))
assertEquals(0.0, mq.getColumnOriginNullCount(logicalProject, 8))
assertEquals(0.0, mq.getColumnOriginNullCount(logicalProject, 9))
assertEquals(0.0, mq.getColumnOriginNullCount(logicalProject, 10))
assertNull(mq.getColumnOriginNullCount(logicalProject, 11))
val ts = relBuilder.scan("MyTable3").build()
relBuilder.push(ts)
val projects = List(
relBuilder.call(EQUALS, relBuilder.field(0), relBuilder.literal(1)),
relBuilder.field(0),
relBuilder.field(1),
relBuilder.literal(true),
relBuilder.literal(null))
val project = relBuilder.project(projects).build()
assertEquals(null, mq.getColumnOriginNullCount(project, 0))
assertEquals(1.0, mq.getColumnOriginNullCount(project, 1))
assertEquals(0.0, mq.getColumnOriginNullCount(project, 2))
assertEquals(0.0, mq.getColumnOriginNullCount(project, 3))
assertEquals(1.0, mq.getColumnOriginNullCount(project, 4))
}
@Test
def testGetColumnOriginNullCountOnCalc(): Unit = {
// only filter
relBuilder.push(studentLogicalScan)
// id <= 2
val expr = relBuilder.call(LESS_THAN_OR_EQUAL, relBuilder.field(0), relBuilder.literal(2))
val calc1 = createLogicalCalc(
studentLogicalScan, studentLogicalScan.getRowType, relBuilder.fields(), List(expr))
(0 until calc1.getRowType.getFieldCount).foreach { idx =>
assertNull(mq.getColumnOriginNullCount(calc1, idx))
}
val ts = relBuilder.scan("MyTable3").build()
relBuilder.push(ts)
val projects = List(
relBuilder.call(EQUALS, relBuilder.field(0), relBuilder.literal(1)),
relBuilder.field(0),
relBuilder.field(1),
relBuilder.literal(true),
relBuilder.literal(null))
val outputRowType = relBuilder.project(projects).build().getRowType
val calc2 = createLogicalCalc(ts, outputRowType, projects, List())
assertEquals(null, mq.getColumnOriginNullCount(calc2, 0))
assertEquals(1.0, mq.getColumnOriginNullCount(calc2, 1))
assertEquals(0.0, mq.getColumnOriginNullCount(calc2, 2))
assertEquals(0.0, mq.getColumnOriginNullCount(calc2, 3))
assertEquals(1.0, mq.getColumnOriginNullCount(calc2, 4))
}
@Test
def testGetColumnOriginNullCountOnJoin(): Unit = {
val innerJoin1 = relBuilder.scan("MyTable3").project(relBuilder.fields().subList(0, 2))
.scan("MyTable4")
.join(JoinRelType.INNER,
relBuilder.call(EQUALS, relBuilder.field(2, 0, 1), relBuilder.field(2, 1, 1)))
.build
assertEquals(1.0, mq.getColumnOriginNullCount(innerJoin1, 0))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin1, 1))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin1, 2))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin1, 3))
val innerJoin2 = relBuilder.scan("MyTable3").project(relBuilder.fields().subList(0, 2))
.scan("MyTable4")
.join(JoinRelType.INNER,
relBuilder.call(EQUALS, relBuilder.field(2, 0, 0), relBuilder.field(2, 1, 0)))
.build
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin2, 0))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin2, 1))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin2, 2))
assertEquals(0.0, mq.getColumnOriginNullCount(innerJoin2, 3))
Array(logicalLeftJoinOnUniqueKeys, logicalRightJoinNotOnUniqueKeys,
logicalFullJoinWithEquiAndNonEquiCond, logicalSemiJoinNotOnUniqueKeys,
logicalSemiJoinWithEquiAndNonEquiCond).foreach { join =>
(0 until join.getRowType.getFieldCount).foreach { idx =>
assertNull(mq.getColumnOriginNullCount(join, idx))
}
}
}
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdColumnOriginNullCountTest.scala
|
Scala
|
apache-2.0
| 6,376 |
import com.twitter.scrooge.ScroogeSBT
import sbt.Keys._
import sbt._
object Searchbird extends Build {
val guavaVersion = "18.0"
val finagleVersion = "6.24.0"
val scroogeVersion = "3.17.0"
val twitterServerVersion = "1.9.0"
val twitterUtilVersion = "6.23.0"
val additionalResolvers = Seq(
resolvers ++= Seq(
"Twitter Maven Repository" at "http://maven.twttr.com"
)
)
val dependencies = Seq(
libraryDependencies ++= Seq(
"com.google.guava" % "guava" % guavaVersion,
"com.twitter" %% "finagle-core" % finagleVersion,
"com.twitter" %% "finagle-http" % finagleVersion,
"com.twitter" %% "finagle-redis" % finagleVersion,
"com.twitter" %% "finagle-stats" % finagleVersion,
"com.twitter" %% "finagle-thrift" % finagleVersion,
"com.twitter" %% "scrooge-core" % scroogeVersion,
"com.twitter" %% "twitter-server" % twitterServerVersion,
"com.twitter" %% "util-app" % twitterUtilVersion,
"com.twitter" %% "util-core" % twitterUtilVersion,
"com.twitter" %% "util-logging" % twitterUtilVersion
)
)
val scalaSettings = Seq(
scalaVersion := "2.10.3",
scalacOptions ++= Seq("-encoding", "utf8"),
scalacOptions += "-deprecation"
)
val scroogeSettings = ScroogeSBT.newSettings
val finagleThriftExample = Project(id = "Searchbird", base = file("."))
.settings(scalaSettings:_*)
.settings(additionalResolvers:_*)
.settings(scroogeSettings:_*)
.settings(dependencies:_*)
}
|
maufee/Searchbird
|
project/Build.scala
|
Scala
|
apache-2.0
| 1,509 |
/*
* Artificial Intelligence for Humans
* Volume 1: Fundamental Algorithms
* Scala Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
* Copyright 2013 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh
/**
* Global constants for AIFH.
*/
object AIFH {
/**
* The default precision.
*/
val DEFAULT_PRECISION = 0.0000001
}
|
PeterLauris/aifh
|
vol1/scala-examples/src/main/scala/com/heatonresearch/aifh/AIFH.scala
|
Scala
|
apache-2.0
| 1,101 |
package pl.newicom.dddd
import pl.newicom.dddd.aggregate.EntityId
trait BusinessEntity {
def id: EntityId
def department: String
}
|
pawelkaczor/akka-ddd
|
akka-ddd-protocol/src/main/scala/pl/newicom/dddd/BusinessEntity.scala
|
Scala
|
mit
| 139 |
/*
* Copyright 2014β2020 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.local
import quasar.api.destination.DestinationError.{
InitializationError,
malformedConfiguration
}
import quasar.connector.MonadResourceErr
import quasar.connector.destination.{Destination, DestinationModule}
import scala.util.Either
import argonaut.Json
import cats.effect.{Blocker, ContextShift, ConcurrentEffect, Resource, Timer}
trait LocalDestinationModule extends DestinationModule {
def blocker: Blocker
val destinationType = LocalDestinationType
def sanitizeDestinationConfig(config: Json): Json = config
def destination[F[_]: ConcurrentEffect: ContextShift: MonadResourceErr: Timer](
config: Json)
: Resource[F, Either[InitializationError[Json], Destination[F]]] = {
val dest = for {
ld <- attemptConfig[F, LocalDestinationConfig, InitializationError[Json]](
config,
"Failed to decode LocalDestination config: ")(
(c, d) => malformedConfiguration((destinationType, c, d)))
root <- validatedPath(ld.rootDir, "Invalid destination path: ") { d =>
malformedConfiguration((destinationType, config, d))
}
} yield LocalDestination[F](root, blocker): Destination[F]
Resource.liftF(dest.value)
}
}
|
slamdata/quasar
|
impl/src/main/scala/quasar/impl/local/LocalDestinationModule.scala
|
Scala
|
apache-2.0
| 1,822 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml
import org.apache.spark.ml.feature.{HashingTF, IDF, IDFModel, VectorAssembler}
import org.apache.spark.sql.DataFrame
/**
* == Feature transformers ==
*
* The `ml.feature` package provides common feature transformers that help convert raw data or
* features into more suitable forms for model fitting.
* Most feature transformers are implemented as [[Transformer]]s, which transform one [[DataFrame]]
* into another, e.g., [[HashingTF]].
* Some feature transformers are implemented as [[Estimator]]s, because the transformation requires
* some aggregated information of the dataset, e.g., document frequencies in [[IDF]].
* For those feature transformers, calling [[Estimator!.fit]] is required to obtain the model first,
* e.g., [[IDFModel]], in order to apply transformation.
* The transformation is usually done by appending new columns to the input [[DataFrame]], so all
* input columns are carried over.
*
* We try to make each transformer minimal, so it becomes flexible to assemble feature
* transformation pipelines.
* [[Pipeline]] can be used to chain feature transformers, and [[VectorAssembler]] can be used to
* combine multiple feature transformations, for example:
*
* {{{
* import org.apache.spark.ml.feature._
* import org.apache.spark.ml.Pipeline
*
* // a DataFrame with three columns: id (integer), text (string), and rating (double).
* val df = spark.createDataFrame(Seq(
* (0, "Hi I heard about Spark", 3.0),
* (1, "I wish Java could use case classes", 4.0),
* (2, "Logistic regression models are neat", 4.0)
* )).toDF("id", "text", "rating")
*
* // define feature transformers
* val tok = new RegexTokenizer()
* .setInputCol("text")
* .setOutputCol("words")
* val sw = new StopWordsRemover()
* .setInputCol("words")
* .setOutputCol("filtered_words")
* val tf = new HashingTF()
* .setInputCol("filtered_words")
* .setOutputCol("tf")
* .setNumFeatures(10000)
* val idf = new IDF()
* .setInputCol("tf")
* .setOutputCol("tf_idf")
* val assembler = new VectorAssembler()
* .setInputCols(Array("tf_idf", "rating"))
* .setOutputCol("features")
*
* // assemble and fit the feature transformation pipeline
* val pipeline = new Pipeline()
* .setStages(Array(tok, sw, tf, idf, assembler))
* val model = pipeline.fit(df)
*
* // save transformed features with raw data
* model.transform(df)
* .select("id", "text", "rating", "features")
* .write.format("parquet").save("/output/path")
* }}}
*
* Some feature transformers implemented in MLlib are inspired by those implemented in scikit-learn.
* The major difference is that most scikit-learn feature transformers operate eagerly on the entire
* input dataset, while MLlib's feature transformers operate lazily on individual columns,
* which is more efficient and flexible to handle large and complex datasets.
*
* @see [[http://scikit-learn.org/stable/modules/preprocessing.html scikit-learn.preprocessing]]
*/
package object feature
|
gioenn/xSpark
|
mllib/src/main/scala/org/apache/spark/ml/feature/package.scala
|
Scala
|
apache-2.0
| 3,888 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.state
import java.util.Locale
import org.apache.hadoop.conf.Configuration
import org.apache.spark.TaskContext
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression, JoinedRow, Literal, SafeProjection, SpecificInternalRow, UnsafeProjection, UnsafeRow}
import org.apache.spark.sql.execution.streaming.StatefulOperatorStateInfo
import org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinHelper._
import org.apache.spark.sql.types.{BooleanType, LongType, StructField, StructType}
import org.apache.spark.util.NextIterator
/**
* Helper class to manage state required by a single side of
* [[org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinExec]].
* The interface of this class is basically that of a multi-map:
* - Get: Returns an iterator of multiple values for given key
* - Append: Append a new value to the given key
* - Remove Data by predicate: Drop any state using a predicate condition on keys or values
*
* @param joinSide Defines the join side
* @param inputValueAttributes Attributes of the input row which will be stored as value
* @param joinKeys Expressions to generate rows that will be used to key the value rows
* @param stateInfo Information about how to retrieve the correct version of state
* @param storeConf Configuration for the state store.
* @param hadoopConf Hadoop configuration for reading state data from storage
* @param partitionId A partition ID of source RDD.
* @param stateFormatVersion The version of format for state.
*
* Internally, the key -> multiple values is stored in two [[StateStore]]s.
* - Store 1 ([[KeyToNumValuesStore]]) maintains mapping between key -> number of values
* - Store 2 ([[KeyWithIndexToValueStore]]) maintains mapping; the mapping depends on the state
* format version:
* - version 1: [(key, index) -> value]
* - version 2: [(key, index) -> (value, matched)]
* - Put: update count in KeyToNumValuesStore,
* insert new (key, count) -> value in KeyWithIndexToValueStore
* - Get: read count from KeyToNumValuesStore,
* read each of the n values in KeyWithIndexToValueStore
* - Remove state by predicate on keys:
* scan all keys in KeyToNumValuesStore to find keys that do match the predicate,
* delete from key from KeyToNumValuesStore, delete values in KeyWithIndexToValueStore
* - Remove state by condition on values:
* scan all elements in KeyWithIndexToValueStore to find values that match
* the predicate, delete corresponding (key, indexToDelete) from KeyWithIndexToValueStore
* by overwriting with the value of (key, maxIndex), and removing [(key, maxIndex),
* decrement corresponding num values in KeyToNumValuesStore
*/
class SymmetricHashJoinStateManager(
val joinSide: JoinSide,
inputValueAttributes: Seq[Attribute],
joinKeys: Seq[Expression],
stateInfo: Option[StatefulOperatorStateInfo],
storeConf: StateStoreConf,
hadoopConf: Configuration,
partitionId: Int,
stateFormatVersion: Int) extends Logging {
import SymmetricHashJoinStateManager._
/*
=====================================================
Public methods
=====================================================
*/
/** Get all the values of a key */
def get(key: UnsafeRow): Iterator[UnsafeRow] = {
val numValues = keyToNumValues.get(key)
keyWithIndexToValue.getAll(key, numValues).map(_.value)
}
/** Append a new value to the key */
def append(key: UnsafeRow, value: UnsafeRow, matched: Boolean): Unit = {
val numExistingValues = keyToNumValues.get(key)
keyWithIndexToValue.put(key, numExistingValues, value, matched)
keyToNumValues.put(key, numExistingValues + 1)
}
/**
* Get all the matched values for given join condition, with marking matched.
* This method is designed to mark joined rows properly without exposing internal index of row.
*
* @param excludeRowsAlreadyMatched Do not join with rows already matched previously.
* This is used for right side of left semi join in
* [[StreamingSymmetricHashJoinExec]] only.
*/
def getJoinedRows(
key: UnsafeRow,
generateJoinedRow: InternalRow => JoinedRow,
predicate: JoinedRow => Boolean,
excludeRowsAlreadyMatched: Boolean = false): Iterator[JoinedRow] = {
val numValues = keyToNumValues.get(key)
keyWithIndexToValue.getAll(key, numValues).filterNot { keyIdxToValue =>
excludeRowsAlreadyMatched && keyIdxToValue.matched
}.map { keyIdxToValue =>
val joinedRow = generateJoinedRow(keyIdxToValue.value)
if (predicate(joinedRow)) {
if (!keyIdxToValue.matched) {
keyWithIndexToValue.put(key, keyIdxToValue.valueIndex, keyIdxToValue.value,
matched = true)
}
joinedRow
} else {
null
}
}.filter(_ != null)
}
/**
* Remove using a predicate on keys.
*
* This produces an iterator over the (key, value, matched) tuples satisfying condition(key),
* where the underlying store is updated as a side-effect of producing next.
*
* This implies the iterator must be consumed fully without any other operations on this manager
* or the underlying store being interleaved.
*/
def removeByKeyCondition(removalCondition: UnsafeRow => Boolean): Iterator[KeyToValuePair] = {
new NextIterator[KeyToValuePair] {
private val allKeyToNumValues = keyToNumValues.iterator
private var currentKeyToNumValue: KeyAndNumValues = null
private var currentValues: Iterator[KeyWithIndexAndValue] = null
private def currentKey = currentKeyToNumValue.key
private val reusedRet = new KeyToValuePair()
private def getAndRemoveValue(): KeyToValuePair = {
val keyWithIndexAndValue = currentValues.next()
keyWithIndexToValue.remove(currentKey, keyWithIndexAndValue.valueIndex)
reusedRet.withNew(currentKey, keyWithIndexAndValue.value, keyWithIndexAndValue.matched)
}
override def getNext(): KeyToValuePair = {
// If there are more values for the current key, remove and return the next one.
if (currentValues != null && currentValues.hasNext) {
return getAndRemoveValue()
}
// If there weren't any values left, try and find the next key that satisfies the removal
// condition and has values.
while (allKeyToNumValues.hasNext) {
currentKeyToNumValue = allKeyToNumValues.next()
if (removalCondition(currentKey)) {
currentValues = keyWithIndexToValue.getAll(currentKey, currentKeyToNumValue.numValue)
keyToNumValues.remove(currentKey)
if (currentValues.hasNext) {
return getAndRemoveValue()
}
}
}
// We only reach here if there were no satisfying keys left, which means we're done.
finished = true
return null
}
override def close(): Unit = {}
}
}
/**
* Remove using a predicate on values.
*
* At a high level, this produces an iterator over the (key, value, matched) tuples such that
* value satisfies the predicate, where producing an element removes the value from the
* state store and producing all elements with a given key updates it accordingly.
*
* This implies the iterator must be consumed fully without any other operations on this manager
* or the underlying store being interleaved.
*/
def removeByValueCondition(removalCondition: UnsafeRow => Boolean): Iterator[KeyToValuePair] = {
new NextIterator[KeyToValuePair] {
// Reuse this object to avoid creation+GC overhead.
private val reusedRet = new KeyToValuePair()
private val allKeyToNumValues = keyToNumValues.iterator
private var currentKey: UnsafeRow = null
private var numValues: Long = 0L
private var index: Long = 0L
private var valueRemoved: Boolean = false
// Push the data for the current key to the numValues store, and reset the tracking variables
// to their empty state.
private def updateNumValueForCurrentKey(): Unit = {
if (valueRemoved) {
if (numValues >= 1) {
keyToNumValues.put(currentKey, numValues)
} else {
keyToNumValues.remove(currentKey)
}
}
currentKey = null
numValues = 0
index = 0
valueRemoved = false
}
// Find the next value satisfying the condition, updating `currentKey` and `numValues` if
// needed. Returns null when no value can be found.
private def findNextValueForIndex(): ValueAndMatchPair = {
// Loop across all values for the current key, and then all other keys, until we find a
// value satisfying the removal condition.
def hasMoreValuesForCurrentKey = currentKey != null && index < numValues
def hasMoreKeys = allKeyToNumValues.hasNext
while (hasMoreValuesForCurrentKey || hasMoreKeys) {
if (hasMoreValuesForCurrentKey) {
// First search the values for the current key.
val valuePair = keyWithIndexToValue.get(currentKey, index)
if (removalCondition(valuePair.value)) {
return valuePair
} else {
index += 1
}
} else if (hasMoreKeys) {
// If we can't find a value for the current key, cleanup and start looking at the next.
// This will also happen the first time the iterator is called.
updateNumValueForCurrentKey()
val currentKeyToNumValue = allKeyToNumValues.next()
currentKey = currentKeyToNumValue.key
numValues = currentKeyToNumValue.numValue
} else {
// Should be unreachable, but in any case means a value couldn't be found.
return null
}
}
// We tried and failed to find the next value.
return null
}
override def getNext(): KeyToValuePair = {
val currentValue = findNextValueForIndex()
// If there's no value, clean up and finish. There aren't any more available.
if (currentValue == null) {
updateNumValueForCurrentKey()
finished = true
return null
}
// The backing store is arraylike - we as the caller are responsible for filling back in
// any hole. So we swap the last element into the hole and decrement numValues to shorten.
// clean
if (index != numValues - 1) {
val valuePairAtMaxIndex = keyWithIndexToValue.get(currentKey, numValues - 1)
if (valuePairAtMaxIndex != null) {
keyWithIndexToValue.put(currentKey, index, valuePairAtMaxIndex.value,
valuePairAtMaxIndex.matched)
} else {
val projectedKey = getInternalRowOfKeyWithIndex(currentKey)
logWarning(s"`keyWithIndexToValue` returns a null value for index ${numValues - 1} " +
s"at current key $projectedKey.")
}
}
keyWithIndexToValue.remove(currentKey, numValues - 1)
numValues -= 1
valueRemoved = true
return reusedRet.withNew(currentKey, currentValue.value, currentValue.matched)
}
override def close(): Unit = {}
}
}
// Unsafe row to internal row projection for key of `keyWithIndexToValue`.
lazy private val keyProjection = SafeProjection.create(keySchema)
/** Projects the key of unsafe row to internal row for printable log message. */
def getInternalRowOfKeyWithIndex(currentKey: UnsafeRow): InternalRow = keyProjection(currentKey)
/** Commit all the changes to all the state stores */
def commit(): Unit = {
keyToNumValues.commit()
keyWithIndexToValue.commit()
}
/** Abort any changes to the state stores if needed */
def abortIfNeeded(): Unit = {
keyToNumValues.abortIfNeeded()
keyWithIndexToValue.abortIfNeeded()
}
/** Get the combined metrics of all the state stores */
def metrics: StateStoreMetrics = {
val keyToNumValuesMetrics = keyToNumValues.metrics
val keyWithIndexToValueMetrics = keyWithIndexToValue.metrics
def newDesc(desc: String): String = s"${joinSide.toString.toUpperCase(Locale.ROOT)}: $desc"
StateStoreMetrics(
keyWithIndexToValueMetrics.numKeys, // represent each buffered row only once
keyToNumValuesMetrics.memoryUsedBytes + keyWithIndexToValueMetrics.memoryUsedBytes,
keyWithIndexToValueMetrics.customMetrics.map {
case (metric, value) => (metric.withNewDesc(desc = newDesc(metric.desc)), value)
}
)
}
/*
=====================================================
Private methods and inner classes
=====================================================
*/
private val keySchema = StructType(
joinKeys.zipWithIndex.map { case (k, i) => StructField(s"field$i", k.dataType, k.nullable) })
private val keyAttributes = keySchema.toAttributes
private val keyToNumValues = new KeyToNumValuesStore()
private val keyWithIndexToValue = new KeyWithIndexToValueStore(stateFormatVersion)
// Clean up any state store resources if necessary at the end of the task
Option(TaskContext.get()).foreach { _.addTaskCompletionListener[Unit] { _ => abortIfNeeded() } }
/** Helper trait for invoking common functionalities of a state store. */
private abstract class StateStoreHandler(stateStoreType: StateStoreType) extends Logging {
/** StateStore that the subclasses of this class is going to operate on */
protected def stateStore: StateStore
def commit(): Unit = {
stateStore.commit()
logDebug("Committed, metrics = " + stateStore.metrics)
}
def abortIfNeeded(): Unit = {
if (!stateStore.hasCommitted) {
logInfo(s"Aborted store ${stateStore.id}")
stateStore.abort()
}
}
def metrics: StateStoreMetrics = stateStore.metrics
/** Get the StateStore with the given schema */
protected def getStateStore(keySchema: StructType, valueSchema: StructType): StateStore = {
val storeProviderId = StateStoreProviderId(
stateInfo.get, partitionId, getStateStoreName(joinSide, stateStoreType))
val store = StateStore.get(
storeProviderId, keySchema, valueSchema, numColsPrefixKey = 0,
stateInfo.get.storeVersion, storeConf, hadoopConf)
logInfo(s"Loaded store ${store.id}")
store
}
}
/**
* Helper class for representing data returned by [[KeyWithIndexToValueStore]].
* Designed for object reuse.
*/
private class KeyAndNumValues(var key: UnsafeRow = null, var numValue: Long = 0) {
def withNew(newKey: UnsafeRow, newNumValues: Long): this.type = {
this.key = newKey
this.numValue = newNumValues
this
}
}
/** A wrapper around a [[StateStore]] that stores [key -> number of values]. */
private class KeyToNumValuesStore extends StateStoreHandler(KeyToNumValuesType) {
private val longValueSchema = new StructType().add("value", "long")
private val longToUnsafeRow = UnsafeProjection.create(longValueSchema)
private val valueRow = longToUnsafeRow(new SpecificInternalRow(longValueSchema))
protected val stateStore: StateStore = getStateStore(keySchema, longValueSchema)
/** Get the number of values the key has */
def get(key: UnsafeRow): Long = {
val longValueRow = stateStore.get(key)
if (longValueRow != null) longValueRow.getLong(0) else 0L
}
/** Set the number of values the key has */
def put(key: UnsafeRow, numValues: Long): Unit = {
require(numValues > 0)
valueRow.setLong(0, numValues)
stateStore.put(key, valueRow)
}
def remove(key: UnsafeRow): Unit = {
stateStore.remove(key)
}
def iterator: Iterator[KeyAndNumValues] = {
val keyAndNumValues = new KeyAndNumValues()
stateStore.iterator().map { pair =>
keyAndNumValues.withNew(pair.key, pair.value.getLong(0))
}
}
}
/**
* Helper class for representing data returned by [[KeyWithIndexToValueStore]].
* Designed for object reuse.
*/
private class KeyWithIndexAndValue(
var key: UnsafeRow = null,
var valueIndex: Long = -1,
var value: UnsafeRow = null,
var matched: Boolean = false) {
def withNew(
newKey: UnsafeRow,
newIndex: Long,
newValue: UnsafeRow,
newMatched: Boolean): this.type = {
this.key = newKey
this.valueIndex = newIndex
this.value = newValue
this.matched = newMatched
this
}
def withNew(
newKey: UnsafeRow,
newIndex: Long,
newValue: ValueAndMatchPair): this.type = {
this.key = newKey
this.valueIndex = newIndex
if (newValue != null) {
this.value = newValue.value
this.matched = newValue.matched
} else {
this.value = null
this.matched = false
}
this
}
}
private trait KeyWithIndexToValueRowConverter {
/** Defines the schema of the value row (the value side of K-V in state store). */
def valueAttributes: Seq[Attribute]
/**
* Convert the value row to (actual value, match) pair.
*
* NOTE: implementations should ensure the result row is NOT reused during execution, so
* that caller can safely read the value in any time.
*/
def convertValue(value: UnsafeRow): ValueAndMatchPair
/**
* Build the value row from (actual value, match) pair. This is expected to be called just
* before storing to the state store.
*
* NOTE: depending on the implementation, the result row "may" be reused during execution
* (to avoid initialization of object), so the caller should ensure that the logic doesn't
* affect by such behavior. Call copy() against the result row if needed.
*/
def convertToValueRow(value: UnsafeRow, matched: Boolean): UnsafeRow
}
private object KeyWithIndexToValueRowConverter {
def create(version: Int): KeyWithIndexToValueRowConverter = version match {
case 1 => new KeyWithIndexToValueRowConverterFormatV1()
case 2 => new KeyWithIndexToValueRowConverterFormatV2()
case _ => throw new IllegalArgumentException("Incorrect state format version! " +
s"version $version")
}
}
private class KeyWithIndexToValueRowConverterFormatV1 extends KeyWithIndexToValueRowConverter {
override val valueAttributes: Seq[Attribute] = inputValueAttributes
override def convertValue(value: UnsafeRow): ValueAndMatchPair = {
if (value != null) ValueAndMatchPair(value, false) else null
}
override def convertToValueRow(value: UnsafeRow, matched: Boolean): UnsafeRow = value
}
private class KeyWithIndexToValueRowConverterFormatV2 extends KeyWithIndexToValueRowConverter {
private val valueWithMatchedExprs = inputValueAttributes :+ Literal(true)
private val indexOrdinalInValueWithMatchedRow = inputValueAttributes.size
private val valueWithMatchedRowGenerator = UnsafeProjection.create(valueWithMatchedExprs,
inputValueAttributes)
override val valueAttributes: Seq[Attribute] = inputValueAttributes :+
AttributeReference("matched", BooleanType)()
// Projection to generate key row from (value + matched) row
private val valueRowGenerator = UnsafeProjection.create(
inputValueAttributes, valueAttributes)
override def convertValue(value: UnsafeRow): ValueAndMatchPair = {
if (value != null) {
ValueAndMatchPair(valueRowGenerator(value).copy(),
value.getBoolean(indexOrdinalInValueWithMatchedRow))
} else {
null
}
}
override def convertToValueRow(value: UnsafeRow, matched: Boolean): UnsafeRow = {
val row = valueWithMatchedRowGenerator(value)
row.setBoolean(indexOrdinalInValueWithMatchedRow, matched)
row
}
}
/**
* A wrapper around a [[StateStore]] that stores the mapping; the mapping depends on the
* state format version - please refer implementations of [[KeyWithIndexToValueRowConverter]].
*/
private class KeyWithIndexToValueStore(stateFormatVersion: Int)
extends StateStoreHandler(KeyWithIndexToValueType) {
private val keyWithIndexExprs = keyAttributes :+ Literal(1L)
private val keyWithIndexSchema = keySchema.add("index", LongType)
private val indexOrdinalInKeyWithIndexRow = keyAttributes.size
// Projection to generate (key + index) row from key row
private val keyWithIndexRowGenerator = UnsafeProjection.create(keyWithIndexExprs, keyAttributes)
// Projection to generate key row from (key + index) row
private val keyRowGenerator = UnsafeProjection.create(
keyAttributes, keyAttributes :+ AttributeReference("index", LongType)())
private val valueRowConverter = KeyWithIndexToValueRowConverter.create(stateFormatVersion)
protected val stateStore = getStateStore(keyWithIndexSchema,
valueRowConverter.valueAttributes.toStructType)
def get(key: UnsafeRow, valueIndex: Long): ValueAndMatchPair = {
valueRowConverter.convertValue(stateStore.get(keyWithIndexRow(key, valueIndex)))
}
/**
* Get all values and indices for the provided key.
* Should not return null.
*/
def getAll(key: UnsafeRow, numValues: Long): Iterator[KeyWithIndexAndValue] = {
val keyWithIndexAndValue = new KeyWithIndexAndValue()
var index = 0
new NextIterator[KeyWithIndexAndValue] {
override protected def getNext(): KeyWithIndexAndValue = {
if (index >= numValues) {
finished = true
null
} else {
val keyWithIndex = keyWithIndexRow(key, index)
val valuePair = valueRowConverter.convertValue(stateStore.get(keyWithIndex))
keyWithIndexAndValue.withNew(key, index, valuePair)
index += 1
keyWithIndexAndValue
}
}
override protected def close(): Unit = {}
}
}
/** Put new value for key at the given index */
def put(key: UnsafeRow, valueIndex: Long, value: UnsafeRow, matched: Boolean): Unit = {
val keyWithIndex = keyWithIndexRow(key, valueIndex)
val valueWithMatched = valueRowConverter.convertToValueRow(value, matched)
stateStore.put(keyWithIndex, valueWithMatched)
}
/**
* Remove key and value at given index. Note that this will create a hole in
* (key, index) and it is upto the caller to deal with it.
*/
def remove(key: UnsafeRow, valueIndex: Long): Unit = {
stateStore.remove(keyWithIndexRow(key, valueIndex))
}
/** Remove all values (i.e. all the indices) for the given key. */
def removeAllValues(key: UnsafeRow, numValues: Long): Unit = {
var index = 0
while (index < numValues) {
stateStore.remove(keyWithIndexRow(key, index))
index += 1
}
}
def iterator: Iterator[KeyWithIndexAndValue] = {
val keyWithIndexAndValue = new KeyWithIndexAndValue()
stateStore.iterator().map { pair =>
val valuePair = valueRowConverter.convertValue(pair.value)
keyWithIndexAndValue.withNew(
keyRowGenerator(pair.key), pair.key.getLong(indexOrdinalInKeyWithIndexRow), valuePair)
keyWithIndexAndValue
}
}
/** Generated a row using the key and index */
private def keyWithIndexRow(key: UnsafeRow, valueIndex: Long): UnsafeRow = {
val row = keyWithIndexRowGenerator(key)
row.setLong(indexOrdinalInKeyWithIndexRow, valueIndex)
row
}
}
}
object SymmetricHashJoinStateManager {
val supportedVersions = Seq(1, 2)
val legacyVersion = 1
def allStateStoreNames(joinSides: JoinSide*): Seq[String] = {
val allStateStoreTypes: Seq[StateStoreType] = Seq(KeyToNumValuesType, KeyWithIndexToValueType)
for (joinSide <- joinSides; stateStoreType <- allStateStoreTypes) yield {
getStateStoreName(joinSide, stateStoreType)
}
}
private sealed trait StateStoreType
private case object KeyToNumValuesType extends StateStoreType {
override def toString(): String = "keyToNumValues"
}
private case object KeyWithIndexToValueType extends StateStoreType {
override def toString(): String = "keyWithIndexToValue"
}
private def getStateStoreName(joinSide: JoinSide, storeType: StateStoreType): String = {
s"$joinSide-$storeType"
}
/** Helper class for representing data (value, matched). */
case class ValueAndMatchPair(value: UnsafeRow, matched: Boolean)
/**
* Helper class for representing data key to (value, matched).
* Designed for object reuse.
*/
case class KeyToValuePair(
var key: UnsafeRow = null,
var value: UnsafeRow = null,
var matched: Boolean = false) {
def withNew(newKey: UnsafeRow, newValue: UnsafeRow, newMatched: Boolean): this.type = {
this.key = newKey
this.value = newValue
this.matched = newMatched
this
}
def withNew(newKey: UnsafeRow, newValue: ValueAndMatchPair): this.type = {
this.key = newKey
if (newValue != null) {
this.value = newValue.value
this.matched = newValue.matched
} else {
this.value = null
this.matched = false
}
this
}
}
}
|
ueshin/apache-spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/state/SymmetricHashJoinStateManager.scala
|
Scala
|
apache-2.0
| 26,435 |
package net.paploo.encountersimulator.sim
import java.util.UUID
import scala.language.implicitConversions
import net.paploo.encountersimulator.sim.Creature.CreatureId
object Creature {
type CreatureId = UUID
def apply(template: CreatureTemplate): Creature = ImmutableCreature(template = template)
def apply(name: Option[String], template: CreatureTemplate): Creature = ImmutableCreature(name = name, template = template)
/**
* This is a non-pure method, but that isn't a desired property.
* @return
*/
def createId: CreatureId = UUID.randomUUID
object Conversions {
implicit def creatureToCreatureId(creature: Creature): CreatureId = creature.id
}
}
trait Creature extends Livable {
def id: CreatureId = Creature.createId
def name: Option[String]
def template: CreatureTemplate
def damage: Int // The total damage.
def life = template.hitPoints - damage
def isAlive: Boolean = life > 0
def isDead: Boolean = !isAlive
def applyDamage(dmg: Int): Creature
}
case class ImmutableCreature(override val id: CreatureId = Creature.createId,
name: Option[String] = None,
template: CreatureTemplate,
damage: Int = 0) extends Creature {
override def applyDamage(dmg: Int): Creature = this.copy(damage = damage+dmg)
}
|
paploo/EncounterSimulator
|
src/main/scala/net/paploo/encountersimulator/sim/Creature.scala
|
Scala
|
bsd-3-clause
| 1,341 |
package mr.merc.battle
import mr.merc.map.GameField
import mr.merc.battle.event._
import mr.merc.unit.Soldier
import mr.merc.map.hex.TerrainHex
import mr.merc.map.pathfind.PathFinder
import mr.merc.unit.Attack
import mr.merc.map.pathfind.PossibleMovesFinder
import mr.merc.unit.BeforeTurnAction
import mr.merc.unit.SoldierTurnState
import mr.merc.unit.NotHisTurn
import mr.merc.unit.HaveAttacked
import mr.merc.unit.CanntMoveAnyMore
import mr.merc.unit.StillCanMove
import mr.merc.unit.HaventMoved
import mr.merc.log.Logging
import mr.merc.players.Player
import mr.merc.unit.SoldierDefence
import scala.util.control.NonFatal
class BattleModel(val map: GameField) extends BattleModelEventHandler with Logging {
private val sides = map.sides
private var currentPlayerIndex = 0
def currentPlayer: Player = map.players(currentPlayerIndex)
private var prevSoldiersWithHexes: Option[List[(Soldier, TerrainHex)]] = None
map.hexField.soldierChangeListener = (_, _) => { prevSoldiersWithHexes = None }
def allSoldiersWithHexes: List[(Soldier, TerrainHex)] = {
prevSoldiersWithHexes match {
case Some(soldiers) => soldiers
case None =>
val sh = map.hexField.hexes.filter(_.soldier.isDefined).map(h => (h.soldier.get, h)).toList
prevSoldiersWithHexes = Some(sh)
prevSoldiersWithHexes.get
}
}
def allSoldiers: List[Soldier] = allSoldiersWithHexes.map(_._1)
setSoldierTurnState()
def handleEvent(event: BattleModelEvent): BattleModelEventResult = {
//debug(s"Battle model recevied event $event")
event match {
case MovementModelEvent(soldier, from, to) => handleMovementEvent(soldier, from, to)
case AttackModelEvent(soldier, from, target, attackNumber) => handleAttackEvent(soldier, from, target, attackNumber)
case EndMoveModelEvent => handleEndTurnEvent()
}
}
private[battle] def handleMovementEvent(soldier: Soldier, from: TerrainHex, to: TerrainHex): MovementModelEventResult = {
require(validateMovementEvent(soldier, from, to), s"movement from $from to $to is invalid")
require(to.soldier.isEmpty)
val path = PathFinder.findPath(map.gridForSoldier(soldier), from, to)
val movePrice = pathPrice(soldier.movementCostFunction, path.get)
soldier.movePointsRemain -= movePrice
from.soldier = None
to.soldier = Some(soldier)
soldier.turnState = soldierTurnState(to)
MovementModelEventResult(soldier, path.get)
}
private def setSoldierTurnState(): Unit = {
map.hexField.hexes.filter(_.soldier.isDefined).map(h => (h, h.soldier.get)).foreach {
case (hex, soldier) =>
soldier.turnState = soldierTurnState(hex)
}
}
def areFriends(player1:Player, player2:Player):Boolean = {
player1 == player2 ||
sides.exists(set => set.contains(player1) && set.contains(player2))
}
def areEnemies(player1:Player, player2:Player):Boolean = {
!areFriends(player1, player2)
}
private[battle] def handleEndTurnEvent(): EndMoveModelEventResult = {
require(validateEndTurn)
nextPlayer()
val beforeTurnActions = map.hexField.hexes.filter(_.soldier.isDefined).map(h => (h.x, h.y, h.soldier.get)).
filter(_._3.owner == currentPlayer).flatMap { case (x, y, s) => s.beforeTurnActions(map.hexField, x, y) }
val filteredActions = BeforeTurnAction.filterActions(beforeTurnActions.toSet).toList
val results = filteredActions flatMap (_.action())
allSoldiers.filter(_.owner == currentPlayer).foreach(_.beforeTurnRenowation())
setSoldierTurnState()
EndMoveModelEventResult(currentPlayer, results)
}
private[battle] def handleAttackEvent(soldier: Soldier, from: TerrainHex, target: TerrainHex, attackNumber: Int): AttackModelEventResult = {
require(validateAttackEvent(soldier, from, target, attackNumber), s"invalid attack attempt from $from to $target with attack $attackNumber")
val defender = target.soldier.get
val attackerAttack = soldier.soldierType.attacks(attackNumber)
val defenderAttack = Attack.selectBestAttackForDefender(soldier, defender, attackerAttack)
soldier.attackedThisTurn = true
val result = Attack.battle(from, target, attackerAttack, defenderAttack)
if (soldier.hp == 0) {
from.soldier = None
} else if (defender.hp == 0) {
target.soldier = None
}
if (soldier.hp != 0) {
soldier.turnState = soldierTurnState(from)
}
new AttackModelEventResult(from, target, soldier, defender, result)
}
def hexBySoldier(soldierOpt: Option[Soldier]): Option[TerrainHex] = soldierOpt map hexBySoldier
def hexBySoldier(soldier: Soldier): TerrainHex = map.hexField.hexes.find(h => h.soldier == Some(soldier)).get
def validateMovementEvent(soldier: Soldier, from: TerrainHex, to: TerrainHex, validatePath: Boolean = true, checkPlayer: Boolean = true): Boolean = {
if (checkPlayer && !(soldier.owner == currentPlayer)) {
return false
}
if (from == to) {
return false
}
if (soldier.attackedThisTurn) {
return false
}
if (soldier.movedThisTurn && map.zoneOfControlForEnemys(soldier.owner).contains(from)) {
return false
}
if (validatePath) {
val pathOpt = PathFinder.findOptimalPath(map.gridForSoldier(soldier), from, to, soldier.movePointsRemain, soldier.movedThisTurn)
pathOpt match {
case Some(path) => {
val price = pathPrice(soldier.movementCostFunction, path)
price <= soldier.movePointsRemain
}
case None => false
}
} else {
true
}
}
def possibleMoves(soldier: Soldier, currentHex: TerrainHex): Set[TerrainHex] = {
val movePoints = if (soldier.owner == currentPlayer) {
soldier.movePointsRemain
} else {
soldier.soldierType.movement
}
val movedThisTurn = if (soldier.owner == currentPlayer) {
soldier.movedThisTurn
} else {
false
}
PossibleMovesFinder.findPossibleMoves(map.gridForSoldier(soldier), currentHex,
movePoints, movedThisTurn).filterNot(h => h == currentHex || h.soldier.nonEmpty)
}
def possibleAttacksWhenThereAreNoMoves(soldier: Soldier, currentHex: TerrainHex): Set[TerrainHex] = {
val neigbours = map.hexField.neighboursSet(currentHex)
val enemiesNear = neigbours.filter(_.soldier.exists(s => areEnemies(s.owner, soldier.owner)))
if (enemiesNear.isEmpty) {
return Set()
}
if (soldier.movedThisTurn) {
enemiesNear
} else {
Set()
}
}
// TODO add test
def validateMovementAndAttack(soldier: Soldier, start: TerrainHex, destination: TerrainHex, underAttack: TerrainHex, attackNumber: Int): Boolean = {
if (start != destination) {
validateMovementEvent(soldier, start, destination) && validateAttackEvent(soldier, destination, underAttack, attackNumber)
} else {
validateAttackEvent(soldier, destination, underAttack, attackNumber)
}
}
// TODO allies test
def validateAttackEvent(soldier: Soldier, from: TerrainHex, target: TerrainHex, attackNumber: Int): Boolean = {
if (soldier.attackedThisTurn) {
return false
}
if (soldier.owner != currentPlayer) {
return false
}
if (soldier.soldierType.attacks.size <= attackNumber) {
return false
}
if (!map.hexField.neighbours(from).contains(target)) {
return false
}
target.soldier match {
case Some(enemy) => areEnemies(soldier.owner, enemy.owner)
case None => false
}
}
def validateEndTurn: Boolean = !isOver
private def nextPlayer(): Unit = {
currentPlayerIndex += 1
if (currentPlayerIndex == map.players.size) {
currentPlayerIndex = 0
}
}
private def pathPrice(cost: TerrainHex => Int, path: List[TerrainHex]): Int = {
path.tail.map(cost).sum
}
private[battle] def soldierTurnState(hex: TerrainHex): SoldierTurnState = {
val soldier = hex.soldier.get
if (soldier.owner != currentPlayer) {
return NotHisTurn
}
if (soldier.attackedThisTurn) {
return HaveAttacked
}
val moves = possibleMoves(soldier, hex)
if (moves.isEmpty) {
CanntMoveAnyMore
} else if (soldier.movedThisTurn) {
StillCanMove
} else {
HaventMoved
}
}
def isOver: Boolean = soldiersByAlliance.size == 1
def soldiersByAlliance: Map[Set[Player], List[Soldier]] = {
try {
allSoldiers.groupBy(s => sides.find(_.contains(s.owner)).get)
} catch {
case NonFatal(t) =>
error(s"Couldn't group soldiers: players are $sides and soldiers are $allSoldiers")
throw t
}
}
def defenceForSoldier(soldier: Soldier, hex: TerrainHex): SoldierDefence = Attack.calculateSoldierDefence(soldier, hex)
}
case class BattleResult(winningAlliance: List[Player])
|
RenualdMarch/merc
|
src/main/scala/mr/merc/battle/BattleModel.scala
|
Scala
|
gpl-3.0
| 8,770 |
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.codeStyle.JavaCodeStyleManager
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.refactoring.changeSignature.JavaParameterInfo
import com.intellij.refactoring.util.CanonicalTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import scala.beans.{BeanProperty, BooleanBeanProperty}
/**
* Nikolay.Tropin
* 2014-08-10
*/
class ScalaParameterInfo(@BeanProperty var name: String,
@BeanProperty val oldIndex: Int,
var scType: ScType,
val project: Project,
var isRepeatedParameter: Boolean,
var isByName: Boolean,
@BeanProperty var defaultValue: String = "",
var keywordsAndAnnotations: String = "",
val isIntroducedParameter: Boolean = false)
extends JavaParameterInfo {
def this(p: ScParameter) {
this(p.name, p.index, p.getType(TypingContext.empty).getOrAny, p.getProject, p.isRepeatedParameter, p.isCallByNameParameter,
keywordsAndAnnotations = ScalaParameterInfo.keywordsAndAnnotations(p))
}
var defaultForJava = defaultValue
@BooleanBeanProperty
var useAnySingleVariable: Boolean = false
val wasArrayType: Boolean = scType match {
case JavaArrayType(_) => true
case _ => false
}
val isVarargType = false //overriders in java of method with repeated parameters are not varargs
protected def psiType: PsiType = {
if (scType == null) return null
val allScope = GlobalSearchScope.allScope(project)
if (isByName) {
val functionType = ScFunctionType(scType, Seq())(project, allScope)
ScType.toPsi(functionType, project, allScope)
}
else if (isRepeatedParameter) {
val seqType = ScDesignatorType.fromClassFqn("scala.collection.Seq", project, allScope)
ScType.toPsi(ScParameterizedType(seqType, Seq(scType)), project, allScope)
}
else ScType.toPsi(scType, project, allScope)
}
override def createType(context: PsiElement, manager: PsiManager): PsiType = psiType
override def getValue(expr: PsiCallExpression): PsiExpression = {
if (defaultForJava.isEmpty) return null
val defaultText =
if (defaultForJava.contains("$default$")) {
val qual = expr match {
case mc: PsiMethodCallExpression =>
mc.getMethodExpression.getQualifierExpression match {
case s: PsiSuperExpression => ""
case null => ""
case q => q.getText + "."
}
case _ => ""
}
qual + defaultForJava
} else defaultForJava
val expression = JavaPsiFacade.getElementFactory(project).createExpressionFromText(defaultText, expr)
JavaCodeStyleManager.getInstance(project).shortenClassReferences(expression).asInstanceOf[PsiExpression]
}
override def getTypeWrapper: CanonicalTypes.Type = {
if (scType != null) CanonicalTypes.createTypeWrapper(psiType) else null
}
override def getTypeText: String =
if (scType != null) getTypeWrapper.getTypeText else null
def typeText = {
val baseText = Option(scType).fold("")(_.presentableText)
if (isRepeatedParameter) baseText + "*"
else if (isByName) " => " + baseText
else baseText
}
}
object ScalaParameterInfo {
def apply(p: ScParameter) = new ScalaParameterInfo(p)
def apply(project: Project) = new ScalaParameterInfo("", -1, null, project, false, false)
def keywordsAndAnnotations(p: ScParameter) = {
val nameId = p.nameId
val elems = p.children.takeWhile(_ != nameId)
elems.map(_.getText).mkString
}
def allForMethod(methodLike: ScMethodLike): Seq[Seq[ScalaParameterInfo]] = {
def infos(clause: ScParameterClause): Seq[ScalaParameterInfo] = clause.parameters.map(new ScalaParameterInfo(_))
methodLike.parameterList.clauses.map(infos)
}
}
|
LPTK/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala
|
Scala
|
apache-2.0
| 4,290 |
package scala.slick.ast
import TypeUtil.typeToTypeUtil
import Util._
/** A SQL comprehension */
final case class Comprehension(from: Seq[(Symbol, Node)] = Seq.empty, where: Seq[Node] = Seq.empty, groupBy: Option[Node] = None, orderBy: Seq[(Node, Ordering)] = Seq.empty, select: Option[Node] = None, fetch: Option[Long] = None, offset: Option[Long] = None) extends DefNode {
type Self = Comprehension
val nodeChildren = from.map(_._2) ++ where ++ groupBy ++ orderBy.map(_._1) ++ select
override def nodeChildNames =
from.map("from " + _._1) ++
where.zipWithIndex.map("where" + _._2) ++
groupBy.map(_ => "groupBy") ++
orderBy.map("orderBy " + _._2) ++
select.map(_ => "select")
protected[this] def nodeRebuild(ch: IndexedSeq[Node]) = {
val newFrom = ch.slice(0, from.length)
val whereOffset = newFrom.length
val newWhere = ch.slice(whereOffset, whereOffset + where.length)
val groupByOffset = whereOffset + newWhere.length
val newGroupBy = ch.slice(groupByOffset, groupByOffset + (if(groupBy.isDefined) 1 else 0))
val orderByOffset = groupByOffset + newGroupBy.length
val newOrderBy = ch.slice(orderByOffset, orderByOffset + orderBy.length)
val selectOffset = orderByOffset + newOrderBy.length
val newSelect = ch.slice(selectOffset, selectOffset + (if(select.isDefined) 1 else 0))
copy(
from = (from, newFrom).zipped.map { case ((s, _), n) => (s, n) },
where = newWhere,
groupBy = if(newGroupBy.isEmpty) None else Some(newGroupBy.head),
orderBy = (orderBy, newOrderBy).zipped.map { case ((_, o), n) => (n, o) },
select = if(newSelect.isEmpty) None else Some(newSelect.head)
)
}
def nodeGenerators = from
override def toString = "Comprehension(fetch = "+fetch+", offset = "+offset+")"
protected[this] def nodeRebuildWithGenerators(gen: IndexedSeq[Symbol]) =
copy(from = (from, gen).zipped.map { case ((_, n), s) => (s, n) })
def nodeWithComputedType2(scope: SymbolScope, typeChildren: Boolean, retype: Boolean): Self = {
// Assign types to all "from" Nodes and compute the resulting scope
val (genScope, f2) = from.foldLeft((scope, Vector.empty[Node])) { case ((sc, n2s), (s, n)) =>
val n2 = n.nodeWithComputedType(sc, typeChildren, retype)
val sc2 = sc + (s -> n2.nodeType.asCollectionType.elementType)
(sc2, n2s :+ n2)
}
// Assign types to "where", "groupBy", "orderBy" and "select" Nodes
val w2 = mapOrNone(where)(_.nodeWithComputedType(genScope, typeChildren, retype))
val g2 = mapOrNone(groupBy)(_.nodeWithComputedType(genScope, typeChildren, retype))
val o = orderBy.map(_._1)
val o2 = mapOrNone(o)(_.nodeWithComputedType(genScope, typeChildren, retype))
val s2 = mapOrNone(select)(_.nodeWithComputedType(genScope, typeChildren, retype))
// Check if the nodes changed
val same = (from, f2).zipped.map(_._2 eq _).forall(identity) &&
w2.isEmpty && g2.isEmpty && o2.isEmpty && s2.isEmpty
val newSel = s2.map(_.headOption).getOrElse(select)
val newType =
if(!nodeHasType || retype) {
newSel match {
case Some(sel) => sel.nodeType
case None =>
val el = f2.last.nodeType.asCollectionType.elementType
val tc = f2.head.nodeType.asCollectionType.cons
CollectionType(tc, el)
}
} else nodeType
if(same && newType == nodeType) this else {
// Compute result type
copy(
from = (from, f2).zipped.map { case ((s, _), n) => (s, n) },
where = w2.getOrElse(where),
groupBy = g2.map(_.headOption).getOrElse(groupBy),
orderBy = o2.map(o2 => (orderBy, o2).zipped.map { case ((_, o), n) => (n, o) }).getOrElse(orderBy),
select = s2.map(_.headOption).getOrElse(select)
).nodeTyped(newType)
}
}
}
/** The row_number window function */
final case class RowNumber(by: Seq[(Node, Ordering)] = Seq.empty) extends TypedNode {
type Self = RowNumber
def tpe = ScalaBaseType.longType
lazy val nodeChildren = by.map(_._1)
protected[this] def nodeRebuild(ch: IndexedSeq[Node]) =
copy(by = by.zip(ch).map{ case ((_, o), n) => (n, o) })
override def nodeChildNames = by.zipWithIndex.map("by" + _._2)
override def toString = "RowNumber"
}
|
retronym/slick
|
src/main/scala/scala/slick/ast/Comprehension.scala
|
Scala
|
bsd-2-clause
| 4,267 |
/*
* This file is part of eCobertura.
*
* Copyright (c) 2009, 2010 Joachim Hofer
* All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package ecobertura.ui.views.session.commands
import org.eclipse.core.commands._
import org.eclipse.swt.SWT
import org.eclipse.swt.widgets.FileDialog
import org.eclipse.ui.handlers.HandlerUtil
import ecobertura.core.cobertura.CoberturaWrapper
import ecobertura.core.data.CoverageSession
import ecobertura.ui.views.session.CoverageSessionModel
class OpenCoverageSessionHandler extends AbstractHandler {
override def execute(event: ExecutionEvent) = {
val sessionFilename = retrieveCoverageSessionFilename(event)
val projectData = CoberturaWrapper.get.projectDataFromFile(sessionFilename)
CoverageSessionModel.get.addCoverageSession(
CoverageSession.fromCoberturaProjectData(projectData))
null // handlers must return null
}
private def retrieveCoverageSessionFilename(event: ExecutionEvent) = {
val parentShell = HandlerUtil.getActiveShell(event)
val dialog = new FileDialog(parentShell, SWT.OPEN)
dialog.setFilterNames(Array("Cobertura Session Files (*.ser)", "All Files (*)"))
dialog.setFilterExtensions(Array("*.ser", "*"))
dialog.open
}
}
|
jmhofer/eCobertura
|
ecobertura.ui/src/main/scala/ecobertura/ui/views/session/commands/OpenCoverageSessionHandler.scala
|
Scala
|
epl-1.0
| 1,419 |
//
// RemoteFuture.scala -- Scala classes RemoteFuture and RemoteFutureReader, and trait RemoteFutureManager
// Project PorcE
//
// Created by jthywiss on Jan 13, 2016.
//
// Copyright (c) 2019 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.run.distrib.porce
import orc.FutureReader
import orc.run.distrib.Logger
import orc.run.distrib.common.RemoteRef
import orc.run.porce.runtime.Future
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary
/** A reference to an Future value at another Location.
*
* @author jthywiss
*/
class RemoteFutureRef(futureManager: RemoteFutureManager, override val remoteRefId: RemoteFutureRef#RemoteRefId, raceFreeResolution: Boolean) extends Future(raceFreeResolution) with RemoteRef {
override def toString: String = f"${getClass.getName}(remoteRefId=$remoteRefId%#x,cachedState=${get},numWaiters=$numWaiters)"
/** Resolve this to a value and call publish and halt on each blocked FutureReader.
*/
@TruffleBoundary(allowInlining = true) @noinline
override def bind(v: AnyRef) = {
if (raceFreeResolution) {
Logger.Futures.finest("Future $futureId%#x: Shortcutting bind communication, since raceFreeResolution=true")
localBind(v)
}
futureManager.sendFutureResolution(remoteRefId, Some(v))
}
/** Resolve this to stop and call halt on each blocked FutureReader.
*/
@TruffleBoundary(allowInlining = true) @noinline
override def stop(): Unit = {
if (raceFreeResolution) {
Logger.Futures.finest("Future $futureId%#x: Shortcutting bind communication, since raceFreeResolution=true")
localStop()
}
futureManager.sendFutureResolution(remoteRefId, None)
}
futureManager.sendReadFuture(remoteRefId)
}
/** A remote reader that is blocked awaiting a local Future value.
*
* @author jthywiss
*/
class RemoteFutureReader(val fut: Future, val futureManager: RemoteFutureManager, futureId: RemoteFutureManager#RemoteFutureId) extends FutureReader {
protected var readerLocations = new java.util.HashSet[PeerLocation](4)
override def toString: String = f"${getClass.getName}(fut=$fut, futureManager=$futureManager, futureId=$futureId%#x)"
def addReader(l: PeerLocation): Unit = synchronized {
readerLocations.add(l)
if (readerLocations.size == 1) {
fut.read(this)
}
}
protected def getAndClearReaders(): Array[PeerLocation] = synchronized {
import scala.collection.JavaConverters._
val readers = readerLocations.asScala.toArray
readerLocations.clear()
readers
}
override def publish(v: AnyRef): Unit = synchronized {
Logger.Futures.entering(getClass.getName, "publish", Seq(s"$this publish $v"))
futureManager.sendFutureResult(getAndClearReaders(), futureId, fut, Some(v))
}
override def halt(): Unit = synchronized {
Logger.Futures.entering(getClass.getName, "halt", Seq(s"$this halt"))
futureManager.sendFutureResult(getAndClearReaders(), futureId, fut, None)
}
}
/** A mix-in to manage remote futures.
*
* @author jthywiss
*/
trait RemoteFutureManager {
execution: DOrcExecution =>
type RemoteFutureId = RemoteFutureRef#RemoteRefId
// These two maps are inverses of each other (sorta)
// TODO: Determine when a served RemoteFutureId is no longer referenced, and remove entries from these maps.
/** Map from a local ("real") future to its assigned RemoteFutureId. */
protected val servingLocalFutures = new java.util.concurrent.ConcurrentHashMap[Future, RemoteFutureId]
/** Map from a RemoteFutureId for a local future to its local proxy for the remote readers. */
protected val servingRemoteFutures = new java.util.concurrent.ConcurrentHashMap[RemoteFutureId, RemoteFutureReader]
/** Map from a RemoteFutureId for a remote future to its local proxy
* (RemoteFutureRef), which local FutureReaders block on.
*/
protected val waitingReaders = new java.util.concurrent.ConcurrentHashMap[RemoteFutureId, RemoteFutureRef]
/** Given a future (local or remote), get its RemoteFutureId. If the
* future is a local future that hasn't been exposed as a remote future
* previously, set it up to be remotely ref'ed, and return its new ID.
*/
def ensureFutureIsRemotelyAccessibleAndGetId(fut: Future): RemoteFutureId = {
//Logger.Futures.entering(getClass.getName, "ensureFutureIsRemotelyAccessibleAndGetId", Seq(fut))
fut match {
case rfut: RemoteFutureRef => rfut.remoteRefId
case _ => servingLocalFutures.computeIfAbsent(fut, fut => {
val newFutureId = execution.freshRemoteRefId()
val newReader = new RemoteFutureReader(fut, execution, newFutureId)
servingRemoteFutures.put(newFutureId, newReader)
newFutureId
})
}
}
/** Get the future for the given ID. If the ID refers to a future at this
* location, that future is returned. Otherwise, a RemoteFutureRef for
* the future is returned.
*/
def futureForId(futureId: RemoteFutureId, raceFreeResolution: Boolean): Future = {
if (execution.homeLocationForRemoteRef(futureId) == execution.runtime.asInstanceOf[DOrcRuntime].here) {
servingRemoteFutures.get(futureId).fut ensuring (_.raceFreeResolution == raceFreeResolution)
} else {
waitingReaders.computeIfAbsent(futureId, new RemoteFutureRef(execution, _, raceFreeResolution))
}
}
/** Send request to be sent the resolution of a remote future. */
def sendReadFuture(futureId: RemoteFutureId): Unit = {
val homeLocation = execution.homeLocationForRemoteRef(futureId)
Tracer.traceFutureReadSend(futureId, execution.runtime.here, homeLocation)
homeLocation.sendInContext(execution)(ReadFutureCmd(executionId, futureId, execution.runtime.runtimeId))
}
/** Record remote request to be sent the resolution of a future we're serving. */
def readFuture(futureId: RemoteFutureId, readerFollowerRuntimeId: DOrcRuntime.RuntimeId): Unit = {
Logger.Futures.fine(f"Posting read on $futureId%#x, with reader at runtime ${readerFollowerRuntimeId.longValue}%x")
servingRemoteFutures.get(futureId).addReader(execution.runtime.locationForRuntimeId(readerFollowerRuntimeId))
}
/** Send the resolution of a future we're serving. */
def sendFutureResult(readers: Traversable[PeerLocation], futureId: RemoteFutureId, fut: Future, value: Option[AnyRef]): Unit = {
Logger.Futures.entering(getClass.getName, "sendFutureResult", Seq(readers, "0x" + futureId.toHexString, value))
readers foreach { reader =>
val mv = value.map(execution.marshalValue(reader)(_))
Tracer.traceFutureResultSend(futureId, execution.runtime.here, reader)
reader.sendInContext(execution)(DeliverFutureResultCmd(execution.executionId, futureId, mv))
}
servingLocalFutures.remove(fut)
// TODO: PERFORMANCE: See servingRemoteFutures declaration
//servingRemoteFutures.remove(futureId)
}
/** Locally deliver the resolution of a remote future. */
def deliverFutureResult(origin: PeerLocation, futureId: RemoteFutureId, value: Option[AnyRef]): Unit = {
val reader = waitingReaders.get(futureId)
if (reader != null) {
value match {
case Some(v) => {
val unmarshalledValue = execution.unmarshalValue(origin)(v)
Logger.Futures.fine(f"Future $futureId%#x (reader $reader) was resolved to $unmarshalledValue")
reader.localBind(unmarshalledValue)
}
case None => {
Logger.Futures.fine(f"Future $futureId%#x (reader $reader) was resolved to stop")
reader.localStop()
}
}
waitingReaders.remove(futureId)
} else {
Logger.Futures.finer(f"deliverFutureResult reader not found, id=$futureId%#x")
}
}
/** Send our resolution of a remote future. */
def sendFutureResolution(futureId: RemoteFutureId, value: Option[AnyRef]): Unit = {
val homeLocation = execution.homeLocationForRemoteRef(futureId)
val marshaledFutureValue = value match {
case Some(v) => Some(execution.marshalValue(homeLocation)(v))
case None => value
}
Tracer.traceFutureResolveSend(futureId, execution.runtime.here, homeLocation)
homeLocation.sendInContext(execution)(ResolveFutureCmd(executionId, futureId, marshaledFutureValue))
}
/** Handle remote resolution of a future we're serving. */
def receiveFutureResolution(origin: PeerLocation, futureId: RemoteFutureId, value: Option[AnyRef]): Unit = {
value match {
case Some(v) => servingRemoteFutures.get(futureId).fut.bind(execution.unmarshalValue(origin)(v))
case None => servingRemoteFutures.get(futureId).fut.stop()
}
}
}
|
orc-lang/orc
|
PorcE/src/orc/run/distrib/porce/RemoteFuture.scala
|
Scala
|
bsd-3-clause
| 8,842 |
/*
* Copyright 2014 Lars Edenbrandt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.nimsa.sbx.dicom.streams
import akka.NotUsed
import akka.stream.scaladsl.Flow
import akka.util.ByteString
import se.nimsa.dicom.data.DicomParts.DicomPart
import se.nimsa.dicom.data.{Tag, TagPath, VR}
import se.nimsa.dicom.streams.DicomFlows.tagFilter
import se.nimsa.dicom.streams.ModifyFlow.{TagInsertion, TagModification, modifyFlow}
import se.nimsa.sbx.anonymization.AnonymizationUtil._
import se.nimsa.sbx.anonymization.{AnonymizationOp, AnonymizationProfile}
import se.nimsa.sbx.dicom.DicomUtil.toAsciiBytes
class AnonymizationFlow(profile: AnonymizationProfile) {
import AnonymizationOp._
def anonFlow: Flow[DicomPart, DicomPart, NotUsed] = {
tagFilter(_ => true) { tagPath =>
!tagPath.toList.map(_.tag).flatMap(profile.opOf)
.exists {
case REMOVE => true
case REMOVE_OR_ZERO => true // always remove (limitation)
case REMOVE_OR_DUMMY => true // always remove (limitation)
case REMOVE_OR_ZERO_OR_DUMMY => true // always remove (limitation)
case REMOVE_OR_ZERO_OR_REPLACE_UID => true // always remove (limitation)
case _ => false
}
}
.via(modifyFlow(
Seq(
TagModification(tagPath =>
profile.opOf(tagPath.tag).contains(REPLACE_UID), _ => createUid()),
TagModification(tagPath =>
profile.opOf(tagPath.tag).exists {
case DUMMY => true // zero instead of replace with dummy (limitation)
case CLEAN => true // zero instead of replace with cleaned value (limitation)
case ZERO => true
case ZERO_OR_DUMMY => true // always zero (limitation)
case _ => false
}, _ => ByteString.empty
)
),
Seq(
TagInsertion(TagPath.fromTag(Tag.DeidentificationMethod), _ => toAsciiBytes(profile.options.map(_.description).mkString(" - "), VR.LO)),
TagInsertion(TagPath.fromTag(Tag.PatientIdentityRemoved), _ => toAsciiBytes("YES", VR.CS)),
)))
}
}
|
slicebox/slicebox
|
src/main/scala/se/nimsa/sbx/dicom/streams/AnonymizationFlow.scala
|
Scala
|
apache-2.0
| 2,631 |
// Copyright (c) 2010, Stephen D. Strowes
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * The author named in the above copyright notice may not be used to
// endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package com.sdstrowes.util
import scala.io.Source
import scala.collection.mutable.{Set, Map}
import scala.actors.Actor
case class DoneDijkstra(origin: Int, dists: Map[Int, Byte])
case class Dijkstra(origin: Int)
case object Continue
case object Terminate
/* ------------------------------------------------------------------------ */
class Spinner(g: Graph[Int]) extends Actor
{
def act() {
var alive = true
while(alive) {
receive {
case Dijkstra(n) => {
try {
val result = g.dijkstra(n)
val distances = result._1
sender ! DoneDijkstra(n, distances)
} catch {
case e:java.lang.AssertionError => {
System.err.println("Source "+n+" not in graph!")
sender ! Continue
}
}
}
case Terminate =>
alive = false
case _ =>
System.err.println("Error in Spinner.")
}
}
}
}
/* ------------------------------------------------------------------------ */
class Master(args: Array[String], graph: Graph[Int]) extends Actor
{
var spinners = List[Spinner]()
def act() {
/* Kick start the spinners. */
for (i <- 1 to java.lang.Runtime.getRuntime().availableProcessors()) {
spinners = (new Spinner(graph)) :: spinners
}
spinners.foreach(s => s.start())
/* Determine the set of origins to run Dijkstra's algorithm from. If
* there is no input file describing a subset of the graph, then assume
* all-pairs. */
var nodes = if (args.size == 1) {
var temp = Set[Int]()
for (line <- Source.fromFile(new java.io.File(args(0))).getLines()) {
val pair = line.trim.split(" ")
val source = pair(0).toInt
val dest = pair(1).toInt
//temp = temp + scala.math.min(source, dest)
temp = temp + source
temp = temp + dest
}
temp.toList
}
else {
graph.vertices.toList
}
/* Retain a copy; I'm going to walk through the full list. */
val allNodes = nodes
val distances = new DistanceTable(graph.vertices.toList)
var total = nodes.size
var completed = 0
val dStartTime = System.currentTimeMillis()
/* Generate the distances table by calculating Dijkstra from all points */
for (i <- 0 until scala.math.min(total, spinners.size)) {
spinners(i) ! Dijkstra(nodes.head)
nodes = nodes.tail
}
/* -- Split ------------------------------------------------------------ */
while(completed < total) {
receive {
case DoneDijkstra(origin, d) => {
d.keys.foreach(destination => {
val distance = d(destination)
distances.set(origin, destination, distance)
})
if (!nodes.isEmpty) {
sender ! Dijkstra(nodes.head)
nodes = nodes.tail
}
completed += 1
}
case Continue => {
if (!nodes.isEmpty) {
sender ! Dijkstra(nodes.head)
nodes = nodes.tail
}
completed += 1
}
case _ => {
System.err.println("2. Error in Master")
}
}
}
/* -- Join ------------------------------------------------------------- */
val dEndTime = System.currentTimeMillis()
/* Output the distances */
if (args.size == 1) {
for (line <- Source.fromFile(new java.io.File(args(0))).getLines()) {
val pair = line.trim.split(" ")
val source = pair(0).toInt
val dest = pair(1).toInt
println(source+" "+dest+" "+distances.dist(source, dest))
}
}
else {
for (nodeA <- allNodes)
for (nodeB <- allNodes)
println(nodeA +" "+ nodeB +" "+distances.dist(nodeA, nodeB))
}
spinners.foreach(s => s ! Terminate)
println("Took "+(dEndTime - dStartTime)+"ms to run Dijkstra from all locations.")
exit
}
}
/* ========== Bootstrap =================================================== */
object ParallelDijkstraTest {
def main(args: Array[String]) : Unit = {
val dijkstraGraph = new Graph[Int]()
val gStartTime = System.currentTimeMillis()
val lines = Source.fromInputStream(System.in).getLines()
while (lines.hasNext) {
val line = lines.next
val temp = line.trim.split(' ')
val asnums = temp.slice(0,2).map(str => str.toInt)
dijkstraGraph.addEdge(asnums(0), asnums(1))
}
val gEndTime = System.currentTimeMillis()
/* Run algorithm. */
val master = new Master(args, dijkstraGraph)
master.start()
/* Output stats. */
println("Processed graph of "+dijkstraGraph.size+" nodes.")
println("Took "+(gEndTime - gStartTime)+"ms to read the graph.")
}
}
|
sdstrowes/scala-util
|
ParallelDijkstra.scala
|
Scala
|
bsd-3-clause
| 7,167 |
package suggestions
package gui
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.swing._
import scala.util.{ Try, Success, Failure }
import scala.swing.event._
import swing.Swing._
import javax.swing.UIManager
import Orientation._
import rx.subscriptions.CompositeSubscription
import rx.lang.scala.Observable
import rx.lang.scala.Subscription
import observablex._
import search._
object WikipediaSuggest extends SimpleSwingApplication with ConcreteSwingApi with ConcreteWikipediaApi {
{
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName())
} catch {
case t: Throwable =>
}
}
def top = new MainFrame {
/* gui setup */
title = "Query Wikipedia"
minimumSize = new Dimension(900, 600)
val button = new Button("Get") {
icon = new javax.swing.ImageIcon(javax.imageio.ImageIO.read(this.getClass.getResourceAsStream("/suggestions/wiki-icon.png")))
}
val searchTermField = new TextField
val suggestionList = new ListView(ListBuffer[String]())
val status = new Label(" ")
val editorpane = new EditorPane {
import javax.swing.border._
border = new EtchedBorder(EtchedBorder.LOWERED)
editable = false
peer.setContentType("text/html")
}
contents = new BoxPanel(orientation = Vertical) {
border = EmptyBorder(top = 5, left = 5, bottom = 5, right = 5)
contents += new BoxPanel(orientation = Horizontal) {
contents += new BoxPanel(orientation = Vertical) {
maximumSize = new Dimension(240, 900)
border = EmptyBorder(top = 10, left = 10, bottom = 10, right = 10)
contents += new BoxPanel(orientation = Horizontal) {
maximumSize = new Dimension(640, 30)
border = EmptyBorder(top = 5, left = 0, bottom = 5, right = 0)
contents += searchTermField
}
contents += new ScrollPane(suggestionList)
contents += new BorderPanel {
maximumSize = new Dimension(640, 30)
add(button, BorderPanel.Position.Center)
}
}
contents += new ScrollPane(editorpane)
}
contents += status
}
val eventScheduler = SchedulerEx.SwingEventThreadScheduler
/**
* Observables
* You may find the following methods useful when manipulating GUI elements:
* `myListView.listData = aList` : sets the content of `myListView` to `aList`
* `myTextField.text = "react"` : sets the content of `myTextField` to "react"
* `myListView.selection.items` returns a list of selected items from `myListView`
* `myEditorPane.text = "act"` : sets the content of `myEditorPane` to "act"
*/
// TO IMPLEMENT
val searchTerms: Observable[String] = ???
// TO IMPLEMENT
val suggestions: Observable[Try[List[String]]] = ???
// TO IMPLEMENT
val suggestionSubscription: Subscription = suggestions.observeOn(eventScheduler) subscribe {
x => ???
}
// TO IMPLEMENT
val selections: Observable[String] = ???
// TO IMPLEMENT
val pages: Observable[Try[String]] = ???
// TO IMPLEMENT
val pageSubscription: Subscription = pages.observeOn(eventScheduler) subscribe {
x => ???
}
}
}
trait ConcreteWikipediaApi extends WikipediaApi {
def wikipediaSuggestion(term: String) = Search.wikipediaSuggestion(term)
def wikipediaPage(term: String) = Search.wikipediaPage(term)
}
trait ConcreteSwingApi extends SwingApi {
type ValueChanged = scala.swing.event.ValueChanged
object ValueChanged {
def unapply(x: Event) = x match {
case vc: ValueChanged => Some(vc.source.asInstanceOf[TextField])
case _ => None
}
}
type ButtonClicked = scala.swing.event.ButtonClicked
object ButtonClicked {
def unapply(x: Event) = x match {
case bc: ButtonClicked => Some(bc.source.asInstanceOf[Button])
case _ => None
}
}
type TextField = scala.swing.TextField
type Button = scala.swing.Button
}
|
vasnake/Principles-of-Reactive-Programming
|
w4/suggestions/src/main/scala/suggestions/gui/WikipediaSuggest.scala
|
Scala
|
gpl-3.0
| 4,113 |
package edu.gemini.model.p1.dtree.inst
import org.specs2.mutable.Specification
import edu.gemini.model.p1.immutable.{GracesReadMode, GracesFiberMode}
class GracesSpec extends Specification {
"The Graces decision tree" should {
"includes Graces fiber modes" in {
val graces = Graces()
graces.title must beEqualTo("Fiber Mode")
graces.choices must have size 2
}
"includes Graces read modes" in {
val graces = Graces()
val readModeChoice = graces.apply(GracesFiberMode.forName("ONE_FIBER"))
readModeChoice must beLeft
val readMode = readModeChoice.left.get
readMode.title must beEqualTo("Read Mode")
readMode.choices must have size 3
}
"build a Graces blueprint" in {
val graces = Graces()
val readModeChoice = graces.apply(GracesFiberMode.forName("ONE_FIBER"))
readModeChoice must beLeft
val readMode = readModeChoice.left.get
val blueprint = readMode.apply(GracesReadMode.forName("SLOW"))
blueprint must beRight
}
}
}
|
fnussber/ocs
|
bundle/edu.gemini.model.p1/src/test/scala/edu/gemini/model/p1/dtree/inst/GracesSpec.scala
|
Scala
|
bsd-3-clause
| 1,039 |
package org.json4s.reflect
import java.lang.reflect.{Field, TypeVariable}
import org.json4s.{JArray, JObject, JValue}
object ScalaType {
private val types = new Memo[Manifest[_], ScalaType]
private val singletonFieldName = "MODULE$"
def apply[T](mf: Manifest[T]): ScalaType = {
/* optimization */
if (mf.runtimeClass == classOf[Int] || mf.runtimeClass == classOf[java.lang.Integer]) ScalaType.IntType
else if (mf.runtimeClass == classOf[Long] || mf.runtimeClass == classOf[java.lang.Long]) ScalaType.LongType
else if (mf.runtimeClass == classOf[Byte] || mf.runtimeClass == classOf[java.lang.Byte]) ScalaType.ByteType
else if (mf.runtimeClass == classOf[Short] || mf.runtimeClass == classOf[java.lang.Short]) ScalaType.ShortType
else if (mf.runtimeClass == classOf[Float] || mf.runtimeClass == classOf[java.lang.Float]) ScalaType.FloatType
else if (mf.runtimeClass == classOf[Double] || mf.runtimeClass == classOf[java.lang.Double]) ScalaType.DoubleType
else if (mf.runtimeClass == classOf[BigInt] || mf.runtimeClass == classOf[java.math.BigInteger])
ScalaType.BigIntType
else if (mf.runtimeClass == classOf[BigDecimal] || mf.runtimeClass == classOf[java.math.BigDecimal])
ScalaType.BigDecimalType
else if (mf.runtimeClass == classOf[Boolean] || mf.runtimeClass == classOf[java.lang.Boolean]) ScalaType.BooleanType
else if (mf.runtimeClass == classOf[String] || mf.runtimeClass == classOf[java.lang.String]) ScalaType.StringType
else if (mf.runtimeClass == classOf[java.util.Date]) ScalaType.DateType
else if (mf.runtimeClass == classOf[java.sql.Timestamp]) ScalaType.TimestampType
else if (mf.runtimeClass == classOf[Symbol]) ScalaType.SymbolType
else if (mf.runtimeClass == classOf[Number]) ScalaType.NumberType
else if (mf.runtimeClass == classOf[JObject]) ScalaType.JObjectType
else if (mf.runtimeClass == classOf[JArray]) ScalaType.JArrayType
else if (mf.runtimeClass == classOf[JValue]) ScalaType.JValueType
/* end optimization */
else {
if (mf.typeArguments.isEmpty) types(mf, new ScalaType(_))
else new ScalaType(mf)
}
}
def apply(erasure: Class[_], typeArgs: Seq[ScalaType] = Seq.empty): ScalaType = {
val mf = ManifestFactory.manifestOf(erasure, typeArgs.map(_.manifest))
ScalaType(mf)
}
def apply(target: TypeInfo): ScalaType = {
target match {
case t: TypeInfo with SourceType => t.scalaType
case t =>
val tArgs =
t.parameterizedType.map(_.getActualTypeArguments.toList.map(Reflector.scalaTypeOf(_))).getOrElse(Nil)
ScalaType(target.clazz, tArgs)
}
}
// Deal with the most common cases as an optimization
/* optimization */
private val IntType: ScalaType = new PrimitiveScalaType(Manifest.Int)
private val NumberType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[Number]))
private val LongType: ScalaType = new PrimitiveScalaType(Manifest.Long)
private val ByteType: ScalaType = new PrimitiveScalaType(Manifest.Byte)
private val ShortType: ScalaType = new PrimitiveScalaType(Manifest.Short)
private val BooleanType: ScalaType = new PrimitiveScalaType(Manifest.Boolean)
private val FloatType: ScalaType = new PrimitiveScalaType(Manifest.Float)
private val DoubleType: ScalaType = new PrimitiveScalaType(Manifest.Double)
private val StringType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[java.lang.String]))
private val SymbolType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[Symbol]))
private val BigDecimalType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[BigDecimal]))
private val BigIntType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[BigInt]))
private val JValueType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[JValue]))
private val JObjectType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[JObject]))
private val JArrayType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[JArray]))
private val DateType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[java.util.Date]))
private val TimestampType: ScalaType = new PrimitiveScalaType(Manifest.classType(classOf[java.sql.Timestamp]))
private[json4s] val ListObject: ScalaType =
new ScalaType(
Manifest.classType(
classOf[List[_]],
Manifest.Object
)
)
private[json4s] val Object: ScalaType =
new ScalaType(Manifest.Object)
private[json4s] val MapStringObject: ScalaType =
new ScalaType(
Manifest.classType(
classOf[Map[_, _]],
Manifest.classType(classOf[String]),
Manifest.Object
)
)
private class PrimitiveScalaType(mf: Manifest[_]) extends ScalaType(mf) {
override val isPrimitive = true
}
private class CopiedScalaType(
mf: Manifest[_],
private[this] var _typeVars: Map[String, ScalaType],
override val isPrimitive: Boolean
) extends ScalaType(mf) {
override def typeVars: Map[String, ScalaType] = {
if (_typeVars == null)
_typeVars = Map.empty ++
erasure.getTypeParameters.map(_.asInstanceOf[TypeVariable[_]].getName).zip(typeArgs)
_typeVars
}
}
/* end optimization */
}
class ScalaType(val manifest: Manifest[_]) extends Equals {
import ScalaType.{types, CopiedScalaType}
val erasure: Class[_] = manifest.runtimeClass
val typeArgs: Seq[ScalaType] = manifest.typeArguments.map(ta => Reflector.scalaTypeOf(ta)) ++ (
if (erasure.isArray) List(Reflector.scalaTypeOf(erasure.getComponentType)) else Nil
)
private[this] var _typeVars: Map[String, ScalaType] = null
def typeVars: Map[String, ScalaType] = {
if (_typeVars == null)
_typeVars = Map.empty ++
erasure.getTypeParameters.map(_.asInstanceOf[TypeVariable[_]].getName).zip(typeArgs)
_typeVars
}
val isArray: Boolean = erasure.isArray
private[this] var _rawFullName: String = null
def rawFullName: String = {
if (_rawFullName == null)
_rawFullName = erasure.getName
_rawFullName
}
private[this] var _rawSimpleName: String = null
def rawSimpleName: String = {
if (_rawSimpleName == null) {
_rawSimpleName = safeSimpleName(erasure)
}
_rawSimpleName
}
lazy val simpleName: String =
rawSimpleName + (
if (typeArgs.nonEmpty) typeArgs.map(_.simpleName).mkString("[", ", ", "]")
else if (typeVars.nonEmpty) typeVars.map(_._2.simpleName).mkString("[", ", ", "]")
else ""
)
lazy val fullName: String =
rawFullName + (if (typeArgs.nonEmpty) typeArgs.map(_.fullName).mkString("[", ", ", "]") else "")
lazy val typeInfo: TypeInfo =
new TypeInfo(
erasure,
if (typeArgs.nonEmpty) Some(Reflector.mkParameterizedType(erasure, typeArgs.map(_.erasure))) else None
) with SourceType {
val scalaType: ScalaType = ScalaType.this
}
val isPrimitive = false
def isMap: Boolean = classOf[collection.immutable.Map[_, _]].isAssignableFrom(erasure) ||
classOf[collection.Map[_, _]].isAssignableFrom(erasure)
def isMutableMap: Boolean = classOf[collection.mutable.Map[_, _]].isAssignableFrom(erasure)
def isCollection: Boolean = erasure.isArray ||
classOf[Iterable[_]].isAssignableFrom(erasure) ||
classOf[java.util.Collection[_]].isAssignableFrom(erasure)
def isOption: Boolean = classOf[Option[_]].isAssignableFrom(erasure)
def isEither: Boolean = classOf[Either[_, _]].isAssignableFrom(erasure)
def <:<(that: ScalaType): Boolean = manifest <:< that.manifest
def >:>(that: ScalaType): Boolean = manifest >:> that.manifest
private def singletonField: Option[Field] = erasure.getFields.find(_.getName.equals(ScalaType.singletonFieldName))
def isSingleton: Boolean = singletonField.isDefined
def singletonInstance: Option[AnyRef] = singletonField.map(_.get(null))
override def hashCode(): Int = manifest.##
override def equals(obj: Any): Boolean = obj match {
case a: ScalaType => manifest == a.manifest
case _ => false
}
def canEqual(that: Any): Boolean = that match {
case s: ScalaType => manifest.canEqual(s.manifest)
case _ => false
}
def copy(
erasure: Class[_] = erasure,
typeArgs: Seq[ScalaType] = typeArgs,
typeVars: Map[String, ScalaType] = _typeVars
): ScalaType = {
/* optimization */
if (erasure == classOf[Int] || erasure == classOf[java.lang.Integer]) ScalaType.IntType
else if (erasure == classOf[Long] || erasure == classOf[java.lang.Long]) ScalaType.LongType
else if (erasure == classOf[Byte] || erasure == classOf[java.lang.Byte]) ScalaType.ByteType
else if (erasure == classOf[Short] || erasure == classOf[java.lang.Short]) ScalaType.ShortType
else if (erasure == classOf[Float] || erasure == classOf[java.lang.Float]) ScalaType.FloatType
else if (erasure == classOf[Double] || erasure == classOf[java.lang.Double]) ScalaType.DoubleType
else if (erasure == classOf[BigInt] || erasure == classOf[java.math.BigInteger]) ScalaType.BigIntType
else if (erasure == classOf[BigDecimal] || erasure == classOf[java.math.BigDecimal]) ScalaType.BigDecimalType
else if (erasure == classOf[Boolean] || erasure == classOf[java.lang.Boolean]) ScalaType.BooleanType
else if (erasure == classOf[String] || erasure == classOf[java.lang.String]) ScalaType.StringType
else if (erasure == classOf[java.util.Date]) ScalaType.DateType
else if (erasure == classOf[java.sql.Timestamp]) ScalaType.TimestampType
else if (erasure == classOf[Symbol]) ScalaType.SymbolType
else if (erasure == classOf[Number]) ScalaType.NumberType
else if (erasure == classOf[JObject]) ScalaType.JObjectType
else if (erasure == classOf[JArray]) ScalaType.JArrayType
else if (erasure == classOf[JValue]) ScalaType.JValueType
/* end optimization */
else {
val mf = ManifestFactory.manifestOf(erasure, typeArgs.map(_.manifest))
val st = new CopiedScalaType(mf, typeVars, isPrimitive)
if (typeArgs.isEmpty) types.replace(mf, st)
else st
}
}
override def toString: String = simpleName
}
|
json4s/json4s
|
core/src/main/scala/org/json4s/reflect/ScalaType.scala
|
Scala
|
apache-2.0
| 10,149 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.javadsl.cluster
import scala.concurrent.duration._
import akka.actor.ActorSystem
import com.google.inject.AbstractModule
import com.google.inject.Inject
import com.lightbend.lagom.internal.cluster.JoinClusterImpl
class JoinClusterModule extends AbstractModule {
override def configure(): Unit = {
binder.bind(classOf[JoinCluster]).asEagerSingleton()
}
}
private[lagom] class JoinCluster @Inject() (system: ActorSystem) {
JoinClusterImpl.join(system)
}
|
edouardKaiser/lagom
|
cluster/javadsl/src/main/scala/com/lightbend/lagom/internal/javadsl/cluster/JoinCluster.scala
|
Scala
|
apache-2.0
| 580 |
/*
* -β₯ββββ -ββββ-
* β‘β’βββββ\βββΟ ββββββββββββͺβ
* β£β¬ββ` `ββββ’β Οββ£β¬ββ βββ’β£Q
* ββ£β¬ββ ` β€βββΓ
` ββ’β¬β£
* ββ£β¬ββ βββββ`Β«β β’β’β£β
* β«β¬βββ .β ββ¨β¨ β£β£β¬βΟ βΟββ’β’Γ
* ββ’βββββ"ββββ βΓ
ββββββββββ`
* ``ΛΒ¬ β ΛΛβΒ΄
*
* Copyright Β© 2016 Flipkart.com
*/
package com.flipkart.connekt.commons.iomodels
import com.fasterxml.jackson.annotation.JsonSubTypes.Type
import com.fasterxml.jackson.annotation.{JsonSubTypes, JsonTypeInfo}
import com.flipkart.connekt.commons.dao.HbaseSinkSupport
import com.flipkart.connekt.commons.entities.DeviceCallbackEvent
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.PROPERTY,
property = "type"
)
@JsonSubTypes(Array(
new Type(value = classOf[PNCallbackEvent], name = "PN"),
new Type(value = classOf[EmailCallbackEvent], name = "EMAIL"),
new Type(value = classOf[DeviceCallbackEvent], name = "DEVICE"),
new Type(value = classOf[SmsCallbackEvent], name = "SMS"),
new Type(value = classOf[WACallbackEvent], name = "WA"),
new Type(value = classOf[InboundMessageCallbackEvent], name = "INB")
))
abstract class CallbackEvent extends TopologyInputDatatype {
def contactId: String
def messageId: String
def eventId: String
def eventType: String
def contextId: String
def appName: String
def clientId: String
}
|
Flipkart/connekt
|
commons/src/main/scala/com/flipkart/connekt/commons/iomodels/CallbackEvent.scala
|
Scala
|
mit
| 1,650 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.{Sigmoid => SigmoidOps}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.tf.Context
import org.tensorflow.framework.NodeDef
import scala.reflect.ClassTag
class Sigmoid extends TensorflowOpsLoader {
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
SigmoidOps[T]()
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/tf/loaders/Sigmoid.scala
|
Scala
|
apache-2.0
| 1,215 |
package scala.meta.internal.semanticdb.scalac
import scala.collection.mutable
import scala.meta.internal.inputs._
import scala.meta.internal.io.PathIO
import scala.meta.internal.scalacp._
import scala.meta.internal.semanticdb._
import scala.meta.internal.{semanticdb => s}
import scala.reflect.internal._
import scala.reflect.internal.util._
import scala.reflect.internal.{Flags => gf}
import scala.reflect.io.{PlainFile => GPlainFile}
import scala.{meta => m}
import scala.meta.internal.semanticdb.Scala._
trait TextDocumentOps { self: SemanticdbOps =>
def validateCompilerState(): Unit = {
if (!g.settings.Yrangepos.value) {
sys.error("the compiler instance must have -Yrangepos enabled")
}
if (g.useOffsetPositions) {
sys.error("the compiler instance must use range positions")
}
if (!g.settings.plugin.value.exists(_.contains("semanticdb"))) {
sys.error("the compiler instance must use the semanticdb plugin")
}
val analyzerClassName = g.analyzer.getClass.getName
if (!analyzerClassName.contains("HijackAnalyzer")) {
sys.error(
s"the compiler instance must use a hijacked analyzer, instead of $analyzerClassName")
}
if (g.currentRun.phaseNamed("typer") != NoPhase) {
if (g.phase.id < g.currentRun.phaseNamed("typer").id) {
sys.error("the compiler phase must be not earlier than typer")
}
} else {
sys.error("the compiler instance does not have a typer phase")
}
if (g.currentRun.phaseNamed("patmat") != NoPhase) {
if (g.phase.id > g.currentRun.phaseNamed("patmat").id) {
sys.error("the compiler phase must be not later than patmat")
}
} else {
// do nothing
}
}
implicit class XtensionCompilationUnitDocument(unit: g.CompilationUnit) {
def toTextDocument: s.TextDocument = {
val binders = mutable.Set[m.Position]()
val occurrences = mutable.Map[m.Position, String]()
val symbols = mutable.Map[String, s.SymbolInformation]()
val synthetics = mutable.Map[m.Position, Inferred]().withDefaultValue(Inferred())
val isVisited = mutable.Set.empty[g.Tree] // macro expandees can have cycles, keep tracks of visited nodes.
val todo = mutable.Set[m.Name]() // names to map to global trees
val mstarts = mutable.Map[Int, m.Name]() // start offset -> tree
val mends = mutable.Map[Int, m.Name]() // end offset -> tree
val margnames = mutable.Map[Int, List[m.Name]]() // start offset of enclosing apply -> its arg names
val mwithins = mutable.Map[m.Tree, m.Name]() // name of enclosing member -> name of private/protected within
val mwithinctors = mutable.Map[m.Tree, m.Name]() // name of enclosing class -> name of private/protected within for primary ctor
val mctordefs = mutable.Map[Int, m.Name]() // start offset of ctor -> ctor's anonymous name
val mctorrefs = mutable.Map[Int, m.Name]() // start offset of new/init -> new's anonymous name
locally {
object traverser extends m.Traverser {
private def indexName(mname: m.Name): Unit = {
todo += mname
val tok = mname.tokens.dropWhile(_.is[m.Token.LeftParen]).headOption
val mstart1 = tok.map(_.start).getOrElse(mname.pos.start)
val mend1 = tok.map(_.end).getOrElse(mname.pos.end)
if (mstarts.contains(mstart1)) {
val details = syntaxAndPos(mname) + " " + syntaxAndPos(mstarts(mstart1))
sys.error(s"ambiguous mstart $details")
}
if (mends.contains(mend1)) {
val details = syntaxAndPos(mname) + " " + syntaxAndPos(mends(mend1))
sys.error(s"ambiguous mend $details")
}
mstarts(mstart1) = mname
mends(mend1) = mname
}
private def indexArgNames(mapp: m.Tree, mnames: List[m.Name]): Unit = {
if (mnames.isEmpty) return
todo ++= mnames
val mstart1 = mapp.tokens
.dropWhile(_.is[m.Token.LeftParen])
.headOption
.map(_.start)
.getOrElse(-1)
// only add names for the top-level term.apply of a curried function application.
if (!margnames.contains(mstart1))
margnames(mstart1) = mnames
}
private def indexWithin(mname: m.Name.Indeterminate): Unit = {
todo += mname
val mencl = mname.parent.flatMap(_.parent).get
mencl match {
case mencl: m.Ctor.Primary =>
val menclDefn = mencl.parent.get.asInstanceOf[m.Member]
val menclName = menclDefn.name
if (mwithinctors.contains(menclName)) {
val details = syntaxAndPos(mname) + " " + syntaxAndPos(mwithinctors(menclName))
sys.error(s"ambiguous mwithinctors $details")
}
mwithinctors(menclName) = mname
case _ =>
def findBinder(pat: m.Pat) =
pat.collect { case m.Pat.Var(name) => name }.head
val menclName = mencl match {
case mtree: m.Member => mtree.name
case m.Decl.Val(_, pat :: Nil, _) => findBinder(pat)
case m.Decl.Var(_, pat :: Nil, _) => findBinder(pat)
case m.Defn.Val(_, pat :: Nil, _, _) => findBinder(pat)
case m.Defn.Var(_, pat :: Nil, _, _) => findBinder(pat)
}
if (mwithins.contains(menclName)) {
val details = syntaxAndPos(mname) + " " + syntaxAndPos(mwithins(menclName))
sys.error(s"ambiguous mwithins $details")
}
mwithins(menclName) = mname
}
}
override def apply(mtree: m.Tree): Unit = {
mtree match {
case mtree @ m.Term.Apply(_, margs) =>
def loop(term: m.Term): List[m.Term.Name] = term match {
case m.Term.Apply(mfn, margs) =>
margs.toList.collect {
case m.Term.Assign(mname: m.Term.Name, _) => mname
} ++ loop(mfn)
case _ => Nil
}
indexArgNames(mtree, loop(mtree))
case mtree @ m.Mod.Private(mname: m.Name.Indeterminate) =>
indexWithin(mname)
case mtree @ m.Mod.Protected(mname: m.Name.Indeterminate) =>
indexWithin(mname)
case mtree @ m.Importee.Rename(mname, mrename) =>
indexName(mname)
return // NOTE: ignore mrename for now, we may decide to make it a binder
case mtree @ m.Name.Anonymous() =>
()
case mtree: m.Ctor =>
mctordefs(mtree.pos.start) = mtree.name
case mtree: m.Term.New =>
mctorrefs(mtree.pos.start) = mtree.init.name
case mtree: m.Init =>
mctorrefs(mtree.pos.start) = mtree.name
case mtree: m.Name =>
indexName(mtree)
case _ =>
()
}
super.apply(mtree)
}
}
traverser(unit.toSource)
}
locally {
object traverser extends g.Traverser {
private def tryFindMtree(gtree: g.Tree): Unit = {
def success(mtree: m.Name, gsym0: g.Symbol): Unit = {
// We cannot be guaranteed that all symbols have a position, see
// https://github.com/scalameta/scalameta/issues/665
// Instead of crashing with "unsupported file", we ignore these cases.
if (gsym0 == null) return
if (gsym0.isUseless) return
if (mtree.pos == m.Position.None) return
if (occurrences.contains(mtree.pos)) return
val gsym = {
def isClassRefInCtorCall = gsym0.isConstructor && mtree.isNot[m.Name.Anonymous]
if (gsym0 != null && isClassRefInCtorCall) gsym0.owner
else gsym0
}
val symbol = gsym.toSemantic
if (symbol == Symbols.None) return
todo -= mtree
if (mtree.isDefinition) {
val isToplevel = gsym.owner.hasPackageFlag
if (isToplevel) {
unit.source.file match {
case gfile: GPlainFile =>
// FIXME: https://github.com/scalameta/scalameta/issues/1396
val scalaRelPath = m.AbsolutePath(gfile.file).toRelative(config.sourceroot)
val semanticdbRelPath = scalaRelPath + ".semanticdb"
val suri = PathIO.toUnix(semanticdbRelPath.toString)
val ssymbol = symbol
val sinfo = s.SymbolInformation(symbol = ssymbol)
index.append(suri, List(sinfo))
case _ =>
()
}
}
binders += mtree.pos
occurrences(mtree.pos) = symbol
if (config.symbols.isOn) {
def saveSymbol(gs: g.Symbol): Unit = {
if (gs.isUseful) {
symbols(gs.toSemantic) = gs.toSymbolInformation(SymlinkChildren)
}
}
saveSymbol(gsym)
if (gsym.isClass && !gsym.isTrait) {
val gprim = gsym.primaryConstructor
saveSymbol(gprim)
gprim.info.paramss.flatten.foreach(saveSymbol)
}
if (gsym.isGetter) {
val gsetter = gsym.setterIn(gsym.owner)
saveSymbol(gsetter)
gsetter.info.paramss.flatten.foreach(saveSymbol)
}
if (gsym.isUsefulField && gsym.isMutable) {
val getterInfo = symbols(symbol)
val setterInfos = Synthetics.setterInfos(getterInfo, SymlinkChildren)
setterInfos.foreach { info =>
val msymbol = info.symbol
symbols(msymbol) = info
}
}
}
} else {
val selectionFromStructuralType = gsym.owner.isRefinementClass
if (!selectionFromStructuralType) occurrences(mtree.pos) = symbol
}
def tryWithin(map: mutable.Map[m.Tree, m.Name], gsym0: g.Symbol): Unit = {
if (map.contains(mtree)) {
val gsym = gsym0.getterIn(gsym0.owner).orElse(gsym0)
if (!gsym.hasAccessBoundary) return
val within1 = gsym.privateWithin
val within2 = within1.owner.info.member({
if (within1.name.isTermName) within1.name.toTypeName
else within1.name.toTermName
})
success(map(mtree), wrapAlternatives("<within " + symbol + ">", within1, within2))
}
}
tryWithin(mwithins, gsym)
tryWithin(mwithinctors, gsym.primaryConstructor)
}
def tryMstart(start: Int): Boolean = {
if (!mstarts.contains(start)) return false
success(mstarts(start), gtree.symbol)
return true
}
def tryMend(end: Int): Boolean = {
if (!mends.contains(end)) return false
success(mends(end), gtree.symbol)
return true
}
def tryMpos(start: Int, end: Int): Boolean = {
if (!mstarts.contains(start)) return false
val mtree = mstarts(start)
if (mtree.pos.end != end) return false
success(mtree, gtree.symbol)
return true
}
if (gtree.pos == null || gtree.pos == NoPosition) return
val gstart = gtree.pos.start
val gpoint = gtree.pos.point
val gend = gtree.pos.end
if (margnames.contains(gstart) || margnames.contains(gpoint)) {
(margnames.get(gstart) ++ margnames.get(gpoint)).flatten.foreach(margname => {
if (gtree.symbol != null && gtree.symbol.isMethod) {
val gsym = gtree.symbol.paramss.flatten.find(_.name.decoded == margname.value)
gsym.foreach(success(margname, _))
}
})
}
if (mctordefs.contains(gstart)) {
val mname = mctordefs(gstart)
gtree match {
case gtree: g.Template =>
val gctor =
gtree.body.find(x => Option(x.symbol).exists(_.isPrimaryConstructor))
success(mname, gctor.map(_.symbol).getOrElse(g.NoSymbol))
case gtree: g.DefDef if gtree.symbol.isConstructor =>
success(mname, gtree.symbol)
case _ =>
}
}
if (mctorrefs.contains(gpoint)) {
val mname = mctorrefs(gpoint)
gtree match {
case g.Select(_, g.nme.CONSTRUCTOR) => success(mname, gtree.symbol)
case _ =>
}
}
// Ideally, we'd like a perfect match when gtree.pos == mtree.pos.
// Unfortunately, this is often not the case as demonstrated by a bunch of cases above and below.
if (tryMpos(gstart, gend)) return
val gsym = gtree.symbol
gtree match {
case gtree: g.ValDef if gsym.isSelfParameter =>
tryMstart(gstart)
case gtree: g.MemberDef if gtree.symbol.isSynthetic || gtree.symbol.isArtifact =>
if (!gsym.isSemanticdbLocal && !gsym.isUseless) {
symbols(gsym.toSemantic) = gsym.toSymbolInformation(SymlinkChildren)
}
case gtree: g.PackageDef =>
// NOTE: capture PackageDef.pid instead
()
case gtree: g.ModuleDef if gtree.name == g.nme.PACKAGE =>
// NOTE: if a package object comes first in the compilation unit
// then its positions are completely mental, so we just hack around
tryMstart(gpoint + 7)
tryMstart(gpoint)
case gtree: g.ValDef =>
val gsym = gtree.symbol
if (!gsym.isMethod && gsym.getterIn(gsym.owner) != g.NoSymbol) {
// FIXME: https://github.com/scalameta/scalameta/issues/1538
// Skip the field definition in favor of the associated getter.
// This will make sure that val/var class parameters are consistently
// resolved to getter symbols both as definition and references.
} else {
tryMstart(gstart)
tryMstart(gpoint)
}
case gtree: g.MemberDef =>
tryMstart(gpoint)
case gtree: g.DefTree =>
tryMstart(gpoint)
case gtree: g.This =>
tryMstart(gpoint)
case gtree: g.Super =>
tryMend(gend - 1)
case gtree: g.Select if gtree.symbol == g.definitions.NilModule =>
// NOTE: List() gets desugared into mkAttributedRef(NilModule)
tryMstart(gstart)
case gtree: g.RefTree =>
def prohibited(name: String) = {
name.contains(g.nme.DEFAULT_GETTER_STRING)
}
if (prohibited(gtree.name.decoded)) return
tryMstart(gpoint)
case gtree: g.Import =>
val sels = gtree.selectors.flatMap { sel =>
if (sel.name == g.nme.WILDCARD) {
Nil
} else {
mstarts.get(sel.namePos).map(mname => (sel.name, mname))
}
}
sels.foreach {
case (gname, mname) =>
val import1 = gtree.expr.tpe.member(gname.toTermName)
val import2 = gtree.expr.tpe.member(gname.toTypeName)
success(
mname,
wrapAlternatives(
"<import " + gtree.expr + "." + gname + ">",
import1,
import2))
}
case _ =>
}
}
private def tryFindInferred(gtree: g.Tree): Unit = {
if (!config.synthetics.isOn) return
import scala.meta.internal.semanticdb.scalac.{AttributedSynthetic => S}
def success(pos: m.Position, f: Inferred => Inferred): Unit = {
synthetics(pos) = f(synthetics(pos))
}
if (!gtree.pos.isRange) return
object ApplySelect {
def unapply(tree: g.Tree): Option[g.Select] = Option(tree).collect {
case g.Apply(select: g.Select, _) => select
case select: g.Select => select
}
}
object ForComprehensionImplicitArg {
private def isForComprehensionSyntheticName(select: g.Select): Boolean = {
select.pos == select.qualifier.pos && (select.name == g.nme.map ||
select.name == g.nme.withFilter ||
select.name == g.nme.flatMap ||
select.name == g.nme.foreach)
}
private def findSelect(t: g.Tree): Option[g.Tree] = t match {
case g.Apply(fun, _) => findSelect(fun)
case g.TypeApply(fun, _) => findSelect(fun)
case s @ g.Select(qual, _) if isForComprehensionSyntheticName(s) => Some(qual)
case _ => None
}
def unapply(gfun: g.Apply): Option[g.Tree] = findSelect(gfun)
}
gtree match {
case gview: g.ApplyImplicitView =>
val pos = gtree.pos.toMeta
val syntax = showSynthetic(gview.fun) + "(" + S.star + ")"
success(pos, _.copy(conversion = Some(syntax)))
isVisited += gview.fun
case gimpl: g.ApplyToImplicitArgs =>
val args = S.mkString(gimpl.args.map(showSynthetic), ", ")
gimpl.fun match {
case gview: g.ApplyImplicitView =>
isVisited += gview
val pos = gtree.pos.toMeta
val syntax = showSynthetic(gview.fun) + "(" + S.star + ")(" + args + ")"
success(pos, _.copy(conversion = Some(syntax)))
case ForComprehensionImplicitArg(qual) =>
val morePrecisePos = qual.pos.withStart(qual.pos.end).toMeta
val syntax = S("(") + S.star + ")" + "(" + args + ")"
success(morePrecisePos, _.copy(args = Some(syntax)))
case gfun =>
val morePrecisePos = gimpl.pos.withStart(gimpl.pos.end).toMeta
val syntax = S("(") + args + ")"
success(morePrecisePos, _.copy(args = Some(syntax)))
}
case g.TypeApply(fun, targs @ List(targ, _*)) =>
if (targ.pos.isRange) return
val morePrecisePos = fun.pos.withStart(fun.pos.end).toMeta
val args = S.mkString(targs.map(showSynthetic), ", ")
val syntax = S("[") + args + "]"
success(morePrecisePos, _.copy(targs = Some(syntax)))
case ApplySelect(select @ g.Select(qual, nme)) if isSyntheticName(select) =>
val pos = qual.pos.withStart(qual.pos.end).toMeta
val symbol = select.symbol.toSemantic
val name = nme.decoded
val names = List(SyntheticRange(0, name.length, symbol))
val syntax = S(".") + S(nme.decoded, names)
success(pos, _.copy(select = Some(syntax)))
case _ =>
// do nothing
}
}
override def traverse(gtree: g.Tree): Unit = {
if (isVisited(gtree)) return else isVisited += gtree
gtree.attachments.all.foreach {
case att: g.analyzer.MacroExpansionAttachment =>
traverse(att.expandee)
case _ =>
}
gtree match {
case ConstfoldOf(original) =>
traverse(original)
case ClassOf(original) =>
traverse(original)
case NewArrayOf(original) =>
traverse(original)
case SingletonTypeTreeOf(original) =>
traverse(original)
case CompoundTypeTreeOf(original) =>
traverse(original)
case ExistentialTypeTreeOf(original) =>
traverse(original)
case AnnotatedOf(original) =>
traverse(original)
case SelfTypeOf(original) =>
traverse(original)
case SelectOf(original) =>
traverse(original)
case g.Function(params, body) if params.exists(_.name.decoded.startsWith("x$")) =>
traverse(body)
case gtree: g.TypeTree if gtree.original != null =>
traverse(gtree.original)
case gtree: g.TypeTreeWithDeferredRefCheck =>
traverse(gtree.check())
case gtree: g.MemberDef =>
gtree.symbol.annotations.map(ann => traverse(ann.original))
tryFindMtree(gtree)
case _: g.Apply | _: g.TypeApply =>
tryFindInferred(gtree)
case select: g.Select if isSyntheticName(select) =>
tryFindMtree(select.qualifier)
tryFindInferred(select)
case _ =>
tryFindMtree(gtree)
}
super.traverse(gtree)
}
}
traverser.traverse(unit.body)
}
val input = unit.source.toInput
val finalSymbols = symbols.values.toList
val finalOccurrences = {
occurrences.flatMap {
case (pos, sym) =>
sym.asMulti.map { flatSym =>
val role =
if (binders.contains(pos)) s.SymbolOccurrence.Role.DEFINITION
else s.SymbolOccurrence.Role.REFERENCE
s.SymbolOccurrence(Some(pos.toRange), flatSym, role)
}
}.toList
}
val diagnostics = unit.reportedDiagnostics(mstarts)
val finalSynthetics = synthetics.toIterator.map {
case (pos, synthetic) => synthetic.toSynthetic(input, pos)
}.toList
s.TextDocument(
schema = s.Schema.SEMANTICDB4,
uri = unit.source.toUri,
text = unit.source.toText,
md5 = unit.source.toMD5,
language = s.Language.SCALA,
symbols = finalSymbols,
occurrences = finalOccurrences,
diagnostics = diagnostics,
synthetics = finalSynthetics
)
}
}
private def isSyntheticName(select: g.Select): Boolean =
select.pos == select.qualifier.pos &&
(select.name == g.nme.apply ||
select.name == g.nme.foreach ||
select.name == g.nme.withFilter ||
select.name == g.nme.flatMap ||
select.name == g.nme.map ||
select.name == g.nme.unapplySeq ||
select.name == g.nme.unapply)
private def syntaxAndPos(gtree: g.Tree): String = {
if (gtree == g.EmptyTree) "\\u001b[1;31mEmptyTree\\u001b[0m"
else {
val text =
gtree.toString.substring(0, Math.min(45, gtree.toString.length)).replace("\\n", " ")
s"$text [${gtree.pos.start}..${gtree.pos.end})"
}
}
private def syntaxAndPos(mtree: m.Tree): String = {
s"${mtree.pos.syntax} $mtree"
}
private def wrapAlternatives(name: String, alts: g.Symbol*): g.Symbol = {
val normalizedAlts = {
val alts1 = alts.toList.filter(_.exists)
val alts2 = alts1.map(alt => if (alt.isModuleClass) alt.asClass.module else alt)
alts2.distinct
}
normalizedAlts match {
case List(sym) =>
sym
case normalizedAlts =>
val wrapper = g.NoSymbol.newTermSymbol(g.TermName(name))
wrapper.setFlag(gf.OVERLOADED)
wrapper.setInfo(g.OverloadedType(g.NoType, normalizedAlts))
}
}
}
|
MasseGuillaume/scalameta
|
semanticdb/scalac/library/src/main/scala/scala/meta/internal/semanticdb/scalac/TextDocumentOps.scala
|
Scala
|
bsd-3-clause
| 24,628 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.