code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package info.fotm.clustering.RMClustering
import info.fotm.clustering.Clusterer
import info.fotm.clustering.Clusterer.{V, Cluster}
//import scala.util.Random
// https://en.wikipedia.org/wiki/K-means_clustering
class KMeansClusterer extends Clusterer {
val rng = new scala.util.Random(100)
def clusterize(input: Cluster, clustersCount: Int): Set[Cluster] =
{
val means = initialize_plusplus(input, clustersCount)
//println(means)
//println(Set(input))
process(Set(input),means)
}
def process(clusters: Set[Cluster], means: Seq[V]): Set[Cluster] =
{
//println("Process method")
//println("clusters:"+clusters)
val newClusters = assignment(clusters,means)
//println("newclusters:"+newClusters)
if (newClusters!=clusters)
{
val newMeans = update(newClusters)
//println(newMeans)
process(newClusters,newMeans)
}
else
newClusters
}
/** *
* Produces initial means
* @param input
* @param clustersCount
* @return
*/
def initialize(input: Cluster, clustersCount: Int): Seq[V] =
{
rng.shuffle(input).take(clustersCount)
}
//=====================Plus Plus section=======================
def initialize_plusplus(input: Cluster, clustersCount: Int): Seq[V] =
{
val firstCenterIndex = rng.nextInt(input.length)
val centers = input(firstCenterIndex) :: Nil
findCenters(input.filterNot(x => x == centers(0)), centers, clustersCount)
}
/**
* Adds one center to centers from input using a weighted probability distribution
* @param input
* @param centers
* @param centersCount
* @return
*/
def findCenters(input: Cluster, centers: List[V], centersCount: Int): Seq[V] =
{
if (centers.length < centersCount && input.length > 0)
{
//? Странно, что fold тут требует, чтобы startValue был supertype коллекции input
//? https://coderwall.com/p/4l73-a/scala-fold-foldleft-and-foldright
// inputDistancesToCenters(i) is a probability of i-th point from input to be a center
val inputDistancesToCenters = input.map(x => centers.foldLeft(Double.MaxValue){
(acc,c) => acc.min(distance(c,x))
})
val newCenter = getRandomValue(input, inputDistancesToCenters)
findCenters(input.filterNot(x => x == newCenter), newCenter :: centers, centersCount)
}
else
centers
}
def getRandomValue(input: Cluster, probabilities: Seq[Double]): V =
{
// overflow protection coefficient
val max = probabilities.max
// make distribution function from probabilities: (5,2,4,5) -> (5,7,11,16)/max = (5,7,11,16)/5
val distribution = probabilities.foldLeft(List[Double](0)){
(list,x) => list.head + x / max :: list
}.init.reverse
val randomInterval = distribution.last
val rand = rng.nextDouble() * randomInterval
// getting index corresponding to rand and return element from input with this index
input(distribution.span(x => x < rand)._1.length)
}
//=====================/Plus Plus section=======================
def assignment(input: Set[Cluster], means: Seq[V]): Set[Cluster] =
{
input.toSeq.flatten.groupBy(v=>means.minBy(distance(_,v))).mapValues(s=>s.toVector).values.toSet
}
def update(clusters: Set[Cluster]): Seq[V] =
{
clusters.map(c=> div(c.reduce(sumOf),c.length)).toSeq
}
def distance(v1: V, v2: V): Double = v1.sqrDistTo(v2) //v1.zip(v2).map(x=>Math.pow(x._1-x._2,2)).sum
def sumOf(v1: V, v2: V): V = v1 + v2 //v1.zip(v2).map({case(x1,x2)=>x1+x2})
def div(v: V, byValue: Double): V = v/ byValue // v.map(x=>x/byValue)
}
|
temeer/Fotm-Tem-Clone
|
core/src/main/scala/info/fotm/clustering/RMClustering/KMeansClusterer.scala
|
Scala
|
mit
| 3,822 |
import sbt._
import Keys._
object FPInScalaBuild extends Build {
val opts = Project.defaultSettings ++ Seq(
scalaVersion := "2.10.3",
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"
)
lazy val root =
Project(id = "fpinscala",
base = file("."),
settings = opts ++ Seq(
onLoadMessage ~= (_ + nio2check())
)) aggregate (chapterCode, exercises, answers)
lazy val chapterCode =
Project(id = "chapter-code",
base = file("chaptercode"),
settings = opts)
lazy val exercises =
Project(id = "exercises",
base = file("exercises"),
settings = opts)
lazy val answers =
Project(id = "answers",
base = file("answers"),
settings = opts)
def nio2check(): String = {
val cls = "java.nio.channels.AsynchronousFileChannel"
try {Class.forName(cls); ""}
catch {case _: ClassNotFoundException =>
("\\nWARNING: JSR-203 \\"NIO.2\\" (" + cls + ") not found.\\n" +
"You are probably running Java < 1.7; answers will not compile.\\n" +
"You seem to be running " + System.getProperty("java.version") + ".\\n" +
"Try `project exercises' before compile, or upgrading your JDK.")
}
}
}
|
karolchmist/fpinscala
|
project/Build.scala
|
Scala
|
mit
| 1,292 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package bootstrap.liftweb
package checks
import net.liftweb.common._
import com.normation.rudder.domain.RudderDit
import com.normation.inventory.ldap.core.InventoryDit
import com.normation.ldap.sdk.LDAPConnectionProvider
import javax.servlet.UnavailableException
import com.normation.rudder.domain.logger.ApplicationLogger
import com.normation.ldap.sdk.RwLDAPConnection
/**
* This class check that all DIT entries needed for the application
* to work are present in the LDAP directory.
* If they are not, it will try to create them, and stop
* application start if that is not possible.
*
*/
class CheckDIT(
pendingNodesDit:InventoryDit
, acceptedDit:InventoryDit
, removedDit:InventoryDit
, rudderDit:RudderDit
, ldap:LDAPConnectionProvider[RwLDAPConnection]
) extends BootstrapChecks {
@throws(classOf[ UnavailableException ])
override def checks() : Unit = {
def FAIL(msg:String) = {
ApplicationLogger.error(msg)
throw new UnavailableException(msg)
}
//start to check that an LDAP connection is up and running
ldap.map { con =>
con.backed.isConnected
} match {
case e:EmptyBox => FAIL("Can not open LDAP connection")
case _ => //ok
}
//check that all base DN's entry are already in the LDAP
val baseDns = pendingNodesDit.BASE_DN :: pendingNodesDit.SOFTWARE_BASE_DN ::
acceptedDit.BASE_DN :: acceptedDit.SOFTWARE_BASE_DN ::
removedDit.BASE_DN :: removedDit.SOFTWARE_BASE_DN ::
rudderDit.BASE_DN :: Nil
ldap.map { con =>
(for {
dn <- baseDns
} yield {
(con.get(dn, "1:1"), dn)
}).filter { //only keep those on error, and check if the resulting list is empty
case(res,dn) => res.isEmpty
} match {
case Nil => //ok
case list =>
FAIL { "There is some required entries missing in the LDAP directory: %s".format(
list.map {
case (Failure(m,_,_), dn) => "%s (error message: %s)".format(dn.toString, m)
case (Empty,dn) => "%s (no error message)".format(dn.toString)
case _ => "" //strange...
}.mkString(" | ")
)}
}
} match {
case Failure(m,_,_) => FAIL("Error when checking for mandatory entries on the DIT. Message was: %s".format(m))
case Empty => FAIL("Error when checking for mandatory entries on the DIT. No message was left.")
case Full(_) => //ok
}
//now, check that all DIT entries are here, add missing ones
val ditEntries = (pendingNodesDit.getDITEntries ++ acceptedDit.getDITEntries ++ removedDit.getDITEntries ++ rudderDit.getDITEntries).toSet
ldap.map { con =>
(for {
e <- ditEntries.toList
} yield {
if(con.exists(e.dn)) {
ApplicationLogger.debug("DIT entry '%s' already in LDAP directory, nothing to do".format(e.dn))
(Full(e),e.dn)
} else {
ApplicationLogger.info("Missing DIT entry '%s', trying to add it".format(e.dn))
(con.save(e), e.dn)
}
}).filter { //only keep those on error, and check if the resulting list is empty
case (result, dn) => result.isEmpty
} match {
case Nil => //ok
case list =>
FAIL { "There is some required entries missing in the LDAP directory: %s".format(
list.map {
case (Failure(m,_,_), dn) => "%s (error message: %s)".format(dn.toString, m)
case (Empty,dn) => "%s (no error message)".format(dn.toString)
case _ => "" //strange...
}.mkString(" | ")
)}
}
} match {
case Failure(m,_,_) => FAIL("Error when checking for mandatory entries on the DIT. Message was: %s".format(m))
case Empty => FAIL("Error when checking for mandatory entries on the DIT. No message was left.")
case Full(_) => //ok
}
ApplicationLogger.info("All the required DIT entries are present in the LDAP directory")
}
}
|
jooooooon/rudder
|
rudder-web/src/main/scala/bootstrap/liftweb/checks/CheckDIT.scala
|
Scala
|
agpl-3.0
| 5,655 |
package store
import scala.concurrent.{ExecutionContext, Await, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Failure, Success}
object Simulator extends App {
implicit val ec = ExecutionContext.global
val catalog = Catalog()
val store = new Store(catalog)
val receipts = Future.sequence( createReceipts(store, shoppers = 1000) )
receipts onComplete {
case Success(shoppers) =>
println(s"*** Number of shoppers: ${shoppers.length}! ***")
require(shoppers.length == 1000)
case Failure(failure) => println(s"*** Simulation failed: ${failure.getMessage} ***")
}
Await.ready(receipts, 3 seconds)
}
|
objektwerks/store
|
src/main/scala/store/Simulator.scala
|
Scala
|
apache-2.0
| 678 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
// generated by genprod on Thu Apr 29 17:52:16 CEST 2010
package scala
object Product19 {
def unapply[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](x: Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]): Option[Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] =
Some(x)
}
/** Product19 is a cartesian product of 19 components.
*
* @since 2.3
*/
trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19] extends Product {
/**
* The arity of this product.
* @return 19
*/
override def productArity = 19
/**
* Returns the n-th projection of this product if 0<=n<arity,
* otherwise null.
*
* @param n number of the projection to be returned
* @return same as _(n+1)
* @throws IndexOutOfBoundsException
*/
@throws(classOf[IndexOutOfBoundsException])
override def productElement(n: Int) = n match {
case 0 => _1
case 1 => _2
case 2 => _3
case 3 => _4
case 4 => _5
case 5 => _6
case 6 => _7
case 7 => _8
case 8 => _9
case 9 => _10
case 10 => _11
case 11 => _12
case 12 => _13
case 13 => _14
case 14 => _15
case 15 => _16
case 16 => _17
case 17 => _18
case 18 => _19
case _ => throw new IndexOutOfBoundsException(n.toString())
}
/** projection of this product */
def _1: T1
/** projection of this product */
def _2: T2
/** projection of this product */
def _3: T3
/** projection of this product */
def _4: T4
/** projection of this product */
def _5: T5
/** projection of this product */
def _6: T6
/** projection of this product */
def _7: T7
/** projection of this product */
def _8: T8
/** projection of this product */
def _9: T9
/** projection of this product */
def _10: T10
/** projection of this product */
def _11: T11
/** projection of this product */
def _12: T12
/** projection of this product */
def _13: T13
/** projection of this product */
def _14: T14
/** projection of this product */
def _15: T15
/** projection of this product */
def _16: T16
/** projection of this product */
def _17: T17
/** projection of this product */
def _18: T18
/** projection of this product */
def _19: T19
}
|
cran/rkafkajars
|
java/scala/Product19.scala
|
Scala
|
apache-2.0
| 2,980 |
package test
// these classes are in totally different package in order to ensure that macro generates correct identifiers that
// can be accessed outside of the `sangria` package.
import sangria.macros.derive._
import sangria.schema.{EnumType, ObjectType, OutputType}
case class CompanionA(b: CompanionB)
object CompanionA {
implicit val graphqlType: ObjectType[Unit, CompanionA] = deriveObjectType[Unit, CompanionA]()
}
case class CompanionB(c: CompanionC)
object CompanionB {
implicit val graphqlType: OutputType[CompanionB] =
deriveObjectType[Unit, CompanionB](RenameField("c", "myC"))
}
case class CompanionC(e: CompanionEnum, e1: AnotherEnum.ValName)
object CompanionC {
implicit def graphqlType[Ctx]: ObjectType[Ctx, CompanionC] = deriveObjectType[Ctx, CompanionC]()
}
sealed trait CompanionEnum
object CompanionEnum1 extends CompanionEnum
object CompanionEnum2 extends CompanionEnum
object CompanionEnum {
implicit val graphqlType: EnumType[CompanionEnum] =
deriveEnumType[CompanionEnum](RenameValue("CompanionEnum1", "first"))
}
object AnotherEnum extends Enumeration {
type ValName = Value
val FOO, BAR, BAZ = Value
implicit val valNameType: EnumType[AnotherEnum.ValName] = deriveEnumType()
}
|
sangria-graphql/sangria
|
modules/derivation/src/test/scala/test/testMacroModelWithCompanions.scala
|
Scala
|
apache-2.0
| 1,235 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import exceptions.{GeneratorDrivenPropertyCheckFailedException, TableDrivenPropertyCheckFailedException, TestFailedDueToTimeoutException, TestCanceledException}
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.junit.JUnitTestFailedError
import prop.TableDrivenPropertyChecks
import prop.TableFor1
import time.{Second, Span}
import SharedHelpers.EventRecordingReporter
import TableDrivenPropertyChecks._
/* Uncomment after remove type aliases in org.scalatest package object
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.ModifiableMessage
*/
class ClueSpec extends FlatSpec with Matchers with SeveredStackTraces {
def examples: TableFor1[Throwable with ModifiableMessage[_ <: StackDepth]] =
Table(
"exception",
new TestFailedException("message", 3),
new JUnitTestFailedError("message", 3),
new TestFailedDueToTimeoutException(e => Some("message"), None, e => 3, None, Span(1, Second)),
new TableDrivenPropertyCheckFailedException(e => "message", None, e => 3, None, "undecMsg", List.empty, List.empty, 3),
new GeneratorDrivenPropertyCheckFailedException(e => "message", None, e => 3, None, "undecMsg", List.empty, Option(List.empty), List.empty)
)
// TOTEST: clue object with toString. clue object with null toString. all-whitespace clue string
"The modifyMessage method" should "return the an exception with an equal message option if passed a function that returns the same option passed to it" in {
forAll (examples) { e =>
e.modifyMessage(opt => opt) should equal (e)
}
}
it should "return the new exception with the clue string prepended, separated by a space char if passed a function that does that" in {
forAll (examples) { e =>
val clue = "clue"
val fun: (Option[String] => Option[String]) =
opt => opt match {
case Some(msg) => Some(clue + " " + msg)
case None => Some(clue)
}
e.modifyMessage(fun).message.get should be ("clue message")
}
}
// ******* withClue tests *******
"The withClue construct" should "allow any non-ModifiableMessage exception to pass through" in {
val iae = new IllegalArgumentException
val caught = intercept[IllegalArgumentException] {
withClue("howdy") {
throw iae
}
}
caught should be theSameInstanceAs (iae)
}
it should "given an empty clue string, rethrow the same TFE exception" in {
forAll (examples) { e =>
val caught = intercept[Throwable] {
withClue("") {
throw e
}
}
caught should be theSameInstanceAs (e)
}
}
it should "given an all-whitespace clue string, should throw a new TFE with the white space prepended to the old message" in {
forAll (examples) { e =>
val white = " "
val caught = intercept[Throwable with StackDepth] {
withClue(white) {
throw e
}
}
caught should not be theSameInstanceAs (e)
caught.message should be ('defined)
caught.message.get should equal (white + "message")
caught.getClass should be theSameInstanceAs (e.getClass)
}
}
it should "given a non-empty clue string with no trailing white space, throw a new instance of the caught TFE exception that has all fields the same except a prepended clue string followed by an extra space" in {
forAll (examples) { e =>
val caught = intercept[Throwable with StackDepth] {
withClue("clue") {
throw e
}
}
caught should not be theSameInstanceAs (e)
caught.message should be ('defined)
caught.message.get should equal ("clue message")
caught.getClass should be theSameInstanceAs (e.getClass)
}
}
it should "given a non-empty clue string with a trailing space, throw a new instance of the caught TFE exception that has all fields the same except a prepended clue string (followed by no extra space)" in {
forAll (examples) { e =>
val caught = intercept[Throwable with StackDepth] {
withClue("clue ") { // has a trailing space
throw e
}
}
caught should not be theSameInstanceAs (e)
caught.message should be ('defined)
caught.message.get should equal ("clue message")
caught.getClass should be theSameInstanceAs (e.getClass)
}
}
it should "given a non-empty clue string with a end of line, throw a new instance of the caught TFE exception that has all fields the same except a prepended clue string (followed by no extra space)" in {
forAll (examples) { e =>
val caught = intercept[Throwable with StackDepth] {
withClue("clue\\n") { // has a end of line character
throw e
}
}
caught should not be theSameInstanceAs (e)
caught.message should be ('defined)
caught.message.get should equal ("clue\\nmessage")
caught.getClass should be theSameInstanceAs (e.getClass)
}
}
// ***** tests with objects other than String *****
it should "given an object with a non-empty clue string with no trailing white space, throw a new instance of the caught TFE exception that has all fields the same except a prepended clue string followed by an extra space" in {
forAll (examples) { e =>
val list = List(1, 2, 3)
val caught = intercept[Throwable with StackDepth] {
withClue(list) {
throw e
}
}
caught should not be theSameInstanceAs (e)
caught.message should be ('defined)
caught.message.get should equal ("List(1, 2, 3) message")
caught.getClass should be theSameInstanceAs (e.getClass)
}
}
it should "pass the last value back" in {
val result = withClue("hi") { 3 }
result should equal (3)
}
it should "throw NPE if a null clue object is passed" in {
forAll (examples) { e =>
intercept[NullPointerException] {
withClue (null) {
throw e
}
}
}
}
it should "infer the type of the result of the passed in function" in {
val result: Int = withClue("hi") { 22 }
assert(result === 22)
}
it should "be able to accept by-name payload" in {
val result: String = withClue(() => 128) { "hello" }
assert(result === "hello")
}
it should "work when used in withFixture" in {
forAll(examples) { e =>
val a =
new org.scalatest.fixture.FunSpec {
type FixtureParam = String
override def withFixture(test: OneArgTest) = {
withClue("a clue") {
test("something")
}
}
it("should do something") { p =>
throw e
}
}
val rep = new EventRecordingReporter()
a.run(None, Args(rep))
rep.testFailedEventsReceived.length should be (1)
rep.testFailedEventsReceived(0).message should be ("a clue message")
}
}
it should "return Failed that contains TestFailedException and with prepended clue" in {
val failed = Failed(new TestFailedException("message", 3))
val result = withClue("a clue") { failed }
result shouldBe a [Failed]
result.exception shouldBe a [TestFailedException]
result.exception.getMessage shouldBe "a clue message"
}
it should "return original Failed that contains the RuntimeException and without prepended clue" in {
val failed = Failed(new RuntimeException("message"))
val result = withClue("a clue") { failed }
result should be theSameInstanceAs failed
result.exception.getMessage shouldBe "message"
}
it should "return Canceled that contains TestCanceledException and with prepended clue" in {
val canceled = Canceled(new TestCanceledException("message", 3))
val result = withClue("a clue") { canceled }
result shouldBe a [Canceled]
result.exception shouldBe a [TestCanceledException]
result.exception.getMessage shouldBe "a clue message"
}
it should "return original Canceled that contains the RuntimeException and without prepended clue" in {
val re = new RuntimeException("message")
val canceled = Canceled(re)
val result = withClue("a clue") { canceled }
result.exception.getCause should be theSameInstanceAs re
result.exception.getMessage shouldBe "a clue message"
}
it should "return original Pending" in {
val pending = Pending
val result = withClue("a clue") { pending }
result should be theSameInstanceAs pending
}
it should "return original Succeeded" in {
val succeeded = Succeeded
val result = withClue("a clue") { succeeded }
result should be theSameInstanceAs succeeded
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/ClueSpec.scala
|
Scala
|
apache-2.0
| 9,282 |
package roc
package postgresql
package server
import org.specs2._
final class ErrorNoticeMinutiaSpec extends Specification with ScalaCheck { def is = s2"""
ErrorNoticeMessageFields
Severity must equal 'S' ${ENMF().testSeverity}
SQLSTATE Code must equal 'C' ${ENMF().testCode}
Message must equal 'M' ${ENMF().testMessage}
Hint must equal 'H' ${ENMF().testHint}
Position must equal 'P' ${ENMF().testPosition}
InternalPosition must equal 'p' ${ENMF().testInternalPosition}
InternalQuery must equal 'q' ${ENMF().testInternalQuery}
Where must equal 'W' ${ENMF().testWhere}
SchemaName must equal 's' ${ENMF().testSchemaName}
TableName must equal 't' ${ENMF().testTableName}
ColumnName must equal 'c' ${ENMF().testColumnName}
DataTypeName must equal 'd' ${ENMF().testDataTypeName}
ConstraintName must equal 'n' ${ENMF().testConstraintName}
File must equal 'F' ${ENMF().testFile}
Line must equal 'L' ${ENMF().testLine}
Routine must equal 'R' ${ENMF().testRoutine}
Error Class Codes
SuccessCodes must contain all Success values ${ECC().testSuccessCodes}
WarningCodes must contain all Warning values ${ECC().testWarningCodes}
ErrorCodes must contain all Error values ${ECC().testErrorCodes}
Successful Completion must be '00' ${ECC().testSuccessfulCompletion}
Warning must be '01' ${ECC().testWarning}
NoData must be '02' ${ECC().testNoData}
SQLStatementNotYetComplete must be '03' ${ECC().testSQLStatementNotYetComplete}
ConnectionException must be '08' ${ECC().testConnectionException}
TriggeredActionException must be '09' ${ECC().testTriggeredActionException}
FeatureNotSupported must be '0A' ${ECC().testFeatureNotSupported}
InvalidTransactionInitiation must be '0B' ${ECC().testInvalidTransactionInitiation}
LocatorException must be '0F' ${ECC().testLocatorException}
InvalidGrantor must be '0L' ${ECC().testInvalidGrantor}
InvalidRoleSpecification must be '0P' ${ECC().testInvalidRoleSpecification}
DiagnosisException must be '0Z' ${ECC().testDiagnosisException}
CaseNotFound must be '20' ${ECC().testCaseNotFound}
CardinalityViolation must be '21' ${ECC().testCardinalityViolation}
DataException must be '22' ${ECC().testDataException}
IntegrityConstraintViolation must be '23' ${ECC().testIntegrityConstraintViolation}
InvalidCursorState must be '24' ${ECC().testInvalidCursorState}
InvalidTransactionState must be '25' ${ECC().testInvalidTransactionState}
InvalidSQLStatementName must be '26' ${ECC().testInvalidSQLStatementName}
TriggeredDataChangeViolation must be '27' ${ECC().testTriggeredDataChangeViolation}
InvalidAuthorizationSpecification must be '28' ${ECC().testInvalidAuthorizationSpecification}
DependentPrivilegeDescriptorsStillExist must be '2B' ${ECC().testDependentPrivilegeDescriptorsStillExist}
InvalidTransactionTermination must be '2D' ${ECC().testInvalidTransactionTermination}
SQLRoutineException must be '2F' ${ECC().testSQLRoutineException}
InvalidCursorName must be '34' ${ECC().testInvalidCursorName}
ExternalRoutineException must be '38' ${ECC().testExternalRoutineException}
ExternalRoutineInvocationException must be '39' ${ECC().testExternalRoutineInvocationException}
SavepointException must be '3B' ${ECC().testSavepointException}
InvalidCatalogName must be '3D' ${ECC().testInvalidCatalogName}
InvalidSchemaName must be '3F' ${ECC().testInvalidSchemaName}
TransactionRollback must be '40' ${ECC().testTransactionRollback}
SyntaxErrorOrAccessRuleViolation must be '42' ${ECC().testSyntaxErrorOrAccessRuleViolation}
WithCheckOptionViolation must be '44' ${ECC().testWithCheckOptionViolation}
InsufficientResources must be '53' ${ECC().testInsufficientResources}
ProgramLimitExceeded must be '54' ${ECC().testProgramLimitExceeded}
ObjectNotInPrerequisiteState must be '55' ${ECC().testObjectNotInPrerequisiteState}
OperatorIntervention must be '57' ${ECC().testOperatorIntervention}
SystemError must be '58' ${ECC().testSystemError}
ConfigurationFileError must be 'F0' ${ECC().testConfigurationFileError}
ForeignDataWrapperError must be 'HV' ${ECC().testForeignDataWrapperError}
PLpgSQLError must be 'P0' ${ECC().testPLpgSQLError}
InternalError must be 'XX' ${ECC().testInternalError}
"""
case class ENMF() extends ScalaCheck {
import ErrorNoticeMessageFields._
def testSeverity = Severity must_== 'S'
def testCode = Code must_== 'C'
def testMessage = Message must_== 'M'
def testHint = Hint must_== 'H'
def testPosition = Position must_== 'P'
def testInternalPosition = InternalPosition must_== 'p'
def testInternalQuery = InternalQuery must_== 'q'
def testWhere = Where must_== 'W'
def testSchemaName = SchemaName must_== 's'
def testTableName = TableName must_== 't'
def testColumnName = ColumnName must_== 'c'
def testDataTypeName = DataTypeName must_== 'd'
def testConstraintName = ConstraintName must_== 'n'
def testFile = File must_== 'F'
def testLine = Line must_== 'L'
def testRoutine = Routine must_== 'R'
}
case class ECC() extends ScalaCheck {
import ErrorClassCodes._
def testSuccessfulCompletion = SuccessfulCompletion must_== "00"
def testWarning = Warning must_== "01"
def testNoData = NoData must_== "02"
def testSQLStatementNotYetComplete = SQLStatementNotYetComplete must_== "03"
def testConnectionException = ConnectionException must_== "08"
def testTriggeredActionException = TriggeredActionException must_== "09"
def testFeatureNotSupported = FeatureNotSupported must_== "0A"
def testInvalidTransactionInitiation = InvalidTransactionInitiation must_== "0B"
def testLocatorException = LocatorException must_== "0F"
def testInvalidGrantor = InvalidGrantor must_== "0L"
def testInvalidRoleSpecification = InvalidRoleSpecification must_== "0P"
def testDiagnosisException = DiagnosisException must_== "0Z"
def testCaseNotFound = CaseNotFound must_== "20"
def testCardinalityViolation = CardinalityViolation must_== "21"
def testDataException = DataException must_== "22"
def testIntegrityConstraintViolation = IntegrityConstraintViolation must_== "23"
def testInvalidCursorState = InvalidCursorState must_== "24"
def testInvalidTransactionState = InvalidTransactionState must_== "25"
def testInvalidSQLStatementName = InvalidSQLStatementName must_== "26"
def testTriggeredDataChangeViolation = TriggeredDataChangeViolation must_== "27"
def testInvalidAuthorizationSpecification = InvalidAuthorizationSpecification must_== "28"
def testDependentPrivilegeDescriptorsStillExist =
DependentPrivilegeDescriptorsStillExist must_== "2B"
def testInvalidTransactionTermination = InvalidTransactionTermination must_== "2D"
def testSQLRoutineException = SQLRoutineException must_== "2F"
def testInvalidCursorName = InvalidCursorName must_== "34"
def testExternalRoutineException = ExternalRoutineException must_== "38"
def testExternalRoutineInvocationException = ExternalRoutineInvocationException must_== "39"
def testSavepointException = SavepointException must_== "3B"
def testInvalidCatalogName = InvalidCatalogName must_== "3D"
def testInvalidSchemaName = InvalidSchemaName must_== "3F"
def testTransactionRollback = TransactionRollback must_== "40"
def testSyntaxErrorOrAccessRuleViolation = SyntaxErrorOrAccessRuleViolation must_== "42"
def testWithCheckOptionViolation = WithCheckOptionViolation must_== "44"
def testInsufficientResources = InsufficientResources must_== "53"
def testProgramLimitExceeded = ProgramLimitExceeded must_== "54"
def testObjectNotInPrerequisiteState = ObjectNotInPrerequisiteState must_== "55"
def testOperatorIntervention = OperatorIntervention must_== "57"
def testSystemError = SystemError must_== "58"
def testConfigurationFileError = ConfigurationFileError must_== "F0"
def testForeignDataWrapperError = ForeignDataWrapperError must_== "HV"
def testPLpgSQLError = PLpgSQLError must_== "P0"
def testInternalError = InternalError must_== "XX"
def testSuccessCodes = SuccessCodes must_== List(SuccessfulCompletion)
def testWarningCodes = WarningCodes must_== List(Warning, NoData)
def testErrorCodes = {
val expectedCodes = List(SQLStatementNotYetComplete, ConnectionException,
TriggeredActionException, FeatureNotSupported, InvalidTransactionInitiation,
LocatorException, InvalidGrantor, InvalidRoleSpecification, DiagnosisException,
CaseNotFound, CardinalityViolation, DataException, IntegrityConstraintViolation,
InvalidCursorState, InvalidTransactionState, InvalidSQLStatementName,
TriggeredDataChangeViolation, InvalidAuthorizationSpecification,
DependentPrivilegeDescriptorsStillExist, InvalidTransactionTermination, SQLRoutineException,
InvalidCursorName, ExternalRoutineException, ExternalRoutineInvocationException,
SavepointException, InvalidCatalogName, InvalidSchemaName, TransactionRollback,
SyntaxErrorOrAccessRuleViolation, WithCheckOptionViolation, InsufficientResources,
ProgramLimitExceeded, ObjectNotInPrerequisiteState, OperatorIntervention, SystemError,
ConfigurationFileError, ForeignDataWrapperError, PLpgSQLError, InternalError)
ErrorCodes must_== expectedCodes
}
}
}
|
penland365/roc
|
core/src/test/scala/roc/postgresql/server/ErrorNoticeMinutiaSpecs.scala
|
Scala
|
bsd-3-clause
| 10,755 |
import play.api.{ Application, ApplicationLoader, BuiltInComponentsFromContext }
import play.api.libs.ws.ahc.AhcWSComponents
import com.softwaremill.macwire._
import router.Routes
import com.lightbend.lagom.scaladsl.api._
import com.lightbend.lagom.scaladsl.client.LagomServiceClientComponents
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import scala.collection.immutable
class Loader extends ApplicationLoader {
def load(context: ApplicationLoader.Context): Application = {
new BuiltInComponentsFromContext(context)
with LagomServiceClientComponents
with AhcWSComponents
with LagomDevModeComponents
with controllers.AssetsComponents
{
override lazy val serviceInfo = ServiceInfo("p", Map("p" -> immutable.Seq(
ServiceAcl.forPathRegex("/p"),
ServiceAcl.forPathRegex("/assets/.*")
)))
override lazy val router = {
val prefix = "/"
wire[Routes]
}
override lazy val httpFilters = Nil
lazy val applicationController = wire[controllers.Application]
}.application
}
}
|
rstento/lagom
|
dev/sbt-plugin/src/sbt-test/sbt-plugin/run-all-scaladsl/p/app/Loader.scala
|
Scala
|
apache-2.0
| 1,093 |
package com.gx.loan
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
object using {
def apply[R <: {def close() : Unit}, T](resource: => R)(f: R => T): T = {
val source = Option(resource)
try {
f(source.get)
} finally {
for (s <- source)
s.close()
}
}
}
|
josephguan/scala-design-patterns
|
behavioral/loan/src/main/scala/com/gx/loan/using.scala
|
Scala
|
apache-2.0
| 856 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class RankTest extends TableTestBase {
private val util = batchTestUtil()
util.addTableSource[(Int, String, Long)]("MyTable", 'a, 'b, 'c)
@Test(expected = classOf[RuntimeException])
def testRowNumberWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT ROW_NUMBER() over (partition by a) FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[RuntimeException])
def testRowNumberWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT ROW_NUMBER() over (partition by a order by b) as a,
| ROW_NUMBER() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testRankWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT RANK() over (partition by a) FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testRankWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT RANK() over (partition by a order by b) as a,
| RANK() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testDenseRankWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT dense_rank() over (partition by a) FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testDenseRankWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT DENSE_RANK() over (partition by a order by b) as a,
| DENSE_RANK() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testRankValueFilterWithUpperValue(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a) rk FROM MyTable) t
|WHERE rk <= 2 AND a > 10
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testRankValueFilterWithRange(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b, c ORDER BY a) rk FROM MyTable) t
|WHERE rk <= 2 AND rk > -2
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testRankValueFilterWithEquals(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a, c) rk FROM MyTable) t
|WHERE rk = 2
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testWithoutPartitionBy(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (ORDER BY a) rk FROM MyTable) t
|WHERE rk < 10
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testMultiSameRankFunctionsWithSameGroup(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b,
| RANK() OVER (PARTITION BY b ORDER BY a) rk1,
| RANK() OVER (PARTITION BY b ORDER BY a) rk2 FROM MyTable) t
|WHERE rk1 < 10
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testDuplicateRankFunctionColumnName(): Unit = {
util.addTableSource[(Int, Long, String)]("MyTable2", 'a, 'b, 'rk)
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a) rk FROM MyTable2) t
|WHERE rk < 10
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testRankFunctionInMiddle(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, RANK() OVER (PARTITION BY a ORDER BY a) rk, b, c FROM MyTable) t
|WHERE rk < 10
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testCreateViewWithRowNumber(): Unit = {
util.addTable(
"""
|CREATE TABLE test_source (
| name STRING,
| eat STRING,
| age BIGINT
|) WITH (
| 'connector' = 'values',
| 'bounded' = 'true'
|)
""".stripMargin)
util.tableEnv.executeSql("create view view1 as select name, eat ,sum(age) as cnt\\n"
+ "from test_source group by name, eat")
util.tableEnv.executeSql("create view view2 as\\n"
+ "select *, ROW_NUMBER() OVER (PARTITION BY name ORDER BY cnt DESC) as row_num\\n"
+ "from view1")
util.addTable(
s"""
|create table sink (
| name varchar,
| eat varchar,
| cnt bigint
|)
|with(
| 'connector' = 'print'
|)
|""".stripMargin
)
util.verifyExecPlanInsert("insert into sink select name, eat, cnt\\n"
+ "from view2 where row_num <= 3")
}
}
|
aljoscha/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/RankTest.scala
|
Scala
|
apache-2.0
| 5,963 |
package org.vitrivr.adampro.data.index.structures.va.marks
import org.vitrivr.adampro.data.datatypes.vector.Vector
import org.vitrivr.adampro.data.index.IndexingTaskTuple
import org.vitrivr.adampro.data.index.structures.va.VAIndex.Marks
/**
* ADAMpro
*
* Ivan Giangreco
* September 2016
*/
private[va] object VAPlusMarksGenerator extends MarksGenerator with Serializable {
/**
*
* @param samples training samples
* @param maxMarks maximal number of marks (different for every dimension)
* @return
*/
override private[va] def getMarks(samples: Seq[IndexingTaskTuple], maxMarks: Seq[Int]): Marks = {
val dimensionality = maxMarks.length
val EPSILON = 10E-9
val init = EquidistantMarksGenerator.getMarks(samples, maxMarks).map(x => x ++ Seq(Vector.conv_double2vb(x.last + EPSILON)))
(0 until dimensionality).map { dim =>
var marks = init(dim)
var delta = Vector.maxValue
var deltaBar = Vector.maxValue
//TODO: rather than K-means, use DBScan
do {
//K-means
delta = deltaBar
val points = samples.map(_.ap_indexable.apply(dim))
val rjs = marks.sliding(2).toList.map { list => {
val filteredPoints = points.filter(p => p >= list(0) && p < list(1))
if (filteredPoints.isEmpty) {
list.toSeq
} else {
filteredPoints.toSeq
}
}
}.map(fps => (1.0 / fps.length) * fps.sum).map(Vector.conv_double2vb(_))
val cjs = Seq(rjs.head) ++ rjs.sliding(2).map(x => x.sum / x.length).toList
marks = cjs
deltaBar = marks.sliding(2).map { list => points.filter(p => p >= list(0) && p < list(1)) }.toList.zip(rjs).map { case (fps, rj) => fps.map(fp => (fp - rj) * (fp - rj)).sum }.sum
} while (deltaBar / delta < 0.999)
marks
}
}
}
|
dbisUnibas/ADAMpro
|
src/main/scala/org/vitrivr/adampro/data/index/structures/va/marks/VAPlusMarksGenerator.scala
|
Scala
|
mit
| 1,848 |
package shapeless
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
import shapeless.labelled.{ FieldType, field }
/**
* Provides default values of case class-like types.
*
* The `Out` type parameter is an HList type whose length is the number of fields of `T`. Its elements correspond
* to the fields of `T`, in their original order. It is made of `None.type` (no default value for this field) and
* `Some[...]` (default value available for this field, with `...` the type of the field). Note that `None.type` and
* `Some[...]` are more precise than simply `Option[...]`, so that the availability of default values can be used
* in type level calculations.
*
* The `apply` method returns an HList of type `Out`, with `None` elements corresponding to no default value available,
* and `Some(defaultValue)` to default value available for the corresponding fields.
*
* Use like
* {{{
* case class CC(i: Int, s: String = "b")
*
* val default = Default[CC]
*
* // default.Out is None.type :: Some[String] :: HNil
*
* // default() returns
* // None :: Some("b") :: HNil,
* // typed as default.Out
* }}}
*
* @author Alexandre Archambault
*/
trait Default[T] extends DepFn0 with Serializable {
type Out <: HList
}
object Default {
def apply[T](implicit default: Default[T]): Aux[T, default.Out] = default
def mkDefault[T, Out0 <: HList](defaults: Out0): Aux[T, Out0] =
new Default[T] {
type Out = Out0
def apply() = defaults
}
type Aux[T, Out0 <: HList] = Default[T] { type Out = Out0 }
implicit def materialize[T, L <: HList]: Aux[T, L] = macro DefaultMacros.materialize[T, L]
/**
* Provides default values of case class-like types, as a record.
*
* Type `Out` is a record type, having one element per field with a default value. Labels
* come from the available `DefaultSymbolicLabelling[T]`, and values are the default values
* themselves.
*
* Method `apply` provides the record of default values, typed as `Out`.
*
* Example
* {{{
* case class CC(i: Int, s: String = "b")
*
* val default = Default.AsRecord[CC]
*
* // default.Out is Record.`'s -> String`.T
* // default() returns Record(s = "b")
* }}}
*
* @author Alexandre Archambault
*/
trait AsRecord[T] extends DepFn0 with Serializable {
type Out <: HList
}
object AsRecord {
def apply[T](implicit default: AsRecord[T]): Aux[T, default.Out] = default
type Aux[T, Out0 <: HList] = AsRecord[T] { type Out = Out0 }
trait Helper[L <: HList, Labels <: HList] extends DepFn1[L] with Serializable {
type Out <: HList
}
object Helper {
def apply[L <: HList, Labels <: HList](implicit helper: Helper[L, Labels]): Aux[L, Labels, helper.Out] = helper
type Aux[L <: HList, Labels <: HList, Out0 <: HList] = Helper[L, Labels] { type Out = Out0 }
implicit def hnilHelper: Aux[HNil, HNil, HNil] =
new Helper[HNil, HNil] {
type Out = HNil
def apply(l: HNil) = HNil
}
implicit def hconsSomeHelper[K <: Symbol, H, T <: HList, LabT <: HList, OutT <: HList]
(implicit
tailHelper: Aux[T, LabT, OutT]
): Aux[Some[H] :: T, K :: LabT, FieldType[K, H] :: OutT] =
new Helper[Some[H] :: T, K :: LabT] {
type Out = FieldType[K, H] :: OutT
def apply(l: Some[H] :: T) = field[K](l.head.get) :: tailHelper(l.tail)
}
implicit def hconsNoneHelper[K <: Symbol, T <: HList, LabT <: HList, OutT <: HList]
(implicit
tailHelper: Aux[T, LabT, OutT]
): Aux[None.type :: T, K :: LabT, OutT] =
new Helper[None.type :: T, K :: LabT] {
type Out = OutT
def apply(l: None.type :: T) = tailHelper(l.tail)
}
}
implicit def asRecord[T, Labels <: HList, Options <: HList, Rec <: HList]
(implicit
default: Default.Aux[T, Options],
labelling: DefaultSymbolicLabelling.Aux[T, Labels],
helper: Helper.Aux[Options, Labels, Rec]
): Aux[T, Rec] =
new AsRecord[T] {
type Out = Rec
def apply() = helper(default())
}
}
/**
* Provides default values of case class-like types, as a HList of options.
*
* Unlike `Default`, `Out` is made of elements like `Option[...]` instead of `None.type` and `Some[...]`.
* Thus, the availability of default values cannot be checked through types, only through values (via the `apply`
* method).
*
* This representation can be more convenient to deal with when one only check the default values at run-time.
*
* Method `apply` provides the HList of default values, typed as `Out`.
*
* Example
* {{{
* case class CC(i: Int, s: String = "b")
*
* val default = Default.AsOptions[CC]
*
* // default.Out is Option[Int] :: Option[String] :: HNil
* // default() returns
* // None :: Some("b") :: HNil
* // typed as default.Out
* }}}
*
* @author Alexandre Archambault
*/
trait AsOptions[T] extends DepFn0 with Serializable {
type Out <: HList
}
object AsOptions {
def apply[T](implicit default: AsOptions[T]): Aux[T, default.Out] = default
type Aux[T, Out0 <: HList] = AsOptions[T] { type Out = Out0 }
trait Helper[L <: HList, Repr <: HList] extends DepFn1[L] with Serializable {
type Out <: HList
}
object Helper {
def apply[L <: HList, Repr <: HList](implicit helper: Helper[L, Repr]): Aux[L, Repr, helper.Out] = helper
type Aux[L <: HList, Repr <: HList, Out0 <: HList] = Helper[L, Repr] { type Out = Out0 }
implicit def hnilHelper: Aux[HNil, HNil, HNil] =
new Helper[HNil, HNil] {
type Out = HNil
def apply(l: HNil) = HNil
}
implicit def hconsSomeHelper[H, T <: HList, ReprT <: HList, OutT <: HList]
(implicit
tailHelper: Aux[T, ReprT, OutT]
): Aux[Some[H] :: T, H :: ReprT, Option[H] :: OutT] =
new Helper[Some[H] :: T, H :: ReprT] {
type Out = Option[H] :: OutT
def apply(l: Some[H] :: T) = l.head :: tailHelper(l.tail)
}
implicit def hconsNoneHelper[H, T <: HList, ReprT <: HList, OutT <: HList]
(implicit
tailHelper: Aux[T, ReprT, OutT]
): Aux[None.type :: T, H :: ReprT, Option[H] :: OutT] =
new Helper[None.type :: T, H :: ReprT] {
type Out = Option[H] :: OutT
def apply(l: None.type :: T) = None :: tailHelper(l.tail)
}
}
implicit def asOption[T, Repr <: HList, Options <: HList, Out0 <: HList]
(implicit
default: Default.Aux[T, Options],
gen: Generic.Aux[T, Repr],
helper: Helper.Aux[Options, Repr, Out0]
): Aux[T, Out0] =
new AsOptions[T] {
type Out = Out0
def apply() = helper(default())
}
}
}
@macrocompat.bundle
class DefaultMacros(val c: whitebox.Context) extends CaseClassMacros {
import c.universe._
def someTpe = typeOf[Some[_]].typeConstructor
def noneTpe = typeOf[None.type]
def materialize[T: WeakTypeTag, L: WeakTypeTag]: Tree = {
val tpe = weakTypeOf[T]
val cls = classSym(tpe)
lazy val companion = companionRef(tpe)
def altCompanion = companion.symbol.info
val none = q"_root_.scala.None"
def some(value: Tree) = q"_root_.scala.Some($value)"
// Symbol.alternatives is missing in Scala 2.10
def overloadsOf(sym: Symbol) =
if (sym.isTerm) sym.asTerm.alternatives
else if (sym.isType) sym :: Nil
else Nil
def hasDefaultParams(method: MethodSymbol) =
method.paramLists.flatten.exists(_.asTerm.isParamWithDefault)
// The existence of multiple apply overloads with default values gets checked
// after the macro runs. Their existence can make the macro expansion fail,
// as multiple overloads can define the functions we look for below, possibly
// with wrong types, making the compilation fail with the wrong error.
// We do this check here to detect that beforehand.
def overloadsWithDefaultParamsIn(tpe: Type) =
overloadsOf(tpe.member(TermName("apply"))).count {
alt => alt.isMethod && hasDefaultParams(alt.asMethod)
}
def defaultsFor(fields: List[(TermName, Type)]) = for {
((_, argTpe), i) <- fields.zipWithIndex
default = tpe.companion.member(TermName(s"apply$$default$$${i + 1}")) orElse
altCompanion.member(TermName(s"$$lessinit$$greater$$default$$${i + 1}"))
} yield if (default.isTerm) {
val defaultTpe = appliedType(someTpe, devarargify(argTpe))
val defaultVal = some(q"$companion.$default")
(defaultTpe, defaultVal)
} else (noneTpe, none)
def mkDefault(defaults: List[(Type, Tree)]) = {
val (types, values) = defaults.unzip
val outTpe = mkHListTpe(types)
val outVal = mkHListValue(values)
q"_root_.shapeless.Default.mkDefault[$tpe, $outTpe]($outVal)"
}
if (isCaseObjectLike(cls)) return mkDefault(Nil)
if (!isCaseClassLike(cls)) abort(s"$tpe is not a case class or case class like")
// ClassSymbol.primaryConstructor is missing in Scala 2.10
val primaryCtor = overloadsOf(tpe.decl(termNames.CONSTRUCTOR)).find {
alt => alt.isMethod && alt.asMethod.isPrimaryConstructor
}.getOrElse {
c.abort(c.enclosingPosition, s"Cannot get primary constructor of $tpe")
}.asMethod
// Checking if the primary constructor has default parameters, and returning
// a Default instance with non-empty types / values only if that holds.
// The apply$default$... methods below may still exist without these, if an additional
// apply method has default parameters. We want to ignore them in this case.
val hasUniqueDefaults = hasDefaultParams(primaryCtor) && {
val k = overloadsWithDefaultParamsIn(tpe.companion)
k == 1 || (k == 0 && overloadsWithDefaultParamsIn(altCompanion) == 1)
}
mkDefault {
val fields = fieldsOf(tpe)
if (hasUniqueDefaults) defaultsFor(fields)
else List.fill(fields.size)(noneTpe, none)
}
}
}
|
rorygraves/perf_tester
|
corpus/shapeless/src/main/scala/shapeless/default.scala
|
Scala
|
apache-2.0
| 10,118 |
/**
* Copyright 2015 ICT.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.ac.ict.acs.netflow.master
class SubmissionSuite {
}
|
DataSysLab/netflow
|
query/src/test/scala/cn/ac/ict/acs/netflow/query/master/SubmissionSuite.scala
|
Scala
|
apache-2.0
| 893 |
/*
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*/
package org.locationtech.geomesa.utils.geotools
import javax.imageio.spi.ServiceRegistry
import com.typesafe.config.{ConfigFactory, ConfigRenderOptions}
import com.typesafe.scalalogging.LazyLogging
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
/**
* Provides simple feature types based on configs on the classpath
*/
class ConfigSftProvider extends SimpleFeatureTypeProvider with LazyLogging {
override def loadTypes(): java.util.List[SimpleFeatureType] = {
val config = ConfigFactory.load()
val path = sys.props.getOrElse(ConfigSftProvider.ConfigPathProperty, "geomesa.sfts")
if (!config.hasPath(path)) {
return List.empty[SimpleFeatureType]
}
config.getConfigList(path).flatMap { sft =>
try {
Some(SimpleFeatureTypes.createType(sft, None))
} catch {
case e: Exception =>
logger.error("Error loading simple feature type from config " +
s"${sft.root().render(ConfigRenderOptions.concise())}", e)
None
}
}.asJava
}
}
object ConfigSftProvider {
val ConfigPathProperty = "org.locationtech.geomesa.sft.config.path"
}
object SimpleFeatureTypeLoader {
val sfts: List[SimpleFeatureType] =
ServiceRegistry.lookupProviders(classOf[SimpleFeatureTypeProvider]).flatMap(_.loadTypes()).toList
}
|
vpipkt/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/ConfigSftProvider.scala
|
Scala
|
apache-2.0
| 1,714 |
package lila.tournament
package crud
import org.joda.time.{ DateTime, DateTimeZone }
import lila.user.User
final class CrudApi {
def list = TournamentRepo uniques 50
def one(id: String) = TournamentRepo uniqueById id
def editForm(tour: Tournament) = CrudForm.apply fill CrudForm.Data(
name = tour.name,
homepageHours = ~tour.spotlight.flatMap(_.homepageHours),
clockTime = tour.clock.limitInMinutes,
clockIncrement = tour.clock.increment,
minutes = tour.minutes,
variant = tour.variant.id,
date = tour.startsAt,
image = ~tour.spotlight.flatMap(_.iconImg),
headline = tour.spotlight.??(_.headline),
description = tour.spotlight.??(_.description),
conditions = Condition.DataForm.AllSetup(tour.conditions))
def update(old: Tournament, data: CrudForm.Data) =
TournamentRepo update updateTour(old, data) void
def createForm = CrudForm.apply
def create(data: CrudForm.Data, owner: User): Fu[Tournament] = {
val tour = updateTour(empty, data).copy(createdBy = owner.id)
TournamentRepo insert tour inject tour
}
private def empty = Tournament.make(
createdByUserId = "lichess",
clock = chess.Clock.Config(0, 0),
minutes = 0,
system = System.Arena,
variant = chess.variant.Standard,
position = chess.StartingPosition.initial,
mode = chess.Mode.Rated,
`private` = false,
password = None,
waitMinutes = 0)
private def updateTour(tour: Tournament, data: CrudForm.Data) = {
import data._
val clock = chess.Clock.Config((clockTime * 60).toInt, clockIncrement)
val v = chess.variant.Variant.orDefault(variant)
tour.copy(
name = name,
clock = clock,
minutes = minutes,
variant = v,
startsAt = date,
schedule = Schedule(
freq = Schedule.Freq.Unique,
speed = Schedule.Speed.fromClock(clock),
variant = v,
position = chess.StartingPosition.initial,
at = date).some,
spotlight = Spotlight(
headline = headline,
description = description,
homepageHours = homepageHours.some.filterNot(0 ==),
iconFont = none,
iconImg = image.some.filter(_.nonEmpty)).some,
conditions = data.conditions.convert)
}
}
|
clarkerubber/lila
|
modules/tournament/src/main/crud/CrudApi.scala
|
Scala
|
agpl-3.0
| 2,248 |
package implementationVoteSimple
import Gvote.AbstractElection
import Gvote.ModeScrutin
import Gvote.Candidat
class Election(_modeScrutin : ModeScrutin) extends AbstractElection(_modeScrutin){
type ImplTour = Tour
type Candidate = Candidat
def this(modeScrutin : String, nbTour : Int, listGagnantParTour : List[Int] ,visibiliteVote : String ){
this(new ModeScrutin(modeScrutin, nbTour, listGagnantParTour,visibiliteVote));
}
}
|
DoumbiaAmadou/FrameworkVote
|
src/implementationVoteSimple/Election.scala
|
Scala
|
mit
| 443 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.ops
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class InvSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val inv = Inv[Float, Float]().setName("inv")
val input = Tensor[Float](2, 5).apply1(_ => Random.nextFloat())
runSerializationTest(inv, input)
}
}
|
yiheng/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ops/InvSpec.scala
|
Scala
|
apache-2.0
| 1,039 |
package com.zobot.client.packet.definitions.clientbound.play
import com.zobot.client.packet.Packet
case class PlayerListHeaderAndFooter(header: Any, footer: Any) extends Packet {
override lazy val packetId = 0x4A
override lazy val packetData: Array[Byte] =
fromAny(header) ++
fromAny(footer)
}
|
BecauseNoReason/zobot
|
src/main/scala/com/zobot/client/packet/definitions/clientbound/play/PlayerListHeaderAndFooter.scala
|
Scala
|
mit
| 309 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.iterators
import java.util.Date
import org.geotools.data.memory.MemoryDataStore
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.WithClose
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.concurrent.duration.Duration
@RunWith(classOf[JUnitRunner])
class PlaybackIteratorTest extends Specification {
val sft = SimpleFeatureTypes.createType("test", "name:String,dtg:Date,*geom:Point:srid=4326")
val builder = new SimpleFeatureBuilder(sft)
val features = Seq.tabulate(10) { i =>
builder.addAll(Array[AnyRef](s"name$i", s"2018-01-01T00:00:0${9 - i}.000Z", s"POINT (4$i 55)"))
builder.buildFeature(s"$i")
}
val ds = new MemoryDataStore(features.toArray)
val dtg = Some("dtg")
val interval: (Date, Date) = {
val start = features.last.getAttribute("dtg").asInstanceOf[Date].getTime - 1
val end = features.head.getAttribute("dtg").asInstanceOf[Date].getTime + 1
(new Date(start), new Date(end))
}
"PlaybackIterator" should {
"return features in sorted order" in {
WithClose(new PlaybackIterator(ds, sft.getTypeName, interval, dtg, rate = 100f)) { iter =>
foreach(iter.sliding(2).toSeq) { case Seq(left, right) =>
left.getAttribute("dtg").asInstanceOf[Date].before(right.getAttribute("dtg").asInstanceOf[Date]) must beTrue
}
}
}
"query using windows" in {
val window = Some(Duration("5 seconds"))
WithClose(new PlaybackIterator(ds, sft.getTypeName, interval, dtg, window = window, rate = 100f)) { iter =>
foreach(iter.sliding(2).toSeq) { case Seq(left, right) =>
left.getAttribute("dtg").asInstanceOf[Date].before(right.getAttribute("dtg").asInstanceOf[Date]) must beTrue
}
}
}
"replicate original rate" in {
val filter = Some(ECQL.toFilter("name IN ('name8', 'name7')")) // dates are 1 and 2 seconds into interval
WithClose(new PlaybackIterator(ds, sft.getTypeName, interval, dtg, filter = filter, rate = 10f)) { iter =>
// don't time the first result, as it will be inconsistent due to setup and querying
iter.hasNext must beTrue
iter.next()
val start = System.currentTimeMillis()
iter.hasNext must beTrue
// should block until second feature time has elapsed, 100 millis from first feature (due to 10x rate)
iter.next()
System.currentTimeMillis() - start must beCloseTo(100L, 30)
iter.hasNext must beFalse
}
}
}
}
|
ddseapy/geomesa
|
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/iterators/PlaybackIteratorTest.scala
|
Scala
|
apache-2.0
| 3,184 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.measurenullvalue
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class NullMeasureValueTestCaseAggregate extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table IF EXISTS t3")
sql(
"CREATE TABLE IF NOT EXISTS t3 (ID Int, date Timestamp, country String, name String, " +
"phonetype String, serialname String, salary Int) STORED AS carbondata"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/nullmeasurevalue.csv' into table t3");
}
test("select count(salary) from t3") {
checkAnswer(
sql("select count(salary) from t3"),
Seq(Row(0)))
}
test("select count(ditinct salary) from t3") {
checkAnswer(
sql("select count(distinct salary) from t3"),
Seq(Row(0)))
}
test("select sum(salary) from t3") {
checkAnswer(
sql("select sum(salary) from t3"),
Seq(Row(null)))
}
test("select avg(salary) from t3") {
checkAnswer(
sql("select avg(salary) from t3"),
Seq(Row(null)))
}
test("select max(salary) from t3") {
checkAnswer(
sql("select max(salary) from t3"),
Seq(Row(null)))
}
test("select min(salary) from t3") {
checkAnswer(
sql("select min(salary) from t3"),
Seq(Row(null)))
}
test("select sum(distinct salary) from t3") {
checkAnswer(
sql("select sum(distinct salary) from t3"),
Seq(Row(null)))
}
override def afterAll {
sql("drop table t3")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
}
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/measurenullvalue/NullMeasureValueTestCaseAggregate.scala
|
Scala
|
apache-2.0
| 2,780 |
package com.gregghz.ds
import org.specs2.mutable._
import scalaz.State
class GapBufferSpec extends Specification {
"GapBuffer" should {
"insert text correctly" in {
val g = new GapBuffer[Char]()
val machine = for {
_ <- g.insert('h')
_ <- g.insert('e')
_ <- g.insert('l')
_ <- g.insert('l')
_ <- g.insert('o')
result <- g.print
} yield(result)
g.go(machine)._2 mustEqual "hello"
}
"move point left and insert text in middle" in {
val g = new GapBuffer[Char]()
val machine = for {
_ <- g.insert('h')
_ <- g.insert('l')
_ <- g.insert('l')
_ <- g.insert('o')
_ <- g.left
_ <- g.left
_ <- g.left
_ <- g.insert('e')
result <- g.print
} yield(result)
g.go(machine)._2 mustEqual "hello"
}
"move point left nad right and then insert text" in {
val g = new GapBuffer[Char]()
val machine = for {
_ <- g.insert('h')
_ <- g.insert('e')
_ <- g.insert('l')
_ <- g.insert('o')
_ <- g.left
_ <- g.insert('l')
_ <- g.right
_ <- g.insert(' ')
_ <- g.insert('y')
_ <- g.insert('o')
_ <- g.insert('u')
result <- g.print
} yield(result)
g.go(machine)._2 mustEqual "hello you"
}
def checkBuffer[A](str: String, expected: String) = State[GapState[A], Unit] { case state =>
str mustEqual expected
(state, ())
}
"move the point correctly" in {
val size = 30
val g = new GapBuffer[Char](size)
val machine = for {
_ <- g.print.flatMap(checkBuffer(_, ""))
_ <- g.insert('e')
_ <- g.print.flatMap(checkBuffer(_, "e"))
_ <- g.insert('l')
_ <- g.print.flatMap(checkBuffer(_, "el"))
_ <- g.insert('l')
_ <- g.print.flatMap(checkBuffer(_, "ell"))
_ <- g.insert('o')
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.left
_ <- g.print.flatMap(checkBuffer(_, "ello"))
_ <- g.insert('h')
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.right
_ <- g.print.flatMap(checkBuffer(_, "hello"))
_ <- g.insert('!')
_ <- g.print.flatMap(checkBuffer(_, "hello!"))
result <- g.print
} yield(result)
g.go(machine)._2 mustEqual "hello!"
}
"delete correctly" in {
val g = new GapBuffer[Char](30)
val machine = for {
_ <- g.insert('y')
_ <- g.insert('u')
_ <- g.delete
_ <- g.delete
_ <- g.delete
_ <- g.delete
_ <- g.insert('h')
_ <- g.insert('e')
_ <- g.insert('o')
_ <- g.delete
_ <- g.insert('l')
_ <- g.insert('l')
_ <- g.insert('o')
str <- g.print
} yield(str)
g.go(machine)._2 mustEqual "hello"
}
"expand when needed" in {
val g = new GapBuffer[Char](2)
val machine = for {
_ <- g.insert('w')
_ <- g.insert('o')
_ <- g.insert('r')
_ <- g.insert('l')
_ <- g.insert('d')
_ <- g.left
_ <- g.left
_ <- g.left
_ <- g.left
_ <- g.left
_ <- g.insert('h')
_ <- g.insert('e')
_ <- g.insert('l')
_ <- g.insert('l')
_ <- g.insert('o')
_ <- g.insert(' ')
str <- g.print
} yield(str)
g.go(machine)._2 mustEqual "hello world"
}
}
}
|
gregghz/ds
|
src/test/scala/GapBufferSpec.scala
|
Scala
|
mit
| 4,665 |
package dynamite
import java.io.ByteArrayOutputStream
import java.nio.charset.StandardCharsets
import dynamite.Dynamo.DynamoClient
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB
import com.amazonaws.services.dynamodbv2.model.{
AttributeDefinition,
AttributeValue,
KeySchemaElement,
KeyType,
ProvisionedThroughput,
ScalarAttributeType
}
import zio.test.environment.TestConsole
import zio.{Task, ZManaged}
import zio.test._
import zio.test.Assertion._
import scala.jdk.CollectionConverters._
import DynamoDBTestHelpers._
object ScriptSpec extends DefaultRunnableSpec {
private def attributeDefinitions(
attributes: Seq[(String, ScalarAttributeType)]
) =
attributes.map {
case (symbol, attributeType) =>
new AttributeDefinition(symbol, attributeType)
}.asJava
def createTable(
client: AmazonDynamoDB
)(tableName: String)(attributes: (String, ScalarAttributeType)*) =
for {
key <- keySchema(attributes)
result <- Task(
client.createTable(
attributeDefinitions(attributes),
tableName,
key,
new ProvisionedThroughput(1L, 1L)
)
)
} yield result
private def keySchema(attributes: Seq[(String, ScalarAttributeType)]) =
attributes.toList match {
case Nil => Task.fail(new Exception("Invalid key schema"))
case hashKeyWithType :: rangeKeyWithType =>
val keySchemas =
hashKeyWithType._1 -> KeyType.HASH :: rangeKeyWithType.map(
_._1 -> KeyType.RANGE
)
Task.succeed(
keySchemas.map {
case (symbol, keyType) => new KeySchemaElement(symbol, keyType)
}.asJava
)
}
def captureStdOut[A](f: => A): (String, A) = {
val os = new ByteArrayOutputStream()
val result = Console.withOut(os)(f)
val output = new String(os.toByteArray, StandardCharsets.UTF_8)
(output, result)
}
val insertRows =
for {
_ <- Dynamo.putItem(
"playlists",
Map(
"id" -> new AttributeValue("123")
)
)
_ <- Dynamo.putItem(
"playlists",
Map(
"id" -> new AttributeValue("456")
)
)
} yield ()
val withPlaylistTable =
ZManaged
.access[DynamoClient](_.get)
.flatMap(client =>
withTable("playlists")("id" -> ScalarAttributeType.S)(
client
).as(client)
)
def spec =
suite("script")(
testM("fail with error message") {
val input = "select * from playlists limit 1"
withPlaylistTable
.use { client =>
for {
_ <- insertRows
() <- Script.eval(Format.Tabular, input)
output <- TestConsole.output
} yield {
assert(output)(
equalTo(
Vector(
"""id
|"456"
|""".stripMargin
)
)
)
}
}
},
testM("render as json") {
val input = "select * from playlists limit 2"
withPlaylistTable.use { client =>
for {
_ <- insertRows
() <- Script.eval(Format.Json, input)
output <- TestConsole.output
} yield {
assert(output)(
equalTo(
Vector(
"""{"id":"456"}
|{"id":"123"}
|""".stripMargin
)
)
)
}
}
},
testM("render as json-pretty") {
withPlaylistTable.use {
client =>
val input = "select * from playlists limit 2"
for {
_ <- insertRows
() <- Script.eval(Format.JsonPretty, input)
output <- TestConsole.output
} yield {
assert(output)(
equalTo(
Vector(
"""{
| "id" : "456"
|}
|{
| "id" : "123"
|}
|""".stripMargin
)
)
)
}
}
},
testM("render an error when parsing fails") {
val input = "select * from playlists limid"
for {
message <- Script
.eval(Format.JsonPretty, input)
.flip
.map(_.getMessage)
} yield {
assert(message)(
equalTo(
"""[error] Failed to parse query
|select * from playlists limid
| ^""".stripMargin
)
)
}
},
testM("create table") {
val input = "create table users (userId string);"
(for {
() <- Script.eval(Format.Tabular, input)
output <- TestConsole.output
} yield {
assert(output)(equalTo(Vector()))
}).ensuring(cleanupTable("users"))
},
testM("support multiple statements") {
val input =
"""
|create table users (userId string);
|create table events (userId string);
|create table notifications (userId string);
|""".stripMargin
(for {
() <- Script.eval(Format.Tabular, input)
output <- TestConsole.output
} yield {
assert(output)(equalTo(Vector()))
}).ensuring(cleanupTable("users"))
.ensuring(cleanupTable("events"))
.ensuring(cleanupTable("notifications"))
},
testM("render table names") {
withPlaylistTable
.flatMap(client =>
withTable("tracks")("id" -> ScalarAttributeType.S)(
client
).as(client)
)
.use { client =>
val input = "show tables"
for {
() <- Script.eval(Format.Tabular, input)
output <- TestConsole.output
} yield {
assert(output)(
equalTo(
Vector(
"""playlists
|tracks
|""".stripMargin
)
)
)
}
}
}
// testM("render paginated table names") {
// val input = "show tables"
// val (out, result) = captureStdOut {
// Script.eval(Opts(), input, client)
// }
// assert(result)(isRight(anything))
// assert(out)(
// equalTo(
// """playlists
// |tracks
// |""".stripMargin
// )
// )
// },
//TODO: pass client as layer to allow mocking errors
// test("render an error when rendering table names fails") {
// val input = "show tables"
// val error = "Error occurred while loading list of tables"
// Script.eval(Opts(), input, client)
// assert(result)(isLeft(equalTo(error)))
// }
) @@ TestAspect.sequential @@
TestAspect.before(TestConsole.clearOutput) provideCustomLayerShared (
(dynamoClient >>> Dynamo.live.passthrough).orDie
)
}
|
joprice/dynamite
|
src/test/scala/dynamite/ScriptSpec.scala
|
Scala
|
apache-2.0
| 7,223 |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sync3k
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives
import akka.stream.ActorMaterializer
import sync3k.routes.{ BaseRoutes, SimpleRoutes, WebSocketRoutes }
object WebServer extends Directives with SimpleRoutes with WebSocketRoutes {
implicit val system = ActorSystem("my-system")
implicit val materializer = ActorMaterializer()
implicit val executionContext = system.dispatcher
implicit var kafkaServer: String = _
def main(args: Array[String]) {
case class Config(bind: String = "0.0.0.0", port: Int = 8080, kafkaServer: String = "localhost:9092")
val parser = new scopt.OptionParser[Config]("sync3k-server") {
head("sync3k-server")
opt[String]('b', "bind")
.action((x, c) => c.copy(bind = x))
.text("interface to bind to. Defaults to 0.0.0.0")
opt[Int]('p', "port")
.action((x, c) => c.copy(port = x))
.text("port number to listen to. Defaults to 8080")
opt[String]('k', "kafkaServer")
.action((x, c) => c.copy(kafkaServer = x))
.text("Kafka bootstrap server. Defaults to localhost:9092")
}
val config = parser.parse(args, Config())
if (config.isEmpty) {
system.terminate()
return
}
kafkaServer = config.get.kafkaServer
// needed for the future flatMap/onComplete in the end
val bindingFuture = Http().bindAndHandle(routes, config.get.bind, config.get.port)
println(s"Server online at http://localhost:${config.get.port}/")
scala.sys.addShutdownHook {
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => system.terminate()) // and shutdown when done
}
}
val routes = BaseRoutes.baseRoutes ~ simpleRoutes ~ webSocketRoutes
}
|
google/sync3k-server
|
src/main/scala/sync3k/WebServer.scala
|
Scala
|
apache-2.0
| 2,403 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.util.Properties
import joptsimple.{OptionParser, OptionSet, OptionSpec}
import scala.collection.Set
/**
* Helper functions for dealing with command line utilities
*/
object CommandLineUtils extends Logging {
/**
* Check if there are no options or `--help` option from command line
*
* @param commandOpts Acceptable options for a command
* @return true on matching the help check condition
*/
def isPrintHelpNeeded(commandOpts: CommandDefaultOptions): Boolean = {
commandOpts.args.length == 0 || commandOpts.options.has(commandOpts.helpOpt)
}
def isPrintVersionNeeded(commandOpts: CommandDefaultOptions): Boolean = {
commandOpts.options.has(commandOpts.versionOpt)
}
/**
* Check and print help message if there is no options or `--help` option
* from command line, if `--version` is specified on the command line
* print version information and exit.
* NOTE: The function name is not strictly speaking correct anymore
* as it also checks whether the version needs to be printed, but
* refactoring this would have meant changing all command line tools
* and unnecessarily increased the blast radius of this change.
*
* @param commandOpts Acceptable options for a command
* @param message Message to display on successful check
*/
def printHelpAndExitIfNeeded(commandOpts: CommandDefaultOptions, message: String) = {
if (isPrintHelpNeeded(commandOpts))
printUsageAndDie(commandOpts.parser, message)
if (isPrintVersionNeeded(commandOpts))
printVersionAndDie()
}
/**
* Check that all the listed options are present
*/
def checkRequiredArgs(parser: OptionParser, options: OptionSet, required: OptionSpec[_]*) {
for (arg <- required) {
if (!options.has(arg))
printUsageAndDie(parser, "Missing required argument \"" + arg + "\"")
}
}
/**
* Check that none of the listed options are present
*/
def checkInvalidArgs(parser: OptionParser, options: OptionSet, usedOption: OptionSpec[_], invalidOptions: Set[OptionSpec[_]]) {
if (options.has(usedOption)) {
for (arg <- invalidOptions) {
if (options.has(arg))
printUsageAndDie(parser, "Option \"" + usedOption + "\" can't be used with option \"" + arg + "\"")
}
}
}
/**
* Check that none of the listed options are present with the combination of used options
*/
def checkInvalidArgsSet(parser: OptionParser, options: OptionSet, usedOptions: Set[OptionSpec[_]], invalidOptions: Set[OptionSpec[_]]) {
if (usedOptions.count(options.has) == usedOptions.size) {
for (arg <- invalidOptions) {
if (options.has(arg))
printUsageAndDie(parser, "Option combination \"" + usedOptions.mkString(",") + "\" can't be used with option \"" + arg + "\"")
}
}
}
/**
* Print usage and exit
*/
def printUsageAndDie(parser: OptionParser, message: String): Nothing = {
System.err.println(message)
parser.printHelpOn(System.err)
Exit.exit(1, Some(message))
}
def printVersionAndDie(): Nothing = {
System.out.println(VersionInfo.getVersionString)
Exit.exit(0)
}
/**
* Parse key-value pairs in the form key=value
* value may contain equals sign
*/
def parseKeyValueArgs(args: Iterable[String], acceptMissingValue: Boolean = true): Properties = {
val splits = args.map(_.split("=", 2)).filterNot(_.length == 0)
val props = new Properties
for (a <- splits) {
if (a.length == 1 || (a.length == 2 && a(1).isEmpty())) {
if (acceptMissingValue) props.put(a(0), "")
else throw new IllegalArgumentException(s"Missing value for key ${a(0)}")
}
else props.put(a(0), a(1))
}
props
}
/**
* Merge the options into {@code props} for key {@code key}, with the following precedence, from high to low:
* 1) if {@code spec} is specified on {@code options} explicitly, use the value;
* 2) if {@code props} already has {@code key} set, keep it;
* 3) otherwise, use the default value of {@code spec}.
* A {@code null} value means to remove {@code key} from the {@code props}.
*/
def maybeMergeOptions[V](props: Properties, key: String, options: OptionSet, spec: OptionSpec[V]) {
if (options.has(spec) || !props.containsKey(key)) {
val value = options.valueOf(spec)
if (value == null)
props.remove(key)
else
props.put(key, value.toString)
}
}
}
|
KevinLiLu/kafka
|
core/src/main/scala/kafka/utils/CommandLineUtils.scala
|
Scala
|
apache-2.0
| 5,310 |
package benchmark.controllers
import com.twitter.finagle.http.{Request, Response}
import com.twitter.finatra.http.{Controller => HttpController}
import com.twitter.io.Buf
import com.twitter.util.Future
class Controller extends HttpController {
private[this] val helloWorldText = "Hello, World!"
private[this] val helloWorldBuf: Buf = Buf.Utf8(helloWorldText)
get("/json") { request: Request =>
Map("message" -> helloWorldText)
}
get("/plaintext") { request: Request =>
val resp = Response()
resp.content = helloWorldBuf
resp.contentType = response.plainTextContentType
Future.value(resp)
}
}
|
sanjoydesk/FrameworkBenchmarks
|
frameworks/Scala/finatra/src/main/scala/benchmark/controllers/Controller.scala
|
Scala
|
bsd-3-clause
| 628 |
package lila.common
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.stm.Ref
final class WindowCount(timeout: FiniteDuration) {
private val counter = Ref((0, (0, nowMillis)))
private val tms = timeout.toMillis
def add {
val current = nowMillis
counter.single.transform {
case (precedent, (count, millis)) if current > millis + tms => (0, (1, current))
case (precedent, (count, millis)) if current > millis + (tms / 2) => (count, (1, current))
case (precedent, (count, millis)) => (precedent, (count + 1, millis))
}
}
def get = {
val current = nowMillis
val (precedent, (count, millis)) = counter.single()
val since = current - millis
if (since <= tms) ((count + precedent) * 1000) / (since + tms / 2)
else 0
} toInt
}
|
Happy0/lila
|
modules/common/src/main/WindowCount.scala
|
Scala
|
mit
| 848 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2013-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package rewriting
/**
* Abstract syntax constructs that are common to all nominal rewriters.
* These definitions need to be separate from the NominalRewriter class
* so that the classes here don't get an outer field referring to an
* instance of that class.
*/
object NominalTree {
/**
* A name comprising a base string with an optional integer index. The
* index defaults to being omitted.
*/
case class Name (base : String, opti : Option[Int] = None) {
override def toString : String = base + opti.getOrElse ("")
}
/**
* A generic abstract binding of a name in a term.
*/
case class Bind (name : Name, term : Any)
/**
* A transposition of two names is just a tuple.
*/
type Trans = (Name, Name)
}
/**
* An extension of strategy-based term rewriting with special support for
* nominal rewriting along the lines of FreshML and the FreshLib library
* for Haskell. See Scrap your Nameplate, James Cheney, ICFP 2005 for a
* description of the ideas and the FreshLib library.
*/
class NominalRewriter extends Rewriter {
import NominalTree._
import org.kiama.util.Counter
/**
* Swap two names (given by `tr`) throughout a term `t`.
*/
def swap[T] (tr : Trans) (t : T) : T = {
val s = everywhere (rule[Name] {
case n => if (n == tr._1) tr._2
else if (n == tr._2) tr._1
else n
})
rewrite (s) (t)
}
/**
* Is the name `a` fresh (not free) in a term?
*/
def fresh (a : Name) (t : Any) : Boolean =
t match {
case n : Name => a != n
case Bind (b, t) => (a == b) || fresh (a) (t)
case p : Product => p.productIterator.forall (c => fresh (a) (c))
case _ => true
}
/**
* Alpha equivalence of two terms.
*/
def alphaequiv (a1 : Any, a2 : Any) : Boolean =
(a1, a2) match {
case (n1 : Name, n2 : Name) =>
n1 == n2
case (Bind (a, x), Bind (b, y)) =>
((a == b) && alphaequiv (x, y)) ||
(fresh (a) (y) && alphaequiv (x, swap (a, b) (y)))
case (p1 : Product, p2 : Product) =>
(p1.productPrefix == p2.productPrefix) &&
p1.productIterator.zip (p2.productIterator).forall {
case (x,y) => alphaequiv (x,y)
}
case _ =>
a1 == a2
}
/**
* An extractor pattern for terms that contain a single name child.
*/
object HasVar {
def unapply (t : Product) : Option[Name] =
if (t.productArity == 1)
t.productElement (0) match {
case n : Name => Some (n)
case _ => None
}
else
None
}
/**
* Counter to use to produce unique names.
*/
val uniqueNameCounter = new Counter
/**
* Make a unique name using an old name as the base.
*/
def genName (oldname : Name) : Name = {
Name (oldname.base, Some (uniqueNameCounter.next ()))
}
/**
* Alternative extractor for Bind constructs. Decomposes an abstraction
* returning the components after freshening the bound name.
*/
object Binding {
def unapply (b : Bind) : Option[(Name,Any)] = {
val n = genName (b.name)
Some ((n, swap (n, b.name) (b.term)))
}
}
/**
* Substitution of `t1` for free occurrences of `n` in a term.
*/
def subst[T] (n : Name, t1 : Any) : T => T =
rewrite (alltd (
// We use strategyf here instead of rule since rule uses the
// isDefinedAt method of its argument and we want to avoid
// the pattern matching function being called more than once
// due to the side-effect in Binding.
strategyf {
case HasVar (m) if n == m =>
Some (t1)
case Binding (a, x) =>
val y = subst (n, t1) (x)
Some (Bind (a, y))
case _ =>
None
}
))
/**
* Free variables in an term.
*/
def fv (t : Any) : Set[Name] =
t match {
case n : Name => Set (n)
case Bind (b, t) => fv (t) - b
case p : Product => p.productIterator.foldLeft (Set[Name] ()) {
case (s, c) => s | fv (c)
}
case _ => Set ()
}
}
|
adeze/kiama
|
library/src/org/kiama/rewriting/NominalRewriter.scala
|
Scala
|
gpl-3.0
| 5,524 |
import controllers.MicroServiceError.MicroServiceErrorRefererCacheKey
import models.BusinessChooseYourAddressFormModel.BusinessChooseYourAddressCacheKey
import models.DisposeCacheKeyPrefix.CookiePrefix
import models.EnterAddressManuallyFormModel.EnterAddressManuallyCacheKey
import models.VehicleLookupFormModel.VehicleLookupFormModelCacheKey
import uk.gov.dvla.vehicles.presentation.common
import common.model.BruteForcePreventionModel.bruteForcePreventionViewModelCacheKey
import common.model.MicroserviceResponseModel.MsResponseCacheKey
import common.model.SetupTradeDetailsFormModel.setupTradeDetailsCacheKey
import common.model.TraderDetailsModel.traderDetailsCacheKey
import common.model.VehicleAndKeeperDetailsModel
import VehicleAndKeeperDetailsModel.vehicleAndKeeperLookupDetailsCacheKey
package object models {
final val IdentifierCacheKey = s"${CookiePrefix}identifier"
final val DisposeOnlyCacheKeys = Set(
models.DisposeFormModel.DisposeFormModelCacheKey,
models.DisposeFormModel.DisposeFormTransactionIdCacheKey,
models.DisposeFormModel.DisposeFormTimestampIdCacheKey,
models.DisposeFormModel.DisposeFormRegistrationNumberCacheKey
)
final val PrivateDisposeOnlyCacheKeys = Set(
models.PrivateDisposeFormModel.PrivateDisposeFormModelCacheKey,
models.PrivateDisposeFormModel.DisposeFormTransactionIdCacheKey,
models.PrivateDisposeFormModel.DisposeFormTimestampIdCacheKey,
models.PrivateDisposeFormModel.DisposeFormRegistrationNumberCacheKey
)
// Set of cookies related to a single vehicle disposal. Removed once the vehicle is successfully disposed
final val VehicleCacheKeys = Set(
bruteForcePreventionViewModelCacheKey,
vehicleAndKeeperLookupDetailsCacheKey,
MsResponseCacheKey,
VehicleLookupFormModelCacheKey
)
final val DisposeCacheKeys = VehicleCacheKeys ++ DisposeOnlyCacheKeys
final val PrivateDisposeCacheKeys = VehicleCacheKeys ++ PrivateDisposeOnlyCacheKeys
// Set of cookies that store the trade details data. These are retained after a successful disposal
// so the trader does not have to re-enter their details when disposing subsequent vehicles
final val TradeDetailsCacheKeys = Set(setupTradeDetailsCacheKey,
traderDetailsCacheKey,
BusinessChooseYourAddressCacheKey,
EnterAddressManuallyCacheKey)
// The full set of cache keys. These are removed at the start of the process in the "before_you_start" page
final val AllCacheKeys = TradeDetailsCacheKeys.++(DisposeCacheKeys)
.++(Set(models.DisposeFormModel.PreventGoingToDisposePageCacheKey))
.++(Set(models.DisposeFormModel.DisposeOccurredCacheKey))
.+(MicroServiceErrorRefererCacheKey)
.+(IdentifierCacheKey)
final val PrivateAllCacheKeys = TradeDetailsCacheKeys.++(PrivateDisposeCacheKeys)
.++(Set(models.PrivateDisposeFormModel.PreventGoingToDisposePageCacheKey))
.++(Set(models.PrivateDisposeFormModel.DisposeOccurredCacheKey))
.+(MicroServiceErrorRefererCacheKey)
.+(IdentifierCacheKey)
}
|
dvla/vehicles-online
|
app/models/package.scala
|
Scala
|
mit
| 3,004 |
package japgolly.scalajs.react.test
import scala.scalajs.js.Object
import ReactTestUtils.Simulate
/**
* Allows composition and abstraction of `ReactTestUtils.Simulate` procedures.
*/
class Simulation(_run: (() => ReactOrDomNode) => Unit) {
def run(n: => ReactOrDomNode): Unit =
_run(() => n)
def andThen(f: Simulation) =
new Simulation(n => { _run(n); f.run(n()) })
@inline final def >> (f: Simulation) = this andThen f
@inline final def compose(f: Simulation) = f andThen this
final def runN(cs: ReactOrDomNode*): Unit =
cs foreach (run(_))
}
object Simulation {
def apply(run: (=> ReactOrDomNode) => Unit): Simulation =
new Simulation(n => run(n()))
// Don't use default arguments - they force parentheses on to caller.
// Eg. Simulation.blur >> Simulation.focus becomes Simulation.blur() >> Simulation.focus(). Yuk.
def beforeInput = Simulation(Simulate.beforeInput (_))
def blur = Simulation(Simulate.blur (_))
def change = Simulation(Simulate.change (_))
def click = Simulation(Simulate.click (_))
def compositionEnd = Simulation(Simulate.compositionEnd (_))
def compositionStart = Simulation(Simulate.compositionStart (_))
def compositionUpdate= Simulation(Simulate.compositionUpdate(_))
def contextMenu = Simulation(Simulate.contextMenu (_))
def copy = Simulation(Simulate.copy (_))
def cut = Simulation(Simulate.cut (_))
def doubleClick = Simulation(Simulate.doubleClick (_))
def drag = Simulation(Simulate.drag (_))
def dragEnd = Simulation(Simulate.dragEnd (_))
def dragEnter = Simulation(Simulate.dragEnter (_))
def dragExit = Simulation(Simulate.dragExit (_))
def dragLeave = Simulation(Simulate.dragLeave (_))
def dragOver = Simulation(Simulate.dragOver (_))
def dragStart = Simulation(Simulate.dragStart (_))
def drop = Simulation(Simulate.drop (_))
def error = Simulation(Simulate.error (_))
def focus = Simulation(Simulate.focus (_))
def input = Simulation(Simulate.input (_))
def keyDown = Simulation(Simulate.keyDown (_))
def keyPress = Simulation(Simulate.keyPress (_))
def keyUp = Simulation(Simulate.keyUp (_))
def load = Simulation(Simulate.load (_))
def mouseDown = Simulation(Simulate.mouseDown (_))
def mouseEnter = Simulation(Simulate.mouseEnter (_))
def mouseLeave = Simulation(Simulate.mouseLeave (_))
def mouseMove = Simulation(Simulate.mouseMove (_))
def mouseOut = Simulation(Simulate.mouseOut (_))
def mouseOver = Simulation(Simulate.mouseOver (_))
def mouseUp = Simulation(Simulate.mouseUp (_))
def paste = Simulation(Simulate.paste (_))
def reset = Simulation(Simulate.reset (_))
def scroll = Simulation(Simulate.scroll (_))
def select = Simulation(Simulate.select (_))
def submit = Simulation(Simulate.submit (_))
def touchCancel = Simulation(Simulate.touchCancel (_))
def touchEnd = Simulation(Simulate.touchEnd (_))
def touchMove = Simulation(Simulate.touchMove (_))
def touchStart = Simulation(Simulate.touchStart (_))
def wheel = Simulation(Simulate.wheel (_))
def beforeInput (eventData: Object) = Simulation(Simulate.beforeInput (_, eventData))
def blur (eventData: Object) = Simulation(Simulate.blur (_, eventData))
def change (eventData: Object) = Simulation(Simulate.change (_, eventData))
def click (eventData: Object) = Simulation(Simulate.click (_, eventData))
def compositionEnd (eventData: Object) = Simulation(Simulate.compositionEnd (_, eventData))
def compositionStart (eventData: Object) = Simulation(Simulate.compositionStart (_, eventData))
def compositionUpdate(eventData: Object) = Simulation(Simulate.compositionUpdate(_, eventData))
def contextMenu (eventData: Object) = Simulation(Simulate.contextMenu (_, eventData))
def copy (eventData: Object) = Simulation(Simulate.copy (_, eventData))
def cut (eventData: Object) = Simulation(Simulate.cut (_, eventData))
def doubleClick (eventData: Object) = Simulation(Simulate.doubleClick (_, eventData))
def drag (eventData: Object) = Simulation(Simulate.drag (_, eventData))
def dragEnd (eventData: Object) = Simulation(Simulate.dragEnd (_, eventData))
def dragEnter (eventData: Object) = Simulation(Simulate.dragEnter (_, eventData))
def dragExit (eventData: Object) = Simulation(Simulate.dragExit (_, eventData))
def dragLeave (eventData: Object) = Simulation(Simulate.dragLeave (_, eventData))
def dragOver (eventData: Object) = Simulation(Simulate.dragOver (_, eventData))
def dragStart (eventData: Object) = Simulation(Simulate.dragStart (_, eventData))
def drop (eventData: Object) = Simulation(Simulate.drop (_, eventData))
def error (eventData: Object) = Simulation(Simulate.error (_, eventData))
def focus (eventData: Object) = Simulation(Simulate.focus (_, eventData))
def input (eventData: Object) = Simulation(Simulate.input (_, eventData))
def keyDown (eventData: Object) = Simulation(Simulate.keyDown (_, eventData))
def keyPress (eventData: Object) = Simulation(Simulate.keyPress (_, eventData))
def keyUp (eventData: Object) = Simulation(Simulate.keyUp (_, eventData))
def load (eventData: Object) = Simulation(Simulate.load (_, eventData))
def mouseDown (eventData: Object) = Simulation(Simulate.mouseDown (_, eventData))
def mouseEnter (eventData: Object) = Simulation(Simulate.mouseEnter (_, eventData))
def mouseLeave (eventData: Object) = Simulation(Simulate.mouseLeave (_, eventData))
def mouseMove (eventData: Object) = Simulation(Simulate.mouseMove (_, eventData))
def mouseOut (eventData: Object) = Simulation(Simulate.mouseOut (_, eventData))
def mouseOver (eventData: Object) = Simulation(Simulate.mouseOver (_, eventData))
def mouseUp (eventData: Object) = Simulation(Simulate.mouseUp (_, eventData))
def paste (eventData: Object) = Simulation(Simulate.paste (_, eventData))
def reset (eventData: Object) = Simulation(Simulate.reset (_, eventData))
def scroll (eventData: Object) = Simulation(Simulate.scroll (_, eventData))
def select (eventData: Object) = Simulation(Simulate.select (_, eventData))
def submit (eventData: Object) = Simulation(Simulate.submit (_, eventData))
def touchCancel (eventData: Object) = Simulation(Simulate.touchCancel (_, eventData))
def touchEnd (eventData: Object) = Simulation(Simulate.touchEnd (_, eventData))
def touchMove (eventData: Object) = Simulation(Simulate.touchMove (_, eventData))
def touchStart (eventData: Object) = Simulation(Simulate.touchStart (_, eventData))
def wheel (eventData: Object) = Simulation(Simulate.wheel (_, eventData))
// Helpers for common scenarios
def focusSimBlur(s: Simulation) =
focus >> s >> blur
def focusChangeBlur(newValue: String) =
focusSimBlur(ChangeEventData(value = newValue).simulation)
}
|
beni55/scalajs-react
|
test/src/main/scala/japgolly/scalajs/react/test/Simulation.scala
|
Scala
|
apache-2.0
| 8,166 |
package org.tomahna.scalaresume.resume
import scala.io.Source
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import play.api.libs.json.Json
import java.time.LocalDate
@RunWith(classOf[JUnitRunner])
class PublicationTest extends FlatSpec with Matchers {
"Publication" should "be parsed" in {
val stream = getClass.getResourceAsStream("publication.json")
val content = Source.fromInputStream(stream, "utf-8").mkString
stream.close()
Json.parse(content).as[Publication] shouldBe Publication(
"Video compression for 3d media",
"Hooli",
Some(LocalDate.parse("2014-10-01")),
"http://en.wikipedia.org/wiki/Silicon_Valley_(TV_series)",
"Innovative middle-out compression algorithm that changes the way we store data."
)
}
}
|
Tomahna/scalaresume
|
resume/src/test/scala/org/tomahna/scalaresume/resume/PublicationTest.scala
|
Scala
|
mit
| 858 |
import org.scalatest.FlatSpec
import scalax.file.Path
import edu.udlap.graph.Graph
import edu.udlap.analysis._
class SNASpecs extends FlatSpec {
val testGraphPath = "target/SNASpecsDB"
val graph = new Graph(testGraphPath)
// Insert all persons
graph.insertPerson("ID01", "Franco")
graph.insertPerson("ID02", "Francisco")
graph.insertPerson("ID03", "Ernesto")
graph.insertPerson("ID04", "Esteban")
graph.insertPerson("ID05", "Ofelia")
graph.insertPerson("ID06", "Alfredo")
graph.insertPerson("ID07", "Juan")
graph.insertPerson("ID08", "Pedro")
graph.insertPerson("ID09", "Susana")
graph.insertPerson("ID10", "Julia")
// Relations of Franco
graph.relatePersons("ID01", "ID02")
graph.relatePersons("ID01", "ID03")
graph.relatePersons("ID01", "ID04")
graph.relatePersons("ID01", "ID05")
// Relations of Ofelia
graph.relatePersons("ID05", "ID06")
graph.relatePersons("ID05", "ID07")
graph.relatePersons("ID05", "ID08")
// Relations of Susana
graph.relatePersons("ID09", "ID04")
graph.relatePersons("ID09", "ID06")
// Relations of Ernesto
graph.relatePersons("ID03", "ID10")
val degreeOfID01 = 4.0f/9.0f
val betweennessOfID01 = 23.0f
val closenessOfID05 = 1.6666666f
"Degree.of(graph, u)" should "compute the right centrality value" in {
val value = Degree.of(graph, "ID01")
assert(value == degreeOfID01)
}
"Betweenness.of(graph, u)" should "compute the right centrality value" in {
val value = Betweenness.of(graph, "ID01")
assert(value == betweennessOfID01)
}
"Closeness.of(graph, u)" should "compute the right centrality value" in {
val value = Closeness.of(graph, "ID05")
assert(value == closenessOfID05)
}
"TEST" should "destroy it's testing database" in {
graph.destroy
val testPath = Path.fromString(testGraphPath)
assert(!testPath.exists)
}
}
|
Innova4DReaumobile/census_prototype
|
src/test/scala/SNASpecs.scala
|
Scala
|
mit
| 1,871 |
package models
import com.github.aselab.activerecord._
import com.github.aselab.activerecord.dsl._
object Tables extends ActiveRecordTables with PlaySupport {
val users = table[User]
}
|
xdougx/scala-activerecord
|
play2Sbt/src/sbt-test/generator/simple/app/models/Tables.scala
|
Scala
|
mit
| 189 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package impl
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IStubElementType, StubElement}
import com.intellij.reference.SoftReference
import com.intellij.util.io.StringRef
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScIdList, ScPatternList}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
/**
* User: Alexander Podkhalyuzin
* Date: 18.10.2008
*/
class ScVariableStubImpl[ParentPsi <: PsiElement](parent: StubElement[ParentPsi],
elemType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement])
extends StubBaseWrapper[ScVariable](parent, elemType) with ScVariableStub {
private var names: Array[StringRef] = _
private var declaration: Boolean = false
private var typeText: StringRef = _
private var bodyText: StringRef = _
private var containerText: StringRef = _
private var myTypeElement: SoftReference[Option[ScTypeElement]] = new SoftReference(null)
private var myBodyExpression: SoftReference[Option[ScExpression]] = new SoftReference(null)
private var myIds: SoftReference[Option[ScIdList]] = new SoftReference(null)
private var myPatterns: SoftReference[Option[ScPatternList]] = new SoftReference(null)
private var local: Boolean = false
def this(parent: StubElement[ParentPsi],
elemType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement],
names: Array[String], isDeclaration: Boolean, typeText: String, bodyText: String,
containerText: String, isLocal: Boolean) = {
this(parent, elemType.asInstanceOf[IStubElementType[StubElement[PsiElement], PsiElement]])
this.names = for (name <- names) yield StringRef.fromString(name)
this.declaration = isDeclaration
this.typeText = StringRef.fromString(typeText)
this.bodyText = StringRef.fromString(bodyText)
this.containerText = StringRef.fromString(containerText)
local = isLocal
}
def isLocal: Boolean = local
def getNames: Array[String] = for (name <- names) yield StringRef.toString(name) //todo: remove it
def isDeclaration = declaration
def getPatternsContainer: Option[ScPatternList] = {
if (isDeclaration) return None
val patterns = myPatterns.get
if (patterns != null && (patterns.isEmpty || (patterns.get.getContext eq getPsi))) return patterns
val res: Option[ScPatternList] =
if (getBindingsContainerText != "") {
Some(ScalaPsiElementFactory.createPatterListFromText(getBindingsContainerText, getPsi, null))
} else None
myPatterns = new SoftReference[Option[ScPatternList]](res)
res
}
def getTypeText: String = StringRef.toString(typeText)
def getBodyExpr: Option[ScExpression] = {
val body = myBodyExpression.get
if (body != null && (body.isEmpty || (body.get.getContext eq getPsi))) return body
val res: Option[ScExpression] =
if (getBodyText != "") Some(ScalaPsiElementFactory.createExpressionWithContextFromText(getBodyText, getPsi, null))
else None
myBodyExpression = new SoftReference[Option[ScExpression]](res)
res
}
def getTypeElement: Option[ScTypeElement] = {
val typeElement = myTypeElement.get
if (typeElement != null && (typeElement.isEmpty || (typeElement.get.getContext eq getPsi))) return typeElement
val res: Option[ScTypeElement] =
if (getTypeText != "") Some(ScalaPsiElementFactory.createTypeElementFromText(getTypeText, getPsi, null))
else None
myTypeElement = new SoftReference(res)
res
}
def getIdsContainer: Option[ScIdList] = {
if (!isDeclaration) return None
val ids = myIds.get
if (ids != null && (ids.isEmpty || (ids.get.getContext eq getPsi))) return ids
val res: Option[ScIdList] =
if (getBindingsContainerText != "") {
Some(ScalaPsiElementFactory.createIdsListFromText(getBindingsContainerText, getPsi, null))
} else None
myIds = new SoftReference[Option[ScIdList]](res)
res
}
def getBodyText: String = StringRef.toString(bodyText)
def getBindingsContainerText: String = StringRef.toString(containerText)
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScVariableStubImpl.scala
|
Scala
|
apache-2.0
| 4,402 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.services.reports
import org.junit.runner._
import org.specs2.mutable._
import org.specs2.runner._
import scala.collection._
import com.normation.inventory.domain.NodeId
import com.normation.rudder.domain.policies.RuleId
import com.normation.rudder.domain.policies.DirectiveId
import org.joda.time.DateTime
import com.normation.rudder.domain.reports.DirectiveExpectedReports
import com.normation.rudder.domain.reports._
import com.normation.rudder.reports.ComplianceMode
import com.normation.rudder.reports.FullCompliance
import com.normation.rudder.repository.NodeConfigIdInfo
import com.normation.rudder.reports.ChangesOnly
@RunWith(classOf[JUnitRunner])
class ExecutionBatchTest extends Specification {
private implicit def str2directiveId(s:String) = DirectiveId(s)
private implicit def str2ruleId(s:String) = RuleId(s)
private implicit def str2nodeId(s:String) = NodeId(s)
private implicit def str2nodeConfigIds(ss:Seq[String]) = ss.map(s => (NodeId(s), Some(NodeConfigId("version_" + s)))).toMap
def getNodeStatusReportsByRule(
ruleExpectedReports : RuleExpectedReports
, reportsParam : Seq[Reports]
// this is the agent execution interval, in minutes
, complianceMode : ComplianceMode
): Seq[RuleNodeStatusReport] = {
(for {
directiveOnNode <- ruleExpectedReports.directivesOnNodes
(nodeId, version) <- directiveOnNode.nodeConfigurationIds
} yield {
val runTime = reportsParam.headOption.map( _.executionTimestamp).getOrElse(DateTime.now)
val info = NodeConfigIdInfo(NodeConfigId("version1"), DateTime.now.minusDays(1), None)
val runInfo = complianceMode match {
case FullCompliance => CheckCompliance(runTime, info)
case ChangesOnly(heartbeatPeriod) => CheckChanges(runTime, info)
}
val emptyPrevious = scala.collection.immutable.Set[com.normation.rudder.domain.reports.RuleExpectedReports]()
ExecutionBatch.getNodeStatusReports(nodeId, runInfo, Seq(ruleExpectedReports), reportsParam, emptyPrevious)
}).flatten
}
val getNodeStatusByRule = (getNodeStatusReportsByRule _).tupled
//Test the component part
"A component, with two different keys" should {
val executionTimestamp = new DateTime()
val reports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "foo", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "bar", executionTimestamp, "message")
)
val badReports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "foo", executionTimestamp, "message"),
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "foo", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "bar", executionTimestamp, "message")
)
val expectedComponent = new ComponentExpectedReport(
"component"
, 2
, Seq("foo", "bar")
, Seq("foo", "bar")
)
val getComponentStatus = (r:Seq[Reports]) => ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, r, NoAnswerReportType)
"return a component globally repaired " in {
getComponentStatus(reports).compliance === ComplianceLevel(success = 1, repaired = 1)
}
"return a component with two key values " in {
getComponentStatus(reports).componentValues.size === 2
}
"return a component with the key values foo which is repaired " in {
getComponentStatus(reports).componentValues("foo").messages.size === 1 and
getComponentStatus(reports).componentValues("foo").messages.head.reportType === RepairedReportType
}
"return a component with the key values bar which is a success " in {
getComponentStatus(reports).componentValues("bar").messages.size === 1 and
getComponentStatus(reports).componentValues("bar").messages.head.reportType === SuccessReportType
}
"only some missing reports mark them as missing, not unexpected" in {
getComponentStatus(badReports).compliance === ComplianceLevel(success = 1, unexpected = 2)
}
"with bad reports return a component with two key values " in {
getComponentStatus(badReports).componentValues.size === 2
}
"with bad reports return a component with the key values foo which is unknwon " in {
getComponentStatus(badReports).componentValues("foo").messages.size === 2 and
getComponentStatus(badReports).componentValues("foo").messages.head.reportType === UnexpectedReportType
}
"with bad reports return a component with the key values bar which is a success " in {
getComponentStatus(badReports).componentValues("bar").messages.size === 1 and
getComponentStatus(badReports).componentValues("bar").messages.head.reportType === SuccessReportType
}
}
// Test the component part
"A component, with a None keys" should {
val executionTimestamp = new DateTime()
val reports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "None", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "None", executionTimestamp, "message")
)
val badReports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "None", executionTimestamp, "message"),
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "None", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "None", executionTimestamp, "message")
)
val expectedComponent = new ComponentExpectedReport(
"component"
, 2
, Seq("None", "None")
, Seq("None", "None")
)
val withGood = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, reports, NoAnswerReportType)
val withBad = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, badReports, NoAnswerReportType)
"return a component globally repaired " in {
withGood.compliance === ComplianceLevel(success = 1, repaired = 1)
}
"return a component with two key values " in {
withGood.componentValues.size === 1
}
"return a component with both None key repaired " in {
withGood.componentValues("None").messages.size === 2 and
withGood.componentValues("None").compliance === ComplianceLevel(success = 1, repaired = 1)
}
"with bad reports return a component globally unexpected " in {
withBad.compliance === ComplianceLevel(unexpected = 3)
}
"with bad reports return a component with two key values " in {
withBad.componentValues.size === 1
}
"with bad reports return a component with both None key unexpected " in {
withBad.componentValues("None").messages.size === 3 and
withBad.componentValues("None").messages.forall(x => x.reportType === UnexpectedReportType)
}
}
// Test the component part
"A component, with a cfengine keys" should {
val executionTimestamp = new DateTime()
val reports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message")
)
val badReports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message")
)
val expectedComponent = new ComponentExpectedReport("component", 2
, Seq("${sys.bla}", "${sys.foo}")
, Seq("${sys.bla}", "${sys.foo}")
)
val withGood = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, reports, NoAnswerReportType)
val witBad = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, badReports, NoAnswerReportType)
"return a component globally repaired " in {
withGood.compliance === ComplianceLevel(success = 1, repaired = 1)
}
"return a component with two key values " in {
withGood.componentValues.size === 2
}
"return a component with both cfengine keys repaired " in {
withGood.componentValues("${sys.bla}").messages.size === 1
}
}
// Test the component part
"A component, with distinguishable keys" should {
val executionTimestamp = new DateTime()
val reports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "bar", executionTimestamp, "message")
)
val badReports = Seq[Reports](
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultRepairedReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "bar", executionTimestamp, "message")
)
val expectedComponent = new ComponentExpectedReport("component", 2
, Seq("${sys.bla}", "bar")
, Seq("${sys.bla}", "bar")
)
val withGood = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, reports, NoAnswerReportType)
val withBad = ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, badReports, NoAnswerReportType)
"return a component globally repaired " in {
withGood.compliance === ComplianceLevel(success = 1, repaired = 1)
}
"return a component with two key values " in {
withGood.componentValues.size === 2
}
"return a component with the cfengine keys repaired " in {
withGood.componentValues("${sys.bla}").messages.size === 1 and
withGood.componentValues("${sys.bla}").messages.forall(x => x.reportType === RepairedReportType)
}
"return a component with the bar key success " in {
withGood.componentValues("bar").messages.size === 1 and
withGood.componentValues("bar").messages.forall(x => x.reportType === SuccessReportType)
}
"with some bad reports mark them as unexpected (because the check is not done in checkExpectedComponentWithReports" in {
withBad.compliance === ComplianceLevel(success = 1, unexpected = 1)
}
"with bad reports return a component with two key values " in {
withBad.componentValues.size === 2
}
"with bad reports return a component with bar as a success " in {
withBad.componentValues("bar").messages.size === 1 and
withBad.componentValues("bar").messages.forall(x => x.reportType === SuccessReportType)
}
"with bad reports return a component with the cfengine key as unexpected " in {
withBad.componentValues("${sys.bla}").messages.size === 1 and
withBad.componentValues("${sys.bla}").messages.forall(x => x.reportType === UnexpectedReportType)
}
}
"A detailed execution Batch, with one component, cardinality one, one node" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one")
, Seq(
DirectiveExpectedReports("policy"
, Seq(new ComponentExpectedReport("component", 1, Seq("value"), Seq() ))
)
)
))
)
, Seq[Reports](new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value", DateTime.now(), "message"))
, FullCompliance
)
val nodeStatus = (getNodeStatusReportsByRule _).tupled(param)
"have one detailed reports when we create it with one report" in {
nodeStatus.size ==1
}
"have one detailed success node when we create it with one success report" in {
nodeStatus.head.nodeId === str2nodeId("one")
}
"have one detailed rule success directive when we create it with one success report" in {
nodeStatus.head.directives.head._1 === DirectiveId("policy")
}
"have no detailed rule non-success directive when we create it with one success report" in {
AggregatedStatusReport(nodeStatus.toSet).compliance === ComplianceLevel(success = 1)
}
}
"A detailed execution Batch, with one component, cardinality one, wrong node" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one")
, Seq(
DirectiveExpectedReports("policy"
, Seq(new ComponentExpectedReport("component", 1, Seq("value"), Seq() ))
)
)
))
)
, Seq[Reports](new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value",DateTime.now(), "message"))
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have one detailed reports when we create it with one report" in {
nodeStatus.size === 1
}
"have a pending node when we create it with one wrong success report right now" in {
AggregatedStatusReport(nodeStatus.toSet).compliance === ComplianceLevel(missing = 1) and
nodeStatus.head.nodeId === str2nodeId("one")
}
}
"A detailed execution Batch, with one component, cardinality one, one node" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one")
, Seq(
DirectiveExpectedReports("policy"
, Seq(new ComponentExpectedReport("component", 1, Seq("value"), Seq() ))
)
)
))
)
, Seq[Reports](
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message")
, new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message")
)
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have one detailed reports when we create it" in {
nodeStatus.size ==1
}
"have one unexpected node when we create it with one success report" in {
AggregatedStatusReport(nodeStatus.toSet).compliance === ComplianceLevel(unexpected = 2) and
nodeStatus.head.nodeId === str2nodeId("one")
}
}
"A detailed execution Batch, with one component, cardinality one, two nodes, including one not responding" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one", "two")
, Seq(
DirectiveExpectedReports("policy"
, Seq(new ComponentExpectedReport("component", 1, Seq("value"), Seq() ))
)
)
))
)
, Seq[Reports](new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message"))
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have two detailed reports when we create it" in {
nodeStatus.size == 2
}
"have one success, and one pending node, in the component detail of the rule" in {
AggregatedStatusReport(nodeStatus.toSet).compliance === ComplianceLevel(success = 1, missing = 1)
}
}
"A detailed execution Batch, with one component, cardinality one, three nodes, including one not responding" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one", "two", "three")
, Seq(
DirectiveExpectedReports("policy"
, Seq(new ComponentExpectedReport("component", 1, Seq("value"), Seq() ))
)
)
))
)
, Seq[Reports](
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value", DateTime.now(), "message")
, new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value", DateTime.now(), "message")
)
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have three node rule report" in {
nodeStatus.size === 3
}
"have one detailed rule report with a 67% compliance" in {
AggregatedStatusReport(nodeStatus.toSet).compliance === ComplianceLevel(success = 2, missing = 1)
}
}
"A detailed execution Batch, with two directive, two component, cardinality one, three nodes, including one partly responding and one not responding" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one", "two", "three")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("value"), Seq() )
, new ComponentExpectedReport("component2", 1, Seq("value"), Seq() )
))
, DirectiveExpectedReports("policy2", Seq(
new ComponentExpectedReport("component", 1, Seq("value"), Seq() )
, new ComponentExpectedReport("component2", 1, Seq("value"), Seq() )
))
)
))
)
, Seq[Reports](
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "one", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "one", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "two", 12, "component", "value",DateTime.now(), "message")
)
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
val aggregated = AggregatedStatusReport(nodeStatus.toSet)
"have two detailed node rule report" in {
nodeStatus.size === 3
}
"have detailed rule report for policy of 67% (node 1 and 2), pending for node 3" in {
aggregated.directives("policy").compliance === ComplianceLevel(success = 4, missing = 2)
}
"have detailed rule report for policy2 of 33% (node1), missing (node2 and 3)" in {
aggregated.directives("policy2").compliance === ComplianceLevel(success = 3, missing = 3)
}
}
"A detailed execution Batch, with two directive, two component, cardinality three, three nodes, including two not responding" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one", "two", "three")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("value"), Seq() )
, new ComponentExpectedReport("component2", 1, Seq("value"), Seq() )
))
, DirectiveExpectedReports("policy2",Seq(
new ComponentExpectedReport("component", 1, Seq("value"), Seq() )
, new ComponentExpectedReport("component2", 1, Seq("value"), Seq() )
))
)
))
)
, Seq[Reports](
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "one", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "one", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component2", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy2", "two", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "three", 12, "component", "value",DateTime.now(), "message")
)
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
val aggregated = AggregatedStatusReport(nodeStatus.toSet)
"have 3 detailed node rule report" in {
nodeStatus.size === 3
}
"have detailed rule report for policy of 83%" in {
aggregated.directives("policy").compliance === ComplianceLevel(success = 5, missing = 1)
}
"have detailed rule report for policy2 of 33%" in {
aggregated.directives("policy2").compliance === ComplianceLevel(success = 3, missing = 3)
}
"have detailed rule report for policy-component of 100%" in {
aggregated.directives("policy").components("component").compliance === ComplianceLevel(success = 3)
}
"have detailed rule report for policy-component2 of 67%" in {
aggregated.directives("policy").components("component2").compliance === ComplianceLevel(success = 2, missing = 1)
}
"have detailed rule report for policy2-component2 of 33%" in {
aggregated.directives("policy2").components("component2").compliance === ComplianceLevel(success = 1, missing = 2)
}
}
"A detailed execution Batch, with two directive, two component, cardinality three, three nodes, including two not completely responding" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one", "two", "three")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("value", "value2", "value3"), Seq() )
))
)
))
)
, Seq[Reports](
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value2",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "one", 12, "component", "value3",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "two", 12, "component", "value2",DateTime.now(), "message"),
new ResultSuccessReport(DateTime.now(), "rule", "policy", "three", 12, "component", "value",DateTime.now(), "message")
)
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
val aggregated = AggregatedStatusReport(nodeStatus.toSet)
"have 3 detailed node rule report" in {
nodeStatus.size === 3
}
"have detailed rule report for policy of 67%" in {
aggregated.directives("policy").compliance === ComplianceLevel(success = 6, missing = 3)
}
"have detailed rule report for policy/component/value of 100%" in {
aggregated.directives("policy").components("component").componentValues("value").compliance ===
ComplianceLevel(success = 3)
}
"have detailed rule report for policy/component/value2 of 67%" in {
aggregated.directives("policy").components("component").componentValues("value2").compliance ===
ComplianceLevel(success = 2, missing = 1)
}
"have detailed rule report for policy/component/value3 of 33%" in {
aggregated.directives("policy").components("component").componentValues("value3").compliance ===
ComplianceLevel(success = 1, missing = 2)
}
}
"An execution Batch, with one component with a quote in its value, cardinality one, one node" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("one")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("""some\\"text"""), Seq("""some\\text""") )
))
)
))
)
, Seq[Reports](new ResultSuccessReport(new DateTime(), "rule", "policy", "one", 12, "component", """some\\"text""",new DateTime(), "message"))
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have one detailed reports when we create it with one report" in {
nodeStatus.size ===1
}
"have one detailed success node when we create it with one success report" in {
nodeStatus.head.nodeId === str2nodeId("one") and
nodeStatus.head.compliance.pc_success === 100
}
}
"An execution Batch, with one component, one node, but with a component value being a cfengine variable with {, and a an escaped quote as well" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("nodeId")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("""${sys.workdir}/inputs/\\"test"""), Seq() )
))
)
))
)
, Seq[Reports](new ResultSuccessReport(new DateTime(), "rule", "policy", "nodeId", 12, "component", """/var/cfengine/inputs/\\"test""", new DateTime(), "message"))
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have one detailed reports when we create it with one report" in {
nodeStatus.size ===1
}
"have one detailed success node when we create it with one success report" in {
nodeStatus.head.nodeId === str2nodeId("nodeId") and
nodeStatus.head.compliance.pc_success === 100
}
}
"An execution Batch, with one component, one node, but with a component value being a cfengine variable with {, and a quote as well" should {
val param = (
RuleExpectedReports(
"rule"
, 12
, Seq(DirectivesOnNodes(42
, Seq("nodeId")
, Seq(
DirectiveExpectedReports("policy", Seq(
new ComponentExpectedReport("component", 1, Seq("""${sys.workdir}/inputs/"test"""), Seq("""${sys.workdir}/inputs/"test""") )
))
)
))
)
, Seq[Reports](new ResultSuccessReport(new DateTime(), "rule", "policy", "nodeId", 12, "component", """/var/cfengine/inputs/"test""", new DateTime(), "message"))
, FullCompliance
)
val nodeStatus = getNodeStatusByRule(param)
"have one detailed reports when we create it with one report" in {
nodeStatus.size === 1
}
"have one detailed success node when we create it with one success report" in {
nodeStatus.head.nodeId === str2nodeId("nodeId") and
nodeStatus.head.compliance.pc_success === 100
}
}
// Test the component part - with NotApplicable
"A component, with two keys and NotApplicable reports" should {
val executionTimestamp = new DateTime()
val reports = Seq[Reports](
new ResultNotApplicableReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "/var/cfengine", executionTimestamp, "message"),
new ResultSuccessReport(executionTimestamp, "cr", "policy", "nodeId", 12, "component", "bar", executionTimestamp, "message")
)
val expectedComponent = new ComponentExpectedReport(
"component"
, 2
, Seq("/var/cfengine", "bar")
, Seq("/var/cfengine", "bar")
)
val getComponentStatus = (r:Seq[Reports]) => ExecutionBatch.checkExpectedComponentWithReports(expectedComponent, r, NoAnswerReportType)
"return a component globally success " in {
getComponentStatus(reports).compliance === ComplianceLevel(success = 1, notApplicable = 1)
}
"return a component with two key values " in {
getComponentStatus(reports).componentValues.size === 2
}
"return a component with the /var/cfengine in NotApplicable " in {
getComponentStatus(reports).componentValues("/var/cfengine").messages.size === 1 and
getComponentStatus(reports).componentValues("/var/cfengine").compliance.pc_notApplicable === 100
}
"return a component with the bar key success " in {
getComponentStatus(reports).componentValues("bar").messages.size == 1 and
getComponentStatus(reports).componentValues("bar").compliance.pc_success === 100
}
}
}
|
Kegeruneku/rudder
|
rudder-core/src/test/scala/com/normation/rudder/services/reports/ExecutionBatchTest.scala
|
Scala
|
agpl-3.0
| 31,846 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.busybees.streams.flows.reponsehandlers
import akka.http.scaladsl.model.HttpResponse
import akka.stream._
import com.fasterxml.jackson.databind.JsonNode
import com.fasterxml.jackson.databind.node.ObjectNode
import com.flipkart.connekt.busybees.models.GCMRequestTracker
import com.flipkart.connekt.commons.entities.MobilePlatform
import com.flipkart.connekt.commons.factories.{ConnektLogger, LogFile, ServiceFactory}
import com.flipkart.connekt.commons.helpers.CallbackRecorder._
import com.flipkart.connekt.commons.iomodels.MessageStatus.{GCMResponseStatus, InternalStatus}
import com.flipkart.connekt.commons.iomodels._
import com.flipkart.connekt.commons.metrics.Instrumented
import com.flipkart.connekt.commons.services.DeviceDetailsService
import com.flipkart.connekt.commons.utils.StringUtils._
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
class GCMResponseHandler(implicit m: Materializer, ec: ExecutionContext) extends PNProviderResponseHandler[(Try[HttpResponse], GCMRequestTracker)](96) with Instrumented {
override val map: ((Try[HttpResponse], GCMRequestTracker)) => Future[List[PNCallbackEvent]] = responseTrackerPair => Future(profile("map") {
val httpResponse = responseTrackerPair._1
val requestTracker = responseTrackerPair._2
val messageId = requestTracker.messageId
val appName = requestTracker.appName
val deviceIds = requestTracker.deviceId
val events = ListBuffer[PNCallbackEvent]()
val eventTS = System.currentTimeMillis()
httpResponse match {
case Success(r) =>
try {
val stringResponse = r.entity.getString(m)
ConnektLogger(LogFile.PROCESSORS).info(s"GCMResponseHandler received http response for: $messageId")
ConnektLogger(LogFile.PROCESSORS).trace(s"GCMResponseHandler received http response for: $messageId http response body: $stringResponse")
r.status.intValue() match {
case 200 =>
val responseBody = stringResponse.getObj[ObjectNode]
val deviceIdItr = deviceIds.iterator
responseBody.findValue("results").foreach(rBlock => {
val rDeviceId = deviceIdItr.next()
rBlock match {
case s if s.has("message_id") =>
if (s.has("registration_id"))
DeviceDetailsService.get(appName, rDeviceId).foreach(_.foreach(d => {
ConnektLogger(LogFile.PROCESSORS).info(s"GCMResponseHandler device token update notified on. $messageId of device: $rDeviceId")
DeviceDetailsService.update(d.deviceId, d.copy(token = s.get("registration_id").asText.trim))
}))
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.Received_HTTP)
events += PNCallbackEvent(messageId, requestTracker.clientId, rDeviceId, GCMResponseStatus.Received_HTTP, MobilePlatform.ANDROID, appName, requestTracker.contextId, s.get("message_id").asText(), eventTS)
case f if f.has("error") && List("InvalidRegistration", "NotRegistered").contains(f.get("error").asText.trim) =>
DeviceDetailsService.get(appName, rDeviceId).foreach {
_.foreach(device => if (device.osName == MobilePlatform.ANDROID.toString) {
ConnektLogger(LogFile.PROCESSORS).info(s"GCMResponseHandler device token invalid / not_found, deleting details of device: $rDeviceId.")
DeviceDetailsService.delete(appName, device.deviceId)
})
}
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.InvalidDevice)
events += PNCallbackEvent(messageId, requestTracker.clientId, rDeviceId, GCMResponseStatus.InvalidDevice, MobilePlatform.ANDROID, appName, requestTracker.contextId, f.get("error").asText, eventTS)
case ie if ie.has("error") && List("InternalServerError").contains(ie.get("error").asText.trim) =>
//TODO: Support retry.
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.InternalError)
events += PNCallbackEvent(messageId, requestTracker.clientId, rDeviceId, GCMResponseStatus.InternalError, MobilePlatform.ANDROID, appName, requestTracker.contextId, ie.toString, eventTS)
case e: JsonNode =>
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler unknown for message: $messageId, device: $rDeviceId", e)
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.Error)
events += PNCallbackEvent(messageId, requestTracker.clientId, rDeviceId, GCMResponseStatus.Error, MobilePlatform.ANDROID, appName, requestTracker.contextId, e.toString, eventTS)
}
})
case 400 =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.InvalidJsonError, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, GCMResponseStatus.InvalidJsonError, MobilePlatform.ANDROID, appName, requestTracker.contextId, stringResponse, eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler http response - invalid json sent for: $messageId response: $stringResponse")
case 401 =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.AuthError, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, GCMResponseStatus.AuthError, MobilePlatform.ANDROID, appName, requestTracker.contextId, "", eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler http response - the sender account used to send a message couldn't be authenticated for: $messageId response: $stringResponse")
case w if 5 == (w / 100) =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.InternalError, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, GCMResponseStatus.InternalError, MobilePlatform.ANDROID, appName, requestTracker.contextId, "", eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler http response - the gcm server encountered an error while trying to process the request for: $messageId code: $w response: $stringResponse")
case w =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, GCMResponseStatus.Error, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, GCMResponseStatus.Error, MobilePlatform.ANDROID, appName, requestTracker.contextId, stringResponse, eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler http response - gcm response unhandled for: $messageId code: $w response: $stringResponse")
}
} catch {
case e: Exception =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, InternalStatus.GcmResponseParseError, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, InternalStatus.GcmResponseParseError, MobilePlatform.ANDROID, appName, requestTracker.contextId, e.getMessage, eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler failed processing http response body for: $messageId", e)
}
case Failure(e2) =>
ServiceFactory.getReportingService.recordPushStatsDelta(requestTracker.clientId, Option(requestTracker.contextId), requestTracker.meta.get("stencilId").map(_.toString), Option(MobilePlatform.ANDROID.toString), requestTracker.appName, InternalStatus.ProviderSendError, deviceIds.size)
events.addAll(deviceIds.map(PNCallbackEvent(messageId, requestTracker.clientId, _, InternalStatus.ProviderSendError, MobilePlatform.ANDROID, appName, requestTracker.contextId, e2.getMessage, eventTS)))
ConnektLogger(LogFile.PROCESSORS).error(s"GCMResponseHandler gcm send failure for: $messageId", e2)
}
events.enqueue
events.toList
})(m.executionContext)
}
|
Flipkart/connekt
|
busybees/src/main/scala/com/flipkart/connekt/busybees/streams/flows/reponsehandlers/GCMResponseHandler.scala
|
Scala
|
mit
| 10,682 |
package org.jetbrains.plugins.scala
package lang
package resolve
import com.intellij.lang.java.JavaLanguage
import com.intellij.psi._
import com.intellij.psi.impl.source.resolve.JavaResolveUtil
import com.intellij.psi.scope.{NameHint, PsiScopeProcessor}
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScTypeVariableTypeElement, ScSelfTypeElement, ScTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScAccessModifier, ScFieldId, ScReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScSuperReference, ScThisReference}
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScClassParameter, ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.packaging.ScPackaging
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.{ScSyntheticClass, ScSyntheticValue}
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.TypeDefinitionMembers
import org.jetbrains.plugins.scala.lang.psi.impl.{ScPackageImpl, ScalaPsiManager}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue._
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil, types}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.ResolveTargets._
import org.jetbrains.plugins.scala.lang.resolve.processor.{BaseProcessor, ResolveProcessor, ResolverEnv}
import _root_.scala.collection.Set
/**
* @author ven
*/
object ResolveUtils {
def kindMatches(element: PsiElement, kinds: Set[ResolveTargets.Value]): Boolean = kinds == null ||
(element match {
case _: PsiPackage | _: ScPackaging => kinds contains PACKAGE
case obj: ScObject if obj.isPackageObject => kinds contains PACKAGE
case obj: ScObject => (kinds contains OBJECT) || (kinds contains METHOD)
case _: ScTypeVariableTypeElement => kinds contains CLASS
case _: ScTypeParam => kinds contains CLASS
case _: ScTypeAlias => kinds contains CLASS
case _: ScTypeDefinition => kinds contains CLASS
case _: ScSyntheticClass => kinds contains CLASS
case c: PsiClass =>
if (kinds contains CLASS) true
else {
def isStaticCorrect(clazz: PsiClass): Boolean = {
val cclazz = clazz.getContainingClass
cclazz == null || (clazz.hasModifierProperty(PsiModifier.STATIC) && isStaticCorrect(cclazz))
}
(kinds contains OBJECT) && isStaticCorrect(c)
}
case patt: ScBindingPattern =>
val parent = ScalaPsiUtil.getParentOfType(patt, classOf[ScVariable], classOf[ScValue])
parent match {
case x: ScVariable => kinds contains VAR
case _ => kinds contains VAL
}
case patt: ScFieldId =>
if (patt.getParent /*list of ids*/ .getParent.isInstanceOf[ScVariable])
kinds contains VAR else kinds contains VAL
case classParam: ScClassParameter =>
if (classParam.isVar) kinds.contains(VAR) else kinds.contains(VAL)
case param: ScParameter => kinds contains VAL
case _: ScSelfTypeElement => kinds contains VAL
case _: PsiMethod => kinds contains METHOD
case _: ScFun => kinds contains METHOD
case _: ScSyntheticValue => kinds contains VAL
case f: PsiField => (kinds contains VAR) || (f.hasModifierPropertyScala(PsiModifier.FINAL) && kinds.contains(VAL))
case _: PsiParameter => kinds contains VAL //to enable named Parameters resolve in Play 2.0 routing file for java methods
case _ => false
})
def methodType(m : PsiMethod, s : ScSubstitutor, scope: GlobalSearchScope) =
ScFunctionType(s.subst(ScType.create(m.getReturnType, m.getProject, scope)),
m.getParameterList.getParameters.map({
p => val pt = p.getType
//scala hack: Objects in java are modelled as Any in scala
if (pt.equalsToText("java.lang.Object")) types.Any
else s.subst(ScType.create(pt, m.getProject, scope))
}).toSeq)(m.getProject, scope)
def javaMethodType(m: PsiMethod, s: ScSubstitutor, scope: GlobalSearchScope, returnType: Option[ScType] = None): ScMethodType = {
val retType: ScType = (m, returnType) match {
case (f: FakePsiMethod, None) => s.subst(f.retType)
case (_, None) => s.subst(ScType.create(m.getReturnType, m.getProject, scope))
case (_, Some(x)) => x
}
new ScMethodType(retType,
m match {
case f: FakePsiMethod => f.params.toSeq
case _ =>
m.getParameterList.getParameters.map { param =>
val scType = s.subst(param.exactParamType())
new Parameter("", None, scType, scType, false, param.isVarArgs, false, param.index, Some(param))
}
}, false)(m.getProject, scope)
}
def javaPolymorphicType(m: PsiMethod, s: ScSubstitutor, scope: GlobalSearchScope = null, returnType: Option[ScType] = None): NonValueType = {
if (m.getTypeParameters.isEmpty) javaMethodType(m, s, scope, returnType)
else {
ScTypePolymorphicType(javaMethodType(m, s, scope, returnType), m.getTypeParameters.map(new TypeParameter(_)))
}
}
def isAccessible(memb: PsiMember, _place: PsiElement, forCompletion: Boolean = false): Boolean = {
var place = _place
memb match {
case b: ScBindingPattern =>
b.nameContext match {
case memb: ScMember => return isAccessible(memb, place)
case _ => return true
}
//todo: ugly workaround, probably FakePsiMethod is better to remove?
case f: FakePsiMethod => f.navElement match {
case memb: PsiMember => return isAccessible(memb, place)
case _ =>
}
case _ =>
}
if (place.getLanguage == JavaLanguage.INSTANCE) {
return JavaResolveUtil.isAccessible(memb, memb.containingClass, memb.getModifierList, place, null, null)
}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil.getPlaceTd
//this is to make place and member on same level (resolve from library source)
var member: PsiMember = memb
memb.getContainingFile match {
case file: ScalaFile if file.isCompiled =>
place.getContainingFile match {
case file: ScalaFile if file.isCompiled =>
case _ if !member.isInstanceOf[ScMember] =>
member = member.getOriginalElement.asInstanceOf[PsiMember]
case _ => //todo: is it neccessary? added to avoid performance and other problems
}
case _ =>
}
if (forCompletion && place != null) {
val originalFile: PsiFile = place.getContainingFile.getOriginalFile
if (originalFile == member.getContainingFile) {
val newPlace = originalFile.findElementAt(place.getTextRange.getStartOffset)
place = newPlace
}
}
member match {
case f: ScFunction if f.isBridge => return false
case _ =>
}
if (member.hasModifierProperty("public")) return true
def checkProtected(td: PsiClass, withCompanion: Boolean): Boolean = {
val isConstr = member match {
case m: PsiMethod => m.isConstructor
case _ => false
}
var placeTd: ScTemplateDefinition = getPlaceTd(place, isConstr)
if (isConstr) {
if (placeTd != null && !placeTd.isInstanceOf[ScTypeDefinition] && placeTd.extendsBlock.templateBody == None) {
placeTd = getPlaceTd(placeTd)
} else if (placeTd != null) {
if (td != null && isInheritorOrSelfOrSame(placeTd, td)) return true
}
while (placeTd != null) {
if (td == placeTd) return true
val companion: ScTemplateDefinition = ScalaPsiUtil.getCompanionModule(placeTd).getOrElse(null: ScTemplateDefinition)
if (companion != null && companion == td) return true
placeTd = getPlaceTd(placeTd)
}
return false
}
while (placeTd != null) {
if (td != null && isInheritorOrSelfOrSame(placeTd, td)) return true
val companion: ScTemplateDefinition = ScalaPsiUtil.
getCompanionModule(placeTd).getOrElse(null: ScTemplateDefinition)
if (withCompanion && companion != null && td != null &&
ScalaPsiUtil.cachedDeepIsInheritor(companion, td)) return true
placeTd = getPlaceTd(placeTd)
}
false
}
member match {
case scMember: ScMember => scMember.getModifierList.accessModifier match {
case None => true
case Some(am: ScAccessModifier) =>
if (am.isPrivate) {
if (am.access == ScAccessModifier.Type.THIS_PRIVATE) {
/*
ScalaRefernce.pdf:
A member M marked with this modifier can be accessed only from
within the object in which it is defined.
*/
place match {
case ref: ScReferenceElement =>
ref.qualifier match {
case None =>
val enclosing = PsiTreeUtil.getContextOfType(scMember, true, classOf[ScTemplateDefinition])
if (enclosing == null) return true
return PsiTreeUtil.isContextAncestor(enclosing, place, false)
case Some(t: ScThisReference) =>
val enclosing = PsiTreeUtil.getContextOfType(scMember, true, classOf[ScTemplateDefinition])
if (enclosing == null) return true
t.refTemplate match {
case Some(t) => return t == enclosing
case _ => return PsiTreeUtil.isContextAncestor(enclosing, place, false)
}
case Some(ref: ScReferenceElement) =>
val enclosing = PsiTreeUtil.getContextOfType(scMember, true, classOf[ScTemplateDefinition])
if (enclosing == null) return false
val resolve = ref.resolve()
if (enclosing.extendsBlock.selfTypeElement == Some(resolve)) return true
else return false
case _ => return false
}
case _ =>
val enclosing = PsiTreeUtil.getContextOfType(scMember, true, classOf[ScTemplateDefinition])
if (enclosing == null) return true
return PsiTreeUtil.isContextAncestor(enclosing, place, false)
}
}
val ref = am.getReference
if (ref != null) {
val bind = ref.resolve
if (bind == null) return true
def processPackage(packageName: String): Boolean = {
def context(place: PsiElement): PsiElement =
ScalaPsiUtil.getContextOfType(place, true, classOf[ScPackaging],
classOf[ScObject], classOf[ScalaFile])
var placeEnclosing: PsiElement = context(place)
while (placeEnclosing != null && placeEnclosing.isInstanceOf[ScObject] &&
!placeEnclosing.asInstanceOf[ScObject].isPackageObject)
placeEnclosing = context(placeEnclosing)
if (placeEnclosing == null) return false //not Scala
val placePackageName = placeEnclosing match {
case file: ScalaFile => ""
case obj: ScObject => obj.qualifiedName
case pack: ScPackaging => pack.fqn
}
packageContains(packageName, placePackageName)
}
bind match {
case td: ScTemplateDefinition =>
PsiTreeUtil.isContextAncestor(td, place, false) ||
PsiTreeUtil.isContextAncestor(ScalaPsiUtil.getCompanionModule(td).getOrElse(null: PsiElement),
place, false) || (td.isInstanceOf[ScObject] &&
td.asInstanceOf[ScObject].isPackageObject && processPackage(td.qualifiedName))
case pack: PsiPackage =>
val packageName = pack.getQualifiedName
processPackage(packageName)
case _ => true
}
}
else {
/*
ScalaRefernce.pdf:
Such members can be accessed only from within the directly enclosing
template and its companion module or companion class
*/
val enclosing = ScalaPsiUtil.getContextOfType(scMember, true,
classOf[ScalaFile], classOf[ScPackaging], classOf[ScTemplateDefinition])
enclosing match {
case td: ScTemplateDefinition =>
PsiTreeUtil.isContextAncestor(td, place, false) || PsiTreeUtil.isContextAncestor(ScalaPsiUtil.
getCompanionModule(td).getOrElse(null: PsiElement), place, false)
case file: ScalaFile if file.isScriptFile() =>
PsiTreeUtil.isContextAncestor(file, place, false)
case _ =>
val packageName = enclosing match {
case file: ScalaFile => ""
case packaging: ScPackaging => packaging.getPackageName
}
val placeEnclosing: PsiElement = ScalaPsiUtil.
getContextOfType(place, true, classOf[ScPackaging], classOf[ScalaFile])
if (placeEnclosing == null) return false //not Scala
val placePackageName = placeEnclosing match {
case file: ScalaFile => ""
case pack: ScPackaging => pack.getPackageName
}
packageContains(packageName, placePackageName)
}
}
} else if (am.isProtected) { //todo: it's wrong if reference after not appropriate class type
val withCompanion = am.access != ScAccessModifier.Type.THIS_PROTECTED
val ref = am.getReference
if (ref != null) {
val bind = ref.resolve
if (bind == null) return true
def processPackage(packageName: String): Option[Boolean] = {
def context(place: PsiElement): PsiElement =
ScalaPsiUtil.getContextOfType(place, true, classOf[ScPackaging],
classOf[ScObject], classOf[ScalaFile])
var placeEnclosing: PsiElement = context(place)
while (placeEnclosing != null && placeEnclosing.isInstanceOf[ScObject] &&
!placeEnclosing.asInstanceOf[ScObject].isPackageObject)
placeEnclosing = context(placeEnclosing)
if (placeEnclosing == null) return Some(false) //not Scala
val placePackageName = placeEnclosing match {
case file: ScalaFile => ""
case obj: ScObject => obj.qualifiedName
case pack: ScPackaging => pack.fqn
}
if (packageContains(packageName, placePackageName)) return Some(true)
None
}
bind match {
case td: ScTemplateDefinition =>
if (PsiTreeUtil.isContextAncestor(td, place, false) || PsiTreeUtil.isContextAncestor(ScalaPsiUtil.
getCompanionModule(td).getOrElse(null: PsiElement), place, false)) return true
td match {
case o: ScObject if o.isPackageObject =>
processPackage(o.qualifiedName) match {
case Some(x) => return x
case None =>
}
case _ =>
}
case pack: PsiPackage => //like private (nothing related to real life)
val packageName = pack.getQualifiedName
processPackage(packageName) match {
case Some(x) => return x
case None =>
}
case _ => return true
}
}
val enclosing = ScalaPsiUtil.getContextOfType(scMember, true,
classOf[ScalaFile], classOf[ScTemplateDefinition], classOf[ScPackaging])
assert(enclosing != null, s"Enclosing is null in file ${scMember.getContainingFile.getName}:\\n${scMember.getContainingFile.getText}")
if (am.isThis) {
place match {
case ref: ScReferenceElement =>
ref.qualifier match {
case None =>
case Some(t: ScThisReference) =>
case Some(s: ScSuperReference) =>
case Some(ref: ScReferenceElement) =>
val enclosing = PsiTreeUtil.getContextOfType(scMember, true, classOf[ScTemplateDefinition])
if (enclosing == null) return false
val resolve = ref.resolve()
if (enclosing.extendsBlock.selfTypeElement != Some(resolve)) return false
case _ => return false
}
case _ =>
}
}
enclosing match {
case td: ScTypeDefinition =>
if (PsiTreeUtil.isContextAncestor(td, place, false) ||
(withCompanion && PsiTreeUtil.isContextAncestor(ScalaPsiUtil.getCompanionModule(td).
getOrElse(null: PsiElement), place, false))) return true
checkProtected(td, withCompanion)
case td: ScTemplateDefinition =>
//it'd anonymous class, has access only inside
PsiTreeUtil.isContextAncestor(td, place, false)
case _ =>
//same as for private
val packageName = enclosing match {
case file: ScalaFile => ""
case packaging: ScPackaging => packaging.fullPackageName
}
val placeEnclosing: PsiElement = ScalaPsiUtil.
getContextOfType(place, true, classOf[ScPackaging], classOf[ScalaFile])
if (placeEnclosing == null) return false //not Scala
val placePackageName = placeEnclosing match {
case file: ScalaFile => ""
case pack: ScPackaging => pack.fullPackageName
}
packageContains(packageName, placePackageName)
}
} else true
}
case _ =>
if (member.hasModifierProperty("public")) true
else if (member.hasModifierProperty("private")) false
else if (member.hasModifierProperty("protected") &&
checkProtected(member.containingClass, withCompanion = true)) true
else {
val packageName = member.getContainingFile match {
case s: ScalaFile => ""
case f: PsiClassOwner => f.getPackageName
case _ => return false
}
val placeEnclosing: PsiElement = ScalaPsiUtil.
getContextOfType(place, true, classOf[ScPackaging], classOf[ScalaFile])
if (placeEnclosing == null) return false
val placePackageName = placeEnclosing match {
case file: ScalaFile => ""
case pack: ScPackaging => pack.fullPackageName
}
packageContains(packageName, placePackageName)
}
}
}
def processSuperReference(superRef: ScSuperReference, processor : BaseProcessor, place : ScalaPsiElement) {
if (superRef.isHardCoded) {
superRef.drvTemplate match {
case Some(c) => processor.processType(ScThisType(c), place)
case None =>
}
} else {
superRef.staticSuper match {
case Some(t) => processor.processType(t, place)
case None => superRef.drvTemplate match {
case Some(c) =>
TypeDefinitionMembers.processSuperDeclarations(c, processor, ResolveState.initial.put(ScSubstitutor.key, ScSubstitutor.empty), null, place)
case None =>
}
}
}
}
def getPlacePackage(place: PsiElement): String = {
val pack: ScPackaging = ScalaPsiUtil.getContextOfType(place, true, classOf[ScPackaging]) match {
case pack: ScPackaging => pack
case _ => null
}
if (pack == null) return ""
pack.fullPackageName
}
private def isInheritorOrSelfOrSame(placeTd: ScTemplateDefinition, td: PsiClass): Boolean = {
if (ScalaPsiUtil.cachedDeepIsInheritor(placeTd, td)) return true
placeTd.selfTypeElement match {
case Some(te: ScSelfTypeElement) => te.typeElement match {
case Some(te: ScTypeElement) =>
def isInheritorOrSame(tp: ScType): Boolean = {
ScType.extractClass(tp) match {
case Some(clazz) =>
if (clazz == td) return true
if (ScalaPsiUtil.cachedDeepIsInheritor(clazz, td)) return true
case _ =>
}
false
}
te.getType(TypingContext.empty) match {
case Success(ctp: ScCompoundType, _) =>
for (tp <- ctp.components) {
if (isInheritorOrSame(tp)) return true
}
case Success(tp: ScType, _) =>
if (isInheritorOrSame(tp)) return true
case _ =>
}
case _ =>
}
case _ =>
}
false
}
def packageContains(packageName: String, potentialChild: String): Boolean = {
potentialChild == packageName || potentialChild.startsWith(packageName + ".")
}
def packageProcessDeclarations(pack: PsiPackage, processor: PsiScopeProcessor,
state: ResolveState, lastParent: PsiElement, place: PsiElement): Boolean = {
processor match {
case b: BaseProcessor if b.isImplicitProcessor =>
val objectsIterator = ScalaPsiManager.instance(pack.getProject).
getPackageImplicitObjects(pack.getQualifiedName, place.getResolveScope).iterator
while (objectsIterator.hasNext) {
val obj = objectsIterator.next()
if (!processor.execute(obj, state)) return false
}
true
case base: BaseProcessor =>
val nameHint = base.getHint(NameHint.KEY)
val name = if (nameHint == null) "" else nameHint.getName(state)
if (name != null && name != "" && base.getClassKind) {
try {
base.setClassKind(classKind = false)
if (base.getClassKindInner) {
val manager = ScalaPsiManager.instance(pack.getProject)
val qName = pack.getQualifiedName
def calcForName(name: String): Boolean = {
val fqn = if (qName.length() > 0) qName + "." + name else name
val scope = base match {
case r: ResolveProcessor => r.getResolveScope
case _ => place.getResolveScope
}
var classes: Array[PsiClass] = manager.getCachedClasses(scope, fqn)
if (classes.isEmpty) {
//todo: fast fix for the problem with classes, should be fixed in indexes
val improvedFqn = fqn.split('.').map { s =>
if (ScalaNamesUtil.isKeyword(s)) s"`$s`" else s
}.mkString(".")
if (improvedFqn != fqn) {
classes = manager.getCachedClasses(scope, improvedFqn)
}
}
for (clazz <- classes if clazz.containingClass == null) {
if (!processor.execute(clazz, state)) return false
}
true
}
if (!calcForName(name)) return false
val scalaName = { //todo: fast fix for the problem with classes, should be fixed in indexes
base match {
case r: ResolveProcessor =>
val stateName = state.get(ResolverEnv.nameKey)
if (stateName == null) r.name else stateName
case _ => name
}
}
if (scalaName != name && !calcForName(scalaName)) return false
}
//process subpackages
if (base.kinds.contains(ResolveTargets.PACKAGE)) {
val psiPack = pack match {
case s: ScPackageImpl => s.pack
case _ => pack
}
val qName: String = psiPack.getQualifiedName
val subpackageQName: String = if (qName.isEmpty) name else qName + "." + name
val subPackage = ScalaPsiManager.instance(psiPack.getProject).getCachedPackage(subpackageQName)
if (subPackage != null) {
if (!processor.execute(subPackage, state)) return false
}
true
} else true
} finally {
base.setClassKind(classKind = true)
}
} else {
try {
if (base.getClassKindInner) {
base.setClassKind(classKind = false)
val manager = ScalaPsiManager.instance(pack.getProject)
val scope = base match {
case r: ResolveProcessor => r.getResolveScope
case _ => place.getResolveScope
}
val iterator = manager.getClasses(pack, scope).iterator
while (iterator.hasNext) {
val clazz = iterator.next()
if (clazz.containingClass == null && !processor.execute(clazz, state)) return false
}
}
if (base.kinds.contains(ResolveTargets.PACKAGE)) {
//process subpackages
pack match {
case s: ScPackageImpl =>
s.pack.processDeclarations(processor, state, lastParent, place)
case _ =>
pack.processDeclarations(processor, state, lastParent, place)
}
} else true
} finally {
base.setClassKind(classKind = true)
}
}
case _ => pack.processDeclarations(processor, state, lastParent, place)
}
}
}
|
SergeevPavel/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala
|
Scala
|
apache-2.0
| 26,946 |
package de.m7w3.signal.store.model
import org.whispersystems.libsignal.SignalProtocolAddress
import slick.jdbc.H2Profile.api._
case class Address(id: Int, signalAddress: SignalProtocolAddress)
object Address {
def apply(address: SignalProtocolAddress): Address = {
new Address(address.hashCode(), address)
}
}
class Addresses(tag: Tag) extends Table[Address](tag, "ADDRESSES") {
def id = column[Int]("ID", O.PrimaryKey)
def name = column[String]("NAME")
def deviceId = column[Int]("DEVICE_ID")
override def * = (id, name, deviceId) <> (
(tuple: (Int, String, Int)) => Address(tuple._1, new SignalProtocolAddress(tuple._2, tuple._3)),
(address: Address) => Some((address.id, address.signalAddress.getName, address.signalAddress.getDeviceId))
)
}
object Addresses {
val addresses = TableQuery[Addresses]
def upsert(address: SignalProtocolAddress) = {
addresses.insertOrUpdate(Address(address))
}
}
|
ayoub-benali/signal-desktop-client
|
src/main/scala/de/m7w3/signal/store/model/Addresses.scala
|
Scala
|
apache-2.0
| 940 |
package scala.xml.quote
package internal
import fastparse.all._
import scala.xml.parsing.TokenTests
import internal.{ast => p}
private[internal] class XmlParser(Hole: P[p.Placeholder]) extends TokenTests {
import XmlParser._
private val S = CharsWhile(isSpace).opaque("whitespace")
val XmlExpr: P[Seq[p.Node]] = P( S.? ~ Xml.XmlContent.rep(min = 1, sep = S.?) ~ S.? ~ End )
val XmlPattern: P[p.Node] = P( S.? ~ Xml.ElemPattern ~ S.? ~ End )
private[this] object Xml {
val Elem: P[p.Node] = P( Index ~ TagHeader ~/ TagRest ).map {
case (pos, (name, atts), children: Seq[p.Node @unchecked]) =>
p.Elem(name, atts, minimizeEmpty = false, children, pos)
case (pos, (name, atts), _) =>
p.Elem(name, atts, minimizeEmpty = true, Nil, pos)
}
val TagHeader = P( "<" ~ Name ~/ (S ~ Attribute).rep ~/ S.? )
val TagRest = P( "/>" | ">" ~/ Content ~/ ETag ): P[Any] // P[Unit | Seq[p.Node]]
val ETag = P( "</" ~ Name ~ S.? ~ ">" ).toP0
// // This parser respect tag's balance but reports wrong positions on failure
// val Elem = P(
// for {
// pos <- Index
// (name, atts) <- TagHeader
// children <- TagRest(name)
// } yield children match {
// case cs: Seq[p.Node @unchecked] => p.Elem(name, atts, minimizeEmpty = false, cs, 0)
// case _ => p.Elem(name, atts, minimizeEmpty = false, Nil, 0)
// }
// )
//
// val TagHeader = P( "<" ~ Name ~/ (WL ~ Attribute).rep ~/ WL.? )
// def TagRest(name: String) = P( "/>" | ">" ~/ Content ~/ ETag(name) ): P[Any] // P[Unit | Seq[p.Node]]
// def ETag(name: String) = P( "</" ~ name ~ WL.? ~ ">" )
val Attribute = P( Index ~ Name ~/ Eq ~/ AttValue ).map {
case (pos, name, value) => p.Attribute(name, value, pos)
}
val Eq = P( S.? ~ "=" ~ S.? )
val AttValue = P(
"\\"" ~/ (CharQ | Reference).rep.!.map(Left.apply) ~ "\\"" |
"'" ~/ (CharA | Reference).rep.!.map(Left.apply) ~ "'" |
ScalaExpr.map(Right.apply)
): P[p.Attribute.AttValue]
val Content = P( (CharData | Reference | ScalaExpr | XmlContent).rep )
val XmlContent: P[p.Node] = P( Unparsed | CDSect | PI | Comment | Elem )
val ScalaExpr = Hole
val Unparsed = P( Index ~ UnpStart ~/ UnpData.! ~ UnpEnd ).map { case (pos, data) => p.Unparsed(data, pos) }
val UnpStart = P( "<xml:unparsed" ~ (S ~ Attribute).rep ~ S.? ~ ">" ).toP0
val UnpEnd = P( "</xml:unparsed>" )
val UnpData = P( (!UnpEnd ~ Char).rep )
val CDSect = P( Index ~ CDStart ~/ CData.! ~ CDEnd ).map { case (pos, data) => p.PCData(data, pos) }
val CDStart = P( "<![CDATA[" )
val CData = P( (!"]]>" ~ Char).rep )
val CDEnd = P( "]]>" )
val Comment = P( Index ~ "<!--" ~/ ComText.! ~ "-->" ).map { case (pos, text) => p.Comment(text, pos) }
val ComText = P( (!"-->" ~ Char).rep )
val PI = P( Index ~ "<?" ~ Name ~ S.? ~ PIProcText.! ~ "?>" ).map {
case (pos, target, text) => p.ProcInstr(target, text, pos)
}
val PIProcText = P( (!"?>" ~ Char).rep )
val Reference = P( EntityRef | CharRef )
val EntityRef = P( Index ~ "&" ~ Name ~/ ";" ).map { case (pos, name) => p.EntityRef(name, pos) }
val CharRef = P( Index ~ ("&#" ~ Num ~ ";" | "&#x" ~ HexNum ~ ";") ).map {
case (pos, cr) => p.Text(cr.toString, pos)
}
val Num = P( CharIn('0' to '9').rep.! ).map(n => charValueOf(n))
val HexNum = P( CharIn('0' to '9', 'a' to 'f', 'A' to 'F').rep.! ).map(n => charValueOf(n, 16))
val CharData = P( Index ~ Char1.rep(1).! ).map { case (pos, text) => p.Text(text, pos) }
val Char = P( !Hole ~ AnyChar )
val Char1 = P( !("<" | "&") ~ Char )
val CharQ = P( !"\\"" ~ Char1 )
val CharA = P( !"'" ~ Char1 )
val Name = P( NameStart ~ NameChar.rep ).!.filter(_.last != ':').opaque("Name")
val NameStart = P( CharPred(isNameStart) )
val NameChar = P( CharPred(isNameChar) )
val ElemPattern: P[p.Node] = P( Index ~ TagPHeader ~ TagPRest ).map {
case (pos, name, children: Seq[p.Node @unchecked]) =>
p.Elem(name, Nil, minimizeEmpty = false, children, pos)
case (pos, name, _) =>
p.Elem(name, Nil, minimizeEmpty = true, Nil, pos)
}
val TagPHeader = P( "<" ~ Name ~/ S.? )
val TagPRest = P( "/>" | ">" ~/ ContentP ~/ ETag ): P[Any] // P[Unit | Seq[p.Node]]
val ContentP = P( (ScalaPatterns | ElemPattern | CharDataP ).rep )
// matches weirdness of scalac parser on xml reference.
val CharDataP = P( Index ~ ("&" ~ CharData.? | CharData).! ).map { case (pos, text) => p.Text(text, pos) }
val ScalaPatterns = ScalaExpr
}
}
private[internal] object XmlParser {
def charValueOf(cr: String, radix: Int = 10): Char =
if (cr.isEmpty) 0.toChar
else java.lang.Integer.parseInt(cr, radix).toChar
private implicit class ParserOps[T](val self: P[T]) extends AnyVal {
/** Discard the result of this parser */
def toP0: P0 = self.map(_ => Unit)
}
}
|
densh/scala-xml-quote
|
src/main/scala/scala/xml/quote/internal/XmlParser.scala
|
Scala
|
bsd-3-clause
| 5,068 |
package org.shelmet.heap.server
import org.shelmet.heap.model.{Snapshot, JavaClass, JavaHeapObject}
object FinalizerSummaryPage {
private class HistogramElement(val clazz: JavaClass) extends Ordered[HistogramElement] {
var count: Long = 0L
def updateCount() {
this.count += 1
}
override def compare(other: HistogramElement): Int = { (other.count - count).asInstanceOf[Int] }
}
}
class FinalizerSummaryPage(snapshot : Snapshot) extends AbstractPage(snapshot) {
import FinalizerSummaryPage.HistogramElement
override def run() {
html("Finalizer Summary") {
out.println("<p>")
out.println("<b><a href='/'>All Classes (excluding platform)</a></b>")
out.println("</p>")
val objects = snapshot.getFinalizerObjects
printFinalizerSummary(objects.map(_.get))
}
}
private def printFinalizerSummary(objects: List[JavaHeapObject]) {
var count: Int = 0
var map = Map[JavaClass, FinalizerSummaryPage.HistogramElement]()
objects foreach {
obj =>
count += 1
val clazz: JavaClass = obj.getClazz
if (!map.contains(clazz))
map += (clazz -> new HistogramElement(clazz))
val element: HistogramElement = map.get(clazz).get
element.updateCount()
}
out.println("<p>")
out.println("<b>")
out.println("Total ")
if (count != 0)
out.print("<a href='/finalizerObjects/'>instances</a>")
else
out.print("instances")
out.println(" pending finalization: ")
out.print(count)
out.println("</b></p><hr/>")
if (count > 0) {
val elements = map.values.toList.sorted
table {
tableRow {
out.println("<th>Count</th><th>Class</th>")
}
elements foreach { element =>
tableRow {
tableData(out.println(element.count))
tableData(printClass(element.clazz))
}
}
}
}
}
}
|
rorygraves/shelmet
|
src/main/scala/org/shelmet/heap/server/FinalizerSummaryPage.scala
|
Scala
|
gpl-2.0
| 1,920 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.models.json.graph
import spray.json._
import io.deepsense.commons.datetime.DateTimeConverter
import io.deepsense.commons.exception.{DeepSenseFailure, FailureCode, FailureDescription}
import io.deepsense.commons.models.Entity
import io.deepsense.graph.nodestate._
class NodeStatusJsonProtocolSpec extends GraphJsonTestSupport {
import io.deepsense.commons.json.DateTimeJsonProtocol._
import io.deepsense.models.json.graph.NodeStatusJsonProtocol._
"NodeStateJsonProtocol" should {
"transform Draft to Json" in {
toJs(Draft(results)) shouldBe draftJson
}
"read Draft from Json" in {
fromJs(draftJson) shouldBe Draft(results)
}
"transform Queued to Json" in {
toJs(Queued(results)).toJson shouldBe queuedJson
}
"read Queued from Json" in {
fromJs(queuedJson) shouldBe Queued(results)
}
"transform Running to Json" in {
toJs(running) shouldBe
runningJson
}
"read Running from Json" in {
fromJs(runningJson) shouldBe running
}
"transform Completed to Json" in {
toJs(completed) shouldBe completedJson
}
"read Completed from Json" in {
fromJs(completedJson) shouldBe completed
}
"transform Failed to Json" in {
toJs(failed) shouldBe failedJson
}
"read Failed from Json" in {
fromJs(failedJson) shouldBe failed
}
"transform Aborted to Json" in {
toJs(Aborted(results)) shouldBe abortedJson
}
"read Aborted from Json" in {
fromJs(abortedJson) shouldBe Aborted(results)
}
}
def fromJs(queuedJson: JsObject): NodeStatus = {
queuedJson.convertTo[NodeStatus]
}
def toJs(state: NodeStatus): JsValue = state.toJson
def js(state: String, fields: (String, JsValue)*): JsObject = {
val emptyMap = Seq(
NodeStatusJsonProtocol.Status,
NodeStatusJsonProtocol.Started,
NodeStatusJsonProtocol.Ended,
NodeStatusJsonProtocol.Results,
NodeStatusJsonProtocol.Error).map(key => key -> None).toMap[String, Option[JsValue]]
val jsFields = (emptyMap ++ fields.toMap.mapValues(Some(_)) +
(NodeStatusJsonProtocol.Status -> Some(JsString(state)))).mapValues {
case None => JsNull
case Some(v) => v
}
JsObject(jsFields)
}
val started = DateTimeConverter.now
val ended = started.plusDays(1)
val error = FailureDescription(
DeepSenseFailure.Id.randomId,
FailureCode.CannotUpdateRunningWorkflow,
"This is a test FailureDescription",
Some("This is a long test description"),
Map("detail1" -> "value1", "detail2" -> "value2")
)
val results = Seq(Entity.Id.randomId, Entity.Id.randomId, Entity.Id.randomId)
val failed = Failed(started, ended, error)
val completed = Completed(started, ended, results)
val running: Running = Running(started, results)
val failedJson: JsObject = js("FAILED",
"started" -> started.toJson,
"ended" -> ended.toJson,
"error" -> error.toJson)
val completedJson: JsObject = js("COMPLETED",
"started" -> started.toJson,
"ended" -> ended.toJson,
"results" -> results.toJson)
val runningJson: JsObject =
js("RUNNING", "started" -> started.toJson, "results" -> results.toJson)
val abortedJson: JsObject = js("ABORTED", "results" -> results.toJson)
val queuedJson: JsObject = js("QUEUED", "results" -> results.toJson)
val draftJson: JsObject = js("DRAFT", "results" -> results.toJson)
}
|
deepsense-io/seahorse-workflow-executor
|
workflowjson/src/test/scala/io/deepsense/models/json/graph/NodeStatusJsonProtocolSpec.scala
|
Scala
|
apache-2.0
| 4,037 |
// scalac: -Xlint:type-parameter-shadow -language:higherKinds -Xfatal-warnings
//
class B {
type T = Int
trait D
// method parameter shadows some other type
def foobar[D](in: D) = in.toString
// type member's parameter shadows some other type
type MySeq[D] = Seq[D]
// class parameter shadows some other type
class Foo[T](t: T) {
// a type parameter shadows another type parameter
def bar[T](w: T) = w.toString
}
// even deeply nested...
class C[M[List[_]]]
type E[M[List[_]]] = Int
def foo[N[M[List[_]]]] = ???
// ...but not between type parameters in the same list
class F[A, M[L[A]]] // no warning!
type G[A, M[L[A]]] = Int // no warning!
def bar[A, N[M[L[A]]]] = ??? // no warning!
}
|
scala/scala
|
test/files/neg/t5691.scala
|
Scala
|
apache-2.0
| 747 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.dataset
import java.lang.Iterable
import java.lang.{Boolean => JBool}
import org.apache.calcite.plan._
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.{JoinInfo, JoinRelType}
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.rel.{BiRel, RelNode, RelWriter}
import org.apache.calcite.rex.RexNode
import org.apache.calcite.util.mapping.IntPair
import org.apache.flink.api.common.functions.{FilterFunction, FlatJoinFunction, GroupReduceFunction, JoinFunction}
import org.apache.flink.api.common.operators.Order
import org.apache.flink.api.common.operators.base.JoinOperatorBase.JoinHint
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.DataSet
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.internal.BatchTableEnvImpl
import org.apache.flink.table.api.{BatchQueryConfig, TableConfig, TableException, Types}
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.codegen.{FunctionCodeGenerator, GeneratedFunction}
import org.apache.flink.table.plan.nodes.CommonJoin
import org.apache.flink.table.runtime._
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
import scala.collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
/**
* Flink RelNode which matches along with JoinOperator and its related operations.
*/
class DataSetJoin(
cluster: RelOptCluster,
traitSet: RelTraitSet,
leftNode: RelNode,
rightNode: RelNode,
rowRelDataType: RelDataType,
joinCondition: RexNode,
joinRowType: RelDataType,
joinInfo: JoinInfo,
keyPairs: List[IntPair],
joinType: JoinRelType,
joinHint: JoinHint,
ruleDescription: String)
extends BiRel(cluster, traitSet, leftNode, rightNode)
with CommonJoin
with DataSetRel {
override def deriveRowType(): RelDataType = rowRelDataType
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new DataSetJoin(
cluster,
traitSet,
inputs.get(0),
inputs.get(1),
getRowType,
joinCondition,
joinRowType,
joinInfo,
keyPairs,
joinType,
joinHint,
ruleDescription)
}
override def toString: String = {
joinToString(
joinRowType,
joinCondition,
joinType,
getExpressionString)
}
override def explainTerms(pw: RelWriter): RelWriter = {
joinExplainTerms(
super.explainTerms(pw),
joinRowType,
joinCondition,
joinType,
getExpressionString)
}
override def computeSelfCost (planner: RelOptPlanner, metadata: RelMetadataQuery): RelOptCost = {
val leftRowCnt = metadata.getRowCount(getLeft)
val leftRowSize = estimateRowSize(getLeft.getRowType)
val rightRowCnt = metadata.getRowCount(getRight)
val rightRowSize = estimateRowSize(getRight.getRowType)
val ioCost = (leftRowCnt * leftRowSize) + (rightRowCnt * rightRowSize)
val cpuCost = leftRowCnt + rightRowCnt
val rowCnt = leftRowCnt + rightRowCnt
planner.getCostFactory.makeCost(rowCnt, cpuCost, ioCost)
}
override def translateToPlan(
tableEnv: BatchTableEnvImpl,
queryConfig: BatchQueryConfig): DataSet[Row] = {
val config = tableEnv.getConfig
val returnType = FlinkTypeFactory.toInternalRowTypeInfo(getRowType)
// get the equality keys
val leftKeys = ArrayBuffer.empty[Int]
val rightKeys = ArrayBuffer.empty[Int]
if (keyPairs.isEmpty) {
// if no equality keys => not supported
throw new TableException(
"Joins should have at least one equality condition.\\n" +
s"\\tLeft: ${left.toString},\\n" +
s"\\tRight: ${right.toString},\\n" +
s"\\tCondition: (${joinConditionToString(joinRowType,
joinCondition, getExpressionString)})"
)
}
else {
// at least one equality expression
val leftFields = left.getRowType.getFieldList
val rightFields = right.getRowType.getFieldList
keyPairs.foreach(pair => {
val leftKeyType = leftFields.get(pair.source).getType.getSqlTypeName
val rightKeyType = rightFields.get(pair.target).getType.getSqlTypeName
// check if keys are compatible
if (leftKeyType == rightKeyType) {
// add key pair
leftKeys.add(pair.source)
rightKeys.add(pair.target)
} else {
throw new TableException(
"Equality join predicate on incompatible types.\\n" +
s"\\tLeft: ${left.toString},\\n" +
s"\\tRight: ${right.toString},\\n" +
s"\\tCondition: (${joinConditionToString(joinRowType,
joinCondition, getExpressionString)})"
)
}
})
}
val leftDataSet = left.asInstanceOf[DataSetRel].translateToPlan(tableEnv, queryConfig)
val rightDataSet = right.asInstanceOf[DataSetRel].translateToPlan(tableEnv, queryConfig)
joinType match {
case JoinRelType.INNER =>
addInnerJoin(
leftDataSet,
rightDataSet,
leftKeys.toArray,
rightKeys.toArray,
returnType,
config)
case JoinRelType.LEFT =>
addLeftOuterJoin(
leftDataSet,
rightDataSet,
leftKeys.toArray,
rightKeys.toArray,
returnType,
config)
case JoinRelType.RIGHT =>
addRightOuterJoin(
leftDataSet,
rightDataSet,
leftKeys.toArray,
rightKeys.toArray,
returnType,
config)
case JoinRelType.FULL =>
addFullOuterJoin(
leftDataSet,
rightDataSet,
leftKeys.toArray,
rightKeys.toArray,
returnType,
config)
}
}
private def addInnerJoin(
left: DataSet[Row],
right: DataSet[Row],
leftKeys: Array[Int],
rightKeys: Array[Int],
resultType: TypeInformation[Row],
config: TableConfig): DataSet[Row] = {
val generator = new FunctionCodeGenerator(
config,
false,
left.getType,
Some(right.getType))
val conversion = generator.generateConverterResultExpression(
resultType,
joinRowType.getFieldNames)
val condition = generator.generateExpression(joinCondition)
val body =
s"""
|${condition.code}
|if (${condition.resultTerm}) {
| ${conversion.code}
| ${generator.collectorTerm}.collect(${conversion.resultTerm});
|}
|""".stripMargin
val genFunction = generator.generateFunction(
ruleDescription,
classOf[FlatJoinFunction[Row, Row, Row]],
body,
resultType)
val joinFun = new FlatJoinRunner[Row, Row, Row](
genFunction.name,
genFunction.code,
genFunction.returnType)
left.join(right)
.where(leftKeys: _*)
.equalTo(rightKeys: _*)
.`with`(joinFun)
.name(getJoinOpName)
}
private def addLeftOuterJoin(
left: DataSet[Row],
right: DataSet[Row],
leftKeys: Array[Int],
rightKeys: Array[Int],
resultType: TypeInformation[Row],
config: TableConfig): DataSet[Row] = {
if (!config.getNullCheck) {
throw new TableException("Null check in TableConfig must be enabled for outer joins.")
}
val joinOpName = getJoinOpName
// replace field names by indexed names for easier key handling
val leftType = new RowTypeInfo(left.getType.asInstanceOf[RowTypeInfo].getFieldTypes: _*)
val rightType = right.getType.asInstanceOf[RowTypeInfo]
// partition and sort left input
// this step ensures we can reuse the sorting for all following operations
// (groupBy->join->groupBy)
val partitionedSortedLeft: DataSet[Row] = partitionAndSort(left, leftKeys)
// fold identical rows of the left input
val foldedRowsLeft: DataSet[Row] = foldIdenticalRows(partitionedSortedLeft, leftType)
// create JoinFunction to evaluate join predicate
val predFun = generatePredicateFunction(leftType, rightType, config)
val joinOutType = new RowTypeInfo(leftType, rightType, Types.INT)
val joinFun = new LeftOuterJoinRunner(predFun.name, predFun.code, joinOutType)
// join left and right inputs, evaluate join predicate, and emit join pairs
val nestedLeftKeys = leftKeys.map(i => s"f0.f$i")
val joinPairs = foldedRowsLeft.leftOuterJoin(right, JoinHint.REPARTITION_SORT_MERGE)
.where(nestedLeftKeys: _*)
.equalTo(rightKeys: _*)
.`with`(joinFun)
.withForwardedFieldsFirst("f0->f0")
.name(joinOpName)
// create GroupReduceFunction to generate the join result
val convFun = generateConversionFunction(leftType, rightType, resultType, config)
val reduceFun = new LeftOuterJoinGroupReduceRunner(
convFun.name,
convFun.code,
convFun.returnType)
// convert join pairs to result.
// This step ensures we preserve the rows of the left input.
joinPairs
.groupBy("f0")
.reduceGroup(reduceFun)
.name(joinOpName)
.returns(resultType)
}
private def addRightOuterJoin(
left: DataSet[Row],
right: DataSet[Row],
leftKeys: Array[Int],
rightKeys: Array[Int],
resultType: TypeInformation[Row],
config: TableConfig): DataSet[Row] = {
if (!config.getNullCheck) {
throw new TableException("Null check in TableConfig must be enabled for outer joins.")
}
val joinOpName = getJoinOpName
// replace field names by indexed names for easier key handling
val leftType = left.getType.asInstanceOf[RowTypeInfo]
val rightType = new RowTypeInfo(right.getType.asInstanceOf[RowTypeInfo].getFieldTypes: _*)
// partition and sort right input
// this step ensures we can reuse the sorting for all following operations
// (groupBy->join->groupBy)
val partitionedSortedRight: DataSet[Row] = partitionAndSort(right, rightKeys)
// fold identical rows of the right input
val foldedRowsRight: DataSet[Row] = foldIdenticalRows(partitionedSortedRight, rightType)
// create JoinFunction to evaluate join predicate
val predFun = generatePredicateFunction(leftType, rightType, config)
val joinOutType = new RowTypeInfo(leftType, rightType, Types.INT)
val joinFun = new RightOuterJoinRunner(predFun.name, predFun.code, joinOutType)
// join left and right inputs, evaluate join predicate, and emit join pairs
val nestedRightKeys = rightKeys.map(i => s"f0.f$i")
val joinPairs = left.rightOuterJoin(foldedRowsRight, JoinHint.REPARTITION_SORT_MERGE)
.where(leftKeys: _*)
.equalTo(nestedRightKeys: _*)
.`with`(joinFun)
.withForwardedFieldsSecond("f0->f1")
.name(joinOpName)
// create GroupReduceFunction to generate the join result
val convFun = generateConversionFunction(leftType, rightType, resultType, config)
val reduceFun = new RightOuterJoinGroupReduceRunner(
convFun.name,
convFun.code,
convFun.returnType)
// convert join pairs to result
// This step ensures we preserve the rows of the right input.
joinPairs
.groupBy("f1")
.reduceGroup(reduceFun)
.name(joinOpName)
.returns(resultType)
}
private def addFullOuterJoin(
left: DataSet[Row],
right: DataSet[Row],
leftKeys: Array[Int],
rightKeys: Array[Int],
resultType: TypeInformation[Row],
config: TableConfig): DataSet[Row] = {
if (!config.getNullCheck) {
throw new TableException("Null check in TableConfig must be enabled for outer joins.")
}
val joinOpName = getJoinOpName
// replace field names by indexed names for easier key handling
val leftType = new RowTypeInfo(left.getType.asInstanceOf[RowTypeInfo].getFieldTypes: _*)
val rightType = new RowTypeInfo(right.getType.asInstanceOf[RowTypeInfo].getFieldTypes: _*)
// partition and sort left and right input
// this step ensures we can reuse the sorting for all following operations
// (groupBy->join->groupBy), except the second grouping to preserve right rows.
val partitionedSortedLeft: DataSet[Row] = partitionAndSort(left, leftKeys)
val partitionedSortedRight: DataSet[Row] = partitionAndSort(right, rightKeys)
// fold identical rows of the left and right input
val foldedRowsLeft: DataSet[Row] = foldIdenticalRows(partitionedSortedLeft, leftType)
val foldedRowsRight: DataSet[Row] = foldIdenticalRows(partitionedSortedRight, rightType)
// create JoinFunction to evaluate join predicate
val predFun = generatePredicateFunction(leftType, rightType, config)
val joinOutType = new RowTypeInfo(leftType, rightType, Types.INT, Types.INT)
val joinFun = new FullOuterJoinRunner(predFun.name, predFun.code, joinOutType)
// join left and right inputs, evaluate join predicate, and emit join pairs
val nestedLeftKeys = leftKeys.map(i => s"f0.f$i")
val nestedRightKeys = rightKeys.map(i => s"f0.f$i")
val joinPairs = foldedRowsLeft
.fullOuterJoin(foldedRowsRight, JoinHint.REPARTITION_SORT_MERGE)
.where(nestedLeftKeys: _*)
.equalTo(nestedRightKeys: _*)
.`with`(joinFun)
.withForwardedFieldsFirst("f0->f0")
.withForwardedFieldsSecond("f0->f1")
.name(joinOpName)
// create GroupReduceFunctions to generate the join result
val convFun = generateConversionFunction(leftType, rightType, resultType, config)
val leftReduceFun = new LeftFullOuterJoinGroupReduceRunner(
convFun.name,
convFun.code,
convFun.returnType)
val rightReduceFun = new RightFullOuterJoinGroupReduceRunner(
convFun.name,
convFun.code,
convFun.returnType)
// compute joined (left + right) and left preserved (left + null)
val joinedAndLeftPreserved = joinPairs
// filter for pairs with left row
.filter(new FilterFunction[Row](){
override def filter(row: Row): Boolean = row.getField(0) != null})
.groupBy("f0")
.reduceGroup(leftReduceFun)
.name(joinOpName)
.returns(resultType)
// compute right preserved (null + right)
val rightPreserved = joinPairs
// filter for pairs with right row
.filter(new FilterFunction[Row](){
override def filter(row: Row): Boolean = row.getField(1) != null})
.groupBy("f1")
.reduceGroup(rightReduceFun)
.name(joinOpName)
.returns(resultType)
// union joined (left + right), left preserved (left + null), and right preserved (null + right)
joinedAndLeftPreserved.union(rightPreserved)
}
private def getJoinOpName: String = {
s"where: (${joinConditionToString(joinRowType, joinCondition, getExpressionString)}), " +
s"join: (${joinSelectionToString(joinRowType)})"
}
/** Returns an array of indices with some indices being a prefix. */
private def getFullIndiciesWithPrefix(keys: Array[Int], numFields: Int): Array[Int] = {
// get indices of all fields which are not keys
val nonKeys = (0 until numFields).filter(!keys.contains(_))
// return all field indices prefixed by keys
keys ++ nonKeys
}
/**
* Partitions the data set on the join keys and sort it on all field with the join keys being a
* prefix.
*/
private def partitionAndSort(
dataSet: DataSet[Row],
partitionKeys: Array[Int]): DataSet[Row] = {
// construct full sort keys with partitionKeys being a prefix
val sortKeys = getFullIndiciesWithPrefix(partitionKeys, dataSet.getType.getArity)
// partition
val partitioned: DataSet[Row] = dataSet.partitionByHash(partitionKeys: _*)
// sort on all fields
sortKeys.foldLeft(partitioned: DataSet[Row]) { (d, i) =>
d.sortPartition(i, Order.ASCENDING).asInstanceOf[DataSet[Row]]
}
}
/**
* Folds identical rows of a data set into a single row with a duplicate count.
*/
private def foldIdenticalRows(
dataSet: DataSet[Row],
dataSetType: TypeInformation[Row]): DataSet[Row] = {
val resultType = new RowTypeInfo(dataSetType, Types.INT)
val groupKeys = 0 until dataSetType.getArity
dataSet
// group on all fields of the input row
.groupBy(groupKeys: _*)
// fold identical rows
.reduceGroup(new GroupReduceFunction[Row, Row] {
val outTuple = new Row(2)
override def reduce(values: Iterable[Row], out: Collector[Row]): Unit = {
// count number of duplicates
var cnt = 0
val it = values.iterator()
while (it.hasNext) {
// set output row
outTuple.setField(0, it.next())
cnt += 1
}
// set count
outTuple.setField(1, cnt)
// emit folded row with count
out.collect(outTuple)
}
})
.returns(resultType)
.withForwardedFields("*->f0")
.name("fold identical rows")
}
/**
* Generates a [[GeneratedFunction]] of a [[JoinFunction]] to evaluate the join predicate.
* The function returns the result of the predicate as [[JBool]].
*/
private def generatePredicateFunction(
leftType: TypeInformation[Row],
rightType: TypeInformation[Row],
config: TableConfig): GeneratedFunction[JoinFunction[Row, Row, JBool], JBool] = {
val predGenerator = new FunctionCodeGenerator(config, false, leftType, Some(rightType))
val condition = predGenerator.generateExpression(joinCondition)
val predCode =
s"""
|${condition.code}
|return (${condition.resultTerm});
|""".stripMargin
predGenerator.generateFunction(
"OuterJoinPredicate",
classOf[JoinFunction[Row, Row, JBool]],
predCode,
Types.BOOLEAN)
}
/**
* Generates a [[GeneratedFunction]] of a [[JoinFunction]] to produce the join result.
*/
private def generateConversionFunction(
leftType: TypeInformation[Row],
rightType: TypeInformation[Row],
resultType: TypeInformation[Row],
config: TableConfig): GeneratedFunction[JoinFunction[Row, Row, Row], Row] = {
val conversionGenerator = new FunctionCodeGenerator(config, true, leftType, Some(rightType))
val conversion = conversionGenerator.generateConverterResultExpression(
resultType,
joinRowType.getFieldNames)
val convCode =
s"""
|${conversion.code}
|return ${conversion.resultTerm};
|""".stripMargin
conversionGenerator.generateFunction(
"OuterJoinConverter",
classOf[JoinFunction[Row, Row, Row]],
convCode,
resultType)
}
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetJoin.scala
|
Scala
|
apache-2.0
| 19,469 |
/*
* RichVector2bSpec.scala
*
* Copyright (c) 2013 Lonnie Pryor III
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fulcrum.math
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
/**
* Test case for [[fulcrum.math.RichVector2b]].
*
* @author Lonnie Pryor III ([email protected])
*/
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class RichVector2bSpec extends FunSpec with ShouldMatchers {
describe("RichVector2b") {
it("should support unary operators from Boolean") {
!Vector2(true, false) should equal(Vector2(false, true))
}
it("should support comparison operators from Boolean") {
import Vector2._
val v = Vector2(true, false)
v === true should equal(Vector2(true, false))
v === Vector2(true, false) should equal(Vector2(true, true))
v === false should equal(Vector2(false, true))
v === Vector2(false, true) should equal(Vector2(false, false))
v =!= true should equal(Vector2(false, true))
v =!= Vector2(true, false) should equal(Vector2(false, false))
v =!= false should equal(Vector2(true, false))
v =!= Vector2(false, true) should equal(Vector2(true, true))
}
it("should support logical operators from Boolean") {
val v = Vector2(true, false)
v || true should equal(Vector2(true, true))
v || Vector2(true, true) should equal(Vector2(true, true))
v || false should equal(Vector2(true, false))
v || Vector2(false, false) should equal(Vector2(true, false))
v && true should equal(Vector2(true, false))
v && Vector2(true, true) should equal(Vector2(true, false))
v && false should equal(Vector2(false, false))
v && Vector2(false, false) should equal(Vector2(false, false))
}
it("should support bitwise operators from Boolean") {
val v = Vector2(true, false)
v | true should equal(Vector2(true, true))
v | Vector2(true, true) should equal(Vector2(true, true))
v | false should equal(Vector2(true, false))
v | Vector2(false, false) should equal(Vector2(true, false))
v & true should equal(Vector2(true, false))
v & Vector2(true, true) should equal(Vector2(true, false))
v & false should equal(Vector2(false, false))
v & Vector2(false, false) should equal(Vector2(false, false))
v ^ true should equal(Vector2(false, true))
v ^ Vector2(true, true) should equal(Vector2(false, true))
v ^ false should equal(Vector2(true, false))
v ^ Vector2(false, false) should equal(Vector2(true, false))
}
it("should create immutable copies by converting elements") {
val v = Vector2(true, false)
v.toVector2f should equal(immutable.Vector2(1f, 0f))
v.toVector2i should equal(immutable.Vector2(1, 0))
}
}
}
|
lpryor/fulcrum
|
math/src/test/scala/fulcrum/math/RichVector2bSpec.scala
|
Scala
|
apache-2.0
| 3,337 |
package edu.gemini.model.p1.targetio.table
import edu.gemini.spModel.core.{Declination, RightAscension, MagnitudeSystem, Magnitude}
object Serializers {
implicit object StringWriter extends StilSerializer[String] {
def asBinary(value: String) = value
def primitiveClass = classOf[String]
def asText(value: String) = value
}
implicit object IntWriter extends StilSerializer[Int] {
def asBinary(value: Int) = value
def primitiveClass = classOf[java.lang.Integer]
def asText(value: Int) = value.toString
}
implicit object OptionRaWriter extends StilSerializer[Option[RightAscension]] {
def asBinary(value: Option[RightAscension]) = value.map(_.toAngle.toDegrees).orNull
def primitiveClass = classOf[java.lang.Double]
def asText(value: Option[RightAscension]) = value.map(_.toAngle.formatHMS).getOrElse("INDEF")
}
implicit object OptionDecWriter extends StilSerializer[Option[Declination]] {
def asBinary(value: Option[Declination]) = value.map(_.toDegrees).orNull
def primitiveClass = classOf[java.lang.Double]
def asText(value: Option[Declination]) = value.map(_.formatDMS).getOrElse("INDEF")
}
implicit object OptionalDoubleWriter extends StilSerializer[Option[Double]] {
def asBinary(value: Option[Double]) = value.getOrElse(Double.NaN)
def primitiveClass = classOf[java.lang.Double]
def asText(value: Option[Double]) = value.map("%.3f".format(_)).getOrElse("INDEF")
}
implicit object OptionalMagnitudeWriter extends StilSerializer[Option[Magnitude]] {
private def toDouble(value: Option[Magnitude]): Option[Double] = value.map(_.value)
def asBinary(value: Option[Magnitude]) = OptionalDoubleWriter.asBinary(toDouble(value))
def primitiveClass = classOf[java.lang.Double]
def asText(value: Option[Magnitude]) = OptionalDoubleWriter.asText(toDouble(value))
}
implicit object OptionalMagnitudeSystemWriter extends StilSerializer[Option[MagnitudeSystem]] {
def asBinary(value: Option[MagnitudeSystem]) = value.map(_.name).orNull
def primitiveClass = classOf[String]
def asText(value: Option[MagnitudeSystem]) = value.map(_.name).getOrElse("")
}
}
|
arturog8m/ocs
|
bundle/edu.gemini.model.p1.targetio/src/main/scala/edu/gemini/model/p1/targetio/table/Serializers.scala
|
Scala
|
bsd-3-clause
| 2,167 |
package com.computableideas.lib.extractors.test
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.prop._
class CommonSuite extends FlatSpec with Matchers with PropertyChecks
|
MrBogomips/ScalaExtractorsLib
|
lib/src/test/scala/com/computableideas/lib/extractors/test/CommonSuite.scala
|
Scala
|
apache-2.0
| 206 |
package java.lang
import scala.scalajs.js
class Throwable(s: String, private var e: Throwable) extends Object with java.io.Serializable {
def this() = this(null, null)
def this(s: String) = this(s, null)
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
private[this] var stackTrace: Array[StackTraceElement] = _
fillInStackTrace()
def initCause(cause: Throwable): Throwable = {
e = cause
this
}
def getMessage(): String = s
def getCause(): Throwable = e
def getLocalizedMessage(): String = getMessage()
def fillInStackTrace(): Throwable = {
scala.scalajs.runtime.StackTrace.captureState(this)
this
}
def getStackTrace(): Array[StackTraceElement] = {
if (stackTrace eq null)
stackTrace = scala.scalajs.runtime.StackTrace.extract(this)
stackTrace
}
def setStackTrace(stackTrace: Array[StackTraceElement]): Unit = {
var i = 0
while (i < stackTrace.length) {
if (stackTrace(i) eq null)
throw new NullPointerException()
i += 1
}
this.stackTrace = stackTrace.clone()
}
def printStackTrace(): Unit = printStackTrace(System.err)
def printStackTrace(s: java.io.PrintStream): Unit =
printStackTraceImpl(s.println(_))
def printStackTrace(s: java.io.PrintWriter): Unit =
printStackTraceImpl(s.println(_))
private[this] def printStackTraceImpl(sprintln: String => Unit): Unit = {
getStackTrace() // will init it if still null
// Message
sprintln(toString)
// Trace
if (stackTrace.length != 0) {
var i = 0
while (i < stackTrace.length) {
sprintln(" at "+stackTrace(i))
i += 1
}
} else {
sprintln(" <no stack trace available>")
}
// Causes
var wCause: Throwable = this
while ((wCause ne wCause.getCause) && (wCause.getCause ne null)) {
val parentTrace = wCause.getStackTrace
wCause = wCause.getCause
val thisTrace = wCause.getStackTrace
val thisLength = thisTrace.length
val parentLength = parentTrace.length
sprintln("Caused by: " + wCause.toString)
if (thisLength != 0) {
/* Count how many frames are shared between this stack trace and the
* parent stack trace, so that we can omit them when printing.
*/
var sameFrameCount: Int = 0
while (sameFrameCount < thisLength && sameFrameCount < parentLength &&
thisTrace(thisLength-sameFrameCount-1) == parentTrace(parentLength-sameFrameCount-1)) {
sameFrameCount += 1
}
/* If at least one, decrement so that the first common frame is still
* printed. According to Harmony this is spec'ed and common practice.
*/
if (sameFrameCount > 0)
sameFrameCount -= 1
// Print the non-common frames
val lengthToPrint = thisLength - sameFrameCount
var i = 0
while (i < lengthToPrint) {
sprintln(" at "+thisTrace(i))
i += 1
}
if (sameFrameCount > 0)
sprintln(" ... " + sameFrameCount + " more")
} else {
sprintln(" <no stack trace available>")
}
}
}
override def toString(): String = {
val className = getClass.getName
val message = getMessage()
if (message eq null) className
else className + ": " + message
}
}
class ThreadDeath() extends Error()
/* java.lang.*Error.java */
class AbstractMethodError(s: String) extends IncompatibleClassChangeError(s) {
def this() = this(null)
}
class AssertionError private (s: String) extends Error(s) {
def this() = this(null)
def this(o: Object) = this(o.toString)
def this(b: scala.Boolean) = this(b.toString)
def this(c: scala.Char) = this(c.toString)
def this(i: scala.Int) = this(i.toString)
def this(l: scala.Long) = this(l.toString)
def this(f: scala.Float) = this(f.toString)
def this(d: scala.Double) = this(d.toString)
}
class BootstrapMethodError(s: String, e: Throwable) extends LinkageError(s) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class ClassCircularityError(s: String) extends LinkageError(s) {
def this() = this(null)
}
class ClassFormatError(s: String) extends LinkageError(s) {
def this() = this(null)
}
class Error(s: String, e: Throwable) extends Throwable(s, e) {
def this() = this(null, null)
def this(s: String) = this(s, null)
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
}
class ExceptionInInitializerError private (s: String, private val e: Throwable) extends LinkageError(s) {
def this(thrown: Throwable) = this(null, thrown)
def this(s: String) = this(s, null)
def this() = this(null, null)
def getException(): Throwable = e
override def getCause(): Throwable = e
}
class IllegalAccessError(s: String) extends IncompatibleClassChangeError(s) {
def this() = this(null)
}
class IncompatibleClassChangeError(s: String) extends LinkageError(s) {
def this() = this(null)
}
class InstantiationError(s: String) extends IncompatibleClassChangeError(s) {
def this() = this(null)
}
class InternalError(s: String) extends VirtualMachineError(s) {
def this() = this(null)
}
class LinkageError(s: String) extends Error(s) {
def this() = this(null)
}
class NoClassDefFoundError(s: String) extends LinkageError(s) {
def this() = this(null)
}
class NoSuchFieldError(s: String) extends IncompatibleClassChangeError(s) {
def this() = this(null)
}
class NoSuchMethodError(s: String) extends IncompatibleClassChangeError(s) {
def this() = this(null)
}
class OutOfMemoryError(s: String) extends VirtualMachineError(s) {
def this() = this(null)
}
class StackOverflowError(s: String) extends VirtualMachineError(s) {
def this() = this(null)
}
class UnknownError(s: String) extends VirtualMachineError(s) {
def this() = this(null)
}
class UnsatisfiedLinkError(s: String) extends LinkageError(s) {
def this() = this(null)
}
class UnsupportedClassVersionError(s: String) extends ClassFormatError(s) {
def this() = this(null)
}
class VerifyError(s: String) extends LinkageError(s) {
def this() = this(null)
}
abstract class VirtualMachineError(s: String) extends Error(s) {
def this() = this(null)
}
/* java.lang.*Exception.java */
class ArithmeticException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class ArrayIndexOutOfBoundsException(s: String) extends IndexOutOfBoundsException(s) {
def this(index: Int) = this("Array index out of range: " + index)
def this() = this(null)
}
class ArrayStoreException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class ClassCastException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class ClassNotFoundException(s: String, e: Throwable) extends ReflectiveOperationException(s) {
def this(s: String) = this(s, null)
def this() = this(null, null)
def getException(): Throwable = e
override def getCause(): Throwable = e
}
class CloneNotSupportedException(s: String) extends Exception(s) {
def this() = this(null)
}
class EnumConstantNotPresentException(e: Class[_ <: Enum[_]], c: String)
extends RuntimeException(e.getName() + "." + c) {
def enumType(): Class[_ <: Enum[_]] = e
def constantName(): String = c
}
class Exception(s: String, e: Throwable) extends Throwable(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class IllegalAccessException(s: String) extends ReflectiveOperationException(s) {
def this() = this(null)
}
class IllegalArgumentException(s: String, e: Throwable) extends RuntimeException(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class IllegalMonitorStateException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class IllegalStateException(s: String, e: Throwable) extends RuntimeException(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class IllegalThreadStateException(s: String) extends IllegalArgumentException(s) {
def this() = this(null)
}
class IndexOutOfBoundsException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class InstantiationException(s: String) extends ReflectiveOperationException(s) {
def this() = this(null)
}
class InterruptedException(s: String) extends Exception(s) {
def this() = this(null)
}
class NegativeArraySizeException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class NoSuchFieldException(s: String) extends ReflectiveOperationException(s) {
def this() = this(null)
}
class NoSuchMethodException(s: String) extends ReflectiveOperationException(s) {
def this() = this(null)
}
class NullPointerException(s: String) extends RuntimeException(s) {
def this() = this(null)
}
class NumberFormatException(s: String) extends IllegalArgumentException(s) {
def this() = this(null)
}
class ReflectiveOperationException(s: String, e: Throwable) extends Exception(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class RejectedExecutionException(s: String, e: Throwable) extends RuntimeException(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class RuntimeException(s: String, e: Throwable) extends Exception(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class SecurityException(s: String, e: Throwable) extends RuntimeException(s, e) {
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class StringIndexOutOfBoundsException(s: String) extends IndexOutOfBoundsException(s) {
def this(index: Int) = this("String index out of range: " + index)
def this() = this(null)
}
class TypeNotPresentException(t: String, e: Throwable)
extends RuntimeException("Type " + t + " not present", e) {
def typeName(): String = t
}
class UnsupportedOperationException(s: String, e: Throwable) extends RuntimeException(s, e) {
def this() = this(null, null)
def this(s: String) = this(s, null)
def this(e: Throwable) = this(if (e == null) null else e.toString, e)
}
|
lrytz/scala-js
|
javalanglib/src/main/scala/java/lang/Throwables.scala
|
Scala
|
bsd-3-clause
| 10,683 |
package util.plugins
import models.Asset
import collins.provisioning.ProvisionerConfig
import collins.softlayer.{SoftLayerConfig, SoftLayerPlugin}
import play.api.Play
object SoftLayer {
def plugin: Option[SoftLayerPlugin] = pluginEnabled
def pluginEnabled: Option[SoftLayerPlugin] = {
Play.maybeApplication.flatMap { app =>
app.plugin[SoftLayerPlugin].filter(_.enabled)
}
}
def pluginEnabled[T](fn: SoftLayerPlugin => T): Option[T] = {
pluginEnabled.map { p =>
fn(p)
}
}
def ticketLink(id: String): Option[String] = {
pluginEnabled.flatMap { p =>
try {
Some(p.ticketUrl(id.toLong))
} catch {
case _ => None
}
}
}
def assetLink(asset: models.asset.AssetView): Option[String] = asset match {
case a: models.Asset => {
pluginEnabled.flatMap { p =>
p.softLayerUrl(a)
}
}
case _ => None
}
def canCancel(asset: Asset): Boolean = {
validAsset(asset) && SoftLayerConfig.allowedCancelStatus.contains(asset.status)
}
def canActivate(asset: Asset): Boolean = {
validAsset(asset) && asset.isIncomplete
}
protected def validAsset(asset: Asset): Boolean = {
plugin.isDefined &&
plugin.map(_.isSoftLayerAsset(asset)).getOrElse(false) &&
ProvisionerConfig.allowedType(asset.asset_type)
}
}
|
Shopify/collins
|
app/util/plugins/SoftLayer.scala
|
Scala
|
apache-2.0
| 1,343 |
/*
option.scala
*/
object OptionBisect {
// Part A
def findRootOpt(low: Double, high: Double)(f: Double => Double): Option[Double] = ???
// Part B
def solveQuad(a: Double): Option[Double] = ???
}
/* eof */
|
darrenjw/scala-course
|
exercises/option/src/main/scala/option.scala
|
Scala
|
gpl-3.0
| 226 |
package com.logimethods.smartmeter.generate
import com.logimethods.smartmeter.generate._
import org.scalatest._
import java.time._
import Math._
class TemperatureProviderTest extends FunSuite {
test("temperature") {
val firstDay = LocalDateTime.of(2017, 1, 1, 0, 0)
val lastDay = LocalDateTime.of(2017, 12, 31, 23, 59)
var day = LocalDateTime.of(2017, 1, 1, 0, 0)
var temp = TemperatureProvider.temperature(firstDay, 0)
while (day.isBefore(lastDay)) {
day = day.plusHours(1)
val newTemp = TemperatureProvider.temperature(day, 0)
// println(day + " (" + day.getDayOfWeek.ordinal + ") : " + newTemp + " / " + abs(newTemp - temp))
// println(newTemp)
assert(abs(newTemp - temp) < 2)
temp = newTemp
}
}
}
|
Logimethods/smart-meter
|
dockerfile-app_inject/src/test/scala/com/logimethods/smartmeter/generate/TemperatureProviderTest.scala
|
Scala
|
mit
| 765 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs/contributors
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.sexp
import scala.collection.immutable.ListMap
import shapeless.{ :: => :*:, _ }
import shapeless.labelled._
trait DerivedSexpReader[Base, A] {
def readFields(t: ListMap[SexpSymbol, Sexp]): A
}
object DerivedSexpReader {
def gen[A, Repr](
implicit
C: SexpConfig[A],
G: LabelledGeneric.Aux[A, Repr],
CR: Cached[Strict[DerivedSexpReader[A, Repr]]]
): SexpReader[A] = {
case (s: SexpSymbol) =>
G.from(CR.value.value.readFields(ListMap(s -> SexpNil)))
case SexpData(m) => G.from(CR.value.value.readFields(m))
case x => throw new DeserializationException(x)
}
implicit def hnil[A]: DerivedSexpReader[A, HNil] = (_ => HNil)
implicit def hcons[A, Key <: Symbol, Value, Remaining <: HList](
implicit
C: SexpConfig[A],
Key: Witness.Aux[Key],
LV: Lazy[SexpReader[Value]],
DR: DerivedSexpReader[A, Remaining]
): DerivedSexpReader[A, FieldType[Key, Value] :*: Remaining] = { m =>
val key = C.hint(Key.value.name)
val read = LV.value.read(m.getOrElse(key, SexpNil))
field[Key](read) :: DR.readFields(m)
}
implicit def cnil[A]: DerivedSexpReader[A, CNil] =
(_ => throw new DeserializationException(SexpNil))
implicit def ccons[A, Name <: Symbol, Instance, Remaining <: Coproduct](
implicit
C: SexpConfig[A],
Name: Witness.Aux[Name],
LI: Lazy[SexpReader[Instance]],
DR: DerivedSexpReader[A, Remaining]
): DerivedSexpReader[A, FieldType[Name, Instance] :+: Remaining] = { obj =>
val key = Name.value.name
obj.get(C.hint(key)) match {
case None => Inr(DR.readFields(obj))
case Some(value) => Inl(field[Name](LI.value.read(value)))
}
}
}
|
fommil/ensime-server
|
s-express/src/main/scala/org/ensime/sexp/DerivedSexpReader.scala
|
Scala
|
gpl-3.0
| 1,846 |
/*
* Channel.scala
* (ScalaOSC)
*
* Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.osc
import java.io.{Closeable, IOException, PrintStream}
import java.net.{InetAddress, InetSocketAddress, SocketAddress}
import java.nio.channels.{Channel => NIOChannel}
object Channel {
trait ConfigLike {
/** Queries the buffer size used for coding or decoding OSC messages.
* This is the maximum size an OSC packet (bundle or message) can grow to.
*
* @return the buffer size in bytes.
*
* @see #setBufferSize( int )
*/
def bufferSize: Int
/** Queries the transport protocol used by this communicator.
*
* @return the transport, such as <code>UDP</code> or <code>TCP</code>
*
* @see #UDP
* @see #TCP
*/
def transport: Transport
def codec: PacketCodec
}
trait Config extends ConfigLike
trait ConfigBuilder extends ConfigLike {
/** Adjusts the buffer size used by the future channel.
* The minimum allowed size is 16 bytes. Typically, OSC
* applications handle packets up to 8 KB. SuperCollider
* Server handles packets up the 64 KB by default (?).
*/
def bufferSize_=(size: Int): Unit
def codec_=(codec: PacketCodec): Unit
def build: Config
}
object Net {
trait ConfigLike extends Channel.ConfigLike {
def localSocketAddress: InetSocketAddress
final def localPort : Int = localSocketAddress.getPort
final def localAddress : InetAddress = localSocketAddress.getAddress
final def localIsLoopback : Boolean = localSocketAddress.getAddress.isLoopbackAddress
}
trait Config extends Channel.Config with ConfigLike
trait ConfigBuilder extends Channel.ConfigBuilder with ConfigLike {
def localPort_=(port: Int): Unit
def localAddress_=(address: InetAddress): Unit
def localSocketAddress_=(address: InetSocketAddress): Unit
def localIsLoopback_=(loopback: Boolean): Unit
override def build: Config
}
}
trait Net extends Channel with Net.ConfigLike {
def channel: NIOChannel
}
object Undirected {
object Input {
object Net {
type Action = (Packet, SocketAddress) => Unit
val NoAction: Action = (_, _) => ()
}
trait Net extends Channel {
def action: Net.Action
def action_=(value: Net.Action): Unit
}
}
}
object Directed {
trait Net extends Channel.Net {
/** The remote socket address of this channel. Returns `null` if the
* channel has not yet been connected.
*
* @see #connect()
*/
def remoteSocketAddress: InetSocketAddress
final def remotePort : Int = remoteSocketAddress.getPort
final def remoteAddress: InetAddress = remoteSocketAddress.getAddress
}
object Input {
type Action = Packet => Unit
val NoAction: Action = _ => ()
}
trait Input extends Channel {
def action: Input.Action
def action_=(fun: Input.Action): Unit
}
trait Output extends Channel {
def ! (p: Packet): Unit
}
}
trait Bidi extends Channel {
def dumpIn(mode: Dump = Dump.Text,
stream: PrintStream = Console.err,
filter: Dump.Filter = Dump.AllPackets): Unit
def dumpOut(mode: Dump = Dump.Text,
stream: PrintStream = Console.err,
filter: Dump.Filter = Dump.AllPackets): Unit
}
}
trait Channel extends Channel.ConfigLike with Closeable {
def bufferSize: Int
def codec: PacketCodec
/** Queries whether the channel is still open. */
def isOpen: Boolean
/** Establishes connection for transports requiring
* connectivity (e.g. TCP). For transports that do not require connectivity (e.g. UDP),
* this ensures the communication channel is created and bound.
* <P>
* When a <B>UDP</B> transmitter
* is created without an explicit <code>DatagramChannel</code> – say by
* calling <code>Transmitter.newUsing( "udp" )</code>, you are required
* to call <code>connect()</code> so that an actual <code>DatagramChannel</code> is
* created and bound. For a <B>UDP</B> transmitter which was created with an explicit
* <code>DatagramChannel</code>, this method does noting, so it is always safe
* to call <code>connect()</code>. However, for <B>TCP</B> transmitters,
* this may throw an <code>IOException</code> if the transmitter
* was already connected, therefore be sure to check <code>isConnected()</code> before.
*
* @throws IOException if a networking error occurs. Possible reasons: - the underlying
* network channel had been closed by the server. - the transport
* is TCP and the server is not available. - the transport is TCP
* and an <code>Receiver</code> sharing the same socket was stopped before (unable to revive).
*
* @see #isConnected()
*/
@throws(classOf[IOException])
def connect(): Unit
def isConnected: Boolean
/** Changes the way processed OSC messages are printed to the standard err console.
* By default messages are not printed.
*
* @param mode one of `Dump.Off` (don't dump, default),
* `Dump.Text` (dump human readable string),
* `Dump.Hex` (hexdump), or
* `Dump.Both` (both text and hex)
* @param stream the stream to print on
*/
def dump(mode: Dump = Dump.Text,
stream: PrintStream = Console.err,
filter: Dump.Filter = Dump.AllPackets): Unit
}
|
Sciss/ScalaOSC
|
shared/src/main/scala/de/sciss/osc/Channel.scala
|
Scala
|
lgpl-2.1
| 5,857 |
package edu.gemini.itc.shared
import edu.gemini.spModel.core.Peer
import edu.gemini.util.trpc.client.TrpcClient
import scala.collection.JavaConversions._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.swing.Color
import scalaz._
import Scalaz._
/** The data structures here are an attempt to unify the results produced by the different instrument recipes.
* Results are either a few simple numbers in case of imaging or some numbers and a set of charts made up by
* data series with (x,y) value pairs for spectroscopy. Note that some of the results are given on a per CCD
* basis. For now GMOS is the only instrument with more than one CCD, GHOST may be the next one to support
* several CCDs.
*
* The main purpose of the classes here is to serve as data transfer objects and to decouple the internal ITC
* result representation (which contains many data types which are only relevant to ITC) from the service interface.
* The internal result representations (ImagingResult and SpectroscopyResult) can potentially be replaced with
* the result objects here in the future.
*/
/** Representation of relevant ITC calculation results for an instrument's CCD.
* In particular the well depth and amp gain are CCD specific and can be different for different CCDs in the
* same instrument (e.g. GMOS).
*/
final case class ItcCcd(
singleSNRatio: Double, // the final SN ratio for a single image
totalSNRatio: Double, // the total SN ratio for all images
peakPixelFlux: Double, // the highest e- count for all pixels on the CCD
wellDepth: Double, // the well depth (max e- count per pixel) for this CCD
ampGain: Double, // the amplifier gain for this CCD (used to calculate ADU)
warnings: List[ItcWarning] // the warnings provided by ITC for this CCD
) {
val percentFullWell: Double = peakPixelFlux / wellDepth * 100.0 // the max percentage of the well saturation for peak pixel
val adu: Int = (peakPixelFlux / ampGain).toInt // the ADU value
}
sealed trait ItcResult extends Serializable {
def ccds: List[ItcCcd]
/**
* Max value for a property across the CCDs
*/
private def maxP[A](f: ItcCcd => A)(implicit a: scala.Ordering[A]): A = ccds.map(f).max
def ccd(i: Int = 0): Option[ItcCcd] = ccds.index(i % ccds.length)
def peakPixelFlux(ccdIx: Int = 0): Option[Int] = ccd(ccdIx).map(_.peakPixelFlux.toInt)
def maxPeakPixelFlux: Int = maxP(_.peakPixelFlux).toInt
def maxAdu: Int = maxP(_.adu)
def maxPercentFullWell: Double = maxP(_.percentFullWell)
def maxSingleSNRatio: Double = maxP(_.singleSNRatio)
def maxTotalSNRatio: Double = maxP(_.totalSNRatio)
val warnings: List[ItcWarning] = {
def compositeWarnings(i: (ItcCcd, Int)): List[ItcWarning] = i._1.warnings.map(w => ItcWarning(s"CCD ${i._2}: ${w.msg}"))
def concatWarnings =
ccds.zipWithIndex >>= compositeWarnings
if (ccds.length > 1) concatWarnings
else ccds.head.warnings
}
}
// === IMAGING RESULTS
final case class ItcImagingResult(ccds: List[ItcCcd]) extends ItcResult
// === SPECTROSCOPY RESULTS
// There are two different types of charts
sealed trait SpcChartType
case object SignalChart extends SpcChartType { val instance: SpcChartType = this } // signal and background over wavelength [nm]
case object S2NChart extends SpcChartType { val instance: SpcChartType = this } // single and final S2N over wavelength [nm]
case object SignalPixelChart extends SpcChartType { val instance: SpcChartType = this } // single and final S2N over wavelength [nm]
// There are four different data sets
sealed trait SpcDataType
case object SignalData extends SpcDataType { val instance: SpcDataType = this } // signal over wavelength [nm]
case object BackgroundData extends SpcDataType { val instance: SpcDataType = this } // background over wavelength [nm]
case object SingleS2NData extends SpcDataType { val instance: SpcDataType = this } // single S2N over wavelength [nm]
case object FinalS2NData extends SpcDataType { val instance: SpcDataType = this } // final S2N over wavelength [nm]
case object PixSigData extends SpcDataType { val instance: SpcDataType = this } // signal over pixels
case object PixBackData extends SpcDataType { val instance: SpcDataType = this } // background over pixels
/** Series of (x,y) data points used to create charts and text data files. */
final case class SpcSeriesData(dataType: SpcDataType, title: String, data: Array[Array[Double]], color: Option[Color] = None) {
def x(i: Int): Double = xValues(i)
def y(i: Int): Double = yValues(i)
def xValues: Array[Double] = data(0)
def yValues: Array[Double] = data(1)
}
final case class ChartAxisRange(start: Double, end: Double)
final case class ChartAxis(label: String, inverted: Boolean = false, range: Option[ChartAxisRange] = None)
object ChartAxis {
// Java helper
def apply(label: String) = new ChartAxis(label)
}
/** Multiple charts can be grouped.
* This is for example useful to stack IFU results for different offsets on top of each other in the OT.
* (At a later stage we maybe also want to add group labels like IFU offsets etc instead of repeating that
* information in every chart title.)*/
final case class SpcChartGroup(charts: List[SpcChartData])
/** Charts are made up of a set of data series which are all plotted in the same XY-plot. */
final case class SpcChartData(chartType: SpcChartType, title: String, xAxis: ChartAxis, yAxis: ChartAxis, series: List[SpcSeriesData], axes: List[ChartAxis] = List()) {
// JFreeChart requires a unique name for each series
require(series.map(_.title).distinct.size == series.size, "titles of series are not unique")
/** Gets all data series for the given type. */
def allSeries(t: SpcDataType): List[SpcSeriesData] = series.filter(_.dataType == t)
/** Gets all data series for the given type as Java lists. */
def allSeriesAsJava(t: SpcDataType): java.util.List[SpcSeriesData] = series.filter(_.dataType == t)
}
/** The result of a spectroscopy ITC calculation contains some numbers per CCD and a set of groups of charts.
* Individual charts and data series can be referenced by their types and group index. For most instruments there
* is only one chart and data series of each type, however for NIFS and GMOS there will be several charts
* of each type for each IFU element. */
final case class ItcSpectroscopyResult(ccds: List[ItcCcd], chartGroups: List[SpcChartGroup]) extends ItcResult {
/** Gets chart data by type and its group index.
* This method will fail if the result you're looking for does not exist.
*/
def chart(t: SpcChartType, i: Int = 0): SpcChartData = chartGroups(i).charts.filter(_.chartType == t).head
/** Gets all data series by chart type and data type.
* This method will fail if the result (chart/data) you're looking for does not exist.
*/
def allSeries(ct: SpcChartType, dt: SpcDataType): List[SpcSeriesData] = chart(ct).allSeries(dt)
}
object SpcChartData {
def apply(chartType: SpcChartType, title: String, xAxisLabel: String, yAxisLabel: String, series: List[SpcSeriesData]) =
new SpcChartData(chartType, title, ChartAxis(xAxisLabel), ChartAxis(yAxisLabel), series, List())
}
object ItcResult {
import edu.gemini.itc.shared.ItcService._
/** Creates an ITC result in case of an error. */
def forException(e: Throwable): Result = ItcError(e.getMessage).left
/** Creates an ITC result with a single problem/error message. */
def forMessage(msg: String): Result = ItcError(msg).left
/** Creates an ITC result for a result. */
def forResult(result: ItcResult): Result = result.right
}
/**
* Service interface for ITC calculations.
*/
trait ItcService {
import edu.gemini.itc.shared.ItcService._
def calculate(p: ItcParameters): Result
}
sealed trait ItcMessage
final case class ItcError(msg: String) extends ItcMessage
final case class ItcWarning(msg: String) extends ItcMessage
case class ItcParameters(
source: SourceDefinition,
observation: ObservationDetails,
conditions: ObservingConditions,
telescope: TelescopeDetails,
instrument: InstrumentDetails)
object ItcService {
type Result = ItcError \\/ ItcResult
/** Performs an ITC call on the given host. */
def calculate(peer: Peer, inputs: ItcParameters): Future[Result] =
TrpcClient(peer).withoutKeys future { r =>
r[ItcService].calculate(inputs)
}
}
|
spakzad/ocs
|
bundle/edu.gemini.itc.shared/src/main/scala/edu/gemini/itc/shared/ItcService.scala
|
Scala
|
bsd-3-clause
| 8,780 |
/* Copyright 2009-2021 EPFL, Lausanne */
object Nested16 {
def foo(i: BigInt): BigInt = {
def rec1(j: BigInt): BigInt = {
require(j >= 0)
def rec2(k: BigInt): BigInt = {
require(j > 0 || j == k)
if(k == 0) 0 else rec1(j-1)
}
rec2(j)
}
rec1(3)
} ensuring(0 == _)
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/MicroTests/Nested16.scala
|
Scala
|
apache-2.0
| 323 |
package org.openapitools.server.model
/**
* @param `class` for example: ''null''
* @param links for example: ''null''
* @param jenkinsOrganizationPipeline for example: ''null''
* @param name for example: ''null''
*/
final case class GithubOrganization (
`class`: Option[String],
links: Option[GithubOrganizationlinks],
jenkinsOrganizationPipeline: Option[Boolean],
name: Option[String]
)
|
cliffano/swaggy-jenkins
|
clients/scala-akka-http-server/generated/src/main/scala/org/openapitools/server/model/GithubOrganization.scala
|
Scala
|
mit
| 407 |
package util
import java.util.stream.Collectors
import org.jsoup.nodes.Element
import org.jsoup.select.Elements
/**
* <pre>
* Created on 6/3/15.
* </pre>
* @author K.Sakamoto
*/
object JsoupHelper {
implicit def elementsToElements4Scala(elements: Elements): Elements4Scala = {
new Elements4Scala(elements)
}
}
class Elements4Scala(that: Elements) {
def toElementArray: Array[Element] = {
val list: java.util.List[Element] = that.stream.collect(Collectors.toList[Element])
list.toArray(new Array[Element](list.size()))
}
}
|
ktr-skmt/FelisCatusZero
|
src/main/scala/util/JsoupHelper.scala
|
Scala
|
apache-2.0
| 550 |
package pl.touk.nussknacker.openapi
import io.swagger.v3.oas.models.media.Schema
package object parser {
type SwaggerRefSchemas = Map[SwaggerRef, Schema[_]]
}
|
TouK/nussknacker
|
components/openapi/src/main/scala/pl/touk/nussknacker/openapi/parser/package.scala
|
Scala
|
apache-2.0
| 166 |
package ohnosequences.stuff
/**
=Functions on functions=
@groupprio basic 0
@groupname basic
@groupdesc basic
@groupprio ccc 1
@groupname ccc Cartesian-closed structure
@groupdesc ccc This methods correspond to the Cartesian-closed structure on [[Scala]].
*/
object functions {
/**
the identity function on `A`
@group basic
*/
@inline
final def identity[A]: A -> A =
a => a
/** a constant function from Y to X given a value of X */
@inline
final def const[Y, X]: X -> (Y -> X) =
x => _ => x
@inline
final def point[X]: X -> (∗ -> X) =
x => _ => x
@inline
final def force[X]: (∗ -> X) -> X =
_(∗)
}
private[stuff] abstract class Function[X, Y] {
def apply(a: X): Y
}
object Function {
implicit final class Syntax[X, Y](val f: X -> Y) extends CompileTime {
def >->[Z](g: Y -> Z): X -> Z =
x => g(f(x))
def at(x: X): Y =
f(x)
}
}
|
ohnosequences/stuff
|
src/main/scala/functions.scala
|
Scala
|
agpl-3.0
| 939 |
/*
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*/
package org.locationtech.geomesa.accumulo.index
import org.geotools.factory.CommonFactoryFinder
import org.geotools.filter.AttributeExpression
import org.geotools.filter.text.ecql.ECQL
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.data.tables.AvailableTables
import org.locationtech.geomesa.accumulo.index.Strategy.StrategyType
import org.locationtech.geomesa.filter
import org.locationtech.geomesa.utils.geotools.SftBuilder.Opts
import org.locationtech.geomesa.utils.geotools.{SftBuilder, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.stats.Cardinality
import org.opengis.filter._
import org.specs2.matcher.MatchResult
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class QueryFilterSplitterTest extends Specification {
val sft = new SftBuilder()
.stringType("attr1")
.stringType("attr2", index = true)
.stringType("high", Opts(index = true, cardinality = Cardinality.HIGH))
.stringType("low", Opts(index = true, cardinality = Cardinality.LOW))
.date("dtg", default = true)
.point("geom", default = true)
.withIndexes(AvailableTables.DefaultTablesStr)
.build("QueryFilterSplitterTest")
val ff = CommonFactoryFinder.getFilterFactory2
val splitter = new QueryFilterSplitter(sft)
val geom = "BBOX(geom,40,40,50,50)"
val geom2 = "BBOX(geom,60,60,70,70)"
val geomOverlap = "BBOX(geom,35,35,55,55)"
val dtg = "dtg DURING 2014-01-01T00:00:00Z/2014-01-01T23:59:59Z"
val dtg2 = "dtg DURING 2014-01-02T00:00:00Z/2014-01-02T23:59:59"
val dtgOverlap = "dtg DURING 2014-01-01T00:00:00Z/2014-01-02T23:59:59Z"
val nonIndexedAttr = "attr1 = 'test'"
val nonIndexedAttr2 = "attr1 = 'test2'"
val indexedAttr = "attr2 = 'test'"
val indexedAttr2 = "attr2 = 'test2'"
val highCardinaltiyAttr = "high = 'test'"
val lowCardinaltiyAttr = "low = 'test'"
val wholeWorld = "BBOX(geom,-180,-90,180,90)"
def and(clauses: String*) = ff.and(clauses.map(ECQL.toFilter))
def or(clauses: String*) = ff.or(clauses.map(ECQL.toFilter))
def not(clauses: String*) = filter.andFilters(clauses.map(ECQL.toFilter).map(ff.not))(ff)
def f(filter: String) = ECQL.toFilter(filter)
implicit def filterToString(f: Filter): String = ECQL.toCQL(f)
implicit def stringToFilter(f: String): Filter = ECQL.toFilter(f)
"QueryFilterSplitter" should {
"return for filter include" >> {
val filter = Filter.INCLUDE
val options = splitter.getQueryOptions(Filter.INCLUDE)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.RECORD
options.head.filters.head.primary mustEqual Seq(filter)
options.head.filters.head.secondary must beNone
}
"return none for filter exclude" >> {
val options = splitter.getQueryOptions(Filter.EXCLUDE)
options must beEmpty
}
"return none for exclusive anded geoms" >> {
val options = splitter.getQueryOptions(and(geom, geom2, dtg))
options must beEmpty
}.pendingUntilFixed("not implemented")
"return none for exclusive anded dates" >> {
val options = splitter.getQueryOptions(and(geom, dtg, dtg2))
options must beEmpty
}.pendingUntilFixed("not implemented")
"work for spatio-temporal queries" >> {
"with a simple and" >> {
val filter = and(geom, dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(f(geom), f(dtg))
options.head.filters.head.secondary must beNone
}
"with multiple geometries" >> {
val filter = and(geom, geom2, dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(f(geom), f(geom2), f(dtg))
options.head.filters.head.secondary must beNone
}
"with multiple dates" >> {
val filter = and(geom, dtg, dtgOverlap)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(f(geom), f(dtg), f(dtgOverlap))
options.head.filters.head.secondary must beNone
}
"with multiple geometries and dates" >> {
val filter = and(geom, geomOverlap, dtg, dtgOverlap)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary must containTheSameElementsAs(Seq(f(geom), f(geomOverlap), f(dtg), f(dtgOverlap)))
options.head.filters.head.secondary must beNone
}
"with simple ors" >> {
val filter = or(and(geom, dtg), and(geom2, dtg2))
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(2)
forall(options.head.filters)(_.strategy mustEqual StrategyType.Z3)
options.head.filters.map(_.primary) must
containTheSameElementsAs(Seq(Seq(f(geom), f(dtg)), Seq(f(geom2), f(dtg2))))
options.head.filters.map(_.secondary).filter(_.isDefined) must haveLength(1)
options.head.filters.map(_.secondary).filter(_.isDefined).head.get must beAnInstanceOf[Not]
}
"while ignoring world-covering geoms" >> {
val filter = f(wholeWorld)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.RECORD
options.head.filters.head.primary mustEqual Seq(Filter.INCLUDE)
options.head.filters.head.secondary must beNone
}
"while ignoring world-covering geoms when other filters are present" >> {
val filter = and(wholeWorld, geom, dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(f(geom), f(dtg))
options.head.filters.head.secondary must beNone
}
}
"work for single clause filters" >> {
"spatial" >> {
val filter = f(geom)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ST
options.head.filters.head.primary mustEqual Seq(filter)
options.head.filters.head.secondary must beNone
}
"temporal" >> {
val filter = f(dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(dtg).map(f)
options.head.filters.head.secondary must beNone
}
"non-indexed attributes" >> {
val filter = f(nonIndexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.RECORD
options.head.filters.head.primary mustEqual Seq(Filter.INCLUDE)
options.head.filters.head.secondary must beSome(filter)
}
"indexed attributes" >> {
val filter = f(indexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ATTRIBUTE
options.head.filters.head.primary mustEqual Seq(filter)
options.head.filters.head.secondary must beNone
}
"low-cardinality attributes" >> {
val filter = f(lowCardinaltiyAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ATTRIBUTE
options.head.filters.head.primary mustEqual Seq(filter)
options.head.filters.head.secondary must beNone
}
"high-cardinality attributes" >> {
val filter = f(highCardinaltiyAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ATTRIBUTE
options.head.filters.head.primary mustEqual Seq(filter)
options.head.filters.head.secondary must beNone
}
}
"work for simple ands" >> {
"spatial" >> {
val filter = and(geom, geom2)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ST
options.head.filters.head.primary mustEqual Seq(geom, geom2).map(f)
options.head.filters.head.secondary must beNone
}
"temporal" >> {
val filter = and(dtg, dtgOverlap)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(dtg, dtgOverlap).map(f)
options.head.filters.head.secondary must beNone
}
"non-indexed attributes" >> {
val filter = and(nonIndexedAttr, nonIndexedAttr2)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.RECORD
options.head.filters.head.primary mustEqual Seq(Filter.INCLUDE)
options.head.filters.head.secondary must beSome(filter)
}
"indexed attributes" >> {
val filter = and(indexedAttr, indexedAttr2)
val options = splitter.getQueryOptions(filter)
options must haveLength(2)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ATTRIBUTE
options.head.filters.head.primary.head mustEqual f(indexedAttr)
options.head.filters.head.secondary.head mustEqual f(indexedAttr2)
}
"low-cardinality attributes" >> {
val filter = and(lowCardinaltiyAttr, nonIndexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ATTRIBUTE
options.head.filters.head.primary mustEqual Seq(f(lowCardinaltiyAttr))
options.head.filters.head.secondary must beSome(f(nonIndexedAttr))
}
}
"split filters on AND" >> {
"with spatiotemporal clauses" >> {
val filter = and(geom, dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(geom, dtg).map(f)
options.head.filters.head.secondary must beNone
}
"filters with spatiotemporal and non-indexed attributes clauses" >> {
val filter = and(geom, dtg, nonIndexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(geom, dtg).map(f)
options.head.filters.head.secondary must beSome(f(nonIndexedAttr))
}
"with spatiotemporal and indexed attributes clauses" >> {
val filter = and(geom, dtg, indexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(2)
forall(options)(_.filters must haveLength(1))
val z3 = options.find(_.filters.head.strategy == StrategyType.Z3)
z3 must beSome
z3.get.filters.head.primary mustEqual Seq(geom, dtg).map(f)
z3.get.filters.head.secondary must beSome(f(indexedAttr))
val attr = options.find(_.filters.head.strategy == StrategyType.ATTRIBUTE)
attr must beSome
attr.get.filters.head.primary mustEqual Seq(f(indexedAttr))
attr.get.filters.head.secondary must beSome(and(geom, dtg))
}
"with spatiotemporal, indexed and non-indexed attributes clauses" >> {
val filter = and(geom, dtg, indexedAttr, nonIndexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(2)
forall(options)(_.filters must haveLength(1))
val z3 = options.find(_.filters.head.strategy == StrategyType.Z3)
z3 must beSome
z3.get.filters.head.primary mustEqual Seq(geom, dtg).map(f)
z3.get.filters.head.secondary must beSome(and(indexedAttr, nonIndexedAttr))
val attr = options.find(_.filters.head.strategy == StrategyType.ATTRIBUTE)
attr must beSome
attr.get.filters.head.primary mustEqual Seq(f(indexedAttr))
attr.get.filters.head.secondary must beSome(and(geom, dtg, nonIndexedAttr))
}
}
"split filters on OR" >> {
"with spatiotemporal clauses" >> {
val filter = or(geom, dtg)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(2)
val z3 = options.head.filters.find(_.strategy == StrategyType.Z3)
z3 must beSome
z3.get.primary mustEqual Seq(dtg).map(f)
z3.get.secondary must beSome(not(geom))
val st = options.head.filters.find(_.strategy == StrategyType.ST)
st must beSome
st.get.primary mustEqual Seq(f(geom))
st.get.secondary must beNone
}
"with multiple spatial clauses" >> {
val filter = or(geom, geom2)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(2)
forall(options.head.filters)(_.strategy mustEqual StrategyType.ST)
options.head.filters.head.primary mustEqual Seq(f(geom))
options.head.filters.head.secondary must beNone
options.head.filters.tail.head.primary mustEqual Seq(f(geom2))
options.head.filters.tail.head.secondary must beSome(not(geom))
}
"with spatiotemporal and indexed attribute clauses" >> {
val filter = or(geom, indexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(2)
options.head.filters.map(_.strategy) must containTheSameElementsAs(Seq(StrategyType.ST, StrategyType.ATTRIBUTE))
options.head.filters.find(_.strategy == StrategyType.ST).get.primary mustEqual Seq(f(geom))
options.head.filters.find(_.strategy == StrategyType.ST).get.secondary must beNone
options.head.filters.find(_.strategy == StrategyType.ATTRIBUTE).get.primary mustEqual Seq(f(indexedAttr))
options.head.filters.find(_.strategy == StrategyType.ATTRIBUTE).get.secondary must beSome(not(geom))
}
"and collapse overlapping query filters" >> {
"with spatiotemporal and non-indexed attribute clauses" >> {
val filter = or(geom, nonIndexedAttr)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.RECORD
options.head.filters.head.primary mustEqual Seq(Filter.INCLUDE)
options.head.filters.head.secondary must beSome(filter)
}
"with overlapping geometries" >> {
val filter = or(geom, geomOverlap)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.ST
options.head.filters.head.primary mustEqual Seq(f(geomOverlap))
options.head.filters.head.secondary must beNone
}.pendingUntilFixed("not implemented")
"with overlapping dates" >> {
val filter = or(dtg, dtgOverlap)
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(wholeWorld, dtgOverlap).map(f)
options.head.filters.head.secondary must beNone
}.pendingUntilFixed("not implemented")
"with overlapping geometries and dates" >> {
val filter = or(and(geom, dtg), and(geomOverlap, dtgOverlap))
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(geomOverlap, dtgOverlap).map(f)
options.head.filters.head.secondary must beNone
}.pendingUntilFixed("not implemented")
}
}
"split nested filters" >> {
"with ANDs" >> {
val filter = and(geom, and(dtg, nonIndexedAttr))
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(geom, dtg).map(f)
options.head.filters.head.secondary must beSome(f(nonIndexedAttr))
}
"with ORs" >> {
val filter = or(geom, or(dtg, indexedAttr))
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(3)
options.head.filters.map(_.strategy) must
containTheSameElementsAs(Seq(StrategyType.Z3, StrategyType.ST, StrategyType.ATTRIBUTE))
options.head.filters.map(_.primary) must
containTheSameElementsAs(Seq(Seq(f(geom)), Seq(f(dtg)), Seq(f(indexedAttr))))
options.head.filters.map(_.secondary) must
containTheSameElementsAs(Seq(None, Some(not(geom)), Some(not(geom, dtg))))
}
"with ANDs and ORs" >> {
"with spatiotemporal clauses and non-indexed attributes" >> {
val filter = f(and(geom, dtg, or(nonIndexedAttr, nonIndexedAttr2)))
val options = splitter.getQueryOptions(filter)
options must haveLength(1)
options.head.filters must haveLength(1)
options.head.filters.head.strategy mustEqual StrategyType.Z3
options.head.filters.head.primary mustEqual Seq(geom, dtg).map(f)
options.head.filters.head.secondary must beSome(or(nonIndexedAttr, nonIndexedAttr2))
}
}
}
"support indexed date attributes" >> {
val sft = SimpleFeatureTypes.createType("dtgIndex", "dtg:Date:index=full,*geom:Point:srid=4326")
val splitter = new QueryFilterSplitter(sft)
val filter = f("dtg TEQUALS 2014-01-01T12:30:00.000Z")
val options = splitter.getQueryOptions(filter)
options must haveLength(2)
val z3 = options.find(_.filters.exists(_.strategy == StrategyType.Z3))
z3 must beSome
z3.get.filters must haveLength(1)
z3.get.filters.head.primary mustEqual Seq(filter)
z3.get.filters.head.secondary must beNone
val attr = options.find(_.filters.exists(_.strategy == StrategyType.ATTRIBUTE))
attr must beSome
attr.get.filters must haveLength(1)
attr.get.filters.head.primary mustEqual Seq(filter)
attr.get.filters.head.secondary must beNone
}
"provide only one option on OR queries of high cardinality indexed attributes" >> {
def testHighCard(attrPart: String): MatchResult[Any] = {
val filterString = s"($attrPart) AND BBOX(geom, 40.0,40.0,50.0,50.0) AND dtg DURING 2014-01-01T00:00:00+00:00/2014-01-01T23:59:59+00:00"
val options = splitter.getQueryOptions(f(filterString))
options must haveLength(2)
val attrOpt = options.find(_.filters.exists(_.strategy == StrategyType.ATTRIBUTE)).get
attrOpt.filters.length mustEqual 1
val attrQueryFilter = attrOpt.filters.head
attrQueryFilter.strategy mustEqual StrategyType.ATTRIBUTE
attrQueryFilter.primary.length mustEqual 5
attrQueryFilter.primary.forall(_ must beAnInstanceOf[PropertyIsEqualTo])
val attrProps = attrQueryFilter.primary.map(_.asInstanceOf[PropertyIsEqualTo])
foreach(attrProps) {_.getExpression1.asInstanceOf[AttributeExpression].getPropertyName mustEqual "high" }
attrQueryFilter.secondary.isDefined mustEqual true
attrQueryFilter.secondary.get must beAnInstanceOf[And]
attrQueryFilter.secondary.get.asInstanceOf[And].getChildren.length mustEqual 2
val z3Opt = options.find(_.filters.exists(_.strategy == StrategyType.Z3)).get
z3Opt.filters.length mustEqual 1
val z3QueryFilters = z3Opt.filters.head
z3QueryFilters.strategy mustEqual StrategyType.Z3
z3QueryFilters.primary.length mustEqual 2
z3QueryFilters.secondary.get must beAnInstanceOf[Or]
val z3Props = z3QueryFilters.secondary.get.asInstanceOf[Or].getChildren.map(_.asInstanceOf[PropertyIsEqualTo])
foreach (z3Props) { _.getExpression1.asInstanceOf[AttributeExpression].getPropertyName mustEqual "high" }
}
val orQuery = (0 until 5).map( i => s"high = 'h$i'").mkString(" OR ")
val inQuery = s"high in (${(0 until 5).map( i => s"'h$i'").mkString(",")})"
Seq(orQuery, inQuery).forall(testHighCard)
}
}
}
|
drackaer/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/QueryFilterSplitterTest.scala
|
Scala
|
apache-2.0
| 22,999 |
/*
* Copyright (c) 2013-2014 Erik van Oosten
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.grons.metrics.scala
import org.scalatest.Matchers._
import org.junit.runner.RunWith
import org.scalatest.OneInstancePerTest
import org.scalatest.FunSpec
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MetricNameSpec extends FunSpec with OneInstancePerTest {
describe("MetricName object") {
it("concatenates names with a period as separator") {
MetricName(classOf[MetricName], "part1", "part2").name should equal ("nl.grons.metrics.scala.MetricName.part1.part2")
}
it("skips nulls") {
MetricName(classOf[MetricName], "part1", null, "part3").name should equal ("nl.grons.metrics.scala.MetricName.part1.part3")
}
it("supports closures") {
val foo: String => MetricName = s => MetricName(this.getClass)
foo("").name should equal ("nl.grons.metrics.scala.MetricNameSpec")
}
it("supports objects") {
MetricNameSpec.ref.name should equal ("nl.grons.metrics.scala.MetricNameSpec")
}
it("supports nested objects") {
MetricNameSpec.nestedRef.name should equal ("nl.grons.metrics.scala.MetricNameSpec.Nested")
}
it("supports packages") {
nl.grons.metrics.scala.ref.name should equal ("nl.grons.metrics.scala")
}
}
describe("MetricName") {
it("appends names with a period as separator") {
MetricName(classOf[MetricName]).append("part1", "part2").name should equal ("nl.grons.metrics.scala.MetricName.part1.part2")
}
it("skips nulls") {
MetricName(classOf[MetricName]).append("part1", null, "part3").name should equal ("nl.grons.metrics.scala.MetricName.part1.part3")
}
}
}
object MetricNameSpec {
object Nested {
val ref: MetricName = MetricName(this.getClass)
}
private val ref: MetricName = MetricName(this.getClass)
private val nestedRef: MetricName = Nested.ref
}
|
scullxbones/metrics-scala
|
src/test/scala/nl/grons/metrics/scala/MetricNameSpec.scala
|
Scala
|
apache-2.0
| 2,453 |
package com.eharmony.aloha.semantics.compiled.plugin.proto.codegen
object MapType extends Enumeration {
type MapType = Value
val MAP = Value("map")
val FLAT_MAP = Value("flatMap")
val NONE = Value
}
|
eHarmony/aloha
|
aloha-io-proto/src/main/scala/com/eharmony/aloha/semantics/compiled/plugin/proto/codegen/MapType.scala
|
Scala
|
mit
| 216 |
/*
* Copyright 2017 TabMo http://tabmo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.tabmo.avro
import java.util
import org.apache.avro.Schema
import org.apache.avro.generic.GenericData
trait AvroEncoder[A] {
def encode(a: A): AvroValue
}
trait ObjectEncoder[A] extends AvroEncoder[A] {
def encode(a: A): AvroObject
}
object AvroEncoder {
import shapeless._
import shapeless.labelled._
import shapeless.ops.hlist.IsHCons
private def instance[A](f: A => AvroValue) =
new AvroEncoder[A] {
override def encode(a: A): AvroValue = f(a)
}
implicit def hnilEncoder(implicit schema: Schema): AvroEncoder[HNil] =
instance[HNil](_ => AvroObject(new GenericData.Record(schema)))
implicit val booleanEncoder: AvroEncoder[Boolean] = instance(AvroBoolean)
implicit val intEncoder: AvroEncoder[Int] = instance(AvroInt)
implicit val doubleEncoder: AvroEncoder[Double] = instance(AvroDouble)
implicit val longEncoder: AvroEncoder[Long] = instance(AvroLong)
implicit val floatEncoder: AvroEncoder[Float] = instance(AvroFloat)
implicit val stringEncoder: AvroEncoder[String] = instance(AvroString)
implicit def optionEncoder[A](
implicit encoder: AvroEncoder[A]): AvroEncoder[Option[A]] =
instance {
case Some(o) => encoder.encode(o)
case None => AvroNull
}
implicit def seqEncoder[A](
implicit enc: AvroEncoder[A]): AvroEncoder[Seq[A]] =
instance(seq => AvroArray(seq))
implicit def listEncoder[A](
implicit enc: AvroEncoder[A]): AvroEncoder[List[A]] =
instance[List[A]](seq => AvroArray(seq))
implicit def mapEncoder[K, V](implicit encK: AvroEncoder[K],
encV: AvroEncoder[V]): AvroEncoder[Map[K, V]] =
instance {
case map: Map[K, V] =>
AvroMap(map)
}
implicit def hlistEncoder[K <: Symbol, H, T <: shapeless.HList](
implicit witness: Witness.Aux[K],
isHCons: IsHCons.Aux[H :: T, H, T],
hEncoder: Lazy[AvroEncoder[H]],
tEncoder: Lazy[AvroEncoder[T]]
): AvroEncoder[FieldType[K, H] :: T] =
instance[FieldType[K, H] :: T] { o =>
val v = hEncoder.value.encode(isHCons.head(o))
tEncoder.value.encode(isHCons.tail(o)) match {
case res @ AvroObject(record) =>
record.put(witness.value.name, v.value)
res
case _ => AvroNull
}
}
implicit def objectEncoder[A, Repr <: HList](
implicit gen: LabelledGeneric.Aux[A, Repr],
hlistEncoder: Lazy[AvroEncoder[Repr]]
): AvroEncoder[A] = instance { o =>
hlistEncoder.value.encode(gen.to(o))
}
def apply[A](implicit enc: AvroEncoder[A]): AvroEncoder[A] = enc
}
|
tabmo/parquet-avro-shapeless
|
src/main/scala/io/tabmo/avro/AvroEncoders.scala
|
Scala
|
apache-2.0
| 3,214 |
package com.microsoft.awt.directives
import com.microsoft.awt.components.{EmoticonSupport, SessionFactory}
import org.scalajs.angularjs.Directive._
import org.scalajs.angularjs.sanitize.Sce
import org.scalajs.angularjs.{Attributes, Directive, JQLite, Scope, injected}
import org.scalajs.nodejs.util.ScalaJsHelper._
import org.scalajs.sjs.JsUnderOrHelper._
import scala.scalajs.js
/**
* Censorable Directive
* @author [email protected]
* @example {{{ <censorable text="{{ myText }}"></censorable> }}}
*/
class CensorableDirective($sce: Sce, @injected("SessionFactory") sessionFactory: SessionFactory) extends Directive
with ElementRestriction with EmoticonSupport with LinkSupport[CensorableDirectiveScope] with TemplateSupport {
private val CensorBlock = """<span class="sk_censored">censored</span>"""
private val SeqStart = "[["
private val SeqEnd = "]]"
override val scope = CensorableDirectiveScope(text = "@text")
override val template = """<span ng-bind-html="html"></span>"""
override def link(scope: CensorableDirectiveScope, element: JQLite, attrs: Attributes): Unit = {
scope.$watch("text", (newText: js.UndefOr[String], oldText: js.UndefOr[String]) => {
scope.html = newText.flat map replaceTags map enrichWithEmoticons
})
}
private def replaceTags(text: String) = {
val isAnonymous = sessionFactory.session.flatMap(_.isAnonymous).getOrElse(true)
val sb = new StringBuilder(text)
var lastPos = -1
do {
val start = sb.indexOf(SeqStart, lastPos)
val end = sb.indexOf(SeqEnd, start + SeqStart.length)
if (start != -1 && end != -1) {
val limit = end + SeqEnd.length
val replacement = if (isAnonymous) CensorBlock else sb.substring(start, limit).drop(SeqStart.length).dropRight(SeqEnd.length)
sb.replace(start, limit, replacement)
lastPos = end
}
else lastPos = -1
} while (lastPos != -1)
sb.toString()
}
}
/**
* Censorable Directive Scope
* @author [email protected]
*/
@js.native
trait CensorableDirectiveScope extends Scope {
// input fields
var text: js.UndefOr[String] = js.native
/// output fields
var html: js.UndefOr[String] = js.native
}
/**
* Censorable Directive Scope Companion
* @author [email protected]
*/
object CensorableDirectiveScope {
def apply(text: String) = {
val scope = New[CensorableDirectiveScope]
scope.text = text
scope
}
}
|
ldaniels528/awt
|
app-angularjs/src/main/scala/com/microsoft/awt/directives/CensorableDirective.scala
|
Scala
|
apache-2.0
| 2,464 |
package bound
import scalaz._
import Scalaz._
/**
* A value of type `Scope[B,F,A]` is an `F` expression with bound variables in `B`
* and free variables in `A`.
*/
abstract class Scope[B,F[_],A] {
def unscope: F[Var[B,F[A]]]
def map[C](f: A => C)(implicit M: Functor[F]): Scope[B,F,C] =
Scope(unscope map (_ map (_ map f)))
def flatMap[C,D >: B](f: A => Scope[D,F,C])(implicit M: Monad[F]): Scope[D,F,C] =
Scope(unscope flatMap {
case B(b) => M.pure(B(b))
case F(a) => a flatMap (v => f(v).unscope)
})
def traverse[M[_],C](f: A => M[C])(implicit M: Applicative[M], T: Traverse[F]): M[Scope[B,F,C]] = {
val ertraverse = Traverse[({type λ[α] = Var[B, α]})#λ] // \\/#traverse broken until scalaz 7.1
unscope traverse (ertraverse.traverse(_)(_ traverse f)) map (Scope(_))
}
def foldMap[M](f: A => M)(implicit F: Foldable[F], M: Monoid[M]): M =
unscope foldMap (_ foldMap (_ foldMap f))
def foldRight[M](m: M)(f: (A, => M) => M)(implicit F: Foldable[F]) =
unscope.foldRight(m)((v, b) => v.foldRight(b)((fa, bz) => fa.foldRight(bz)(f)))
/** Bind a variable in a scope. */
def bind[C](f: A => F[C])(implicit M: Monad[F]): Scope[B,F,C] =
Scope[B,F,C](unscope map (_.map(_ flatMap f)))
/**
* Enter a scope, instantiating all bound variables.
*
* {{{
* scala> abstract1('a', "abracadabra".toList).instantiate(_ => "foo".toList).mkString
* res0: String = foobrfoocfoodfoobrfoo
* }}}
*/
def instantiate(k: B => F[A])(implicit M: Monad[F]): F[A] =
unscope flatMap {
case B(b) => k(b)
case F(a) => a
}
/** Enter a scope that binds one variable, instantiating it. */
def instantiate1(e: F[A])(implicit M: Monad[F]): F[A] =
instantiate(_ => e)
/**
* Quotients out the possible placements of `F` in this `Scope` by distributing them all
* to the leaves. This yields a more traditional de Bruijn indexing scheme for bound
* variables.
*/
def toDeBruijn(implicit M: Monad[F]): F[Var[B, A]] =
unscope flatMap {
case F(e) => e.map(F(_))
case B(b) => M.pure(B(b))
}
/** Simultaneously substitute bound and free variables. */
def splat[C](fb: B => F[C], fv: A => F[C])(implicit M: Monad[F]): F[C] =
unscope flatMap {
case -\\/(b) => fb(b)
case \\/-(tm) => tm flatMap fv
}
import Show._
override def toString = Scope.scopeShow[Any,Any,Any](showA, showA, showA, showA).shows(this.asInstanceOf[Scope[Any,Any,Any]])
}
sealed abstract class ScopeInstances0 {
implicit def scopeFunctor[F[_]:Functor,D]: Functor[({type λ[α] = Scope[D,F,α]})#λ] =
new Functor[({type λ[α] = Scope[D,F,α]})#λ] {
val M = Functor[F]
override def map[A,B](a: Scope[D,F,A])(f: A => B) = a map f
}
}
sealed abstract class ScopeInstances extends ScopeInstances0 {
implicit def scopeMonad[F[_]:Monad,D]: Monad[({type λ[α] = Scope[D,F,α]})#λ] =
new Monad[({type λ[α] = Scope[D,F,α]})#λ] {
val M = Monad[F]
override def map[A,B](a: Scope[D,F,A])(f: A => B) = a map f
def point[A](a: => A) = Scope(M.pure(F(M.pure(a))))
def bind[A,B](e: Scope[D,F,A])(f: A => Scope[D,F,B]) = e flatMap f
}
implicit def scopeFoldable[F[_]:Foldable,D]: Foldable[({type λ[α] = Scope[D,F,α]})#λ] =
new Foldable[({type λ[α] = Scope[D,F,α]})#λ] {
val T = Foldable[F]
override def foldMap[A,M:Monoid](a: Scope[D,F,A])(f: A => M) = a foldMap f
def foldRight[A,B](a: Scope[D,F,A], z: => B)(f: (A, => B) => B) = a.foldRight(z)(f)
}
}
object Scope extends ScopeInstances {
def apply[B,F[_],A](f: F[Var[B,F[A]]]): Scope[B,F,A] = new Scope[B,F,A] {
def unscope = f
}
implicit def scopeShow[B,F[_],A](implicit B: Show[B],
F: Show[F[Var[B,F[A]]]],
A: Show[A],
FA: Show[F[A]]): Show[Scope[B,F,A]] =
new Show[Scope[B,F,A]] {
override def shows(s: Scope[B,F,A]) = "Scope(%s)".format(s.unscope.shows)
}
implicit def scopeEqual[B,F[_],A](implicit EB: Equal[B], M: Monad[F], EF: Equal1[F], EA: Equal[A]): Equal[Scope[B,F,A]] =
new Equal[Scope[B,F,A]] {
def equal(a: Scope[B,F,A], b: Scope[B,F,A]): Boolean = scopeEqual1[B,F].equal(a, b)
}
implicit def scopeEqual1[B,F[_]](implicit EB: Equal[B], M: Monad[F], E1F: Equal1[F]): Equal1[({type λ[α] = Scope[B,F,α]})#λ] =
new Equal1[({type λ[α] = Scope[B,F,α]})#λ] {
def equal[A](a: Scope[B,F,A], b: Scope[B,F,A])(implicit EA: Equal[A]): Boolean =
E1F.equal(fromScope(a), fromScope(b))
}
implicit def scopeTraverse[F[_]:Traverse,D]: Traverse[({type λ[α] = Scope[D,F,α]})#λ] =
new Traverse[({type λ[α] = Scope[D,F,α]})#λ] {
def traverseImpl[M[_]:Applicative,A,B](a: Scope[D,F,A])(f: A => M[B]) = a traverse f
}
implicit def scopeMonadTrans[B]: MonadTrans[({type λ[φ[_],α] = Scope[B,φ,α]})#λ] =
new MonadTrans[({type λ[φ[_],α] = Scope[B,φ,α]})#λ] {
def liftM[M[_]:Monad,A](m: M[A]) = Scope[B,M,A](Monad[M].point(F(m)))
def apply[M[_]:Monad]: Monad[({type λ[α] = Scope[B,M,α]})#λ] = scopeMonad[M,B]
}
implicit def scopeBound[B]: Bound[({type λ[φ[_],α] = Scope[B,φ,α]})#λ] =
new Bound[({type λ[φ[_],α] = Scope[B,φ,α]})#λ] {
def bind[F[_]:Monad,A,C](m: Scope[B,F,A])(f: A => F[C]): Scope[B,F,C] = m bind f
}
}
|
Chattered/scala-bound
|
core/src/main/scala/bound/Scope.scala
|
Scala
|
mit
| 5,464 |
package org.scalaide.ui.internal.completion
import org.scalaide.core.completion.ScalaCompletions
import org.eclipse.jface.text.contentassist.ICompletionProposal
import org.eclipse.jface.text.contentassist.IContextInformation
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.jdt.ui.text.java.IJavaCompletionProposalComputer
import org.eclipse.jdt.ui.text.java.ContentAssistInvocationContext
import org.eclipse.jdt.ui.text.java.JavaContentAssistInvocationContext
import org.scalaide.core.internal.jdt.model.ScalaCompilationUnit
import org.scalaide.util.ScalaWordFinder
import org.scalaide.ui.completion.ScalaCompletionProposal
class ScalaCompletionProposalComputer extends ScalaCompletions with IJavaCompletionProposalComputer {
override def sessionStarted(): Unit = {}
override def sessionEnded(): Unit = {}
override def getErrorMessage() = null
override def computeContextInformation(context : ContentAssistInvocationContext,
monitor : IProgressMonitor): java.util.List[IContextInformation] = {
// Currently not supported
java.util.Collections.emptyList()
}
override def computeCompletionProposals(context : ContentAssistInvocationContext,
monitor : IProgressMonitor): java.util.List[ICompletionProposal] = {
import java.util.Collections.{ emptyList => javaEmptyList }
val position = context.getInvocationOffset()
context match {
case jc : JavaContentAssistInvocationContext => jc.getCompilationUnit match {
case scu : ScalaCompilationUnit =>
findCompletions(position, context, scu)
case _ => javaEmptyList()
}
case _ => javaEmptyList()
}
}
private def findCompletions(position: Int, context: ContentAssistInvocationContext, scu: ScalaCompilationUnit): java.util.List[ICompletionProposal] = {
val chars = context.getDocument
val region = ScalaWordFinder.findCompletionPoint(chars, position)
val res = findCompletions(region, position, scu)
import collection.JavaConverters._
res.map(ScalaCompletionProposal(_): ICompletionProposal).asJava
}
}
|
stephenh/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/completion/ScalaCompletionProposalComputer.scala
|
Scala
|
bsd-3-clause
| 2,092 |
package com.tribbloids.spookystuff.testutils
/**
* Created by peng on 17/05/16.
*/
trait LocalURIDocsFixture extends LocalPathDocsFixture {
override def HTML_URL = "file://" + super.HTML_URL
override def JSON_URL = "file://" + super.JSON_URL
override def PNG_URL = "file://" + super.PNG_URL
override def PDF_URL = "file://" + super.PDF_URL
override def XML_URL = "file://" + super.XML_URL
override def CSV_URL = "file://" + super.CSV_URL
override def DIR_URL = "file://" + super.DIR_URL
override def DEEP_DIR_URL = "file://" + super.DEEP_DIR_URL
}
|
tribbloid/spookystuff
|
core/src/test/scala/com/tribbloids/spookystuff/testutils/LocalURIDocsFixture.scala
|
Scala
|
apache-2.0
| 571 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package base
package literals
import java.lang
import java.lang.{Float => JFloat}
import com.intellij.lang.ASTNode
import com.intellij.openapi.project.Project
import com.intellij.psi.util.PsiLiteralUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.psi.api.base.literals.ScFloatLiteral
import org.jetbrains.plugins.scala.lang.psi.types.{ScType, api}
final class ScFloatLiteralImpl(node: ASTNode,
override val toString: String)
extends NumericLiteralImplBase(node, toString)
with ScFloatLiteral {
override protected def wrappedValue(value: JFloat): ScLiteral.Value[lang.Float] =
ScFloatLiteralImpl.Value(value)
override protected def parseNumber(text: String): JFloat =
PsiLiteralUtil.parseFloat(text)
override private[psi] def unwrappedValue(value: JFloat) =
value.floatValue
}
object ScFloatLiteralImpl {
final case class Value(override val value: JFloat)
extends NumericLiteralImplBase.Value(value) {
override def negate: NumericLiteralImplBase.Value[JFloat] = Value(-value)
override def presentation: String = super.presentation + 'f'
override def wideType(implicit project: Project): ScType = api.Float
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/literals/ScFloatLiteralImpl.scala
|
Scala
|
apache-2.0
| 1,327 |
package org.pfcoperez.dailyalgorithm.datastructures.graphs.undirected
object labeled {
case class Edge[L, Node: Ordering, W](label: L, a: Node, b: Node, weight: W) extends Link[Node, W] {
override val nodeOrdering = implicitly[Ordering[Node]]
override def key: Any = label
}
object Edge {
def apply[L, Node: Ordering](label: L, a: Node, b: Node): Edge[L, Node, NoWeight] =
Edge(label, a, b, ())
}
object UndirectedWeighedGraph {
/**
* Build a new undirected graphs given its nodes and edges.
* O(m), m = number of edges provided
*
*/
def apply[L, Node: Ordering, W](nodes: Set[Node], edges: Seq[Edge[L, Node, W]]): UndirectedWeighedGraph[L, Node, W] = {
type EdgeType = Edge[L, Node, W]
(new UndirectedWeighedGraph(nodes, Map.empty[Node, Map[Node, EdgeType]], Set.empty[EdgeType]) /: edges)(_ + _)
}
}
object UndirectedGraph {
def apply[L, Node: Ordering](
nodes: Set[Node],
edges: Seq[Edge[L, Node, NoWeight]]): UndirectedWeighedGraph[L, Node, NoWeight] = UndirectedWeighedGraph(nodes, edges)
}
class UndirectedWeighedGraph[L, Node, W] private (
val nodes: Set[Node],
private val relations: Map[Node, Map[Node, Edge[L, Node, W]]],
private val edgeSet: Set[Edge[L, Node, W]]) extends UndirectedGraphOps[Node, W, Edge[L, Node, W], UndirectedWeighedGraph[L, Node, W]] {
/**
* Provide the list of edges in the graph.
* O(1)
*/
def edges(): Seq[Edge[L, Node, W]] = edgeSet.view.toSeq
/**
* Provide the list of edges related to the given node
* O(1)
*/
def edges(node: Node): Seq[Edge[L, Node, W]] =
relations.getOrElse(node, Map.empty).values.toSeq
/**
* Provide the list of adjacent nodes to the given one.
* O(1)
*/
def adjacentTo(node: Node): Set[Node] =
relations.getOrElse(node, Map.empty).keySet
/**
* Add an edge between two nodes in the graph.
* O(1)
*/
def +(edge: Edge[L, Node, W]): UndirectedWeighedGraph[L, Node, W] = {
val Edge(_, a, b, _) = edge
val newRelations = (relations /: Seq(a, b).zip(Seq(b, a))) {
case (updatedRelations, (from, to)) =>
updatedRelations + (from -> (relations.getOrElse(from, Map.empty[Node, Edge[L, Node, W]]) + (to -> edge)))
}
new UndirectedWeighedGraph(nodes, newRelations, edgeSet + edge)
}
/**
* Add a node the graph.
* O(1)
*/
def +(node: Node): UndirectedWeighedGraph[L, Node, W] =
new UndirectedWeighedGraph(nodes + node, relations, edgeSet)
/**
* Remove an edge from the graph
* O(1)
*/
def -(edge: Edge[L, Node, W]): UndirectedWeighedGraph[L, Node, W] = {
val Edge(_, a, b, _) = edge
require(Seq(a, b).forall(nodes contains _), invalidEdgeError)
val newRelations = (relations /: Seq(a, b).zip(Seq(b, a))) {
case (updatedRelations, (from, to)) =>
updatedRelations.get(from) map { adjacentEntries =>
updatedRelations.updated(from, adjacentEntries - to)
} getOrElse updatedRelations
}
new UndirectedWeighedGraph(nodes, newRelations, edgeSet - edge)
}
/**
* Remove a node from the graph
* O(m), n = number of nodes, m = number of edges in the graph
*/
def -(node: Node): UndirectedWeighedGraph[L, Node, W] = {
val newEdgeSet = edgeSet filter {
case Edge(_, a, b, _) => !Set(a, b).contains(node)
}
val newRelations = (relations - node) mapValues (_ - node) filter (_._2.nonEmpty)
new UndirectedWeighedGraph(nodes - node, newRelations, newEdgeSet)
}
}
}
|
pfcoperez/algorithmaday
|
src/main/scala/org/pfcoperez/dailyalgorithm/datastructures/graphs/undirected/labeledUndirectedGraph.scala
|
Scala
|
gpl-3.0
| 3,659 |
class Test {
def res(x: quoted.Expr[Int])(using quoted.Reflection): quoted.Expr[Int] = x match { // error
case '{ 1 + $b } => // error: Type must be fully defined. Consider annotating the splice using a type ascription: (${b}: XYZ).
b // error: Not found: b
}
}
|
dotty-staging/dotty
|
tests/neg-macros/i6324.scala
|
Scala
|
apache-2.0
| 275 |
package us.feliscat.ir.fulltext.indri
import java.nio.charset.{CodingErrorAction, StandardCharsets}
import java.nio.file.Path
import us.feliscat.text.StringNone
import us.feliscat.util.LibrariesConfig
import us.feliscat.util.process._
import scala.collection.mutable.ListBuffer
import scala.concurrent.duration._
import scala.sys.process.Process
/**
* <pre>
* Created on 2017/01/13.
* </pre>
*
* @author K.Sakamoto
*/
class IndriIndexer(inputPath: Path, indexPath: Path) {
private def command: Seq[String] = {
"IndriBuildIndex" ::
"-field.name=TITLE" ::
"-memory=".concat(LibrariesConfig.indriMemory) ::
"-corpus.path=".concat(inputPath.toAbsolutePath.toString) ::
"-corpus.class=trectext" ::
"-index=".concat(indexPath.toAbsolutePath.toString) :: Nil
}
def index(): Unit = {
val buffer = ListBuffer.empty[String]
command.foreach(buffer.+=)
Process(buffer.result).lineStream(
StandardCharsets.UTF_8,
CodingErrorAction.IGNORE,
CodingErrorAction.IGNORE,
StringNone,
LibrariesConfig.indriBuildIndexTimeout.minute
).foreach(println)
}
}
|
ktr-skmt/FelisCatusZero-multilingual
|
libraries/src/main/scala/us/feliscat/ir/fulltext/indri/IndriIndexer.scala
|
Scala
|
apache-2.0
| 1,140 |
package com.scalaAsm.x86
package Instructions
package General
// Description: Store String
// Category: general/datamovstring
trait STOSD extends InstructionDefinition {
val mnemonic = "STOSD"
}
object STOSD extends ZeroOperands[STOSD] with STOSDImpl
trait STOSDImpl extends STOSD {
implicit object _0 extends NoOp{
val opcode: OneOpcode = 0xAB
override def hasImplicitOperand = true
}
}
|
bdwashbu/scala-x86-inst
|
src/main/scala/com/scalaAsm/x86/Instructions/General/STOSD.scala
|
Scala
|
apache-2.0
| 410 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.workbench.lift.comet
import xml.NodeSeq
import net.liftweb.http.{SHtml, CometActor}
import net.liftweb.http.js.{JsCmd, JsCmds}
import net.liftweb.http.js.JE.{Call, JsRaw}
import de.fuberlin.wiwiss.silk.workbench.learning._
import de.fuberlin.wiwiss.silk.learning.individual.{Population, Individual}
import de.fuberlin.wiwiss.silk.evaluation.statistics.LinkageRuleComplexity
import net.liftweb.http.js.JsCmds.{Confirm, OnLoad, SetHtml, Script}
import de.fuberlin.wiwiss.silk.workspace.{TaskDataListener, User}
import de.fuberlin.wiwiss.silk.workbench.lift.util.{LinkageRuleTree, JS}
import de.fuberlin.wiwiss.silk.evaluation.LinkageRuleEvaluator
import java.util.logging.Logger
import de.fuberlin.wiwiss.silk.util.Timer
/**
* Widget which shows the current population.
*/
class PopulationContent extends CometActor {
/** The individuals to be rendered. */
private def individuals = CurrentPopulation().individuals
/** The number of links shown on one page. */
private val pageSize = 20
/** Redraw the widget on every view, because the current learning task may change. */
override protected val dontCacheRendering = true
/**
* Redraw the widget whenever the current population is updated.
*/
private val populationListener = new TaskDataListener(CurrentPopulation) {
override def onUpdate(population: Population) {
partialUpdate(updateListCmd)
}
}
/**
* Renders this widget.
*/
override def render = {
val showListFunc = JsCmds.Function("showList", "page" :: Nil, SHtml.ajaxCall(JsRaw("page"), (pageStr) => showList(pageStr.toInt))._2.cmd)
bind("entry", defaultHtml,
"script" -> Script(OnLoad(updateListCmd) & showListFunc),
"list" -> <div id="results" />)
}
private def updateListCmd: JsCmd = {
JsRaw("initPagination(" + individuals.size + ");").cmd
}
private def showList(page: Int): JsCmd = {
val sortedIndividuals = individuals.toSeq.sortBy(-_.fitness)
val pageIndividuals = sortedIndividuals.view(page * pageSize, (page + 1) * pageSize)
SetHtml("results", renderPopulation(pageIndividuals)) & Call("initTrees").cmd & Call("updateResultsWidth").cmd
}
/**
* Renders the population.
*/
private def renderPopulation(individuals: Seq[Individual]) = {
<div>
<div class="individual">
<div class="individual-header heading">
<div class="individual-desc">Description</div>
<div class="individual-score">Score</div>
<div class="individual-mcc">MCC</div>
<div class="individual-f1">F-Measure</div>
<div class="individual-buttons">Actions</div>
</div>
</div> {
for((individual, count) <- individuals.zipWithIndex) yield {
renderIndividual(individual, count)
}
}
</div>
}
/**
* Renders a single individual.
*/
private def renderIndividual(individual: Individual, counter: Int) = {
<div class="individual" id={getId(individual)} >
{ renderIndividualHeader(individual, counter) }
{ renderIndividualContent(individual) }
<div style="clear:both"></div>
</div>
}
/**
* Renders the list header of a single individual.
*/
private def renderIndividualHeader(individual: Individual, counter: Int) = {
val scores = LinkageRuleEvaluator(individual.node.build, User().linkingTask.cache.entities)
<div class={if (counter%2==0) "individual-header grey" else "individual-header" }
onmouseover="$(this).addClass('individual-over');"
onmouseout="$(this).removeClass('individual-over');">
<div id={getId(individual, "toggle")}><span class="ui-icon ui-icon ui-icon-triangle-1-e"></span></div>
<div class="individual-desc">{renderDescription(individual)}</div>
<div class="individual-score">{renderScore(individual.fitness)}</div>
<div class="individual-mcc">{renderScore(scores.mcc)}</div>
<div class="individual-f1">{renderScore(scores.fMeasure)}</div>
<div class="individual-buttons">{renderButtons(individual)}</div>
</div>
}
/**
* Renders the description of an individual.
*/
private def renderDescription(individual: Individual) = {
val complexity = LinkageRuleComplexity(individual.node.build)
complexity.comparisonCount + " Comparisons and " + complexity.transformationCount + " Transformations"
}
/**
* Renders a score between -1.0 and 1.0.
*/
private def renderScore(score: Double): NodeSeq = {
<div class="confidencebar">
<div class="confidence">{"%.1f".format(score * 100)}%</div>
</div>
}
/**
* Renders the action buttons for an individual.
*/
private def renderButtons(individual: Individual) = {
val image = <img src="./static/img/learn/load.png" title="Load this linkage rule in the editor" />
SHtml.a(() => loadIndividualCmd(individual), image)
}
/**
* Renders the content of a single indivual.
*/
private def renderIndividualContent(individual: Individual) = {
implicit val prefixes = User().project.config.prefixes
<div class="individual-details" id={getId(individual, "details")}>
{ LinkageRuleTree.render(individual.node.build) }
</div>
}
def loadIndividualCmd(individual: Individual) = {
def load() = {
val linkingTask = User().linkingTask
val linkSpec = linkingTask.linkSpec
val newLinkageRule = individual.node.build
User().task = linkingTask.updateLinkSpec(linkSpec.copy(rule = newLinkageRule), User().project)
JS.Redirect("editor.html")
}
Confirm("This will overwrite the current linkage rule!", SHtml.ajaxInvoke(load)._2.cmd)
}
/**
* Generates a new id based on an individual.
*/
private def getId(individual : Individual, prefix : String = "") = {
prefix + individual.hashCode
}
}
|
fusepoolP3/p3-silk
|
silk-workbench-outdated/src/main/scala/de/fuberlin/wiwiss/silk/workbench/lift/comet/PopulationContent.scala
|
Scala
|
apache-2.0
| 6,410 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.validation
import com.typesafe.config.Config
/**
* Verifies that the name and values are within the specified length bounds. The limits for
* the value of the name key can be set independently from the values of other keys. Sample
* config:
*
* ```
* name {
* min-length = 2
* max-length = 80
* }
* others {
* min-length = 2
* max-length = 60
* }
* ```
*/
case class NameValueLengthRule(nameRule: ValueLengthRule, valueRule: ValueLengthRule)
extends TagRule {
override def validate(k: String, v: String): String = {
if (k == "name") nameRule.validate(k, v) else valueRule.validate(k, v)
}
}
object NameValueLengthRule {
def apply(config: Config): NameValueLengthRule = {
val nameRule = ValueLengthRule(config.getConfig("name"))
val valueRule = ValueLengthRule(config.getConfig("others"))
apply(nameRule, valueRule)
}
}
|
Netflix/atlas
|
atlas-core/src/main/scala/com/netflix/atlas/core/validation/NameValueLengthRule.scala
|
Scala
|
apache-2.0
| 1,523 |
package ranking.common
import org.scalatest.FunSuite
class RankingElementSuite extends FunSuite {
test("ranking ids are unique") {
val ids = RankingType.values.flatMap { typ =>
typ.rankings.map(_.id)
}
assert(ids.distinct.length === ids.length)
}
}
|
b-wind/MyFleetGirls
|
server/test/ranking/common/RankingElementSuite.scala
|
Scala
|
mit
| 273 |
package crakken.actor
import akka.actor._
import akka.event._
import scala.concurrent.duration._
import akka.util.Timeout
import scala.reflect._
import crakken.data.repository._
import crakken.actor.Testing.Processing
import crakken.actor.Testing.GetState
import crakken.actor.Testing.Idle
import scala.util.Success
import crakken.data.model.PageFetchRequest
import crakken.data.model.CrawlRequest
import crakken.actor.Testing.Initializing
import scala.util.Failure
import reactivemongo.bson.BSONObjectID
class CrawlRequestActor(val pageFetchActor: ActorRef, val repositoryActor: ActorRef) extends Actor with UnboundedStash with ActorLogging {
import context._
val receive = idle
override def preStart() = {
}
override def postStop() = {
}
def idle: Receive = LoggingReceive {
case (request: CrawlRequest) => {
log.debug(s"Request received. Becoming processing.")
context.setReceiveTimeout(10.seconds)
become(processing(request, List.empty))
unstashAll()
repositoryActor ! CrawlRequestMessages.create(request)
self ! PageFetchRequest(None, request.id , request.origin, None, None, request.initialRecursionLevel, request.includeExternalLinks)
}
case GetState() => sender ! Idle()
case _ => stash()
}
def processing(crawlRequest: CrawlRequest, history: List[String]): Receive = LoggingReceive {
//PageFetchRequest Messages
case PageFetchSuccess(request) => {
log.info(s"Page fetch for ${request.url} completed with code ${request.statusCode.getOrElse("unknown")}.")
repositoryActor ! PageFetchRequestMessages.update(request)
}
case PageFetchFailure(request, ex) => {
log.error(ex, "Page fetch failure received.")
}
case (request: PageFetchRequest) => {
log.info(s"Page fetch for ${request.url} received.")
if (!history.contains(request.url)) {
become(processing(crawlRequest, history :+ request.url))
repositoryActor ! PageFetchRequestMessages.create(request)
pageFetchActor ! request
}
}
case GetState() => sender ! Processing()
case ReceiveTimeout => {
log.debug("No activity received for 10 seconds. Becoming idle.")
context.setReceiveTimeout(Duration.Undefined)
become(idle)
unstashAll()
}
case _ => stash()
}
}
|
CrakkenCrawler/crakken
|
app/crakken/actor/CrawlRequestActor.scala
|
Scala
|
apache-2.0
| 2,408 |
package persistence_fsm
import actors.{DisplayOrderActor, ProductQuantityActor}
import akka.actor.{ActorRef, ActorSystem}
import akka.persistence.fsm.PersistentFSM
import akka.testkit.{ImplicitSender, TestKit, TestKitBase}
import db.populators.Seeder
import domain.models.{DeliveryMethod, PaymentMethod}
import domain.{ChooseDeliveryMethodCommand, _}
import domain.models.response.FSMProcessInfoResponse
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}
import shared.models.Product
class OrderingProcessFSMTest extends FunSuiteLike with TestKitBase with ImplicitSender with BeforeAndAfterAll {
override implicit lazy val system: ActorSystem = ActorSystem(getClass.getSimpleName)
private val product1: Product = Product(1, "iPhone", 1)
private val product2: Product = Product(3, "Computer", 1)
private implicit val orderId = 1123523L
test("Ordering process FSM") {
val displayOrderActor = system.actorOf(DisplayOrderActor.props, "DisplayOrderActor")
val productQuantityActor = system.actorOf(ProductQuantityActor.props, "ProductQuantityActor")
val fsm = system.actorOf(OrderingProcessFSM.props(displayOrderActor, productQuantityActor), "order")
fsm ! CreateOrderCommand(orderId)
expectMsg(FSMProcessInfoResponse(Idle.toString, EmptyShoppingCart.toString, "order created!"))
processStepsOfFSMOrderingProcess(fsm)
fsm ! PersistentFSM.StateTimeout
processStepsOfFSMOrderingProcess(fsm)
fsm ! CheckoutCommand(orderId)
expectMsg(FSMProcessInfoResponse(OrderReadyToCheckout.toString, DataWithPaymentMethod(NonEmptyShoppingCart(Seq(product1, product2)), DeliveryMethod.Courier, PaymentMethod.CreditCard).toString, "order processed!"))
}
private def processStepsOfFSMOrderingProcess(fsm: ActorRef): Unit = {
val responseMessage = "adding item to shopping cart!"
fsm ! AddItemToShoppingCartCommand(product1)
expectMsg(FSMProcessInfoResponse(InShoppingCart.toString, EmptyShoppingCart.toString, responseMessage))
//To ensure order of added items
Thread.sleep(300)
fsm ! AddItemToShoppingCartCommand(product2)
expectMsg(FSMProcessInfoResponse(InShoppingCart.toString, NonEmptyShoppingCart(Seq(product1)).toString, responseMessage))
//To ensure order of added items
Thread.sleep(300)
fsm ! AddItemToShoppingCartCommand(product2)
expectMsg(FSMProcessInfoResponse(InShoppingCart.toString, NonEmptyShoppingCart(Seq(product1, product2)).toString, responseMessage))
fsm ! ConfirmShoppingCartCommand(orderId)
expectMsg(FSMProcessInfoResponse(InShoppingCart.toString, NonEmptyShoppingCart(Seq(product1, product2)).toString, "confirm shopping cart!"))
fsm ! ChooseDeliveryMethodCommand(DeliveryMethod.Courier)
expectMsg(FSMProcessInfoResponse(WaitingForChoosingDeliveryMethod.toString, NonEmptyShoppingCart(Seq(product1, product2)).toString, "delivery method chosen!"))
fsm ! ChoosePaymentMethodCommand(PaymentMethod.CreditCard)
expectMsg(FSMProcessInfoResponse(WaitingForChoosingPaymentMethod.toString, DataWithDeliveryMethod(NonEmptyShoppingCart(Seq(product1, product2)), DeliveryMethod.Courier).toString, "payment method chosen!"))
}
override protected def beforeAll(): Unit = {
Seeder.run()
}
override def afterAll() {
TestKit.shutdownActorSystem(system)
}
}
|
kkrzys/eShop
|
eShop-core/src/test/scala/persistence_fsm/OrderingProcessFSMTest.scala
|
Scala
|
apache-2.0
| 3,317 |
package at.logic.gapt.provers.veriT
import at.logic.gapt.algorithms.rewriting.TermReplacement
import at.logic.gapt.formats.veriT._
import at.logic.gapt.proofs.HOLSequent
import at.logic.gapt.proofs.expansionTrees._
import at.logic.gapt.utils.traits.ExternalProgram
import at.logic.gapt.utils.runProcess
import java.io._
import at.logic.gapt.provers._
import at.logic.gapt.expr._
object VeriT extends VeriT
class VeriT extends Prover with ExternalProgram {
override def isValid( s: HOLSequent ): Boolean = {
// Generate the input file for veriT
val veritInput = SmtLibExporter( renameConstantsToFi( s )._1 )
val veritOutput = runProcess( Seq( "veriT" ), veritInput )
// Parse the output
VeriTParser.isUnsat( new StringReader( veritOutput ) )
}
private def withRenamedConstants( seq: HOLSequent )( f: HOLSequent => Option[ExpansionSequent] ): Option[ExpansionSequent] = {
val ( renamedSeq, _, invertRenaming ) = renameConstantsToFi( seq )
f( renamedSeq ) map { renamedExpSeq =>
renamedExpSeq map { TermReplacement( _, invertRenaming.toMap[LambdaExpression, LambdaExpression] ) }
}
}
/*
* Given a sequent A1, ..., An |- B1, ..., Bm, veriT's proof is actually of
* the sequent A1, ..., An, not B1, ..., not Bm |-.
* Currently there is no way to recover the antecedent/succedent formulas from
* veriT's output, so in this method we re-build the expansion sequent by
* taking the quantified equality axioms from the proof returned by veriT and
* merging them with the original end-sequent.
*/
override def getExpansionSequent( s: HOLSequent ): Option[ExpansionSequent] = withRenamedConstants( s ) { s =>
val smtBenchmark = SmtLibExporter( s )
val output = runProcess( Seq( "veriT", "--proof=-", "--proof-version=1" ), smtBenchmark )
VeriTParser.getExpansionProof( new StringReader( output ) ) match {
case Some( exp_seq ) =>
val exp_seq_quant = new ExpansionSequent(
exp_seq.antecedent.filter( f => isQuantified( f ) ),
exp_seq.succedent.filter( f => isQuantified( f ) )
)
val ant_prop = s.antecedent.map( f => formulaToExpansionTree( f, false ) )
val suc_prop = s.succedent.map( f => formulaToExpansionTree( f, true ) )
val quasi_taut = new ExpansionSequent( exp_seq_quant.antecedent ++ ant_prop, exp_seq_quant.succedent ++ suc_prop )
val taut = addSymmetry( quasi_taut )
Some( taut )
case None => None
}
}
override def getLKProof( s: HOLSequent ) = getExpansionSequent( s ) map { ExpansionProofToLK( _ ) }
val isInstalled: Boolean =
try {
runProcess( Seq( "veriT", "--version" ) )
true
} catch {
case ex: IOException => false
}
}
|
loewenheim/gapt
|
src/main/scala/at/logic/gapt/provers/veriT/VeriT.scala
|
Scala
|
gpl-3.0
| 2,753 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import java.util.UUID
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.apache.spark.{SparkEnv, TaskContext}
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, UnaryNode}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.{Identifier, StagedTable, StagingTableCatalog, SupportsWrite, Table, TableCatalog}
import org.apache.spark.sql.connector.expressions.Transform
import org.apache.spark.sql.connector.write.{BatchWrite, DataWriterFactory, LogicalWriteInfoImpl, PhysicalWriteInfoImpl, V1Write, Write, WriterCommitMessage}
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.execution.{SparkPlan, UnaryExecNode}
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.{LongAccumulator, Utils}
/**
* Deprecated logical plan for writing data into data source v2. This is being replaced by more
* specific logical plans, like [[org.apache.spark.sql.catalyst.plans.logical.AppendData]].
*/
@deprecated("Use specific logical plans like AppendData instead", "2.4.0")
case class WriteToDataSourceV2(
relation: Option[DataSourceV2Relation],
batchWrite: BatchWrite,
query: LogicalPlan) extends UnaryNode {
override def child: LogicalPlan = query
override def output: Seq[Attribute] = Nil
override protected def withNewChildInternal(newChild: LogicalPlan): WriteToDataSourceV2 =
copy(query = newChild)
}
/**
* Physical plan node for v2 create table as select when the catalog does not support staging
* the table creation.
*
* A new table will be created using the schema of the query, and rows from the query are appended.
* If either table creation or the append fails, the table will be deleted. This implementation is
* not atomic; for an atomic variant for catalogs that support the appropriate features, see
* CreateTableAsSelectStagingExec.
*/
case class CreateTableAsSelectExec(
catalog: TableCatalog,
ident: Identifier,
partitioning: Seq[Transform],
plan: LogicalPlan,
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
ifNotExists: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
if (catalog.tableExists(ident)) {
if (ifNotExists) {
return Nil
}
throw QueryCompilationErrors.tableAlreadyExistsError(ident)
}
val schema = CharVarcharUtils.getRawSchema(query.schema).asNullable
val table = catalog.createTable(ident, schema,
partitioning.toArray, properties.asJava)
writeToTable(catalog, table, writeOptions, ident)
}
override protected def withNewChildInternal(newChild: SparkPlan): CreateTableAsSelectExec =
copy(query = newChild)
}
/**
* Physical plan node for v2 create table as select, when the catalog is determined to support
* staging table creation.
*
* A new table will be created using the schema of the query, and rows from the query are appended.
* The CTAS operation is atomic. The creation of the table is staged and the commit of the write
* should bundle the commitment of the metadata and the table contents in a single unit. If the
* write fails, the table is instructed to roll back all staged changes.
*/
case class AtomicCreateTableAsSelectExec(
catalog: StagingTableCatalog,
ident: Identifier,
partitioning: Seq[Transform],
plan: LogicalPlan,
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
ifNotExists: Boolean) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
if (catalog.tableExists(ident)) {
if (ifNotExists) {
return Nil
}
throw QueryCompilationErrors.tableAlreadyExistsError(ident)
}
val schema = CharVarcharUtils.getRawSchema(query.schema).asNullable
val stagedTable = catalog.stageCreate(
ident, schema, partitioning.toArray, properties.asJava)
writeToTable(catalog, stagedTable, writeOptions, ident)
}
override protected def withNewChildInternal(newChild: SparkPlan): AtomicCreateTableAsSelectExec =
copy(query = newChild)
}
/**
* Physical plan node for v2 replace table as select when the catalog does not support staging
* table replacement.
*
* A new table will be created using the schema of the query, and rows from the query are appended.
* If the table exists, its contents and schema should be replaced with the schema and the contents
* of the query. This is a non-atomic implementation that drops the table and then runs non-atomic
* CTAS. For an atomic implementation for catalogs with the appropriate support, see
* ReplaceTableAsSelectStagingExec.
*/
case class ReplaceTableAsSelectExec(
catalog: TableCatalog,
ident: Identifier,
partitioning: Seq[Transform],
plan: LogicalPlan,
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
orCreate: Boolean,
invalidateCache: (TableCatalog, Table, Identifier) => Unit) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
// Note that this operation is potentially unsafe, but these are the strict semantics of
// RTAS if the catalog does not support atomic operations.
//
// There are numerous cases we concede to where the table will be dropped and irrecoverable:
//
// 1. Creating the new table fails,
// 2. Writing to the new table fails,
// 3. The table returned by catalog.createTable doesn't support writing.
if (catalog.tableExists(ident)) {
val table = catalog.loadTable(ident)
invalidateCache(catalog, table, ident)
catalog.dropTable(ident)
} else if (!orCreate) {
throw QueryCompilationErrors.cannotReplaceMissingTableError(ident)
}
val schema = CharVarcharUtils.getRawSchema(query.schema).asNullable
val table = catalog.createTable(
ident, schema, partitioning.toArray, properties.asJava)
writeToTable(catalog, table, writeOptions, ident)
}
override protected def withNewChildInternal(newChild: SparkPlan): ReplaceTableAsSelectExec =
copy(query = newChild)
}
/**
*
* Physical plan node for v2 replace table as select when the catalog supports staging
* table replacement.
*
* A new table will be created using the schema of the query, and rows from the query are appended.
* If the table exists, its contents and schema should be replaced with the schema and the contents
* of the query. This implementation is atomic. The table replacement is staged, and the commit
* operation at the end should perform the replacement of the table's metadata and contents. If the
* write fails, the table is instructed to roll back staged changes and any previously written table
* is left untouched.
*/
case class AtomicReplaceTableAsSelectExec(
catalog: StagingTableCatalog,
ident: Identifier,
partitioning: Seq[Transform],
plan: LogicalPlan,
query: SparkPlan,
properties: Map[String, String],
writeOptions: CaseInsensitiveStringMap,
orCreate: Boolean,
invalidateCache: (TableCatalog, Table, Identifier) => Unit) extends TableWriteExecHelper {
override protected def run(): Seq[InternalRow] = {
val schema = CharVarcharUtils.getRawSchema(query.schema).asNullable
if (catalog.tableExists(ident)) {
val table = catalog.loadTable(ident)
invalidateCache(catalog, table, ident)
}
val staged = if (orCreate) {
catalog.stageCreateOrReplace(
ident, schema, partitioning.toArray, properties.asJava)
} else if (catalog.tableExists(ident)) {
try {
catalog.stageReplace(
ident, schema, partitioning.toArray, properties.asJava)
} catch {
case e: NoSuchTableException =>
throw QueryCompilationErrors.cannotReplaceMissingTableError(ident, Some(e))
}
} else {
throw QueryCompilationErrors.cannotReplaceMissingTableError(ident)
}
writeToTable(catalog, staged, writeOptions, ident)
}
override protected def withNewChildInternal(newChild: SparkPlan): AtomicReplaceTableAsSelectExec =
copy(query = newChild)
}
/**
* Physical plan node for append into a v2 table.
*
* Rows in the output data set are appended.
*/
case class AppendDataExec(
query: SparkPlan,
refreshCache: () => Unit,
write: Write) extends V2ExistingTableWriteExec {
override protected def withNewChildInternal(newChild: SparkPlan): AppendDataExec =
copy(query = newChild)
}
/**
* Physical plan node for overwrite into a v2 table.
*
* Overwrites data in a table matched by a set of filters. Rows matching all of the filters will be
* deleted and rows in the output data set are appended.
*
* This plan is used to implement SaveMode.Overwrite. The behavior of SaveMode.Overwrite is to
* truncate the table -- delete all rows -- and append the output data set. This uses the filter
* AlwaysTrue to delete all rows.
*/
case class OverwriteByExpressionExec(
query: SparkPlan,
refreshCache: () => Unit,
write: Write) extends V2ExistingTableWriteExec {
override protected def withNewChildInternal(newChild: SparkPlan): OverwriteByExpressionExec =
copy(query = newChild)
}
/**
* Physical plan node for dynamic partition overwrite into a v2 table.
*
* Dynamic partition overwrite is the behavior of Hive INSERT OVERWRITE ... PARTITION queries, and
* Spark INSERT OVERWRITE queries when spark.sql.sources.partitionOverwriteMode=dynamic. Each
* partition in the output data set replaces the corresponding existing partition in the table or
* creates a new partition. Existing partitions for which there is no data in the output data set
* are not modified.
*/
case class OverwritePartitionsDynamicExec(
query: SparkPlan,
refreshCache: () => Unit,
write: Write) extends V2ExistingTableWriteExec {
override protected def withNewChildInternal(newChild: SparkPlan): OverwritePartitionsDynamicExec =
copy(query = newChild)
}
case class WriteToDataSourceV2Exec(
batchWrite: BatchWrite,
refreshCache: () => Unit,
query: SparkPlan) extends V2TableWriteExec {
override protected def run(): Seq[InternalRow] = {
val writtenRows = writeWithV2(batchWrite)
refreshCache()
writtenRows
}
override protected def withNewChildInternal(newChild: SparkPlan): WriteToDataSourceV2Exec =
copy(query = newChild)
}
trait V2ExistingTableWriteExec extends V2TableWriteExec {
def refreshCache: () => Unit
def write: Write
override protected def run(): Seq[InternalRow] = {
val writtenRows = writeWithV2(write.toBatch)
refreshCache()
writtenRows
}
}
/**
* The base physical plan for writing data into data source v2.
*/
trait V2TableWriteExec extends V2CommandExec with UnaryExecNode {
def query: SparkPlan
var commitProgress: Option[StreamWriterCommitProgress] = None
override def child: SparkPlan = query
override def output: Seq[Attribute] = Nil
protected def writeWithV2(batchWrite: BatchWrite): Seq[InternalRow] = {
val rdd: RDD[InternalRow] = {
val tempRdd = query.execute()
// SPARK-23271 If we are attempting to write a zero partition rdd, create a dummy single
// partition rdd to make sure we at least set up one write task to write the metadata.
if (tempRdd.partitions.length == 0) {
sparkContext.parallelize(Array.empty[InternalRow], 1)
} else {
tempRdd
}
}
val writerFactory = batchWrite.createBatchWriterFactory(
PhysicalWriteInfoImpl(rdd.getNumPartitions))
val useCommitCoordinator = batchWrite.useCommitCoordinator
val messages = new Array[WriterCommitMessage](rdd.partitions.length)
val totalNumRowsAccumulator = new LongAccumulator()
logInfo(s"Start processing data source write support: $batchWrite. " +
s"The input RDD has ${messages.length} partitions.")
try {
sparkContext.runJob(
rdd,
(context: TaskContext, iter: Iterator[InternalRow]) =>
DataWritingSparkTask.run(writerFactory, context, iter, useCommitCoordinator),
rdd.partitions.indices,
(index, result: DataWritingSparkTaskResult) => {
val commitMessage = result.writerCommitMessage
messages(index) = commitMessage
totalNumRowsAccumulator.add(result.numRows)
batchWrite.onDataWriterCommit(commitMessage)
}
)
logInfo(s"Data source write support $batchWrite is committing.")
batchWrite.commit(messages)
logInfo(s"Data source write support $batchWrite committed.")
commitProgress = Some(StreamWriterCommitProgress(totalNumRowsAccumulator.value))
} catch {
case cause: Throwable =>
logError(s"Data source write support $batchWrite is aborting.")
try {
batchWrite.abort(messages)
} catch {
case t: Throwable =>
logError(s"Data source write support $batchWrite failed to abort.")
cause.addSuppressed(t)
throw QueryExecutionErrors.writingJobFailedError(cause)
}
logError(s"Data source write support $batchWrite aborted.")
cause match {
// Only wrap non fatal exceptions.
case NonFatal(e) => throw QueryExecutionErrors.writingJobAbortedError(e)
case _ => throw cause
}
}
Nil
}
}
object DataWritingSparkTask extends Logging {
def run(
writerFactory: DataWriterFactory,
context: TaskContext,
iter: Iterator[InternalRow],
useCommitCoordinator: Boolean): DataWritingSparkTaskResult = {
val stageId = context.stageId()
val stageAttempt = context.stageAttemptNumber()
val partId = context.partitionId()
val taskId = context.taskAttemptId()
val attemptId = context.attemptNumber()
val dataWriter = writerFactory.createWriter(partId, taskId)
var count = 0L
// write the data and commit this writer.
Utils.tryWithSafeFinallyAndFailureCallbacks(block = {
while (iter.hasNext) {
// Count is here.
count += 1
dataWriter.write(iter.next())
}
val msg = if (useCommitCoordinator) {
val coordinator = SparkEnv.get.outputCommitCoordinator
val commitAuthorized = coordinator.canCommit(stageId, stageAttempt, partId, attemptId)
if (commitAuthorized) {
logInfo(s"Commit authorized for partition $partId (task $taskId, attempt $attemptId, " +
s"stage $stageId.$stageAttempt)")
dataWriter.commit()
} else {
val commitDeniedException = QueryExecutionErrors.commitDeniedError(
partId, taskId, attemptId, stageId, stageAttempt)
logInfo(commitDeniedException.getMessage)
// throwing CommitDeniedException will trigger the catch block for abort
throw commitDeniedException
}
} else {
logInfo(s"Writer for partition ${context.partitionId()} is committing.")
dataWriter.commit()
}
logInfo(s"Committed partition $partId (task $taskId, attempt $attemptId, " +
s"stage $stageId.$stageAttempt)")
DataWritingSparkTaskResult(count, msg)
})(catchBlock = {
// If there is an error, abort this writer
logError(s"Aborting commit for partition $partId (task $taskId, attempt $attemptId, " +
s"stage $stageId.$stageAttempt)")
dataWriter.abort()
logError(s"Aborted commit for partition $partId (task $taskId, attempt $attemptId, " +
s"stage $stageId.$stageAttempt)")
}, finallyBlock = {
dataWriter.close()
})
}
}
private[v2] trait TableWriteExecHelper extends V2TableWriteExec with SupportsV1Write {
protected def writeToTable(
catalog: TableCatalog,
table: Table,
writeOptions: CaseInsensitiveStringMap,
ident: Identifier): Seq[InternalRow] = {
Utils.tryWithSafeFinallyAndFailureCallbacks({
table match {
case table: SupportsWrite =>
val info = LogicalWriteInfoImpl(
queryId = UUID.randomUUID().toString,
query.schema,
writeOptions)
val writeBuilder = table.newWriteBuilder(info)
val write = writeBuilder.build()
val writtenRows = write match {
case v1: V1Write => writeWithV1(v1.toInsertableRelation)
case v2 => writeWithV2(v2.toBatch)
}
table match {
case st: StagedTable => st.commitStagedChanges()
case _ =>
}
writtenRows
case _ =>
// Table does not support writes - staged changes are also rolled back below if table
// is staging.
throw QueryExecutionErrors.unsupportedTableWritesError(ident)
}
})(catchBlock = {
table match {
// Failure rolls back the staged writes and metadata changes.
case st: StagedTable => st.abortStagedChanges()
case _ => catalog.dropTable(ident)
}
})
}
}
private[v2] case class DataWritingSparkTaskResult(
numRows: Long,
writerCommitMessage: WriterCommitMessage)
/**
* Sink progress information collected after commit.
*/
private[sql] case class StreamWriterCommitProgress(numOutputRows: Long)
|
wangmiao1981/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Exec.scala
|
Scala
|
apache-2.0
| 18,441 |
package io.github.tailhq.dynaml.probability.distributions
import breeze.stats.distributions._
/**
* Univariate Truncated Gaussian Distribution
*
* @param mu The mean of the base gaussian.
* @param sigma Std Deviation of the base gaussian.
* @param a Lower limit of truncation.
* @param b Upper limit of truncation.
* @author tailhq date 03/03/2017.
*
* */
case class TruncatedGaussian(mu: Double, sigma: Double, a: Double, b: Double)(
implicit rand: RandBasis = Rand) extends
AbstractContinuousDistr[Double] with
HasCdf with HasInverseCdf with Moments[Double, Double] {
require(sigma > 0.0, "Std Dev must be positive.")
require(a < b, "A must be lower limit, B must be upper limit")
private val baseGaussian = Gaussian(mu, sigma)
private val z = baseGaussian.cdf(b) - baseGaussian.cdf(a)
private val y = baseGaussian.pdf(b) - baseGaussian.pdf(a)
private val (alpha, beta) = ((a-mu)/sigma, (b-mu)/sigma)
override def probability(x: Double, y: Double) = {
require(
x <= y,
"Lower limit x must be actually lesser than upper limit y in P(x <= a <= y)")
cdf(y) - cdf(x)
}
override def cdf(x: Double) =
if(x <= a) 0.0
else if(x >= b) 1.0
else (baseGaussian.cdf(x) - baseGaussian.cdf(b))/z
override def inverseCdf(p: Double) = baseGaussian.inverseCdf(baseGaussian.cdf(a) + p*z)
override def unnormalizedLogPdf(x: Double) =
if(x <= b && x >= a) baseGaussian.logPdf(x)
else Double.NegativeInfinity
override def logNormalizer = math.log(z)
override def draw() = {
inverseCdf(rand.uniform.draw())
}
override def mean = mu - sigma*(baseGaussian.pdf(b) - baseGaussian.pdf(a))/z
override def variance =
sigma*sigma*(1.0 - ((beta*baseGaussian.pdf(b) - alpha*baseGaussian.pdf(a))/z) - math.pow(y/z, 2.0))
override def entropy =
(alpha*baseGaussian.pdf(a) - beta*baseGaussian.pdf(b))/(2*z) + math.sqrt(2*math.Pi*math.exp(1.0))*sigma*z
override def mode = if (mu < a) a else if (mu > b) b else mu
}
|
mandar2812/DynaML
|
dynaml-core/src/main/scala/io/github/tailhq/dynaml/probability/distributions/TruncatedGaussian.scala
|
Scala
|
apache-2.0
| 2,009 |
package edu.knowitall.cluewebextractor
import de.l3s.boilerpipe.extractors
import edu.washington.cs.knowitall.tool.sentence.OpenNlpSentencer
import edu.washington.cs.knowitall.common.Timing
import edu.washington.cs.knowitall.common.Resource
import scala.collection.JavaConverters._
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.io.FileWriter
import java.io.PrintStream
import java.io.InputStream
import java.io.FileInputStream
import java.io.File
import java.io.PrintWriter
import java.io.BufferedInputStream
import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.DataInputStream
import java.util.zip.GZIPInputStream
/**
* Copyright 2013 David H Jung
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ----------------------- END OF LICENSE INFO --------------------------------
*
* CLI that takes as input either a .warc file or directory containing .warc
* files and outputs the extracted payload content in either a default or
* specified directory.
*
* If the user inputs a directory that contains a .warc file with an already-
* existing corresponding output file, it will be skipped.
*
* If the user inputs a single .warc file with an already-existing
* corresponding output file, it will be overwritten.
*
* On bad input or unexpected errors, this program will choose to log the error
* and skip over the file (or document, depending on the granularity of the
* error) rather than stop execution.
*/
object CluewebExtractorMain extends App {
val logger = LoggerFactory.getLogger(this.getClass)
// What we use to process the warc records
val garbager = new GarbageFilter()
val nlpSentencer = new OpenNlpSentencer()
val bp = new extractors.DefaultExtractor()
case class Config(
inputFiles: Seq[File] = Seq.empty,
outputDirectory: Option[File] = None) {}
// Defines the command line arguments.
val parser = new scopt.immutable.OptionParser[Config]("cweb") {
def options = Seq(
arglist("<input-files>", "pattern file") { (path: String, config: Config) =>
val file = new File(path)
require(file.exists(), "file does not exist: " + path)
config.copy(inputFiles = (config.inputFiles :+ file))
},
opt("output-dir", "output directory") { (path: String, config: Config) =>
val file = new File(path)
require(file.exists, "directory does not exist: " + path)
require(file.isDirectory, "file is not a directory: " + path)
config.copy(outputDirectory = Some(file))
})
}
parser.parse(args, Config()) match {
case Some(config) => run(config)
case None => System.err.println(usage)
}
def run(config: Config) {
// Files contains (inputFile, outputFile) pairs.
val files: Iterable[(File, File)] = getInputOutputFiles(config)
// For each (input, output) pair, get a warc record iterator for the input
// and write the corresponding extracted payload to the output
for ((inputFile, outputFile) <- files) {
try {
processWarcFile(inputFile, outputFile)
} catch {
case e: Throwable =>
logger.error("Error while processing warc file: " + inputFile +
". Skipping file. \\n\\t" + e + ": " + e.getStackTraceString);
}
}
}
// Given an input warc file and its corresponding output file, processes the
// input and writes out the payloads to outputFile.
def processWarcFile(inputFile: File, outputFile: File) = {
val ns = Timing.time {
Resource.using(openInputStream(inputFile)) { is =>
Resource.using(new PrintWriter(outputFile, "UTF8")) { writer =>
val warcIt = new WarcRecordIterator(
new DataInputStream(
new BufferedInputStream(is)))
logger.info("Successfully created new warc iterator")
var lastDocument = 0
var nanos = System.nanoTime()
// Iterate over warc records
for (warc <- warcIt.flatten) {
if (warc.warcType.equals("response") &&
!warc.payload.equals("")) {
// If this document is a multiple of a thousand, note it in the log
// and the current documents / second
if (warcIt.currentDocument % 1000 == 0 &&
lastDocument != warcIt.currentDocument) {
logger.info("Processing document: " + warcIt.currentDocument +
" (" +
("%.2f" format (warcIt.currentDocument.toDouble /
((System.nanoTime - nanos).toDouble /
Timing.Seconds.divisor.toDouble))) + " doc/sec)")
lastDocument = warcIt.currentDocument
}
try {
processWarcRecord(warc, writer)
} catch {
case e: Throwable =>
logger.error("Error while processing warc record: " +
warc.warcTrecId + "\\n\\t" + e + ": " + e.getStackTraceString)
}
}
}
}}}
logger.info("Processed file '" + inputFile.getName + "' -> '"
+ outputFile.getName + "' in: " + Timing.Seconds.format(ns))
}
// Given a warc record, processes it using boilerpipe and writes each
// sentences out to writer
def processWarcRecord(warc: WarcRecord, writer: PrintWriter) = {
// piped stores the payload after being passed through boilerpipe
val piped = try {
bp.getText(warc.payload.trim)
} catch {
case e: Throwable =>
logger.error("Error during boilerpipe extraction. " +
"Skipping document: " + warc.warcTrecId + "\\n\\t" +
e + ": " + e.getStackTraceString)
""
}
val sentences = nlpSentencer.segmentTexts(piped)
// iterate over sentences
var i = 0
for (s <- sentences) {
try {
if (processSentence(s, warc, writer, i))
i += 1
} catch {
case e: Throwable =>
logger.error("Error while processing sentence " +
warc.warcTrecId + ":" + i + "\\n\\t" + e + ": " +
e.getStackTraceString)
}
}
}
// Processes a given warc sentence with some filters. If the sentence passes
// through the filters, it gets written into writer.
def processSentence(sent: String,
warc: WarcRecord,
writer: PrintWriter,
i: Int) = {
val sentence = garbager.removeWhitespace(sent)
if (!garbager.containsHtml(sentence) &&
!garbager.tooLong(sentence) &&
!garbager.tooShort(sentence)) {
writer.println(warc.warcTrecId + "\\t" +
warc.warcUri + "\\t" +
warc.warcDate + "\\t" +
i + "\\t" +
sentence)
true
} else {
false
}
}
// Given a Config object with input files, returns an Iterable over pairs of
// (inputfile, outputfile), where inputfile corresponds to a File in config,
// and outputfile is the corresponding output file.
def getInputOutputFiles(config: Config): Iterable[(File, File)] = {
import org.apache.commons.io.FileUtils
import scala.collection.JavaConverters._
config.inputFiles.flatMap {
file =>
// if it's a directory, search subdirectories
if (file.isDirectory) {
val files: Iterable[File] =
FileUtils.listFiles(file, Array("gz", "warc"), true).asScala
files.flatMap { inputFile =>
val subdirectory = inputFile.getParentFile.getPath.drop(file.getParentFile.getPath.length).drop(1)
// build the output file
val outputDirectory = config.outputDirectory match {
case Some(dir) => new File(dir, subdirectory)
case None => new File(subdirectory)
}
// create the file's parent directory if it doesn't exist
outputDirectory.mkdirs
val outputFileName = makeOutputFileName(inputFile)
val outputFile = new File(outputDirectory, outputFileName)
// if the output file already exists, skip by returning None
if (outputFile.exists) {
None
} else {
Some(inputFile, outputFile)
}
}
} else {
// the user input a simple .warc file
val outputFileName = makeOutputFileName(file)
val outputFile = config.outputDirectory match {
case Some(dir) => new File(dir, outputFileName)
case None => new File(outputFileName)
}
Some(file, outputFile)
}
}
}
// Output filename is the input filename up to and including the first dot
// with "sentences" as the extension.
def makeOutputFileName(inputFile: File) = {
inputFile.getName().takeWhile(_ != '.') + ".sentences"
}
def usage {
"Usage: java -jar <this.jar> <input.warc(.gz)>"
}
def openInputStream(file: File): InputStream = {
if (file.getName endsWith ".gz") {
// then the user has passed in .warc.gz file
logger.info("Opening zip file " + file)
new GZIPInputStream(new FileInputStream(file))
} else {
// then the user has passed in .warc file
logger.info("Opening file " + file)
new FileInputStream(file)
}
}
}
|
xdavidjung/cluewebextractor
|
src/main/scala/edu/knowitall/cluewebextractor/CluewebExtractorMain.scala
|
Scala
|
apache-2.0
| 9,717 |
/*
* #%L
* vaadinutil
* %%
* Copyright (C) 2014 KangWoo, Lee
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.imcharsi.vaadin.vaadinutil.converter
import java.sql.Date
import java.util.Locale
import com.vaadin.data.util.converter.Converter.ConversionException
import org.apache.commons.lang3.time.FastDateFormat
import scala.util.{Failure, Success, Try}
/**
* Created by KangWoo,Lee on 14. 7. 30.
*/
class OptionDateConverter(pattern: String = "yyyy-MM-dd") extends AbstractOptionStringConverter[Date] {
val fastDateFormat: FastDateFormat = FastDateFormat.getInstance(pattern, Locale.getDefault)
override protected def innerConvertToModel(value: String, targetType: Class[_ <: Option[Date]], locale: Locale): Option[Date] = {
Try(Some(fastDateFormat.parse(value)).map(_.getTime).map(new Date(_))) match {
case Success(x) ⇒ x
case Failure(x) ⇒ throw new ConversionException(x)
}
}
override protected def innerConvertToPresentation(value: Option[Date], targetType: Class[_ <: String], locale: Locale): String = {
Try(value.map(fastDateFormat.format)) match {
case Success(Some(x)) ⇒ x
case Success(None) ⇒ null
case Failure(x) ⇒ throw new ConversionException(x)
}
}
}
object OptionDateConverter extends OptionDateConverter("yyyy-MM-dd")
class OptionDateConverter2(pattern: String = "yyyy-MM-dd") extends AbstractConverter[java.util.Date, Option[Date]] {
val fastDateFormat: FastDateFormat = FastDateFormat.getInstance(pattern, Locale.getDefault)
override def convertToModel(p1: java.util.Date, p2: Class[_ <: Option[Date]], p3: Locale): Option[Date] = {
if (p1 == null)
None
else
Some(new Date(p1.getTime))
}
override def convertToPresentation(p1: Option[Date], p2: Class[_ <: java.util.Date], p3: Locale): java.util.Date = {
p1 match {
case Some(x) ⇒ x
case None ⇒ null
}
}
}
object OptionDateConverter2 extends OptionDateConverter2("yyyy-MM-dd")
|
imcharsi/vaadinutil
|
src/main/scala/com/github/imcharsi/vaadin/vaadinutil/converter/OptionDateConverter.scala
|
Scala
|
apache-2.0
| 2,528 |
package models
/**
* Created by alper on 26/11/14.
*/
case class LoginCombination(pin:String,pwd:String, fp:String, card:String, face:String)
object LoginCombination{
def fromParser(
pin:String = "",
pwd:String = "",
fp:String = "",
card:String = "",
face:String = ""
):LoginCombination = {
new LoginCombination(
pin,
pwd,
fp,
card,
face
)
}
}
|
ZKTecoEu/ZKRestApi
|
ZKRestServer/app/models/LoginCombination.scala
|
Scala
|
mit
| 506 |
package controllers
import javax.inject.Inject
import actors.{ContainersActor, CreateContainer, Port}
import akka.actor.ActorSystem
import play.api.Mode
import play.api.Play.current
import play.api.libs.json._
import play.api.libs.ws.WSClient
import play.api.mvc.{Action, Controller, WebSocket}
import play.api.mvc.WebSocket.FrameFormatter
import scala.concurrent.ExecutionContext.Implicits._
class ContainersController @Inject() (ws: WSClient, system: ActorSystem) extends Controller {
/*Json formatters to get data from websocket*/
implicit val PortFormat = Json.format[Port]
implicit val PortFormatter = FrameFormatter.jsonFrame[Port]
implicit val CreateContainerFormat = Json.format[CreateContainer]
implicit val CreateContainerFormatter = FrameFormatter.jsonFrame[CreateContainer]
/*Create a container with the CreateContainer object*/
def create = WebSocket.acceptWithActor[CreateContainer, String] { request => out =>
ContainersActor.props(out, ws)
}
/*Get the ip of local or external if we are in Prod or Dev Mode*/
val swarmMaster = current.mode match {
case Mode.Dev => current.configuration.getString("vapp.swarm-master.ip").get+":8080"
case Mode.Prod => "192.168.2.100:3376"
}
/*
* Start a container with the id
*/
def start(id : String) = Action.async {
ws.url("https://"+swarmMaster+"/containers/"+id+"/start").post("").map(response =>
{
Redirect(routes.Application.dashboard())
})
}
/*
* Stop a container with the id
*/
def stop(id : String) = Action.async {
ws.url("https://"+swarmMaster+"/containers/"+id+"/stop").post("").map(response =>
{
Redirect(routes.Application.dashboard())
})
}
/*
* Dekete a container with the id
*/
def delete(id : String) = Action.async {
ws.url("https://"+swarmMaster+"/containers/"+id).delete.map(response =>
{
Redirect(routes.Application.dashboard())
})
}
}
|
snigle/FrontDockerOrchestrator
|
PlayProject/app/controllers/ContainersController.scala
|
Scala
|
mit
| 1,974 |
package nz.wicker.autoencoder.math.structure
import math._
/**
* Vector space structure with `Double` as field,
* for everything that looks like Real^n for some
* natural n.
*
* We are aware of the fact that Real numbers is
* not the only field, and we are also aware that
* scalar product structure does not necessary
* exist for every vector space. Real^n is really
* all we need at the moment.
*/
trait VectorSpace[V <: VectorSpace[V]] {
self: V =>
def +(v: V): V
def unary_- : V
def zero: V
def -(v: V): V = this + (-v)
def *(d: Double): V
def /(d: Double): V = this * (1/d)
def dot(v: V): Double
def normSq: Double = this.dot(this)
def norm: Double = sqrt(normSq)
def normalized = this / norm
def isNaN: Boolean
def isInfinite: Boolean
def isInvalid = isNaN || isInfinite
}
|
joergwicker/autoencoder
|
src/main/scala/nz/wicker/autoencoder/math/structure/VectorSpace.scala
|
Scala
|
gpl-3.0
| 826 |
package pl.touk.nussknacker.ui.api
import akka.http.scaladsl.server.Directive1
import pl.touk.nussknacker.engine.api.process.ProcessName
import pl.touk.nussknacker.restmodel.process.{ProcessIdWithName, ProcessIdWithNameAndCategory}
import pl.touk.nussknacker.restmodel.processdetails.{BaseProcessDetails, ProcessShapeFetchStrategy}
import pl.touk.nussknacker.ui.process.repository.FetchingProcessRepository
import pl.touk.nussknacker.ui.process.repository.ProcessDBQueryRepository.ProcessNotFoundError
import pl.touk.nussknacker.ui.security.api.LoggedUser
import scala.concurrent.{ExecutionContext, Future}
trait ProcessDirectives {
import akka.http.scaladsl.server.Directives._
val processRepository: FetchingProcessRepository[Future]
implicit val ec: ExecutionContext
def processId(processName: String): Directive1[ProcessIdWithName] = {
handleExceptions(EspErrorToHttp.espErrorHandler).tflatMap { _ =>
onSuccess(processRepository.fetchProcessId(ProcessName(processName))).flatMap {
case Some(processId) => provide(ProcessIdWithName(processId, ProcessName(processName)))
case None => failWith(ProcessNotFoundError(processName))
}
}
}
def processIdWithCategory(processName: String): Directive1[ProcessIdWithNameAndCategory] = {
handleExceptions(EspErrorToHttp.espErrorHandler).tflatMap { _ =>
onSuccess(processRepository.fetchProcessDetails(ProcessName(processName))).flatMap {
case Some(details) => provide(ProcessIdWithNameAndCategory(details.id, ProcessName(processName), details.processCategory))
case None => failWith(ProcessNotFoundError(processName))
}
}
}
def processDetailsForName[PS: ProcessShapeFetchStrategy](processName: String)(implicit loggedUser: LoggedUser): Directive1[BaseProcessDetails[PS]] = {
processId(processName).tflatMap { processId =>
handleExceptions(EspErrorToHttp.espErrorHandler).tflatMap { _ =>
onSuccess(processRepository.fetchLatestProcessDetailsForProcessId[PS](processId._1.id)).flatMap {
case Some(process) => provide(process)
case None => failWith(ProcessNotFoundError(processName))
}
}
}
}
}
|
TouK/nussknacker
|
ui/server/src/main/scala/pl/touk/nussknacker/ui/api/ProcessDirectives.scala
|
Scala
|
apache-2.0
| 2,185 |
package drx.interface
import java.util.concurrent.ThreadLocalRandom
import VarMap.mapmapOps
import drx.concreteplatform
import drx.interface.DSL.{Owner, BagVar, Val, Var}
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable
class BagBuilder[X](private val underlying: mutable.Map[X, Int] = mutable.Map[X, Int]()) extends mutable.Builder[(X, Int), Bag[X]] {
//println("before " + underlying.toIndexedSeq)
override def +=(elem: (X, Int)): this.type = {
val next = underlying.getOrElse(elem._1, 0) + elem._2
//println(" += " + (elem._1, underlying.getOrElse(elem._1, 0), elem._2, next))
if (next != 0) underlying(elem._1) = next
else underlying.remove(elem._1)
this }
override def clear(): Unit = underlying.clear()
override def result(): Bag[X] = {
//println("now " + underlying.toIndexedSeq + "\\n")
new Bag(underlying.toMap)
}
}
class Bag[X](private val underlying: Map[X, Int] = Map[X, Int]()) {
def size: Int = underlying.values.sum
def isEmpty: Boolean = underlying.isEmpty
def nonEmpty: Boolean = underlying.nonEmpty
def apply(key: X): Int = underlying.getOrElse(key, 0)
def merge(that: Bag[X], debug: Boolean = false)(op: (Int, Int) => Int): Bag[X] = {
val keys = this.underlying.keys ++ that.underlying.keys
VarMap.idebug("merge ", keys.map[(X, Int), Bag[X]] { k =>
VarMap.idebug("%s = %s + %s = m(%s)".format(op(this (k), that(k)), this (k), that(k), k),
k -> op(this(k), that(k)))
}(Bag.canBuildBagFromIterable))
}
def mapValues(f: Int => Int): Bag[X] =
new Bag(underlying.mapValues(f))
def mapI[Y](f: ((X, Int)) => Y): Bag[Y] =
flatMapI[Y](x => new Bag[Y](Map[Y, Int](f(x) -> 1)))
def map[Y](f: X => Y): Bag[Y] =
flatMap[Y](x => new Bag[Y](Map[Y, Int](f(x) -> 1)))
def flatMapI[Y](f: ((X, Int)) => Bag[Y]): Bag[Y] = {
val bagBuilder = new BagBuilder[Y]()
underlying.foreach { case (k, v) => f(k, v).foreachI { case (k2, v2) =>
bagBuilder += k2 -> v
} }
bagBuilder.result()
}
def flatMap[Y](f: X => Bag[Y]): Bag[Y] = {
val bagBuilder = new BagBuilder[Y]()
underlying.foreach { case (k, v) => f(k).foreachI { case (k2, v2) =>
bagBuilder += k2 -> (v * v2)
} }
bagBuilder.result()
}
def fold[Y](init: Y)(f: (Y, X) => Y): Y = {
var state = init
for (elem <- this.underlying) { for (_ <- (0 until elem._2)) {
state = f(state, elem._1)
} }
state
}
def foreachI[Y](f: ((X, Int)) => Y): Unit = underlying.foreach(f)
def foreach[Y](f: X => Y): Unit = underlying.foreach(x => f(x._1))
def iteratorI: Iterator[(X, Int)] = underlying.iterator
def iterator: Iterator[X] = underlying.iterator.flatMap { case (k,v) => (0 until v).map(_ => k) }
}
object Bag {
def canBuildBagFromIterable[X]: CanBuildFrom[Iterable[X], (X, Int), Bag[X]] = new CanBuildFrom[Iterable[X], (X, Int), Bag[X]] {
override def apply(): mutable.Builder[(X, Int), Bag[X]] = new BagBuilder[X]
override def apply(from: Iterable[X]): mutable.Builder[(X, Int), Bag[X]] = apply()
}
def canBuildBagFromMap[X]: CanBuildFrom[Iterable[(X, Int)], (X, Int), Bag[X]] = new CanBuildFrom[Iterable[(X, Int)], (X, Int), Bag[X]] {
override def apply(): mutable.Builder[(X, Int), Bag[X]] = new BagBuilder[X]
override def apply(from: Iterable[(X, Int)]): mutable.Builder[(X, Int), Bag[X]] = apply()
}
implicit def traversableBag[X](bag: Bag[X]): Traversable[X] = new Traversable[X] {
override def foreach[U](f: X => U): Unit = bag.foreach(f) }
implicit def traversableBagI[X](bag: Bag[X]): Traversable[(X, Int)] = new Traversable[(X, Int)] {
override def foreach[U](f: ((X, Int)) => U): Unit = bag.foreachI(f) }
}
final class VarMap[X <: Owner] extends Owner {
val diffs: BagVar[X] = BagVar[X]()
val aggregate: Val[Bag[X]] = diffs.mkAggregate(this)
def sampleAggregate: Bag[X] = aggregate.sample
def update(deltas: Bag[X]): Unit = diffs.update(deltas)
def filterInPlace(func: X => Boolean): Unit = update(sampleAggregate.flatMap(x =>
new Bag(Map(x -> (if (func(x)) 0 else -1))) ))
}
object VarMap {
var i = 2
var empty = true
def idebug[X](msg: String, x: => X): X = {
if (msg != "") println(" ".repeat(i) + msg)
i+=1
empty = true
val result = x
i-=1
if (!empty) println(" ".repeat(i) + "end")
empty = false
result
}
def apply[X <: Owner]() = new VarMap[X]()
implicit class flattenOps[X](val rx: Val[Bag[Val[Bag[X]]]]) extends AnyVal {
def flatten(owner: Owner): Val[Bag[X]] =
rx.map { ev => idebug("flatten", {
val bagBuilder = new BagBuilder[X]()
ev.foreachI { case (k, v) => k.get.foreachI { case (k2, v2) =>
bagBuilder += k2 -> (v * v2)
} }
bagBuilder.result()
}) }
}
implicit class mapmapOps[X](val rx: Val[Bag[X]]) extends AnyVal {
def mapFilter(fun: X => Boolean)(owner: Owner): Val[Bag[X]] =
rx.map { bag => bag map { v: X => Val(new Bag(Map(v -> (if (fun(v)) 1 else 0)))) } }.flatten(owner)
def mapMap[Y](fun: X => Y)(owner: Owner): Val[Bag[Y]] =
rx.map { bag => bag map { v: X => Val(new Bag(Map(fun(v) -> 1))) } }.flatten(owner)
def mapFlatMap[Y](fun: X => Bag[Y])(owner: Owner): Val[Bag[Y]] =
rx.map { bag => bag map { v: X => Val(fun(v)) } }.flatten(owner)
def mkAggregate(owner: Owner, debug: Boolean = false): Val[Bag[X]] =
rx.scan(new Bag[X]())( (state,ev) => state.merge(ev, debug)(_+_))(owner)
}
def hexRnd(): String = ThreadLocalRandom.current().nextLong().toHexString
}
class Table[This <: Owner : reflect.ClassTag] extends Owner { self: This =>
val id: String = VarMap.hexRnd()
Table.getRaw[This].update(new Bag(Map(this -> 1)))
def Vary[X](init: X): Var[X] = drx.interface.DSL.Var(init)
def delete(): Unit = Table.getRaw[This].filterInPlace(_ != this)
}
object Table extends Owner {
private val tables: mutable.Map[Class[_], VarMap[_ <: Owner]] = mutable.Map()
private[interface] def getRaw[C <: Owner : reflect.ClassTag]: VarMap[C] =
tables.getOrElseUpdate(implicitly[reflect.ClassTag[C]].runtimeClass, { VarMap[C]() }).asInstanceOf[VarMap[C]]
def getData[C <: Owner : reflect.ClassTag]: Val[Bag[C]] = getRaw[C].aggregate
def getDelta[C <: Owner : reflect.ClassTag]: Val[Bag[C]] = getRaw[C].diffs
def extend[X <: Owner : reflect.ClassTag, Y](func: X => Y): X => Y = {
val cache = concreteplatform.WeakMap[X, Y]()
val accessor = (x: X) => cache.get(x).getOrElse {
val tmp = func(x)
cache.set(x, tmp)
tmp
}
Table.getRaw[X].aggregate.sample.foreach(accessor)
Table.getRaw[X].diffs.mapMap(accessor)(this).enable()
accessor
}
}
|
drcicero/drx
|
shared/src/main/scala/drx/interface/Table.scala
|
Scala
|
mit
| 6,685 |
package org.langmeta.internal
package semanticdb
package vfs
import java.io.File
import java.io.FileOutputStream
import org.langmeta.io._
import org.langmeta.internal.semanticdb.{vfs => v}
import org.langmeta.internal.semanticdb.{schema => s}
object Database {
def load(classpath: Classpath): v.Database = {
val fentries = classpath.deep.filter(e => v.SemanticdbPaths.isSemanticdb(e.name))
val ventries = fentries.map(fentry => v.Entry.OnDisk(fentry))
v.Database(ventries)
}
}
final case class Database(entries: List[Entry]) {
def toSchema: s.Database = {
val sentries = entries.flatMap { ventry =>
val sdb = s.Database.parseFrom(ventry.bytes)
sdb.mergeMessageOnlyDocuments.documents
}
s.Database(sentries)
}
def save(append: Boolean): Unit = {
entries.foreach(ventry => {
val file = new File(ventry.uri)
file.getParentFile.mkdirs()
val fos = new FileOutputStream(file, append)
try fos.write(ventry.bytes)
finally fos.close()
})
}
}
|
DavidDudson/scalameta
|
langmeta/langmeta/shared/src/main/scala/org/langmeta/semanticdb/internal/vfs/Database.scala
|
Scala
|
bsd-3-clause
| 1,023 |
package com.blinkbox.books.auth
import com.blinkbox.security.jwt.{InvalidTokenException, UnsupportedTokenException, TokenDecoder}
import com.blinkbox.security.jwt.encryption.{RSA_OAEP, A128GCM, EncryptionMethod}
import com.blinkbox.security.jwt.signatures.{ES256, SignatureAlgorithm}
import java.security._
import java.security.spec.{X509EncodedKeySpec, InvalidKeySpecException, PKCS8EncodedKeySpec}
import java.io.IOException
import java.nio.file.{Paths, Files}
import scala.collection.concurrent.TrieMap
class ZuulTokenDecoder(val keysFolder: String) extends TokenDecoder {
private val privateKeys = new TrieMap[String, Array[Byte]]()
private val publicKeys = new TrieMap[String, Array[Byte]]()
override def getDecrypter(header: java.util.Map[String, AnyRef]): EncryptionMethod =
if (header.get("enc") == A128GCM.NAME && header.get("alg") == RSA_OAEP.NAME) {
header.get("kid") match {
case keyId: String => new A128GCM(new RSA_OAEP(privateKey(keyId)))
case _ => super.getDecrypter(header)
}
} else super.getDecrypter(header)
override def getVerifier(header: java.util.Map[String, AnyRef]): SignatureAlgorithm =
if (header.get("alg") == ES256.NAME) {
header.get("kid") match {
case keyId: String => new ES256(publicKey(keyId))
case _ => super.getVerifier(header)
}
} else super.getVerifier(header)
private def privateKey(keyId: String): PrivateKey = try {
val keyData = getKeyData(keyId, "private.key", cache = privateKeys)
KeyFactory.getInstance("RSA").generatePrivate(new PKCS8EncodedKeySpec(keyData))
} catch {
case e: NoSuchAlgorithmException => throw new UnsupportedTokenException("The RSA encryption algorithm is not supported.", e)
case e: InvalidKeySpecException => throw new InvalidKeyException(s"The private key '$keyId' is invalid.", e)
}
private def publicKey(keyId: String): PublicKey = try {
val keyData = getKeyData(keyId, "public.key", cache = publicKeys)
KeyFactory.getInstance("EC").generatePublic(new X509EncodedKeySpec(keyData))
} catch {
case e: NoSuchAlgorithmException => throw new UnsupportedTokenException("The ECDSA signature algorithm is not supported.", e)
case e: InvalidKeySpecException => throw new InvalidKeyException(s"The public key '$keyId' is invalid.", e)
}
private def getKeyData(keyId: String, name: String, cache: TrieMap[String, Array[Byte]]): Array[Byte] =
// not using getOrElseUpdate as this is inherited from MapLike and isn't thread-safe
// see this bug report for more info: https://issues.scala-lang.org/browse/SI-7943
// also note that this method may read the file more than once, but that's alright
cache.get(keyId).getOrElse({
try {
val keyData = Files.readAllBytes(Paths.get(keysFolder, keyId, name))
cache.putIfAbsent(keyId, keyData).getOrElse(keyData)
} catch {
case e: IOException => throw new InvalidTokenException(s"Invalid key identifier '$keyId'.")
}
})
}
|
blinkboxbooks/common-spray-auth.scala
|
src/main/scala/com/blinkbox/books/auth/ZuulTokenDecoder.scala
|
Scala
|
mit
| 3,012 |
package scala.meta
package semantic
import org.scalameta.adt._
import org.scalameta.annotations._
import org.scalameta.invariants._
import org.scalameta.unreachable
import scala.{Seq => _}
import scala.annotation.compileTimeOnly
import scala.collection.immutable.Seq
import scala.reflect.{ClassTag, classTag}
import scala.meta.semantic.{Context => SemanticContext}
import scala.meta.internal.{ast => impl} // necessary only to implement APIs, not to define them
import scala.meta.internal.{semantic => s} // necessary only to implement APIs, not to define them
import scala.meta.internal.ui.Summary // necessary only to implement APIs, not to define them
import scala.reflect.runtime.{universe => ru} // necessary only for a very hacky approximation of hygiene
private[meta] trait Api {
// ===========================
// PART 1: ATTRIBUTES
// ===========================
implicit class XtensionSemanticTermDesugar(tree: Term) {
@hosted def desugar: Term = implicitly[SemanticContext].desugar(tree)
}
implicit class XtensionSemanticTermTpe(tree: Term) {
@hosted def tpe: Type = implicitly[SemanticContext].tpe(tree)
}
implicit class XtensionSemanticTypeTpe(tree: Type) {
@hosted def tpe: Type = tree
}
implicit class XtensionSemanticMemberTpe(tree: Member) {
@hosted private def SeqRef: impl.Type.Name = {
val iScala = s.Symbol.Global(s.Symbol.Root, "scala", s.Signature.Term)
val iCollection = s.Symbol.Global(iScala, "collection", s.Signature.Term)
val iSeq = s.Symbol.Global(iCollection, "Seq", s.Signature.Type)
impl.Type.Name("Seq", s.Denotation.Single(s.Prefix.Zero, iSeq))
}
@hosted private def dearg(tpe: Type.Arg): Type = tpe.require[impl.Type.Arg] match {
case impl.Type.Arg.ByName(tpe) => impl.Type.Apply(SeqRef, List(tpe))
case impl.Type.Arg.Repeated(tpe) => tpe
case tpe: impl.Type => tpe
}
@hosted def tpe: Type = tree.require[impl.Member] match {
case tree: impl.Pat.Var.Term => tree.name.tpe
case tree: impl.Pat.Var.Type => tree.name
case tree: impl.Decl.Def => tree.decltpe
case tree: impl.Decl.Type => tree.name
case tree: impl.Defn.Def => tree.decltpe.getOrElse(tree.body.tpe)
case tree: impl.Defn.Macro => tree.tpe
case tree: impl.Defn.Type => tree.name
case tree: impl.Defn.Class => tree.name
case tree: impl.Defn.Trait => tree.name
case tree: impl.Defn.Object => impl.Type.Singleton(tree.name)
case impl.Pkg(name: impl.Term.Name, _) => impl.Type.Singleton(name)
case impl.Pkg(impl.Term.Select(_, name: impl.Term.Name), _) => impl.Type.Singleton(name)
case tree: impl.Pkg.Object => impl.Type.Singleton(tree.name)
case tree: impl.Term.Param if tree.parent.map(_.isInstanceOf[impl.Template]).getOrElse(false) => ??? // TODO: don't forget to intersect with the owner type
case tree: impl.Term.Param => dearg(implicitly[SemanticContext].tpe(tree))
case tree: impl.Type.Param => tree.name.require[Type.Name]
case tree: impl.Ctor.Primary => tree.owner.require[meta.Member].tpe
case tree: impl.Ctor.Secondary => tree.owner.require[meta.Member].tpe
}
}
implicit class XtensionSemanticRefDefn(tree: Ref) {
@hosted def defns: Seq[Member] = implicitly[SemanticContext].defns(tree)
@hosted def defn: Member = {
defns match {
case Seq(single) => single
case Seq(_, _*) => throw new SemanticException(s"multiple definitions found for ${showSummary(tree)}")
case Seq() => unreachable(debug(tree, tree.show[Raw]))
}
}
}
implicit class XtensionSemanticTermRefDefn(tree: Term.Ref) {
@hosted def defns: Seq[Member.Term] = (tree: Ref).defns.map(_.require[Member.Term])
@hosted def defn: Member.Term = (tree: Ref).defn.require[Member.Term]
}
// NOTE: the types here are intentionally just Member, not Member.Type
// because Type.Refs can refer to both type members (obviously) and term members (singleton types)
implicit class XtensionSemanticTypeRefDefn(tree: Type.Ref) {
@hosted def defns: Seq[Member] = (tree: Ref).defns
@hosted def defn: Member = (tree: Ref).defn
}
// ===========================
// PART 2: TYPES
// ===========================
implicit class XtensionSemanticType(tree: Type) {
@hosted def <:<(other: Type): Boolean = implicitly[SemanticContext].isSubType(tree, other)
@hosted def weak_<:<(other: Type): Boolean = ???
@hosted def =:=(other: Type): Boolean = (tree =:= other) && (other =:= tree)
@hosted def widen: Type = implicitly[SemanticContext].widen(tree)
@hosted def dealias: Type = implicitly[SemanticContext].dealias(tree)
@hosted def companion: Type.Ref = ???
@hosted def parents: Seq[Type] = implicitly[SemanticContext].parents(tree)
}
@hosted def lub(tpes: Seq[Type]): Type = implicitly[SemanticContext].lub(tpes)
@hosted def glb(tpes: Seq[Type]): Type = implicitly[SemanticContext].glb(tpes)
// ===========================
// PART 3: MEMBERS
// ===========================
trait XtensionSemanticMemberLike {
@hosted protected def tree: Member
// TODO: An alternative design for typeSignatureIn that is very much worth exploring
// consists in lazy recalculation of signatures produced by Scope.members.
// Much like we plan to remember lexical contexts, we could also remember type parameters to be instantiated.
// For example, `t"List[Int]".defs("head")` would give us `def head: A = ...`,
// with A carrying information about the fact that it should be substituted for Int.
// My only immediate concern here is what to do with `show[Code]`, but that we can figure out.
// Even though this design looks more principled and arguably more elegant that eager recalculation,
// I ended up not going for it, because it is much less straightforward implementation-wise,
// and any time savings are worth very much at this stage of the project.
@hosted def source: Member = {
def stripPrefix(denot: s.Denotation) = denot match {
case s.Denotation.Zero => s.Denotation.Zero
case denot: s.Denotation.Single => denot.copy(prefix = s.Prefix.Zero)
}
val prefixlessName = tree.name match {
case name: impl.Name.Anonymous => name
case name: impl.Name.Indeterminate => name
case name: impl.Term.Name => name.copy(denot = stripPrefix(name.denot))
case name: impl.Type.Name => name.copy(denot = stripPrefix(name.denot))
case name: impl.Ctor.Name => name.copy(denot = stripPrefix(name.denot))
}
prefixlessName.defn
}
@hosted def name: Name = {
tree.require[impl.Member] match {
case tree: impl.Pat.Var.Term => tree.name
case tree: impl.Pat.Var.Type => tree.name
case tree: impl.Decl.Def => tree.name
case tree: impl.Decl.Type => tree.name
case tree: impl.Defn.Def => tree.name
case tree: impl.Defn.Macro => tree.name
case tree: impl.Defn.Type => tree.name
case tree: impl.Defn.Class => tree.name
case tree: impl.Defn.Trait => tree.name
case tree: impl.Defn.Object => tree.name
case impl.Pkg(name: impl.Term.Name, _) => name
case impl.Pkg(impl.Term.Select(_, name: impl.Term.Name), _) => name
case tree: impl.Pkg.Object => tree.name
case tree: impl.Term.Param => tree.name
case tree: impl.Type.Param => tree.name
case tree: impl.Ctor.Primary => tree.name
case tree: impl.Ctor.Secondary => tree.name
}
}
@hosted def parents: Seq[Member] = implicitly[SemanticContext].parents(tree)
@hosted def children: Seq[Member] = implicitly[SemanticContext].children(tree)
@hosted def companion: Member = {
val candidates = {
if (tree.isClass || tree.isTrait) tree.owner.members.filter(m => m.isObject && m.name.toString == tree.name.toString)
else if (tree.isObject) tree.owner.members.filter(m => (m.isClass || m.isTrait) && m.name.toString == tree.name.toString)
else throw new SemanticException(s"can't have companions for ${showSummary(tree)}")
}
require(candidates.length < 2)
candidates match {
case Seq(companion) => companion
case Seq() => throw new SemanticException(s"no companions for ${showSummary(tree)}")
case _ => unreachable(debug(tree, tree.show[Raw]))
}
}
@hosted def mods: Seq[Mod] = {
def fieldMods(tree: impl.Pat.Var.Term): Seq[Mod] = {
firstNonPatParent(tree) match {
case Some(parent: impl.Decl.Val) => parent.mods
case Some(parent: impl.Decl.Var) => parent.mods
case Some(parent: impl.Defn.Val) => parent.mods
case Some(parent: impl.Defn.Var) => parent.mods
case _ => Nil
}
}
tree.require[impl.Member] match {
case tree: impl.Pat.Var.Term => fieldMods(tree)
case tree: impl.Pat.Var.Type => Nil
case tree: impl.Decl.Def => tree.mods
case tree: impl.Decl.Type => tree.mods
case tree: impl.Defn.Def => tree.mods
case tree: impl.Defn.Macro => tree.mods
case tree: impl.Defn.Type => tree.mods
case tree: impl.Defn.Class => tree.mods
case tree: impl.Defn.Trait => tree.mods
case tree: impl.Defn.Object => tree.mods
case tree: impl.Pkg => Nil
case tree: impl.Pkg.Object => tree.mods
case tree: impl.Term.Param => tree.mods
case tree: impl.Type.Param => tree.mods
case tree: impl.Ctor.Primary => tree.mods
case tree: impl.Ctor.Secondary => tree.mods
}
}
@hosted def annots: Seq[Term] = tree.mods.collect{ case impl.Mod.Annot(ref) => ref }
@hosted private def firstNonPatParent(pat: Pat): Option[Tree] = {
pat.parent.collect{case pat: Pat => pat}.flatMap(firstNonPatParent).orElse(pat.parent)
}
@hosted def isVal: Boolean = {
val patVarTerm = Some(tree).collect{case tree: impl.Pat.Var.Term => tree}
val relevantParent = patVarTerm.flatMap(firstNonPatParent)
relevantParent.map(s => s.isInstanceOf[impl.Decl.Val] || s.isInstanceOf[impl.Defn.Val]).getOrElse(false)
}
@hosted def isVar: Boolean = {
val patVarTerm = Some(tree).collect{case tree: impl.Pat.Var.Term => tree}
val relevantParent = patVarTerm.flatMap(firstNonPatParent)
relevantParent.map(s => s.isInstanceOf[impl.Decl.Var] || s.isInstanceOf[impl.Defn.Var]).getOrElse(false)
}
@hosted def isDef: Boolean = tree.isInstanceOf[impl.Decl.Def] || tree.isInstanceOf[impl.Defn.Def]
@hosted def isCtor: Boolean = tree.isInstanceOf[impl.Ctor.Primary] || tree.isInstanceOf[impl.Ctor.Secondary]
@hosted def isPrimaryCtor: Boolean = tree.isInstanceOf[impl.Ctor.Primary]
@hosted def isMacro: Boolean = tree.isInstanceOf[impl.Defn.Macro]
@hosted def isAbstractType: Boolean = tree.isInstanceOf[impl.Decl.Type]
@hosted def isAliasType: Boolean = tree.isInstanceOf[impl.Defn.Type]
@hosted def isClass: Boolean = tree.isInstanceOf[impl.Defn.Class]
@hosted def isTrait: Boolean = tree.isInstanceOf[impl.Defn.Trait]
@hosted def isObject: Boolean = tree.isInstanceOf[impl.Defn.Object]
@hosted def isPackage: Boolean = tree.isInstanceOf[impl.Pkg]
@hosted def isPackageObject: Boolean = tree.isInstanceOf[impl.Pkg.Object]
@hosted def isPrivate: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Private])
@hosted def isProtected: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Protected])
@hosted def isPublic: Boolean = !tree.isPrivate && !tree.isProtected
@hosted def accessBoundary: Option[Name.Qualifier] = tree.mods.collectFirst { case impl.Mod.Private(name) => name; case impl.Mod.Protected(name) => name }
@hosted def isImplicit: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Implicit])
@hosted def isFinal: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Final]) || tree.isObject
@hosted def isSealed: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Sealed])
@hosted def isOverride: Boolean = {
def isSyntacticOverride = !isAbstract && tree.mods.exists(_.isInstanceOf[impl.Mod.Override])
def isSemanticOverride = {
def isEligible = isVal || isVar || isDef || isMacro || isAbstractType || isAliasType
def overridesSomething = parents.nonEmpty
isEligible && overridesSomething
}
isSyntacticOverride || isSemanticOverride
}
@hosted def isCase: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Case])
@hosted def isAbstract: Boolean = {
val isAbstractClass = !isAbstractOverride && tree.mods.exists(_.isInstanceOf[impl.Mod.Abstract])
val isAbstractMember = tree.isInstanceOf[impl.Decl]
isAbstractClass || isAbstractMember
}
@hosted def isCovariant: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Covariant])
@hosted def isContravariant: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Contravariant])
@hosted def isLazy: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.Lazy])
@hosted def isAbstractOverride: Boolean = (
tree.mods.exists(_.isInstanceOf[impl.Mod.Abstract]) &&
tree.mods.exists(_.isInstanceOf[impl.Mod.Override])
)
@hosted def isTermBind: Boolean = !tree.isVal && !tree.isVar && tree.isInstanceOf[impl.Pat.Var.Term]
@hosted def isTypeBind: Boolean = tree.isInstanceOf[impl.Pat.Var.Type]
@hosted def isTermParam: Boolean = tree.isInstanceOf[impl.Term.Param]
@hosted def isTypeParam: Boolean = tree.isInstanceOf[impl.Type.Param]
@hosted def isAnonymous: Boolean = {
tree.require[impl.Member] match {
case tree: impl.Term.Param => tree.name.isInstanceOf[impl.Name.Anonymous]
case tree: impl.Type.Param => tree.name.isInstanceOf[impl.Name.Anonymous]
case _ => false
}
}
@hosted def isByNameParam: Boolean = tree match { case impl.Term.Param(_, _, Some(impl.Type.Arg.ByName(_)), _) => true; case _ => false }
@hosted def isVarargParam: Boolean = tree match { case impl.Term.Param(_, _, Some(impl.Type.Arg.Repeated(_)), _) => true; case _ => false }
@hosted def isValParam: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.ValParam])
@hosted def isVarParam: Boolean = tree.mods.exists(_.isInstanceOf[impl.Mod.VarParam])
@hosted def ffi: Option[String] = tree.mods.collectFirst { case impl.Mod.Ffi(signature) => signature }
}
implicit class XtensionSemanticMember(member: Member) extends XtensionSemanticMemberLike {
@hosted protected def tree: Member = member
}
implicit class XtensionSemanticRefMemberLike(ref: Ref) extends XtensionSemanticMemberLike {
@hosted protected def tree: Member = ref.defn
}
implicit class XtensionSemanticTermMember(tree: Member.Term) {
@hosted def source: Member.Term = new XtensionSemanticMember(tree).name.require[Member.Term]
@hosted def name: Term.Name = new XtensionSemanticMember(tree).name.require[Term.Name]
@hosted def parents: Seq[Member.Term] = new XtensionSemanticMember(tree).parents.require[Seq[Member.Term]]
@hosted def children: Seq[Member.Term] = new XtensionSemanticMember(tree).children.require[Seq[Member.Term]]
@hosted def companion: Member.Type = new XtensionSemanticMember(tree).companion.require[Member.Type]
}
implicit class XtensionSemanticTermRefMemberLike(tree: Term.Ref) {
@hosted def source: Member.Term = new XtensionSemanticRefMemberLike(tree).name.require[Member.Term]
@hosted def name: Term.Name = new XtensionSemanticRefMemberLike(tree).name.require[Term.Name]
@hosted def parents: Seq[Member.Term] = new XtensionSemanticRefMemberLike(tree).parents.require[Seq[Member.Term]]
@hosted def children: Seq[Member.Term] = new XtensionSemanticRefMemberLike(tree).children.require[Seq[Member.Term]]
@hosted def companion: Member.Type = new XtensionSemanticRefMemberLike(tree).companion.require[Member.Type]
}
implicit class XtensionSemanticTypeMember(tree: Member.Type) {
@hosted def source: Member.Type = new XtensionSemanticMember(tree).name.require[Member.Type]
@hosted def name: Type.Name = new XtensionSemanticMember(tree).name.require[Type.Name]
@hosted def parents: Seq[Member.Type] = new XtensionSemanticMember(tree).parents.require[Seq[Member.Type]]
@hosted def children: Seq[Member.Type] = new XtensionSemanticMember(tree).parents.require[Seq[Member.Type]]
@hosted def companion: Member.Term = new XtensionSemanticMember(tree).companion.require[Member.Term]
}
// NOTE: no additional methods here unlike in SemanticTermRefMemberLikeOps
// because Type.Refs can refer to both type members (obviously) and term members (singleton types)
implicit class XtensionSemanticTypeRefMemberLike(tree: Type.Ref)
implicit class XtensionSemanticTermParam(tree: Term.Param) {
@hosted def source: Term.Param = new XtensionSemanticMember(tree).name.require[Term.Param]
@hosted def name: Term.Param.Name = new XtensionSemanticMember(tree).name.require[Term.Param.Name]
@hosted def default: Option[Term] = tree.require[impl.Term.Param].default
@hosted def field: Member.Term = tree.owner.owner.members(tree.name).require[Member.Term]
}
implicit class XtensionSemanticTypeParam(tree: Type.Param) {
@hosted def source: Type.Param = new XtensionSemanticMember(tree).name.require[Type.Param]
@hosted def name: Type.Param.Name = new XtensionSemanticMember(tree).name.require[Type.Param.Name]
@hosted def contextBounds: Seq[Type] = tree.require[impl.Type.Param].contextBounds
@hosted def viewBounds: Seq[Type] = tree.require[impl.Type.Param].viewBounds
@hosted def lo: Type = tree.require[impl.Type.Param].lo
@hosted def hi: Type = tree.require[impl.Type.Param].hi
}
// ===========================
// PART 4: SCOPES
// ===========================
// TODO: so what I wanted to do with Scope.members is to have three overloads:
// * () => Seq[Member]
// * Name => Member
// * T <: Member => T <: Member
// unfortunately, if I try to introduce all the overloads, scalac compiler gets seriously confused
// when I'm trying to call members for any kind of name
// therefore, I'm essentially forced to use a type class here
// another good idea would be to name these methods differently
sealed trait XtensionMembersSignature[T, U]
object XtensionMembersSignature {
implicit def NameToMember[T <: Name]: XtensionMembersSignature[T, Member] = null
implicit def MemberToMember[T <: Member]: XtensionMembersSignature[T, T] = null
}
trait XtensionSemanticScopeLike {
@hosted protected def tree: Scope
@hosted def owner: Scope = {
def fromSyntax(tree: Tree): Option[Scope] = {
tree.parent.flatMap(_ match {
case scope: Scope => Some(scope)
case other => fromSyntax(other)
})
}
def fromPrefix(prefix: s.Prefix): Option[Member] = {
// TODO: this should account for type arguments of the prefix!
// TODO: also prefix types are probably more diverse than what's supported now
prefix match {
case s.Prefix.Type(ref: impl.Type.Ref) => Some(ref.defn)
case s.Prefix.Type(impl.Type.Apply(tpe, _)) => fromPrefix(s.Prefix.Type(tpe))
case s.Prefix.Type(impl.Type.ApplyInfix(_, tpe, _)) => fromPrefix(s.Prefix.Type(tpe))
case _ => None
}
}
tree.require[impl.Scope] match {
case member: impl.Member => fromSyntax(member).orElse(fromPrefix(member.name.require[impl.Name].denot.prefix)).get
case term: impl.Term => fromSyntax(term).get
case pat: impl.Pat => fromSyntax(pat).get
case cas: impl.Case => fromSyntax(cas).get
case tpe: impl.Type.Ref => tpe.defn.owner
case tpe: impl.Type => ???
case _ => unreachable(debug(tree))
}
}
@hosted private[meta] def deriveEvidences(tparam: Type.Param): Seq[Term.Param] = {
def deriveEvidence(evidenceTpe: Type): Term.Param = {
// TODO: it's almost a decent parameter except for the facts that:
// 1) the tree doesn't have a parent or a denotation, and we'd have remember that it comes from tparam
// otherwise, tree.owner is going crash (luckily, i can't imagine this tree participating in other semantic calls)
// 2) the name is anonymous, but it's not actually a correct way of modelling it,
// because the user can refer to that name via implicit search
// so far we strip off all desugarings and, hence, all inferred implicit arguments, so that's not a problem for us, but it will be
// NOTE: potential solution would involve having the symbol of the parameter to be of a special, new kind
// Symbol.Synthetic(origin: Symbol, generator: ???)
impl.Term.Param(List(impl.Mod.Implicit()), impl.Name.Anonymous(), Some(evidenceTpe.require[impl.Type]), None)
}
def deriveViewEvidence(tpe: Type) = deriveEvidence(impl.Type.Function(List(tparam.name.require[impl.Type]), tpe.require[impl.Type]))
def deriveContextEvidence(tpe: Type) = deriveEvidence(impl.Type.Apply(tpe.require[impl.Type], List(tparam.name.require[impl.Type])))
tparam.viewBounds.map(deriveViewEvidence) ++ tparam.contextBounds.map(deriveContextEvidence)
}
@hosted private[meta] def mergeEvidences(paramss: Seq[Seq[Term.Param]], evidences: Seq[Term.Param]): Seq[Seq[Term.Param]] = {
paramss match {
case init :+ last if last.exists(_.isImplicit) => init :+ (last ++ evidences)
case init :+ last => init :+ last :+ evidences
case Nil => List(evidences)
}
}
@hosted private[meta] def internalAll: Seq[Member] = {
def membersOfStats(stats: Seq[impl.Tree]) = stats.collect{
case member: Member => member
}
def membersOfEnumerator(enum: impl.Enumerator) = enum match {
case impl.Enumerator.Generator(pat, _) => membersOfPat(pat)
case impl.Enumerator.Val(pat, _) => membersOfPat(pat)
case impl.Enumerator.Guard(_) => Nil
}
def membersOfPatType(ptpe: impl.Pat.Type): Seq[impl.Member] = ptpe match {
case impl.Pat.Type.Wildcard() => Nil
case ptpe @ impl.Pat.Var.Type(_) => List(ptpe)
case impl.Type.Name(_) => Nil
case impl.Type.Select(_, _) => Nil
case impl.Pat.Type.Project(ptpe, _) => membersOfPatType(ptpe)
case impl.Type.Singleton(_) => Nil
case impl.Pat.Type.Apply(tpe, args) => membersOfPatType(tpe) ++ args.flatMap(membersOfPatType)
case impl.Pat.Type.ApplyInfix(lhs, _, rhs) => membersOfPatType(lhs) ++ membersOfPatType(rhs)
case impl.Pat.Type.Function(params, res) => params.flatMap(membersOfPatType) ++ membersOfPatType(res)
case impl.Pat.Type.Tuple(elements) => elements.flatMap(membersOfPatType)
case impl.Pat.Type.Compound(tpes, _) => tpes.flatMap(membersOfPatType)
case impl.Pat.Type.Existential(tpe, _) => membersOfPatType(tpe)
case impl.Pat.Type.Annotate(tpe, _) => membersOfPatType(tpe)
case impl.Type.Placeholder(_) => Nil
case _: impl.Lit => Nil
}
def membersOfPat(pat: impl.Pat.Arg): Seq[impl.Member] = pat match {
case impl.Pat.Wildcard() => Nil
case pat @ impl.Pat.Var.Term(name) => List(pat)
case impl.Pat.Bind(lhs, rhs) => membersOfPat(lhs) ++ membersOfPat(rhs)
case impl.Pat.Alternative(lhs, rhs) => membersOfPat(lhs) ++ membersOfPat(rhs)
case impl.Pat.Tuple(elements) => elements.flatMap(membersOfPat)
case impl.Pat.Extract(_, _, elements) => elements.flatMap(membersOfPat)
case impl.Pat.ExtractInfix(lhs, _, rhs) => membersOfPat(lhs) ++ rhs.flatMap(membersOfPat)
case impl.Pat.Interpolate(_, _, args) => args.flatMap(membersOfPat)
case impl.Pat.Typed(lhs, ptpe) => membersOfPat(lhs) ++ membersOfPatType(ptpe)
case impl.Pat.Arg.SeqWildcard() => Nil
case impl.Term.Name(_) => Nil
case impl.Term.Select(_, _) => Nil
case _: impl.Lit => Nil
}
tree.require[impl.Scope] match {
case tree: impl.Term.Block => membersOfStats(tree.stats)
case tree: impl.Term.Function => tree.params
case tree: impl.Term.For => tree.enums.flatMap(membersOfEnumerator)
case tree: impl.Term.ForYield => tree.enums.flatMap(membersOfEnumerator)
case tree: impl.Term.Param => Nil
case tree: impl.Type => implicitly[SemanticContext].members(tree)
case tree: impl.Type.Param => tree.tparams
case tree: impl.Pat.Var.Term => Nil
case tree: impl.Pat.Var.Type => Nil
case tree: impl.Decl.Def => tree.tparams ++ mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences)).flatten
case tree: impl.Decl.Type => tree.tparams
case tree: impl.Defn.Def => tree.tparams ++ mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences)).flatten
case tree: impl.Defn.Macro => tree.tparams ++ mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences)).flatten
case tree: impl.Defn.Type => tree.tparams
case tree: impl.Defn.Class => tree.tparams ++ tree.tpe.members
case tree: impl.Defn.Trait => tree.tparams ++ tree.tpe.members
case tree: impl.Defn.Object => Nil ++ tree.tpe.members
case tree: impl.Pkg => tree.tpe.members
case tree: impl.Pkg.Object => Nil ++ tree.tpe.members
case tree: impl.Ctor.Primary => mergeEvidences(tree.paramss, tree.owner.tparams.flatMap(deriveEvidences)).flatten
case tree: impl.Ctor.Secondary => mergeEvidences(tree.paramss, tree.owner.tparams.flatMap(deriveEvidences)).flatten
case tree: impl.Case => membersOfPat(tree.pat)
}
}
@hosted private[meta] def internalFilter[T: ClassTag](filter: T => Boolean): Seq[T] = {
internalAll.collect{ case x: T => x }.filter(filter)
}
@hosted private[meta] def internalSingle[T <: Member : ClassTag](filter: T => Boolean, diagnostic: String): T = {
val filtered = internalFilter[T](filter)
filtered match {
case Seq() => throw new SemanticException(s"""no $diagnostic found in ${showSummary(tree)}""")
case Seq(single) => single
case Seq(_, _*) => throw new SemanticException(s"""multiple $diagnostic found in ${showSummary(tree)}""")
}
}
@hosted private[meta] def internalSingleNamed[T <: Member : ClassTag](name: String, filter: T => Boolean, diagnostic: String): T = {
val filtered = internalFilter[T](x => x.name.toString == name && filter(x))
filtered match {
case Seq() => throw new SemanticException(s"""no $diagnostic named "$name" found in ${showSummary(tree)}""")
case Seq(single) => single
case Seq(_, _*) => throw new SemanticException(s"""multiple $diagnostic named "$name" found in ${showSummary(tree)}""")
}
}
@hosted private[meta] def internalMulti[T <: Member : ClassTag](name: String, filter: T => Boolean, diagnostic: String): Seq[T] = {
val filtered = internalFilter[T](x => x.name.toString == name && filter(x))
filtered match {
case Seq() => throw new SemanticException(s"""no $diagnostic named "$name" found in ${showSummary(tree)}""")
case Seq(single) => List(single)
case Seq(multi @ _*) => multi.toList
}
}
@hosted def members: Seq[Member] = internalFilter[Member](_ => true)
@hosted def members[T : ClassTag, U : ClassTag](param: T)(implicit ev: XtensionMembersSignature[T, U]): U = param match {
case name: Name =>
name match {
case name: Term.Name => internalSingleNamed[Member.Term](name.toString, _ => true, "term members").require[U]
case name: Type.Name => internalSingleNamed[Member.Type](name.toString, _ => true, "type members").require[U]
case _ => throw new SemanticException(s"""no member named $name found in ${showSummary(tree)}""")
}
case member: Member =>
member.name match {
case thisName: impl.Name =>
internalFilter[T](that => {
def thisDenot = thisName.denot.require[s.Denotation.Single]
def thatDenot = that.require[impl.Member].name.require[impl.Name].denot.require[s.Denotation.Single]
scala.util.Try(thisDenot.symbol == thatDenot.symbol).getOrElse(false)
}) match {
case Seq() => throw new SemanticException(s"no prototype for $member found in ${showSummary(tree)}")
case Seq(single) => single.require[U]
case _ => unreachable(debug(member, member.show[Raw]))
}
}
case _ =>
unreachable(debug(param, param.getClass))
}
@hosted def packages: Seq[Member.Term] = internalFilter[Member.Term](_.isPackage)
@hosted def packages(name: String): Member.Term = internalSingleNamed[Member.Term](name, _.isPackage, "packages")
@hosted def packages(name: scala.Symbol): Member.Term = packages(name.toString)
@hosted def packageObject: Member.Term = internalSingle[impl.Pkg.Object](_.isPackageObject, "package objects")
@hosted def ctor: Member.Term = internalSingle[Member.Term](_.isPrimaryCtor, "primary constructors")
@hosted def ctors: Seq[Member.Term] = internalFilter[Member.Term](_.isCtor)
@hosted def classes: Seq[Member.Type] = internalFilter[Member.Type](_.isClass)
@hosted def classes(name: String): Member.Type = internalSingleNamed[Member.Type](name, _.isClass, "classes")
@hosted def classes(name: scala.Symbol): Member.Type = classes(name.toString)
@hosted def traits: Seq[Member.Type] = internalFilter[Member.Type](_.isTrait)
@hosted def traits(name: String): Member.Type = internalSingleNamed[Member.Type](name, _.isTrait, "traits")
@hosted def traits(name: scala.Symbol): Member.Type = traits(name.toString)
@hosted def objects: Seq[Member.Term] = internalFilter[Member.Term](_.isObject)
@hosted def objects(name: String): Member.Term = internalSingleNamed[Member.Term](name, _.isObject, "objects")
@hosted def objects(name: scala.Symbol): Member.Term = objects(name.toString)
@hosted def vars: Seq[Member.Term] = internalFilter[impl.Pat.Var.Term](_.isVar)
@hosted def vars(name: String): Member.Term = internalSingleNamed[impl.Pat.Var.Term](name, _.isVar, "vars")
@hosted def vars(name: scala.Symbol):Member.Term = vars(name.toString)
@hosted def vals: Seq[Member.Term] = internalFilter[impl.Pat.Var.Term](_.isVal)
@hosted def vals(name: String): Member.Term = internalSingleNamed[impl.Pat.Var.Term](name, _.isVal, "vals")
@hosted def vals(name: scala.Symbol): Member.Term = vals(name.toString)
@hosted def defs: Seq[Member.Term] = internalFilter[Member.Term](_.isDef)
@hosted def defs(name: String): Member.Term = internalSingleNamed[Member.Term](name, _.isDef, "defs")
@hosted def defs(name: scala.Symbol): Member.Term = defs(name.toString)
@hosted def overloads(name: String): Seq[Member.Term] = internalMulti[Member.Term](name, _.isDef, "defs")
@hosted def overloads(name: scala.Symbol): Seq[Member.Term] = overloads(name.toString)
@hosted def types: Seq[Member.Type] = internalFilter[Member.Type](m => m.isAbstractType || m.isAliasType)
@hosted def types(name: String): Member.Type = internalSingleNamed[Member.Type](name, m => m.isAbstractType || m.isAliasType, "types")
@hosted def types(name: scala.Symbol): Member.Type = types(name.toString)
@hosted def params: Seq[Term.Param] = internalFilter[Term.Param](_ => true)
@hosted def paramss: Seq[Seq[Term.Param]] = tree match {
case tree: impl.Decl.Def => mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences))
case tree: impl.Defn.Def => mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences))
case tree: impl.Defn.Macro => mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences))
case tree: impl.Ctor.Primary => mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences))
case tree: impl.Ctor.Secondary => mergeEvidences(tree.paramss, tree.tparams.flatMap(deriveEvidences))
case _ => Nil
}
@hosted def params(name: String): Term.Param = internalSingleNamed[Term.Param](name, _ => true, "parameters")
@hosted def params(name: scala.Symbol): Term.Param = params(name.toString)
@hosted def tparams: Seq[Type.Param] = internalFilter[Type.Param](_ => true)
@hosted def tparams(name: String): Type.Param = internalSingleNamed[Type.Param](name, _ => true, "type parameters")
@hosted def tparams(name: scala.Symbol): Type.Param = tparams(name.toString)
}
implicit class XtensionSemanticScope(scope: Scope) extends XtensionSemanticScopeLike {
@hosted protected def tree: Scope = scope
}
implicit class XtensionSemanticRefScopeLike(ref: Ref) extends XtensionSemanticScopeLike {
@hosted protected def tree: Scope = ref.defn
}
implicit class XtensionSemanticTypeRefScopeLike(ref: Type.Ref) extends XtensionSemanticScopeLike {
@hosted protected def tree: Scope = ref
}
// ===========================
// PART 5: BINDINGS
// ===========================
implicit class XtensionSemanticName(tree: Name) {
def isBinder: Boolean = tree.parent.map(_.isInstanceOf[impl.Member]).getOrElse(false)
def isReference: Boolean = !isBinder
}
private object Fresh {
private var id = new java.util.concurrent.atomic.AtomicInteger()
def nextId() = id.incrementAndGet()
}
implicit class XtensionSemanticTermModule(tree: Term.type) {
def fresh(): Term.Name = fresh("fresh")
def fresh(prefix: String): Term.Name = impl.Term.Name(prefix + Fresh.nextId())
}
implicit class XtensionSemanticTypeModule(tree: Type.type) {
def fresh(): Type.Name = fresh("fresh")
def fresh(prefix: String): Type.Name = impl.Type.Name(prefix + Fresh.nextId())
}
implicit class XtensionSemanticPatModule(tree: Pat.type) {
def fresh(): Pat with Member.Term = impl.Pat.Var.Term(Term.fresh().require[impl.Term.Name])
def fresh(prefix: String): Pat with Member.Term = impl.Pat.Var.Term(Term.fresh(prefix).require[impl.Term.Name])
}
implicit class XtensionSemanticPatTypeModule(tree: Pat.Type.type) {
def fresh(): Pat.Type with Member.Type = impl.Pat.Var.Type(Type.fresh().require[impl.Type.Name])
def fresh(prefix: String): Pat.Type with Member.Type = impl.Pat.Var.Type(Type.fresh(prefix).require[impl.Type.Name])
}
// ===========================
// PART 6: REPRESENTATION CONVERSIONS
// ===========================
implicit class XtensionTypeToPatType(tree: Type) {
@hosted def pat: Pat.Type = {
def loop(tpe: impl.Type): impl.Pat.Type = tpe match {
case tpe: impl.Type.Name => tpe
case tpe: impl.Type.Select => tpe
case impl.Type.Project(qual, name) => impl.Pat.Type.Project(loop(qual), name)
case tpe: impl.Type.Singleton => tpe
case impl.Type.Apply(tpe, args) => impl.Pat.Type.Apply(loop(tpe), args.map(loop))
case impl.Type.ApplyInfix(lhs, op, rhs) => impl.Pat.Type.ApplyInfix(loop(lhs), op, loop(rhs))
case impl.Type.Function(params, res) => impl.Pat.Type.Function(params.map(param => loop(param.require[impl.Type])), loop(res))
case impl.Type.Tuple(elements) => impl.Pat.Type.Tuple(elements.map(loop))
case impl.Type.Compound(tpes, refinement) => impl.Pat.Type.Compound(tpes.map(loop), refinement)
case impl.Type.Existential(tpe, quants) => impl.Pat.Type.Existential(loop(tpe), quants)
case impl.Type.Annotate(tpe, annots) => impl.Pat.Type.Annotate(loop(tpe), annots)
case tpe: impl.Type.Placeholder => tpe
case tpe: impl.Lit => tpe
}
loop(tree.require[impl.Type])
}
}
implicit class XtensionPatTypeToType(tree: Pat.Type) {
@hosted def tpe: Type = {
def loop(tpe: impl.Pat.Type): impl.Type = tpe match {
case tpe: impl.Type.Name => tpe
case tpe: impl.Type.Select => tpe
case tpe: impl.Type.Singleton => tpe
case tpe: impl.Type.Placeholder => tpe
case tpe: impl.Pat.Var.Type => ???
case tpe: impl.Pat.Type.Wildcard => impl.Type.Placeholder(impl.Type.Bounds(None, None))
case impl.Pat.Type.Project(qual, name) => impl.Type.Project(loop(qual), name)
case impl.Pat.Type.Apply(tpe, args) => impl.Type.Apply(loop(tpe), args.map(loop))
case impl.Pat.Type.ApplyInfix(lhs, op, rhs) => impl.Type.ApplyInfix(loop(lhs), op, loop(rhs))
case impl.Pat.Type.Function(params, res) => impl.Type.Function(params.map(loop), loop(res))
case impl.Pat.Type.Tuple(elements) => impl.Type.Tuple(elements.map(loop))
case impl.Pat.Type.Compound(tpes, refinement) => impl.Type.Compound(tpes.map(loop), refinement)
case impl.Pat.Type.Existential(tpe, quants) => impl.Type.Existential(loop(tpe), quants)
case impl.Pat.Type.Annotate(tpe, annots) => impl.Type.Annotate(loop(tpe), annots)
case tpe: impl.Lit => tpe
}
loop(tree.require[impl.Pat.Type])
}
}
implicit class XtensionSemanticPatTypeWithTypeToType(tree: Pat.Type with Type) {
@hosted def tpe: Type = tree
}
implicit class XtensionTypeToCtorRef(tree: Type) {
@hosted def ctorRef(ctor: Ctor.Name): Term = {
def loop(tpe: impl.Type, ctor: impl.Ctor.Name): impl.Term = {
object Types {
def unapply(tpes: Seq[impl.Type.Arg]): Option[Seq[impl.Type]] = {
if (tpes.forall(_.isInstanceOf[impl.Type])) Some(tpes.map(_.require[impl.Type]))
else None
}
}
def adjustValue(ctor: impl.Ctor.Name, value: String) = impl.Ctor.Name(value, ctor.denot)
tpe match {
case impl.Type.Name(value) => adjustValue(ctor, value)
case impl.Type.Select(qual, impl.Type.Name(value)) => impl.Ctor.Ref.Select(qual, adjustValue(ctor, value))
case impl.Type.Project(qual, impl.Type.Name(value)) => impl.Ctor.Ref.Project(qual, adjustValue(ctor, value))
case impl.Type.Function(Types(params), ret) => impl.Term.ApplyType(impl.Ctor.Ref.Function(ctor), params :+ ret)
case impl.Type.Annotate(tpe, annots) => impl.Term.Annotate(loop(tpe, ctor), annots)
case impl.Type.Apply(tpe, args) => impl.Term.ApplyType(loop(tpe, ctor), args)
case impl.Type.ApplyInfix(lhs, op, rhs) => impl.Term.ApplyType(loop(op, ctor), List(lhs, rhs))
case _ => unreachable(debug(tree, tree.show[Raw], tpe, tpe.show[Raw]))
}
}
// TODO: if we uncomment this, that'll lead to a stackoverflow in scalahost
// it's okay, but at least we could verify that ctor's prefix is coherent with tree
// val prefixedCtor = tree.members(ctor.defn).name.require[Ctor.Name]
loop(tree.require[impl.Type], ctor.require[impl.Ctor.Name])
}
}
// TODO: Previously, we had a `dialectFromSemanticContext` implicit, which obviated the need in this method.
// However, this dialect materializer was really half-hearted in the sense that it worked for prettyprinting
// but not for quasiquotes (since quasiquotes need a dialect at compile time, not a potentially runtime dialect).
// Until this problem is fixed, I'm disabling the materializer altogether.
private def showSummary(tree: Tree)(implicit c: SemanticContext) {
implicit val d: Dialect = c.dialect
tree.show[Summary]
}
}
|
mdemarne/scalameta
|
scalameta/src/main/scala/scala/meta/semantic/Api.scala
|
Scala
|
bsd-3-clause
| 39,404 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.irw
import java.nio.ByteBuffer
import java.util.NoSuchElementException
import java.util.concurrent.TimeUnit
import akka.NotUsed
import akka.stream.alpakka.cassandra.scaladsl.CassandraSource
import akka.stream.scaladsl.Source
import cmwell.common.metrics.WithMetrics
import cmwell.domain._
import cmwell.driver.{Dao, DaoExecution}
import cmwell.util.collections.partitionWith
import cmwell.util.concurrent.{FutureTimeout, travector}
import cmwell.util.jmx._
import cmwell.util.{Box, BoxedFailure, EmptyBox, FullBox}
import cmwell.zstore.ZStore
import com.datastax.driver.core._
import com.datastax.driver.core.querybuilder.Truncate
import com.datastax.driver.core.utils.Bytes
import com.google.common.cache.{Cache, CacheBuilder}
import com.typesafe.scalalogging.LazyLogging
import org.joda.time.DateTime
import scala.collection.JavaConversions._
import scala.concurrent._
import scala.concurrent.duration.{Duration => SDuration, _}
import scala.util.{Failure, Success}
class IRWServiceNativeImpl2(
storageDao: Dao,
maxReadSize: Int = 25,
disableReadCache: Boolean = false,
readCacheDuration: FiniteDuration = 120.seconds
)(implicit val defaultCasTimeout: SDuration = 10.seconds)
extends IRWService
with LazyLogging
with WithMetrics
with DaoExecution {
val delayOnError = 40.millis
val cacheSize: Long = 50000L
val sysQuad = InfotonSerializer.sysQuad
// TODO: make both configurable
val dataCahce: Cache[String, Infoton] = CacheBuilder
.newBuilder()
.maximumSize(cacheSize)
.expireAfterWrite(readCacheDuration.toMillis, TimeUnit.MILLISECONDS)
.recordStats()
.build[String, Infoton]()
val dataCacheMBean = new GuavaCacheJMX[String, Infoton](dataCahce)
jmxRegister(dataCacheMBean, "cmwell.irw:type=IRWServiceImpl2")
val getFromCacheFunc: String => Option[Infoton] = { uuid =>
Option(dataCahce.getIfPresent(uuid))
}
val zStore = ZStore(storageDao)
val infotonsIRWReadDataTimer = metrics.meter("IRW Read Data Time")
val infotonsIRWWriteDataTimer = metrics.meter("IRW Write Data Time")
val infotonsIRWReadPathTimer = metrics.meter("IRW Read Path Time")
val infotonsIRWWritePathTimer = metrics.meter("IRW Write Path Time")
// here we are prepering all statments to use
val getInfotonUUID: PreparedStatement = storageDao.getSession.prepare("SELECT * FROM infoton WHERE uuid = ?")
val getSystemFields: PreparedStatement = storageDao.getSession.prepare(s"SELECT quad,field,value FROM infoton WHERE uuid = ? AND quad = '$sysQuad'")
val getLastInPath: PreparedStatement = storageDao.getSession.prepare("SELECT uuid, last_modified FROM path WHERE path = ? LIMIT 1")
val getAllHistory: PreparedStatement =
storageDao.getSession.prepare("SELECT last_modified,uuid FROM path WHERE path = ?")
val getHistory = storageDao.getSession.prepare("SELECT last_modified,uuid FROM path WHERE path = ? limit ?")
val getHistoryNeighbourhoodAsc = storageDao.getSession.prepare("SELECT last_modified,uuid FROM path WHERE path = ? AND last_modified >= ? LIMIT ?")
val getHistoryNeighbourhoodDesc = storageDao.getSession.prepare("SELECT last_modified,uuid FROM path WHERE path = ? AND last_modified <= ? LIMIT ?")
val getIndexTime: PreparedStatement = storageDao.getSession.prepare(
s"SELECT uuid,quad,field,value FROM infoton WHERE uuid = ? AND quad = '$sysQuad' AND field = 'indexTime'"
)
val setInfotonData: PreparedStatement =
storageDao.getSession.prepare("INSERT INTO infoton (uuid,quad,field,value) VALUES (?,?,?,?)")
val setInfotonFileData: PreparedStatement =
storageDao.getSession.prepare("INSERT INTO infoton (uuid,quad,field,value,data) VALUES (?,?,?,?,?)")
val setDc: PreparedStatement =
storageDao.getSession.prepare(s"INSERT INTO infoton (uuid,quad,field,value) VALUES (?,'$sysQuad','dc',?)")
val setIndexTime: PreparedStatement =
storageDao.getSession.prepare(s"INSERT INTO infoton (uuid,quad,field,value) VALUES (?,'$sysQuad','indexTime',?)")
val setPathLast: PreparedStatement =
storageDao.getSession.prepare("INSERT INTO path (path,last_modified,uuid) VALUES (?,?,?)")
val delIndexTime: PreparedStatement =
storageDao.getSession.prepare(s"DELETE FROM infoton WHERE uuid = ? AND quad = '$sysQuad' AND field = 'indexTime'")
val purgeInfotonByUuid: PreparedStatement = storageDao.getSession.prepare("DELETE FROM infoton WHERE uuid = ?")
val purgeHistoryEntry: PreparedStatement =
storageDao.getSession.prepare("DELETE FROM path WHERE path = ? AND last_modified = ?")
val purgeAllHistory: PreparedStatement = storageDao.getSession.prepare("DELETE FROM path WHERE path = ?")
implicit val daoProxy: Dao = storageDao
case class UnmatchingReadUUID(requestedUuid: String, receivedInfoton: Infoton)
extends IllegalStateException(
s"received infoton with uuid=[${receivedInfoton.uuid}] for requested uuid=[$requestedUuid]."
)
private def convert(uuid: String)(result: ResultSet): Box[Infoton] = {
if (result.isExhausted()) Box.empty[Infoton]
else
try {
val i = InfotonSerializer.deserialize2(
uuid,
new Iterator[(String, String, (String, Array[Byte]))] {
override def hasNext: Boolean = !result.isExhausted()
override def next(): (String, String, (String, Array[Byte])) = {
val r: Row = result.one()
val q = r.getString("quad")
val f = r.getString("field")
val v = r.getString("value")
val d = {
if (f != "data") null
else Bytes.getArray(r.getBytes("data"))
}
(q, f, v -> d)
}
}
)
if (i.uuid != uuid) BoxedFailure(UnmatchingReadUUID(uuid, i))
else FullBox(i)
} catch {
case t: Throwable => BoxedFailure(t)
}
}
def rawReadSystemFields(uuid: String, lvl: ConsistencyLevel): Future[Seq[(String,String,String)]] = {
// TODO this was copied from convert method, we need to stay DRY
import scala.concurrent.ExecutionContext.Implicits.global
executeAsyncInternal(getSystemFields.bind(uuid).setConsistencyLevel(lvl)).map { result =>
new Iterator[(String, String, String)] {
override def hasNext: Boolean = !result.isExhausted
override def next(): (String, String, String) = {
val r: Row = result.one()
val q = r.getString("quad")
val f = r.getString("field")
val v = r.getString("value")
(q, f, v)
}
}.toSeq
}
}
def rawReadUuidAsyc(uuid: String, lvl: ConsistencyLevel): Future[Seq[(String,String,(String,Array[Byte]))]] = {
// TODO this was copied from convert method, we need to stay DRY
import scala.concurrent.ExecutionContext.Implicits.global
executeAsyncInternal(getInfotonUUID.bind(uuid).setConsistencyLevel(lvl)).map { result =>
new Iterator[(String, String, (String, Array[Byte]))] {
override def hasNext: Boolean = !result.isExhausted
override def next(): (String, String, (String, Array[Byte])) = {
val r: Row = result.one()
val q = r.getString("quad")
val f = r.getString("field")
val v = r.getString("value")
val d = {
if (f != "data") null
else Bytes.getArray(r.getBytes("data"))
}
(q, f, v -> d)
}
}.toSeq
}
}
def readUUIDSAsync(uuids: Seq[String],
level: ConsistencyLevel = ONE)(implicit ec: ExecutionContext): Future[Seq[Box[Infoton]]] = {
def getUuidsFromCas(us: Seq[String]) = travector(us) { uuid =>
readUUIDAsync(uuid, level)(ec).recover {
case err => {
logger.error(s"could not retrieve UUID [$uuid] from cassandra")
BoxedFailure(err)
}
}
}
if (!disableReadCache) {
val all = uuids.map(uuid => uuid -> getFromCacheFunc(uuid))
if (all.forall(_._2.isDefined)) Future.successful(all.map { case (_, i) => Box.wrap(i.get) })
else {
val (unCachedUuids, cachedInfotons) = partitionWith(all) {
case (_, Some(i)) => Right(Box.wrap(i))
case (uuid, None) => Left(uuid)
}
getUuidsFromCas(unCachedUuids).map(_ ++ cachedInfotons)
}
} else getUuidsFromCas(uuids)
}
def readUUIDAsync(uuid: String, level: ConsistencyLevel = ONE, dontFetchPayload: Boolean = false)(
implicit ec: ExecutionContext
): Future[Box[Infoton]] = {
def getFromCas(lvl: ConsistencyLevel, isARetry: Boolean = false): Future[Box[Infoton]] = {
val stmt = getInfotonUUID.bind(uuid).setConsistencyLevel(lvl)
val resultedInfotonFuture =
cmwell.util.concurrent.retry(5, delayOnError, 2)(executeAsyncInternal(stmt).map(convert(uuid)))(ec)
resultedInfotonFuture
.recover {
case UnmatchingReadUUID(_, i) => FullBox(i)
case err: Throwable => {
logger.error(s"could not read/convert uuid [$uuid] to infoton. returning None", err)
BoxedFailure(err)
}
}
.flatMap {
case b: Box[Infoton] if b.isEmpty =>
if (isARetry) Future.successful(b)
else cmwell.util.concurrent.SimpleScheduler.scheduleFuture(delayOnError)(getFromCas(QUORUM, true))
case io @ FullBox(i) =>
if (isARetry) {
logger.warn(s"The uuid $uuid is only available in QUORUM")
if (i.uuid != uuid)
logger.error(
s"The infoton [${i.path}] retrieved with different uuid [${i.uuid}] from requested uuid [$uuid]"
)
}
if (i.uuid != uuid && !isARetry) getFromCas(QUORUM, true)
else Future.successful(io)
}(ec)
}
def populateDataIfNeeded(infotonBoxFut: Future[Box[Infoton]]): Future[Box[Infoton]] = {
def populateDataIfNeededInternal(infoton: Infoton): Future[Infoton] = infoton match {
case fi: FileInfoton if fi.hasDataPointer && !dontFetchPayload => fi.populateDataFromPointerBy(zStore.get)(ec)
case i => Future.successful(i)
}
infotonBoxFut.flatMap {
case FullBox(i) => populateDataIfNeededInternal(i).map(Box.wrap(_))
case box => Future.successful(box)
}
}
if (disableReadCache) populateDataIfNeeded(getFromCas(level))
else
getFromCacheFunc(uuid).fold(populateDataIfNeeded(getFromCas(level)).andThen {
case Success(FullBox(i)) if i.uuid == uuid => dataCahce.put(uuid, i)
})(futureBox)
}
def executeAsync(statmentToExec: Statement,
identifierForFailures: => String,
retries: Int = 5,
delay: FiniteDuration = delayOnError,
casTimeout: SDuration = defaultCasTimeout)(implicit ec: ExecutionContext): Future[ResultSet] =
cmwell.util.concurrent.retryWithDelays(Vector.iterate(delayOnError, retries)(_ * 2): _*) {
if (casTimeout.isFinite())
cmwell.util.concurrent
.timeoutFuture(executeAsyncInternal(statmentToExec), casTimeout.asInstanceOf[FiniteDuration])
.andThen {
case Failure(FutureTimeout(f)) => {
val givingUpTimestamp = System.currentTimeMillis()
val msg = identifierForFailures
val id = cmwell.util.numeric.Radix64.encodeUnsigned(givingUpTimestamp) + "_" + cmwell.util.numeric.Radix64
.encodeUnsigned(msg.hashCode())
logger.error(s"[$id] FutureTimeout for [ $msg ], and givingUpTimestamp = [ $givingUpTimestamp ]")
f.onComplete {
case Success(_) =>
logger.error(
s"executeAsync for [$id] returned successfully ${System.currentTimeMillis() - givingUpTimestamp}ms after giving up"
)
case Failure(t) =>
logger.error(
s"executeAsync for [$id] returned with failure ${System.currentTimeMillis() - givingUpTimestamp}ms after giving up",
t
)
}
}
} else
executeAsyncInternal(statmentToExec)
}(ec)
@deprecated("No need in IRW2, use `setPathLast` instead", "1.5.x")
def setPathHistory(infoton: Infoton, level: ConsistencyLevel = QUORUM): Future[Infoton] =
setPathLast(infoton, level).map(_ => infoton)(scala.concurrent.ExecutionContext.Implicits.global)
def setPathLast(infoton: Infoton, level: ConsistencyLevel = QUORUM): Future[Unit] =
setPathLast(infoton.path, infoton.lastModified.toDate, infoton.uuid, level)
def setPathLast(path: String, lastModified: java.util.Date, uuid: String, level: ConsistencyLevel): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = setPathLast.bind(path, lastModified, uuid).setConsistencyLevel(level)
executeAsync(stmt, s"'${setPathLast.getQueryString}'.bind($path, $lastModified, $uuid)").map { rs =>
logger.trace(s"resultSet from setPathLast: $rs")
}
}
def writeAsyncDataOnly(infoton: Infoton,
level: ConsistencyLevel = QUORUM)(implicit ec: ExecutionContext): Future[Infoton] = {
val p = Promise[Infoton]()
val (uuid, rows) = InfotonSerializer.serialize2(infoton)
// Write the changes for one infoton as an un-logged batch so that the changes are written atomically.
val statements: Seq[(Statement, () => String)] = rows.flatMap {
case (quad, fields) =>
fields.flatMap {
case (field, values) =>
values.map {
case (value, data) =>
if (field == "data")
(setInfotonFileData.bind(uuid, quad, field, value, ByteBuffer.wrap(data)).setConsistencyLevel(level),
() => s"'${setInfotonFileData.getQueryString}'.bind($uuid, $quad, $field, $value, ${data.take(8)})")
else
(setInfotonData.bind(uuid, quad, field, value).setConsistencyLevel(level),
() => s"'${setInfotonData.getQueryString}'.bind($uuid, $quad, $field, $value)")
}
}
}
// This un-logged batch runs against a single partition, so it does not have the performance issue that
// multi-partition unlogged batches do.
// Cassandra limits the number of statements in a batch to 0xFFFF.
val futureResults = statements.grouped(0xFFFF).map { zipped =>
val (stmts, ids) = zipped.unzip(identity)
val failStringFunc = () => {
if (ids.length <= 10)
ids
.foldLeft(new StringBuilder) {
case (sb, f) =>
sb += '\\n'
sb ++= f()
}
.result()
else {
val h = ids.take(8).foldLeft(new StringBuilder) {
case (sb, f) =>
sb += '\\n'
sb ++= f()
}
h ++= "\\n…\\n"
h ++= ids.last()
h.result
}
}
executeAsync(new BatchStatement(BatchStatement.Type.UNLOGGED).addAll(stmts), failStringFunc())
}
Future.sequence(futureResults).onComplete {
case Failure(t) => p.failure(t)
case Success(l) if l.exists(!_.wasApplied()) =>
p.failure(new RuntimeException(s"some statements was not applied: ${l
.collect { case r if !r.wasApplied() => r.getExecutionInfo.getQueryTrace.toString }
.mkString("\\n\\t", "\\n\\t", "\\n")}"))
case Success(_) => p.success(infoton)
}
p.future.andThen {
case Success(i) if !disableReadCache => dataCahce.put(i.uuid, i)
}
}
def writeSeqAsync(infoton: Seq[Infoton], level: ConsistencyLevel = QUORUM, skipSetPathLast: Boolean = false)(
implicit ec: ExecutionContext
): Future[Seq[Infoton]] =
travector(infoton)(i => writeAsync(i, level, skipSetPathLast))
def writeAsync(infoton: Infoton, level: ConsistencyLevel = QUORUM, skipSetPathLast: Boolean = false)(
implicit ec: ExecutionContext
): Future[Infoton] = {
if (skipSetPathLast) writeAsyncDataOnly(infoton, level)
else
writeAsyncDataOnly(infoton, level).flatMap { i =>
setPathLast(infoton, level).map(_ => i)
}
}
def addIndexTimeToUuid(uuid: String, indexTime: Long, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
if (!disableReadCache) {
Option(dataCahce.getIfPresent(uuid)).foreach { i =>
dataCahce.put(uuid, addIndexTime(i, Some(indexTime)))
}
}
readIndexTimeRowsForUuid(uuid, level).flatMap { indexTimes =>
if (indexTimes.isEmpty) writeIndexTimeToUuid(uuid, indexTime, level)
else if (indexTimes.head == indexTime && indexTimes.tail.isEmpty) {
logger.error(
s"was asked to `addIndexTimeToUuid` for uuid [$uuid], but index time already written [$indexTime]: taking no action and returning Future.successful"
)
Future.successful(())
} else {
logger.error(
s"was asked to `addIndexTimeToUuid` for uuid [$uuid], but different indexTime(s) is already present ${indexTimes
.mkString("[", ",", "]")}: will delete these, and write the new indexTime [$indexTime]"
)
deleteIndexTimeFromUuid(uuid, level).flatMap { _ =>
writeIndexTimeToUuid(uuid, indexTime, level)
}
}
}
}
private def deleteIndexTimeFromUuid(uuid: String, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = delIndexTime.bind(uuid).setConsistencyLevel(level)
executeAsync(stmt, s"'${delIndexTime.getQueryString}'.bind($uuid)").map(rs => if (!rs.wasApplied()) ???)
}
private def writeIndexTimeToUuid(uuid: String, indexTime: Long, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = setIndexTime.bind(uuid, indexTime.toString).setConsistencyLevel(level)
executeAsync(stmt, s"'${setIndexTime.getQueryString}'.bind($uuid, $indexTime)").map(rs => if (!rs.wasApplied()) ???)
}
def readIndexTimeRowsForUuid(uuid: String, level: ConsistencyLevel = QUORUM): Future[Seq[Long]] = {
import scala.concurrent.ExecutionContext.Implicits.global
import scala.collection.JavaConverters._
val stmt = getIndexTime.bind(uuid).setConsistencyLevel(level)
executeAsync(stmt, s"'${getIndexTime.getQueryString}'.bind($uuid)").map { rs =>
if (!rs.wasApplied()) ???
else
rs.all().asScala.map { row =>
row.getString("value").toLong
}
}
}
def addDcToUuid(uuid: String, dc: String, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
if (!disableReadCache) {
Option(dataCahce.getIfPresent(uuid)).foreach { i =>
dataCahce.put(uuid, addDc(i, dc))
}
}
val stmt = setDc.bind(uuid, dc).setConsistencyLevel(level)
executeAsync(stmt, s"'${setDc.getQueryString}'.bind($uuid, $dc)").map(rs => if (!rs.wasApplied()) ???)
}
//FIXME: arghhhhh
private def extractLast(result: ResultSet): Option[String] = {
var s: Option[String] = None
val it = result.iterator()
while (it.hasNext) {
val r: Row = it.next()
val last = r.getString("uuid")
s = Some(last)
}
s
}
private def readPathAndLast(path: String, level: ConsistencyLevel = ONE, retry: Boolean = false)(
implicit ec: ExecutionContext
): Future[Box[(String, String)]] = {
readPathUUIDA(path, level, retry).map(_.map(path -> _))
}
def readPathsAsync(paths: Seq[String],
level: ConsistencyLevel = ONE)(implicit ec: ExecutionContext): Future[Seq[Box[Infoton]]] = {
travector(paths) { p =>
readPathAsync(p, level).recover(recoverAsBoxedFailure)
}
}
/**
* Reads Async infoton according to path
*
* @param path The path of the infoton
* @param level The level of consistency
* @return a future with option of infoton if the infoton does not exists return Future with Nobe else Future with Infoton
*/
def readPathAsync(path: String,
level: ConsistencyLevel = ONE)(implicit ec: ExecutionContext): Future[Box[Infoton]] = {
// first lets try to read path the get uuid
val uuidFuture = readPathUUIDA(path, level)
uuidFuture
.flatMap {
case b: Box[String] if b.isEmpty => Future.successful(b.asInstanceOf[Box[Infoton]])
case FullBox(uuid) => readUUIDAsync(uuid, level)
}
.recover(recoverAsBoxedFailure)
}
def readPathUUIDA(path: String, level: ConsistencyLevel = ONE, retry: Boolean = false)(
implicit ec: ExecutionContext
): Future[Box[String]] = {
val stmt = getLastInPath.bind(path).setConsistencyLevel(level)
executeAsync(stmt, s"'${getLastInPath.getQueryString}'.bind($path)")
.flatMap((extractLast _).andThen {
case None if level == ONE => readPathUUIDA(path, QUORUM, retry = true)
case os =>
if (os.isDefined && retry) logger.warn(s"The path $path is only available in QUORUM")
Future.successful(Box(os))
})
.recover(recoverAsBoxedFailure)
}
/** ********************************************************************************/
//TODO: remove this method, only use the async version of it. BTW it is truly async, not a bluff.
@deprecated("use `historyAsync` instead", "1.4.x")
def history(path: String, limit: Int): Vector[(Long, String)] = {
def executeRetry(statement: Statement, retry: Int = 4, timeToWait: Long = 1000): ResultSet = {
var res: ResultSet = null
var count: Int = 0
while (count != retry) {
try {
res = storageDao.getSession.execute(statement)
// if did not got exception stop while
count = retry
} catch {
case e: Throwable =>
count += 1
logger.error(s"in executeRetry $statement retry $count", e)
Thread.sleep(timeToWait * (1 >> (count + 1)))
if (count == retry)
throw e
}
}
res
}
val q = s"select last_modified,uuid from path where path = '${path.replaceAll("'", "''")}' LIMIT $limit"
val res: ResultSet = executeRetry(new SimpleStatement(q))
val it = res.iterator()
val b = Vector.newBuilder[(Long, String)]
while (it.hasNext) {
val r: Row = it.next()
if (r ne null) {
val d = r.getTimestamp("last_modified")
val u = r.getString("uuid")
b += (d.getTime -> u)
}
}
b.result()
}
def historyReactive(path: String, level: ConsistencyLevel): Source[(Long, String), NotUsed] = {
val stmt = getAllHistory.bind(path).setConsistencyLevel(level)
CassandraSource(stmt)(storageDao.getSession).map { r: Row =>
r.getTimestamp("last_modified").getTime -> r.getString("uuid")
}
}
def lastVersion(path: String, level: ConsistencyLevel): Future[Option[(Long, String)]] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = getLastInPath.bind(path.replaceAll("'", "''")).setConsistencyLevel(level)
executeAsync(stmt, s"'${getLastInPath.getQueryString}'.bind($path)").map { res =>
val it = res.iterator()
val b = Vector.newBuilder[(Long, String)]
while (it.hasNext) {
val r: Row = it.next()
if (r ne null) {
val d = r.getTimestamp("last_modified") // TODO this Vector.newBuilder should be a private def!!!
val u = r.getString("uuid")
b += (d.getTime -> u)
}
}
b.result().headOption
}
}
override def historyNeighbourhood(path: String, timestamp: Long, desc: Boolean, limit: Int, level: ConsistencyLevel): Future[Vector[(Long, String)]] = {
import scala.concurrent.ExecutionContext.Implicits.global
val getHistoryNeighbourhood = if(desc) getHistoryNeighbourhoodDesc else getHistoryNeighbourhoodAsc
val stmt = getHistoryNeighbourhood.bind(path.replaceAll("'", "''"), new java.util.Date(timestamp), Int.box(limit)).setConsistencyLevel(level)
executeAsync(stmt, s"'${getHistoryNeighbourhood.getQueryString}'.bind($path,$timestamp,$limit)").map { res =>
val it = res.iterator()
val b = Vector.newBuilder[(Long, String)]
while (it.hasNext) {
val r: Row = it.next()
if (r ne null) {
val d = r.getTimestamp("last_modified")
val u = r.getString("uuid")
b += (d.getTime -> u)
}
}
b.result()
}
}
def historyAsync(path: String, limit: Int): Future[Vector[(Long, String)]] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = getHistory.bind(path.replaceAll("'", "''"), Int.box(limit)).setConsistencyLevel(ConsistencyLevel.QUORUM)
executeAsync(stmt, s"'${getHistory.getQueryString}'.bind($path)").map { res =>
val it = res.iterator()
val b = Vector.newBuilder[(Long, String)]
while (it.hasNext) {
val r: Row = it.next()
if (r ne null) {
val d = r.getTimestamp("last_modified")
val u = r.getString("uuid")
b += (d.getTime -> u)
}
}
b.result()
}
}
def exists(paths: Set[String], level: ConsistencyLevel = ONE): Map[String, Option[String]] = {
import scala.concurrent.ExecutionContext.Implicits.global
if (paths.isEmpty)
Map.empty
else {
val res = travector(paths) { path =>
readPathAndLast(path, level)
}
val r = Await.result(res, 10000.millis)
val data = r.collect { case FullBox(tup) => tup }.toMap
val v2 = paths.map { p =>
p -> data.get(p)
}
v2.toMap
}
}
def purgeHistorical(infoton: Infoton,
isOnlyVersion: Boolean = false,
level: ConsistencyLevel = QUORUM): Future[Unit] = {
purgeHistorical(infoton.path, infoton.uuid, infoton.lastModified.getMillis, isOnlyVersion, level)
}
def purgeHistorical(path: String,
uuid: String,
lastModified: Long,
isOnlyVersion: Boolean,
level: ConsistencyLevel): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val pHistoryEntry = {
val stmt = purgeHistoryEntry.bind(path, new java.util.Date(lastModified)).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeHistoryEntry.getQueryString}'.bind($path, $lastModified)")
.map(rs => if (!rs.wasApplied()) ???)
}
def pAllHistory = {
val stmt = purgeAllHistory.bind(path).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeAllHistory.getQueryString}'.bind($path)").map(rs => if (!rs.wasApplied()) ???)
}
def pInfoton = {
val stmt = purgeInfotonByUuid.bind(uuid).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeInfotonByUuid.getQueryString}'.bind($uuid)").map { rs =>
dataCahce.invalidate(uuid)
if (!rs.wasApplied()) ???
}
}
(if (isOnlyVersion) pAllHistory else pHistoryEntry).flatMap(_ => pInfoton)
}
def purgeUuid(path: String,
uuid: String,
lastModified: Long,
isOnlyVersion: Boolean,
level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
def pAllHistory = {
val stmt = purgeAllHistory.bind(path).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeAllHistory.getQueryString}'.bind($path)").map(rs => if (!rs.wasApplied()) ???)
}
def pHistoryEntry = {
val stmt = purgeHistoryEntry.bind(new java.util.Date(lastModified), path).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeHistoryEntry.getQueryString}'.bind($lastModified, $path)")
.map(rs => if (!rs.wasApplied()) ???)
}
purgeFromInfotonsOnly(uuid, level).flatMap { _ =>
if (isOnlyVersion) pAllHistory else pHistoryEntry
}
}
def purgeFromInfotonsOnly(uuid: String, level: ConsistencyLevel = QUORUM) = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = purgeInfotonByUuid.bind(uuid).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeInfotonByUuid.getQueryString}'.bind($uuid)").map { rs =>
dataCahce.invalidate(uuid)
if (!rs.wasApplied()) ???
}
}
def purgeFromPathOnly(path: String, lastModified: Long, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = purgeHistoryEntry.bind(new java.util.Date(lastModified), path).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeHistoryEntry.getQueryString}'.bind($lastModified, $path)")
.map(rs => if (!rs.wasApplied()) ???)
}
def purgePathOnly(path: String, level: ConsistencyLevel = QUORUM): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
val stmt = purgeAllHistory.bind(path).setConsistencyLevel(level)
executeAsync(stmt, s"'${purgeAllHistory.getQueryString}'.bind($path)").map(rs => if (!rs.wasApplied()) ???)
}
override def purgeAll(): Future[Unit] = {
import scala.concurrent.ExecutionContext.Implicits.global
Future
.sequence(Seq("path", "infoton", "zstore").map { table =>
val truncate = s"TRUNCATE TABLE data2.$table"
executeAsync(new SimpleStatement(truncate), truncate)
})
.map { _ =>
Unit
}
}
def fixPath(path: String,
last: (DateTime, String),
history: Seq[(DateTime, String)],
level: ConsistencyLevel = QUORUM): Future[Seq[Infoton]] = ???
/*{
val uuids = history.map(_._2).toVector
val existingInfotonsFut = this.readUUIDSAsync(uuids)
existingInfotonsFut.flatMap { existingInfopts =>
val existingInfotons = existingInfopts.collect{ case Some(i) => i}
if (existingInfotons.length < history.length) {
logger.warn(s"FixCAS for $path: There are only ${existingInfotons.length} in `infoton`, but ES found ${history.length} versions. Cannot fix!")
Future.failed(new Exception("missing data"))
} else {
import scala.collection.JavaConverters._
val historyMap = history.map(e => new java.util.Date(e._1.getMillis) -> e._2).toMap.asJava
val lastUuid = last._2
val bsSetPathLastAndHistory = new BoundStatement(setPathLastAndHistory)
bsSetPathLastAndHistory.setConsistencyLevel(level)
bsSetPathLastAndHistory.bind(path, historyMap, lastUuid)
executeAsync(bsSetPathLastAndHistory).map { _ => existingInfotons }
}
}
}*/
def rowToRecord(r: Row): String = {
val u = r.getString("uuid")
val q = r.getString("quad")
val f = r.getString("field")
val v = org.apache.commons.lang3.StringEscapeUtils.escapeCsv(r.getString("value"))
val d =
Option(r.getBytes("data")).fold("null")(x => cmwell.util.string.Base64.encodeBase64String(Bytes.getArray(x)))
s"$u,$q,$f,$v,$d"
}
def getRawRow(uuid: String, level: ConsistencyLevel)(implicit ec: ExecutionContext): Future[String] = {
val stmt = getInfotonUUID.bind(uuid).setConsistencyLevel(level)
cmwell.util.concurrent.retry(5, delayOnError, 2)(executeAsyncInternal(stmt))(ec).map { result =>
val sb = StringBuilder.newBuilder
sb.append("uuid,quad,field,value,data\\n")
while (!result.isExhausted()) {
val r: Row = result.one()
sb.append(s"${rowToRecord(r)}\\n")
}
sb.result()
}
}
def getReactiveRawRow(uuid: String, level: ConsistencyLevel): Source[String, NotUsed] = {
val stmt = getInfotonUUID.bind(uuid).setConsistencyLevel(level)
val s1 = Source.single("uuid,quad,field,value,data")
val s2 = CassandraSource(stmt)(storageDao.getSession).map(rowToRecord)
s1.concat(s2)
}
}
|
bryaakov/CM-Well
|
server/cmwell-irw/src/main/scala/cmwell/irw/IRWServiceNativeImpl2.scala
|
Scala
|
apache-2.0
| 32,627 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.cache
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.nio.charset.StandardCharsets
import akka.pattern.Patterns
import akka.actor.{ActorRef, ActorSelection}
import net.liftweb.json._
import scala.concurrent._
import net.liftweb.json.Extraction._
import scala.util.Success
import scala.util.Failure
import akka.util.Timeout
import com.webtrends.harness.utils.Loan._
import net.liftweb.json.ext.JodaTimeSerializers
/**
* Trait to help with caching objects in the wookiee-cache
* A cacheable object will be converted to JSON and then stored as Array[Byte] in the cache manager
*
* TODO: Need to implement timeout strategies. Currently it simply times out the object based on when it
* was inserted. This can cause an object to constantly be updating the insert time and possibly never timeout.
* Also need to implement a strategy where everything will time out every hour or something like that.
*/
trait Cacheable[T] extends Serializable {
this : Serializable =>
@transient implicit def liftJsonFormats:Formats = DefaultFormats.lossless + NoTypeHints ++ JodaTimeSerializers.all
/**
* Gets the ttl for the data in the cache, by default will be set to None which means it will never time out
* The value is in milliseconds. The ttl logic will be up to the specific cache implementation to maintain.
* This can be overridden on individual calls to write
*
* @return Optional milliseconds for ttl of data in cache
*/
def dataTimeout : Option[Long] = None
/**
* The key used to cache the object
*
* @return
*/
def key : String = ???
/**
* The namespace used for the object
*
* @return
*/
def namespace : String = ???
/**
* Extract function allows cacheable object to have control how it extracts the byte data from the array
* by default it will try and parse it as a JSON object and then extract it to the class type
*
* @param obj the array of bytes
* @return
*/
protected def extract(obj:Array[Byte])(implicit m: Manifest[T]) : Option[T] = {
Some(JsonParser.parse(new String(obj, StandardCharsets.UTF_8)).extract[T])
}
/**
* Convenience method that one can call (after overriding extract) to convert bytes to a Serializable class.
* Be sure to also override getBytes and call serialToBytes.
*/
protected def bytesToSerial(obj:Array[Byte]) : Option[T] = {
loan (new ByteArrayInputStream(obj)) to { ba =>
loan (new ObjectInputStream(ba)) to { os =>
Some(os.readObject().asInstanceOf[T])
}
}
}
/**
* getBytes function allows cacheable object to have control over how it writes the data to memcache
* By default it will use Lift JSON to decompose then render the object from json to a string and
* then simply call getBytes on the string
*/
protected def getBytes : Array[Byte] = {
compactRender(decompose(this)).getBytes(StandardCharsets.UTF_8)
}
/**
* Convenience method that one can call (after overriding getBytes) to convert a Serializable class
* to bytes to store in memcache. Be sure to also override extract and call bytesToSerial.
*/
protected def serialToBytes(obj: Serializable) : Array[Byte] = {
loan (new ByteArrayOutputStream()) to { bs =>
loan (new ObjectOutputStream(bs)) to { os =>
os.writeObject(obj)
}
bs.toByteArray
}
}
def readFromCacheSelect(cacheRef:ActorSelection, cacheKey:Option[CacheKey]=None)
(implicit timeout:Timeout, executor:ExecutionContext, m:Manifest[T]) : Future[Option[T]] = {
val p = Promise[Option[T]]
cacheRef.resolveOne onComplete {
case Success(s) =>
readFromCache(s, cacheKey)(timeout, executor, m) onComplete {
case Success(result) => p success result
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
p.future
}
/**
* Looks in the supplied cache for the current object
*
* @param cacheRef This is a reference to the cache actor
* @param timeout Timeout for the cache read
* @return
*/
def readFromCache(cacheRef:ActorRef, cacheKey:Option[CacheKey]=None)
(implicit timeout:Timeout, executor:ExecutionContext, m:Manifest[T]) : Future[Option[T]] = {
val ck = getCacheKey(cacheKey)
val p = Promise[Option[T]]
val future = Patterns.ask(cacheRef, Get(namespace, ck), timeout).mapTo[Option[Array[Byte]]]
future onComplete {
case Success(Some(d)) => p success extract(d)
case Success(None) => p success None
case Failure(f) => p failure f
}
p.future
}
def writeInCacheSelect(cacheRef:ActorSelection, cacheKey:Option[CacheKey]=None,
ttlSec: Option[Int] = dataTimeout.map(_.toInt / 1000))
(implicit timeout:Timeout, executor:ExecutionContext) : Future[Boolean] = {
val p = Promise[Boolean]
cacheRef.resolveOne onComplete {
case Success(s) =>
writeInCache(s, cacheKey, ttlSec)(timeout, executor) onComplete {
case Success(_) => p success true
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
p.future
}
/**
* Writes the current object to the supplied cache
*
* @param cacheRef This is a reference to the cache actor
* @param timeout Timeout for the cache read
* @return
*/
def writeInCache(cacheRef:ActorRef, cacheKey:Option[CacheKey]=None,
ttlSec: Option[Int] = dataTimeout.map(_.toInt / 1000))
(implicit timeout:Timeout, executor:ExecutionContext) : Future[Boolean] = {
val p = Promise[Boolean]
val future = Patterns.ask(
cacheRef, Add(namespace, getCacheKey(cacheKey), this.getBytes, ttlSec), timeout).mapTo[Boolean]
future onComplete {
case Success(_) => p success true
case Failure(f) => p failure f
}
p.future
}
/**
* Deletes the current item from the cache
*
* @param cacheRef The is a reference to the cache actor
* @param cacheKey Optional key, usually this is managed by the object itself
* @param timeout timeout for the cache delete response
* @param executor the executor
* @return true if delete was successful
*/
def deleteFromCache(cacheRef:ActorRef, cacheKey:Option[CacheKey]=None)
(implicit timeout:Timeout, executor:ExecutionContext) : Future[Boolean] = {
val p = Promise[Boolean]
val future = Patterns.ask(cacheRef, Delete(namespace, getCacheKey(cacheKey)), timeout).mapTo[Boolean]
future onComplete {
case Success(_) => p success true
case Failure(f) => p failure f
}
p.future
}
def deleteFromCacheSelect(cacheRef:ActorSelection, cacheKey:Option[CacheKey]=None)
(implicit timeout:Timeout, executor:ExecutionContext) : Future[Boolean] = {
val p = Promise[Boolean]
cacheRef.resolveOne onComplete {
case Success(succ) =>
deleteFromCache(succ, cacheKey)(timeout, executor) onComplete {
case Success(_) => p success true
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
p.future
}
def deserialize(data:Array[Byte])(implicit m:Manifest[T]) : Option[T] = {
extract(data)
}
protected def getCacheKey(cacheKey:Option[CacheKey]) : String = {
cacheKey match {
case Some(s) => s.toString()
case None => key
}
}
}
|
Webtrends/wookiee-cache
|
src/main/scala/com/webtrends/harness/component/cache/Cacheable.scala
|
Scala
|
apache-2.0
| 8,200 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.cluster.main
import java.util.concurrent.TimeUnit
import akka.actor._
import akka.cluster.ClusterEvent._
import akka.cluster.{MemberStatus, Member, Cluster}
import akka.cluster.ddata.DistributedData
import akka.cluster.singleton.{ClusterSingletonProxySettings, ClusterSingletonProxy, ClusterSingletonManagerSettings, ClusterSingletonManager}
import com.typesafe.config.ConfigValueFactory
import org.apache.gearpump.cluster.ClusterConfig
import org.apache.gearpump.cluster.master.Master.MasterListUpdated
import org.apache.gearpump.cluster.master.{Master => MasterActor, MasterNode}
import org.apache.gearpump.util.Constants._
import org.apache.gearpump.util.LogUtil.ProcessType
import org.apache.gearpump.util.{AkkaApp, Constants, LogUtil}
import org.slf4j.Logger
import scala.collection.JavaConverters._
import scala.collection.immutable
import scala.concurrent.Await
import scala.concurrent.duration._
object Master extends AkkaApp with ArgumentsParser {
private var LOG: Logger = LogUtil.getLogger(getClass)
override def akkaConfig: Config = ClusterConfig.master()
override val options: Array[(String, CLIOption[Any])] =
Array("ip" -> CLIOption[String]("<master ip address>", required = true),
"port" -> CLIOption("<master port>", required = true))
override val description = "Start Master daemon"
def main(akkaConf: Config, args: Array[String]): Unit = {
this.LOG = {
LogUtil.loadConfiguration(akkaConf, ProcessType.MASTER)
LogUtil.getLogger(getClass)
}
val config = parse(args)
master(config.getString("ip"), config.getInt("port"), akkaConf)
}
private def verifyMaster(master: String, port: Int, masters: Iterable[String]) = {
masters.exists { hostPort =>
hostPort == s"$master:$port"
}
}
private def master(ip: String, port: Int, akkaConf: Config): Unit = {
val masters = akkaConf.getStringList(Constants.GEARPUMP_CLUSTER_MASTERS).asScala
if (!verifyMaster(ip, port, masters)) {
LOG.error(s"The provided ip $ip and port $port doesn't conform with config at " +
s"gearpump.cluster.masters: ${masters.mkString(", ")}")
System.exit(-1)
}
val masterList = masters.map(master => s"akka.tcp://${MASTER}@$master").toList.asJava
val quorum = masterList.size() / 2 + 1
val masterConfig = akkaConf.
withValue("akka.remote.netty.tcp.port", ConfigValueFactory.fromAnyRef(port)).
withValue(NETTY_TCP_HOSTNAME, ConfigValueFactory.fromAnyRef(ip)).
withValue("akka.cluster.seed-nodes", ConfigValueFactory.fromAnyRef(masterList)).
withValue(s"akka.cluster.role.${MASTER}.min-nr-of-members",
ConfigValueFactory.fromAnyRef(quorum))
LOG.info(s"Starting Master Actor system $ip:$port, master list: ${masters.mkString(";")}")
val system = ActorSystem(MASTER, masterConfig)
val replicator = DistributedData(system).replicator
LOG.info(s"Replicator path: ${replicator.path}")
// Starts singleton manager
val singletonManager = system.actorOf(ClusterSingletonManager.props(
singletonProps = Props(classOf[MasterWatcher], MASTER),
terminationMessage = PoisonPill,
settings = ClusterSingletonManagerSettings(system).withSingletonName(MASTER_WATCHER)
.withRole(MASTER)),
name = SINGLETON_MANAGER)
// Start master proxy
val masterProxy = system.actorOf(ClusterSingletonProxy.props(
singletonManagerPath = s"/user/${SINGLETON_MANAGER}",
// The effective singleton is s"${MASTER_WATCHER}/$MASTER" instead of s"${MASTER_WATCHER}".
// Master is created when there is a majority of machines started.
settings = ClusterSingletonProxySettings(system)
.withSingletonName(s"${MASTER_WATCHER}/$MASTER").withRole(MASTER)),
name = MASTER
)
LOG.info(s"master proxy is started at ${masterProxy.path}")
val mainThread = Thread.currentThread()
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run(): Unit = {
if (!system.whenTerminated.isCompleted) {
LOG.info("Triggering shutdown hook....")
system.stop(masterProxy)
val cluster = Cluster(system)
cluster.leave(cluster.selfAddress)
cluster.down(cluster.selfAddress)
try {
Await.result(system.whenTerminated, Duration(3, TimeUnit.SECONDS))
} catch {
case ex: Exception => // Ignore
}
system.terminate()
mainThread.join()
}
}
})
Await.result(system.whenTerminated, Duration.Inf)
}
}
class MasterWatcher(role: String) extends Actor with ActorLogging {
import context.dispatcher
val cluster = Cluster(context.system)
val config = context.system.settings.config
val masters = config.getList("akka.cluster.seed-nodes")
val quorum = masters.size() / 2 + 1
val system = context.system
// Sorts by age, oldest first
val ageOrdering = Ordering.fromLessThan[Member] { (a, b) => a.isOlderThan(b) }
var membersByAge: immutable.SortedSet[Member] = immutable.SortedSet.empty(ageOrdering)
def receive: Receive = null
// Subscribes to MemberEvent, re-subscribe when restart
override def preStart(): Unit = {
cluster.subscribe(self, classOf[MemberEvent])
context.become(waitForInit)
}
override def postStop(): Unit = {
cluster.unsubscribe(self)
}
def matchingRole(member: Member): Boolean = member.hasRole(role)
def waitForInit: Receive = {
case state: CurrentClusterState => {
membersByAge = immutable.SortedSet.empty(ageOrdering) ++ state.members.filter(m =>
m.status == MemberStatus.Up && matchingRole(m))
if (membersByAge.size < quorum) {
membersByAge.iterator.mkString(",")
log.info(s"We cannot get a quorum, $quorum, " +
s"shutting down...${membersByAge.iterator.mkString(",")}")
context.become(waitForShutdown)
self ! MasterWatcher.Shutdown
} else {
val master = context.actorOf(Props(classOf[MasterActor]), MASTER)
notifyMasterMembersChange(master)
context.become(waitForClusterEvent(master))
}
}
}
def waitForClusterEvent(master: ActorRef): Receive = {
case MemberUp(m) if matchingRole(m) => {
membersByAge += m
notifyMasterMembersChange(master)
}
case mEvent: MemberEvent if (mEvent.isInstanceOf[MemberExited] ||
mEvent.isInstanceOf[MemberRemoved]) && matchingRole(mEvent.member) => {
log.info(s"member removed ${mEvent.member}")
val m = mEvent.member
membersByAge -= m
if (membersByAge.size < quorum) {
log.info(s"We cannot get a quorum, $quorum, " +
s"shutting down...${membersByAge.iterator.mkString(",")}")
context.become(waitForShutdown)
self ! MasterWatcher.Shutdown
} else {
notifyMasterMembersChange(master)
}
}
}
private def notifyMasterMembersChange(master: ActorRef): Unit = {
val masters = membersByAge.toList.map{ member =>
MasterNode(member.address.host.getOrElse("Unknown-Host"),
member.address.port.getOrElse(0))
}
master ! MasterListUpdated(masters)
}
def waitForShutdown: Receive = {
case MasterWatcher.Shutdown => {
cluster.unsubscribe(self)
cluster.leave(cluster.selfAddress)
context.stop(self)
system.scheduler.scheduleOnce(Duration.Zero) {
try {
Await.result(system.whenTerminated, Duration(3, TimeUnit.SECONDS))
} catch {
case ex: Exception => // Ignore
}
system.terminate()
}
}
}
}
object MasterWatcher {
object Shutdown
}
|
manuzhang/incubator-gearpump
|
core/src/main/scala/org/apache/gearpump/cluster/main/Master.scala
|
Scala
|
apache-2.0
| 8,462 |
package ru.wordmetrix.webcrawler
import org.scalatest.{ BeforeAndAfterAll, Matchers, WordSpecLike }
import GMLStorage.{ GMLStorageEstimator, GMLStorageSeed }
import akka.actor.ActorSystem
import akka.testkit.{ DefaultTimeout, ImplicitSender, TestKit, TestProbe }
import ru.wordmetrix.utils.CFG
import ru.wordmetrix.vector.Vector
import java.io.File
import ru.wordmetrix.smartfile.SmartFile.fromFile
import EvaluatePriorityMatrix._
import LinkedVectorsStorage._
class TestLinkedVectorsStorage extends TestKit(ActorSystem("TestStorage"))
with Tools
with DefaultTimeout with ImplicitSender
with WordSpecLike with Matchers with BeforeAndAfterAll {
override def afterAll(): Unit = {
system.terminate()
}
implicit val cfg = CFG(path = new File("/tmp/test"), isdebug = false)
implicit val accuracy = cfg.accuracy
val matrix1 = (new File(".") / "data" / "matrix1.dat").readLines.mkString
val matrix2 = (new File(".") / "data" / "matrix2.dat").readLines.mkString
val vd1 = (new File(".") / "data" / "1.dat").readLines.mkString
val vd2 = (new File(".") / "data" / "2.dat").readLines.mkString
val mids1 = (new File(".") / "data" / "map1.lst").readLines.mkString
val mids2 = (new File(".") / "data" / "map2.lst").readLines.mkString
"A linkedvectorstorage" should {
"dump a vector" in {
val gather = TestProbe()
val seedqueue = TestProbe()
val storage = system.actorOf(
LinkedVectorsStorage.props(cfg), "TestLinkedVectorStorage")
val data = cfg.path / "vectors"
data / "matrix.dat" write("")
data / "1.dat" write("test")
data / "1.dat" write("test")
data / "map.lst" write("")
gather.send(storage, LinkedVectorsStorageSeed(
uri(1),
Set(uri(1), uri(2), uri(3)),
Vector("one" -> 1.0))
)
Thread.sleep(300)
assert((data / "matrix.dat").readLines.mkString == matrix1)
assert((data / "1.dat").readLines.mkString == vd1)
assert((data / "map.lst").readLines.mkString == mids1)
gather.send(storage, LinkedVectorsStorageSeed(
uri(2),
Set(uri(3), uri(4)),
Vector("two" -> 2.0)
))
Thread.sleep(300)
assert((data / "matrix.dat").readLines.mkString == matrix2)
assert((data / "1.dat").readLines.mkString == vd1)
assert((data / "2.dat").readLines.mkString == vd2)
assert((data / "map.lst").readLines.mkString == mids2)
assert(matrix1 != matrix2)
assert(mids1 != mids2)
watch(storage)
gather.send(storage, EvaluatePriorityMatrixStop)
expectTerminated(storage)
}
}
}
|
electricmind/webcrawler
|
src/test/scala/ru/wordmetrix/webcrawler/TestLinkedVectorsStorage.scala
|
Scala
|
apache-2.0
| 2,890 |
package incognito.anonymization.redistribution;
//package uis.cipsi.incognito.anonymization.redistribution
//
//import uis.cipsi.incognito.informationLoss.InformationLoss
//import scala.collection.mutable.ArrayBuffer
//import uis.cipsi.incognito.rdd.Data
//import org.apache.spark.broadcast.Broadcast
//import org.apache.spark.rdd.RDD
//import scala.collection.Map
//import org.apache.spark.storage.StorageLevel
//import java.util.{ Random => JavaRandom }
//import scala.util.hashing.MurmurHash3
//import java.nio.ByteBuffer
//
//class InitializeCenters(k: Int,
// runs: Int,
// initializationSteps: Int,
// seed: Long,
// _taxonomy: Broadcast[Map[String, String]],
// _categoricalQIHeights: Broadcast[Map[Int, Int]],
// pidindex: Int) extends Serializable {
//
// /**
// * Initialize `runs` sets of cluster centers using the k-means|| algorithm by Bahmani et al.
// * (Bahmani et al., Scalable K-Means++, VLDB 2012). This is a variant of k-means++ that tries
// * to find with dissimilar cluster centers by starting with a random center and then doing
// * passes where more centers are chosen with probability proportional to their squared distance
// * to the current cluster set. It results in a provable approximation to an optimal clustering.
// *
// * The original paper can be found at http://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf.
// */
// def initKMeansParallel(data: RDD[Data]): Array[Array[Data]] = {
// // Initialize empty centers and point costs.
// val centers = Array.tabulate(runs)(r => ArrayBuffer.empty[Data])
// var costs = data.map(_ => Array.fill(runs)(Double.PositiveInfinity))
//
// // Initialize each run's first center to a random point.
// val seed = new XORShiftRandom(this.seed).nextInt()
// val sample = data.takeSample(true, runs, seed).toSeq
// val newCenters = Array.tabulate(runs)(r => ArrayBuffer(sample(r)))
//
// /** Merges new centers to centers. */
// def mergeNewCenters(): Unit = {
// var r = 0
// while (r < runs) {
// centers(r) ++= newCenters(r)
// newCenters(r).clear()
// r += 1
// }
// }
//
// // On each step, sample 2 * k points on average for each run with probability proportional
// // to their squared distance from that run's centers. Note that only distances between points
// // and new centers are computed in each iteration.
// var step = 0
// while (step < initializationSteps) {
// val bcNewCenters = data.context.broadcast(newCenters)
// val preCosts = costs
// costs = data.zip(preCosts).map {
// case (point, cost) =>
// Array.tabulate(runs) { r =>
// math.min(KMeans.pointCost(bcNewCenters.value(r), point, _taxonomy, _categoricalQIHeights, pidindex), cost(r))
// }
// }.persist(StorageLevel.MEMORY_ONLY)
// val sumCosts = costs
// .aggregate(new Array[Double](runs))(
// seqOp = (s, v) => {
// // s += v
// var r = 0
// while (r < runs) {
// s(r) += v(r)
// r += 1
// }
// s
// },
// combOp = (s0, s1) => {
// // s0 += s1
// var r = 0
// while (r < runs) {
// s0(r) += s1(r)
// r += 1
// }
// s0
// })
// preCosts.unpersist(blocking = false)
// val chosen = data.zip(costs).mapPartitionsWithIndex { (index, pointsWithCosts) =>
// val rand = new XORShiftRandom(seed ^ (step << 16) ^ index)
// pointsWithCosts.flatMap {
// case (p, c) =>
// val rs = (0 until runs).filter { r =>
// rand.nextDouble() < 2.0 * c(r) * k / sumCosts(r)
// }
// if (rs.length > 0) Some(p, rs) else None
// }
// }.collect()
// mergeNewCenters()
// chosen.foreach {
// case (p, rs) =>
// rs.foreach(newCenters(_) += p)
// }
// step += 1
// }
//
// mergeNewCenters()
// costs.unpersist(blocking = false)
//
// // Finally, we might have a set of more than k candidate centers for each run; weigh each
// // candidate by the number of points in the dataset mapping to it and run a local k-means++
// // on the weighted centers to pick just k of them
// val bcCenters = data.context.broadcast(centers)
// val weightMap = data.flatMap { p =>
// Iterator.tabulate(runs) { r =>
// ((r, KMeans.findClosest(bcCenters.value(r), p, _taxonomy, _categoricalQIHeights, pidindex)._1), 1.0)
// }
// }.reduceByKey(_ + _).collectAsMap()
// val finalCenters = (0 until runs).par.map { r =>
// val myCenters = centers(r).toArray
// val myWeights = (0 until myCenters.length).map(i => weightMap.getOrElse((r, i), 0.0)).toArray
// val initialCentroidWiIndex = myCenters.zipWithIndex
// val centroids = (new InformationLoss).getCenter(initialCentroidWiIndex, pidindex, k, _taxonomy, _categoricalQIHeights)
// centroids
// // LocalKMeans.kMeansPlusPlus(r, myCenters, myWeights, k, 30)
// }
//
// finalCenters.toArray
// }
//
// object KMeans {
// /**
// * Returns the index of the closest center to the given point, as well as the squared distance.
// */
// def findClosest(
// centers: TraversableOnce[Data],
// point: Data,
// _taxonomy: Broadcast[Map[String, String]],
// _categoricalQIHeights: Broadcast[Map[Int, Int]],
// pidIndex: Int): (Int, Double) = {
// var bestDistance = Double.PositiveInfinity
// var bestIndex = 0
// var i = 0
// centers.foreach { center =>
// // Since `\\|a - b\\| \\geq |\\|a\\| - \\|b\\||`, we can use this lower bound to avoid unnecessary
// // distance computation.
// var lowerBoundOfSqDist = ((new InformationLoss).distance(point.qisNumeric, center.qisNumeric, pidIndex)
// + (new InformationLoss).distance(point.qisCategorical, center.qisCategorical, _taxonomy, _categoricalQIHeights))
// if (lowerBoundOfSqDist < bestDistance) {
// val distance: Double = (new InformationLoss).distance(point.qisNumeric, center.qisNumeric, pidIndex)
// +(new InformationLoss).distance(point.qisCategorical, center.qisCategorical, _taxonomy, _categoricalQIHeights)
// if (distance < bestDistance) {
// bestDistance = distance
// bestIndex = i
// }
// }
// i += 1
// }
// (bestIndex, bestDistance)
// }
//
// /**
// * Returns the K-means cost of a given point against the given cluster centers.
// */
// def pointCost(
// centers: TraversableOnce[Data],
// point: Data,
// _taxonomy: Broadcast[Map[String, String]],
// _categoricalQIHeights: Broadcast[Map[Int, Int]],
// pidIndex: Int): Double =
// findClosest(centers, point, _taxonomy, _categoricalQIHeights, pidIndex)._2
// }
//}
|
achak1987/SparkAnonymizationToolkit
|
src/main/scala/incognito/archive/InitializeCenters.scala
|
Scala
|
apache-2.0
| 6,950 |
package sparklyr
class ClassUtils {
def getClassLoader: ClassLoader = {
Option(Thread.currentThread().getContextClassLoader).getOrElse(getClass.getClassLoader)
}
def classForName(className: String): Class[_] = {
Class.forName(className, true, getClassLoader)
}
def classExists(className: String): Boolean = {
try {
classForName(className)
true
} catch {
case e: ClassNotFoundException => false
}
}
}
|
rstudio/sparklyr
|
java/spark-1.6.0/classutils.scala
|
Scala
|
apache-2.0
| 453 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core.javac
import akka.event.slf4j.SLF4JLogging
import com.sun.source.tree.{ IdentifierTree, Tree }
import com.sun.source.util.TreePath
import javax.lang.model.`type`.{ DeclaredType, PrimitiveType, TypeKind, TypeMirror }
import javax.lang.model.element.{ ElementKind, Element, TypeElement }
import org.ensime.api.deprecating
import org.ensime.core.{ DocFqn, DocSig }
@deprecating("prefer FullyQualifiedName")
final case class JavaFqn(pack: Option[String], typename: Option[String], fieldOrMethod: Option[String]) {
def toDocSig = DocSig(DocFqn(pack.getOrElse(""), typename.getOrElse("")), fieldOrMethod)
def toFqnString = Array(pack, typename, fieldOrMethod).flatten.mkString(".")
def toQueryString = Array(pack, typename.map(_.replace(".", "$")), fieldOrMethod).flatten.mkString(".")
}
object JavaFqn {
def apply(pack: String, tpe: String, fieldOrMethod: Option[String]): JavaFqn = {
JavaFqn(
if (pack.isEmpty) None else Some(pack),
if (tpe.isEmpty) None else Some(tpe),
fieldOrMethod
)
}
}
trait Helpers extends UnsafeHelpers with SLF4JLogging {
def typeMirror(c: Compilation, t: Tree): Option[TypeMirror] = {
Option(c.trees.getTypeMirror(c.trees.getPath(c.compilationUnit, t)))
}
def typeElement(c: Compilation, t: Tree): Option[Element] = {
typeMirror(c, t).map(c.types.asElement)
}
def element(c: Compilation, path: TreePath): Option[Element] = {
Option(c.trees.getElement(path)).orElse(unsafeGetElement(path.getLeaf)).orElse {
Option(c.trees.getTypeMirror(path)).flatMap { t => Option(c.types.asElement(t)) }
}
}
private def parseFqnAsClass(s: String): Option[JavaFqn] = {
val (front, back) = s.split("\\\\.").partition { s => s.forall(Character.isLowerCase) }
Some(JavaFqn(front.mkString("."), back.mkString("."), None))
}
def fqn(c: Compilation, el: Element): Option[JavaFqn] = {
el.getKind match {
case ElementKind.LOCAL_VARIABLE | ElementKind.PARAMETER =>
Some(JavaFqn(None, None, Some(el.getSimpleName.toString)))
case ElementKind.CONSTRUCTOR | ElementKind.ENUM_CONSTANT
| ElementKind.METHOD | ElementKind.FIELD =>
Option(el.getEnclosingElement).flatMap(fqn(c, _)).map(_.copy(fieldOrMethod = Some(el.toString)))
case k => parseFqnAsClass(el.toString)
}
}
def fqn(c: Compilation, p: TreePath): Option[JavaFqn] = {
element(c, p).flatMap(fqn(c, _)).orElse({
p.getLeaf match {
case t: IdentifierTree => Some(JavaFqn(None, None, Some(t.getName.toString)))
case t => None
}
}).orElse(fqn(c, c.trees.getTypeMirror(p)))
}
def fqn(c: Compilation, t: Tree): Option[JavaFqn] = {
Option(c.trees.getPath(c.compilationUnit, t)).flatMap { p => fqn(c, p) }
}
def fqn(c: Compilation, tm: TypeMirror): Option[JavaFqn] = {
// "Using instanceof is not necessarily a reliable idiom for
// determining the effective class of an object in this modeling
// hierarchy since an implementation may choose to have a single
// object implement multiple TypeMirror subinterfaces." --
// TypeMirror docs
tm match {
case tm: DeclaredType if tm.getKind == TypeKind.DECLARED => {
tm.asElement match {
case te: TypeElement => parseFqnAsClass(te.getQualifiedName.toString)
case _ => {
None
}
}
}
case tm: PrimitiveType if tm.getKind.isPrimitive => Some(JavaFqn(None, Some(tm.toString), None))
case _ => None
}
}
}
|
d1egoaz/ensime-sbt
|
src/sbt-test/sbt-ensime/ensime-server/core/src/main/scala/org/ensime/core/javac/Helpers.scala
|
Scala
|
apache-2.0
| 3,649 |
/*
* Copyright (C) 2016-2020 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.persistence.cassandra.journal
import akka.actor.{ ExtendedActorSystem, Props }
import akka.persistence.RecoveryCompleted
import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec, Persister }
import akka.serialization.BaseSerializer
import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory
object CassandraSerializationSpec {
val config = ConfigFactory.parseString(s"""
|akka.actor.serialize-messages=false
|akka.actor.serializers.crap="akka.persistence.cassandra.journal.BrokenDeSerialization"
|akka.actor.serialization-identifiers."akka.persistence.cassandra.journal.BrokenDeSerialization" = 666
|akka.actor.serialization-bindings {
| "akka.persistence.cassandra.Persister$$CrapEvent" = crap
|}
|akka.persistence.journal.max-deletion-batch-size = 3
|akka.persistence.publish-confirmations = on
|akka.persistence.publish-plugin-commands = on
|akka.persistence.cassandra.journal.target-partition-size = 5
|akka.persistence.cassandra.max-result-size = 3
|akka.persistence.cassandra.journal.keyspace=CassandraIntegrationSpec
|akka.persistence.cassandra.snapshot.keyspace=CassandraIntegrationSpecSnapshot
|
""".stripMargin).withFallback(CassandraLifecycle.config)
}
class BrokenDeSerialization(override val system: ExtendedActorSystem) extends BaseSerializer {
override def includeManifest: Boolean = false
override def toBinary(o: AnyRef): Array[Byte] =
// I was serious with the class name
Array.emptyByteArray
override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef =
throw new RuntimeException("I can't deserialize a single thing")
}
class CassandraSerializationSpec extends CassandraSpec(CassandraSerializationSpec.config) {
import akka.persistence.cassandra.Persister._
"A Cassandra journal" must {
"Fail recovery when deserialization fails" in {
val probe = TestProbe()
val incarnation1 = system.actorOf(Props(new Persister("id1", probe.ref)))
probe.expectMsgType[RecoveryCompleted]
incarnation1 ! CrapEvent(1)
probe.expectMsg(CrapEvent(1))
probe.watch(incarnation1)
system.stop(incarnation1)
probe.expectTerminated(incarnation1)
val incarnation2 = system.actorOf(Props(new Persister("id1", probe.ref)))
probe.expectMsgType[RuntimeException].getMessage shouldBe "I can't deserialize a single thing"
incarnation2
}
}
}
|
chbatey/akka-persistence-cassandra
|
core/src/test/scala/akka/persistence/cassandra/journal/CassandraSerializationSpec.scala
|
Scala
|
apache-2.0
| 2,579 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.redis.sink.writer
import com.datamountaineer.streamreactor.connect.redis.sink.config.{RedisConfig, RedisConfigConstants, RedisConnectionInfo, RedisSinkSettings}
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}
import org.apache.kafka.connect.sink.SinkRecord
import org.mockito.MockitoSugar
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import redis.clients.jedis.Jedis
import redis.embedded.RedisServer
import scala.collection.JavaConverters._
class RedisInsertSortedSetTest extends AnyWordSpec with Matchers with BeforeAndAfterAll with MockitoSugar {
val redisServer = new RedisServer(6379)
override def beforeAll() = redisServer.start()
override def afterAll() = redisServer.stop()
"Redis INSERT into Sorted Set (SS) writer" should {
"write Kafka records to a Redis Sorted Set" in {
val TOPIC = "cpuTopic"
val KCQL = s"INSERT INTO cpu_stats SELECT * from $TOPIC STOREAS SortedSet(score=ts)"
println("Testing KCQL : " + KCQL)
val props = Map(
RedisConfigConstants.REDIS_HOST->"localhost",
RedisConfigConstants.REDIS_PORT->"6379",
RedisConfigConstants.KCQL_CONFIG->KCQL
).asJava
val config = RedisConfig(props)
val connectionInfo = new RedisConnectionInfo("localhost", 6379, None)
val settings = RedisSinkSettings(config)
val writer = new RedisInsertSortedSet(settings)
writer.createClient(settings)
val schema = SchemaBuilder.struct().name("com.example.Cpu")
.field("type", Schema.STRING_SCHEMA)
.field("temperature", Schema.FLOAT64_SCHEMA)
.field("voltage", Schema.FLOAT64_SCHEMA)
.field("ts", Schema.INT64_SCHEMA).build()
val struct1 = new Struct(schema).put("type", "Xeon").put("temperature", 60.4).put("voltage", 90.1).put("ts", 1482180657010L)
val struct2 = new Struct(schema).put("type", "i7").put("temperature", 62.1).put("voltage", 103.3).put("ts", 1482180657020L)
val struct3 = new Struct(schema).put("type", "i7-i").put("temperature", 64.5).put("voltage", 101.1).put("ts", 1482180657030L)
val sinkRecord1 = new SinkRecord(TOPIC, 0, null, null, schema, struct1, 1)
val sinkRecord2 = new SinkRecord(TOPIC, 0, null, null, schema, struct2, 2)
val sinkRecord3 = new SinkRecord(TOPIC, 0, null, null, schema, struct3, 3)
val jedis = new Jedis(connectionInfo.host, connectionInfo.port)
// Clean up in-memory jedis
jedis.flushAll()
writer.write(Seq(sinkRecord1))
writer.write(Seq(sinkRecord2, sinkRecord3))
// Redis cardinality should now be 3
jedis.zcard("cpu_stats") shouldBe 3
val allSSrecords = jedis.zrange("cpu_stats", 0, 999999999999L)
val results = allSSrecords.asScala.toList
results.head shouldBe """{"type":"Xeon","temperature":60.4,"voltage":90.1,"ts":1482180657010}"""
results(1) shouldBe """{"type":"i7","temperature":62.1,"voltage":103.3,"ts":1482180657020}"""
results(2) shouldBe """{"type":"i7-i","temperature":64.5,"voltage":101.1,"ts":1482180657030}"""
}
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-redis/src/test/scala/com/datamountaineer/streamreactor/connect/redis/sink/writer/RedisInsertSortedSetTest.scala
|
Scala
|
apache-2.0
| 3,796 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.physical.stream
import org.apache.flink.runtime.state.KeyGroupRangeAssignment.DEFAULT_LOWER_BOUND_MAX_PARALLELISM
import org.apache.flink.streaming.api.transformations.PartitionTransformation
import org.apache.flink.streaming.runtime.partitioner.{GlobalPartitioner, KeyGroupStreamPartitioner, StreamPartitioner}
import org.apache.flink.table.api.StreamTableEnvironment
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.plan.nodes.common.CommonPhysicalExchange
import org.apache.flink.table.plan.nodes.exec.{ExecNode, StreamExecNode}
import org.apache.flink.table.plan.util.KeySelectorUtil
import org.apache.flink.table.typeutils.BaseRowTypeInfo
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.{RelDistribution, RelNode}
import java.util
import org.apache.flink.api.dag.Transformation
import scala.collection.JavaConversions._
/**
* Stream physical RelNode for [[org.apache.calcite.rel.core.Exchange]].
*/
class StreamExecExchange(
cluster: RelOptCluster,
traitSet: RelTraitSet,
relNode: RelNode,
relDistribution: RelDistribution)
extends CommonPhysicalExchange(cluster, traitSet, relNode, relDistribution)
with StreamPhysicalRel
with StreamExecNode[BaseRow] {
override def producesUpdates: Boolean = false
override def needsUpdatesAsRetraction(input: RelNode): Boolean = false
override def consumesRetractions: Boolean = false
override def producesRetractions: Boolean = false
override def requireWatermark: Boolean = false
override def copy(
traitSet: RelTraitSet,
newInput: RelNode,
newDistribution: RelDistribution): StreamExecExchange = {
new StreamExecExchange(cluster, traitSet, newInput, newDistribution)
}
//~ ExecNode methods -----------------------------------------------------------
override def getInputNodes: util.List[ExecNode[StreamTableEnvironment, _]] = {
List(getInput.asInstanceOf[ExecNode[StreamTableEnvironment, _]])
}
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[StreamTableEnvironment, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
tableEnv: StreamTableEnvironment): Transformation[BaseRow] = {
val inputTransform = getInputNodes.get(0).translateToPlan(tableEnv)
.asInstanceOf[Transformation[BaseRow]]
val inputTypeInfo = inputTransform.getOutputType.asInstanceOf[BaseRowTypeInfo]
val outputTypeInfo = BaseRowTypeInfo.of(
FlinkTypeFactory.toLogicalRowType(getRowType))
relDistribution.getType match {
case RelDistribution.Type.SINGLETON =>
val partitioner = new GlobalPartitioner[BaseRow]
val transformation = new PartitionTransformation(
inputTransform,
partitioner.asInstanceOf[StreamPartitioner[BaseRow]])
transformation.setOutputType(outputTypeInfo)
transformation
case RelDistribution.Type.HASH_DISTRIBUTED =>
// TODO Eliminate duplicate keys
val selector = KeySelectorUtil.getBaseRowSelector(
relDistribution.getKeys.map(_.toInt).toArray, inputTypeInfo)
val partitioner = new KeyGroupStreamPartitioner(selector,
DEFAULT_LOWER_BOUND_MAX_PARALLELISM)
val transformation = new PartitionTransformation(
inputTransform,
partitioner.asInstanceOf[StreamPartitioner[BaseRow]])
transformation.setOutputType(outputTypeInfo)
transformation
case _ =>
throw new UnsupportedOperationException(
s"not support RelDistribution: ${relDistribution.getType} now!")
}
}
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/physical/stream/StreamExecExchange.scala
|
Scala
|
apache-2.0
| 4,597 |
/*
* Copyright © 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.messages
sealed trait AuthMechanism { val asStr : String }
case object MONGODB_X509 extends AuthMechanism { override def toString = asStr; val asStr = "MONGODB-X509"; }
case object MONGODB_CR extends AuthMechanism { override def toString = asStr; val asStr = "MONGODB-CR" }
case object GSSAPI extends AuthMechanism { override def toString = asStr; val asStr = "GSSAPI" }
case object PLAIN extends AuthMechanism { override def toString = asStr; val asStr = "PLAIN" }
object AuthMechanism {
def apply(str : String) : AuthMechanism = {
str match {
case MONGODB_X509.asStr ⇒ MONGODB_X509
case MONGODB_CR.asStr ⇒ MONGODB_CR
case GSSAPI.asStr ⇒ GSSAPI
case PLAIN.asStr ⇒ PLAIN
case _ ⇒ MONGODB_X509
}
}
}
|
reactific/RxMongo
|
messages/src/main/scala/rxmongo/messages/AuthMechanism.scala
|
Scala
|
mit
| 1,918 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.