code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.soteradefense.datawake.trails.topology.search.data
import backtype.storm.tuple.Values
import com.soteradefense.datawake.trails.data.StormData
/**
* Helper Class for parsing a new Datawake term
* @param org The org it belongs to
* @param domain The domain it belongs to
* @param trail The trail it belongs to
* @param term The search term.
* @param isRelevant Relevant or Irrelevant?
*/
class DatawakeTerm(org: String, domain: String, trail: String, term: String, isRelevant: Boolean) extends StormData {
override def toValues: Values = {
new Values(org, domain, trail, term, isRelevant.asInstanceOf[java.lang.Boolean])
}
}
|
Sotera/datawake-prefetch
|
trail-specific-search/src/main/scala/com/soteradefense/datawake/trails/topology/search/data/DatawakeTerm.scala
|
Scala
|
apache-2.0
| 652 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl
import java.util.LinkedList
import com.amd.aparapi.internal.model.HardCodedClassModels
import com.amd.aparapi.internal.model.Tuple2ClassModel
import com.amd.aparapi.internal.model.DenseVectorClassModel
import com.amd.aparapi.internal.model.SparseVectorClassModel
import com.amd.aparapi.internal.model.ClassModel
import com.amd.aparapi.internal.writer.ScalaArrayParameter
import com.amd.aparapi.internal.writer.ScalaParameter
import com.amd.aparapi.internal.writer.ScalaParameter.DIRECTION
import com.amd.aparapi.internal.model.Entrypoint
object CodeGenUtil {
def isPrimitive(typeString : String) : Boolean = {
return typeString.equals("I") || typeString.equals("D") || typeString.equals("F") || typeString.equals("B")
}
def isPrimitiveArray(typeString : String) : Boolean = {
return typeString.startsWith("[") && isPrimitive(typeString.substring(1))
}
def getPrimitiveTypeForDescriptor(descString : String) : String = {
if (descString.equals("I")) {
return "int"
} else if (descString.equals("D")) {
return "double"
} else if (descString.equals("F")) {
return "float"
} else if (descString.equals("B")) {
return "char"
} else {
return null
}
}
def getClassForDescriptor(descString : String) : Class[_] = {
if (isPrimitive(descString)) {
return null
} else if (isPrimitiveArray(descString)) {
return null
}
var className : String = getTypeForDescriptor(descString)
return Class.forName(className.trim)
}
def getTypeForDescriptor(descString : String) : String = {
var primitive : String = getPrimitiveTypeForDescriptor(descString)
if (primitive == null) {
primitive = ClassModel.convert(descString, "", true)
}
primitive
}
def getParamObjsFromMethodDescriptor(descriptor : String,
expectedNumParams : Int) : LinkedList[ScalaArrayParameter] = {
val arguments : String = descriptor.substring(descriptor.indexOf('(') + 1,
descriptor.lastIndexOf(')'))
val argumentsArr : Array[String] = arguments.split(",")
assert(argumentsArr.length == expectedNumParams)
val params = new LinkedList[ScalaArrayParameter]()
for (i <- 0 until argumentsArr.length) {
val argumentDesc : String = argumentsArr(i)
params.add(ScalaArrayParameter.createArrayParameterFor(
getTypeForDescriptor(argumentDesc),
getClassForDescriptor(argumentDesc), "in" + i, DIRECTION.IN))
}
params
}
def getReturnObjsFromMethodDescriptor(descriptor : String) : ScalaArrayParameter = {
val returnType : String = descriptor.substring(
descriptor.lastIndexOf(')') + 1)
ScalaArrayParameter.createArrayParameterFor(getTypeForDescriptor(returnType),
getClassForDescriptor(returnType), "out", DIRECTION.OUT)
}
def cleanClassName(className : String, objectMangling : Boolean = true) : String = {
if (className.length() == 1) {
// Primitive descriptor
return className
} else if (className.equals("java.lang.Integer")) {
return "I"
} else if (className.equals("java.lang.Float")) {
return "F"
} else if (className.equals("java.lang.Double")) {
return "D"
} else {
if (objectMangling) {
return "L" + className + ";"
} else {
return className
}
}
}
def createCodeGenConfig(dev_ctx : Long) : java.util.Map[String, String] = {
assert(dev_ctx != -1L)
val config : java.util.Map[String, String] = new java.util.HashMap[String, String]()
config.put(Entrypoint.clDevicePointerSize, Integer.toString(
OpenCLBridge.getDevicePointerSizeInBytes(dev_ctx)))
config
}
def createHardCodedDenseVectorClassModel(hardCodedClassModels : HardCodedClassModels) {
val denseVectorClassModel : DenseVectorClassModel = DenseVectorClassModel.create()
hardCodedClassModels.addClassModelFor(
Class.forName("org.apache.spark.mllib.linalg.DenseVector"),
denseVectorClassModel)
}
def createHardCodedSparseVectorClassModel(hardCodedClassModels : HardCodedClassModels) {
val sparseVectorClassModel : SparseVectorClassModel = SparseVectorClassModel.create()
hardCodedClassModels.addClassModelFor(
Class.forName("org.apache.spark.mllib.linalg.SparseVector"),
sparseVectorClassModel)
}
def createHardCodedTuple2ClassModel(obj : Tuple2[_, _],
hardCodedClassModels : HardCodedClassModels,
param : ScalaArrayParameter) {
val inputClassType1 = obj._1.getClass
val inputClassType2 = obj._2.getClass
val inputClassType1Name = CodeGenUtil.cleanClassName(
inputClassType1.getName)
val inputClassType2Name = CodeGenUtil.cleanClassName(
inputClassType2.getName)
val tuple2ClassModel : Tuple2ClassModel = Tuple2ClassModel.create(
inputClassType1Name, inputClassType2Name, param.getDir != DIRECTION.IN)
hardCodedClassModels.addClassModelFor(Class.forName("scala.Tuple2"), tuple2ClassModel)
param.addTypeParameter(inputClassType1Name,
!CodeGenUtil.isPrimitive(inputClassType1Name))
param.addTypeParameter(inputClassType2Name,
!CodeGenUtil.isPrimitive(inputClassType2Name))
}
}
|
agrippa/spark-swat
|
swat/src/main/scala/org/apache/spark/rdd/cl/CodeGenUtil.scala
|
Scala
|
bsd-3-clause
| 6,772 |
package io.skyfii.mandrill.model
// See https://mandrillapp.com/api/docs/users.JSON.html#method=ping
case class Error(status: String,
code: Int,
name: String,
message: String)
|
skyfii/skyfii-mandrill
|
src/main/scala/io/skyfii/mandrill/model/Error.scala
|
Scala
|
apache-2.0
| 228 |
package com.github.morikuni.locest.util
/** 識別可能なオブジェクト。
*
* @tparam Id 識別子の型
* @tparam Prop 扱う属性の型
*/
trait Entity[Id <: Identifier[_], Prop <: Property] {
val id: Id
val property: Prop
override def hashCode: Int = id.hashCode
override def equals(other: Any): Boolean = other match {
case that: Entity[_, _] => this.id == that.id
case _ => false
}
}
|
morikuni/locest
|
util/src/main/scala/com/github/morikuni/locest/util/Entity.scala
|
Scala
|
mit
| 426 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.execution.internal.collection.ChunkedArrayStack
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
import scala.collection.mutable
import scala.util.control.NonFatal
private[tail] object IterantFoldLeftL {
/**
* Implementation for `Iterant#foldLeftL`
*/
final def apply[F[_], S, A](source: Iterant[F, A], seed: => S)(op: (S, A) => S)(implicit F: Sync[F]): F[S] = {
F.defer {
var catchErrors = true
try {
// handle exception in the seed
val init = seed
catchErrors = false
new Loop(init, op).apply(source)
} catch {
case e if NonFatal(e) && catchErrors =>
F.raiseError(e)
}
}
}
/**
* Implementation for `Iterant#toListL`
*/
def toListL[F[_], A](source: Iterant[F, A])(implicit F: Sync[F]): F[List[A]] = {
IterantFoldLeftL(source, mutable.ListBuffer.empty[A])((acc, a) => acc += a)
.map(_.toList)
}
private final class Loop[F[_], S, A](seed: S, op: (S, A) => S)(implicit F: Sync[F])
extends Iterant.Visitor[F, A, F[S]] { loop =>
/** Current calculated state. */
private[this] var state = seed
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
// Used in visit(Concat)
private[this] var stackRef: ChunkedArrayStack[F[Iterant[F, A]]] = _
private def stackPush(item: F[Iterant[F, A]]): Unit = {
if (stackRef == null) stackRef = ChunkedArrayStack()
stackRef.push(item)
}
private def stackPop(): F[Iterant[F, A]] = {
if (stackRef != null) stackRef.pop()
else null.asInstanceOf[F[Iterant[F, A]]]
}
private[this] val concatContinue: (S => F[S]) =
state =>
stackPop() match {
case null => F.pure(state)
case xs => xs.flatMap(loop)
}
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
def visit(ref: Next[F, A]): F[S] = {
state = op(state, ref.item)
ref.rest.flatMap(loop)
}
def visit(ref: NextBatch[F, A]): F[S] = {
state = ref.batch.foldLeft(state)(op)
ref.rest.flatMap(loop)
}
def visit(ref: NextCursor[F, A]): F[S] = {
state = ref.cursor.foldLeft(state)(op)
ref.rest.flatMap(loop)
}
def visit(ref: Suspend[F, A]): F[S] =
ref.rest.flatMap(loop)
def visit(ref: Concat[F, A]): F[S] = {
stackPush(ref.rh)
ref.lh.flatMap(loop).flatMap(concatContinue)
}
def visit[R](ref: Scope[F, R, A]): F[S] =
ref.runFold(this)
def visit(ref: Last[F, A]): F[S] = {
state = op(state, ref.item)
F.pure(state)
}
def visit(ref: Halt[F, A]): F[S] =
ref.e match {
case None => F.pure(state)
case Some(e) => F.raiseError(e)
}
def fail(e: Throwable): F[S] =
F.raiseError(e)
}
}
|
monifu/monifu
|
monix-tail/shared/src/main/scala/monix/tail/internal/IterantFoldLeftL.scala
|
Scala
|
apache-2.0
| 3,628 |
/*
* Copyright (C) 2011 <mathieu.Mathieu Leclaire at openmole.org>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package prynt.test
import slick.driver.H2Driver.simple._
object PatientResults extends Table[(String, String)]("PATIENT_RESULTS") {
def patientID = column[String]("PATIENT_ID")
def testResultID = column[String]("TEST_RESULT_ID")
def * = patientID ~ testResultID
}
|
mathieuleclaire/prynt
|
src/main/scala/prynt/test/PatientResuts.scala
|
Scala
|
lgpl-3.0
| 993 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package com.komanov.serialization.domain.protos.events
@SerialVersionUID(0L)
final case class PageMetaTagAddedPb(
path: String = "",
name: String = "",
value: String = ""
) extends com.trueaccord.scalapb.GeneratedMessage with com.trueaccord.scalapb.Message[PageMetaTagAddedPb] with com.trueaccord.lenses.Updatable[PageMetaTagAddedPb] {
@transient
private[this] var __serializedSizeCachedValue: Int = 0
private[this] def __computeSerializedValue(): Int = {
var __size = 0
if (path != "") { __size += com.google.protobuf.CodedOutputStream.computeStringSize(1, path) }
if (name != "") { __size += com.google.protobuf.CodedOutputStream.computeStringSize(2, name) }
if (value != "") { __size += com.google.protobuf.CodedOutputStream.computeStringSize(3, value) }
__size
}
final override def serializedSize: Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(output: com.google.protobuf.CodedOutputStream): Unit = {
{
val __v = path
if (__v != "") {
output.writeString(1, __v)
}
};
{
val __v = name
if (__v != "") {
output.writeString(2, __v)
}
};
{
val __v = value
if (__v != "") {
output.writeString(3, __v)
}
};
}
def mergeFrom(__input: com.google.protobuf.CodedInputStream): com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb = {
var __path = this.path
var __name = this.name
var __value = this.value
var _done__ = false
while (!_done__) {
val _tag__ = __input.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__path = __input.readString()
case 18 =>
__name = __input.readString()
case 26 =>
__value = __input.readString()
case tag => __input.skipField(tag)
}
}
com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb(
path = __path,
name = __name,
value = __value
)
}
def withPath(__v: String): PageMetaTagAddedPb = copy(path = __v)
def withName(__v: String): PageMetaTagAddedPb = copy(name = __v)
def withValue(__v: String): PageMetaTagAddedPb = copy(value = __v)
def getField(__field: com.google.protobuf.Descriptors.FieldDescriptor): scala.Any = {
__field.getNumber match {
case 1 => {
val __t = path
if (__t != "") __t else null
}
case 2 => {
val __t = name
if (__t != "") __t else null
}
case 3 => {
val __t = value
if (__t != "") __t else null
}
}
}
override def toString: String = com.trueaccord.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb
}
object PageMetaTagAddedPb extends com.trueaccord.scalapb.GeneratedMessageCompanion[PageMetaTagAddedPb] {
implicit def messageCompanion: com.trueaccord.scalapb.GeneratedMessageCompanion[PageMetaTagAddedPb] = this
def fromFieldsMap(__fieldsMap: Map[com.google.protobuf.Descriptors.FieldDescriptor, scala.Any]): com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb = {
require(__fieldsMap.keys.forall(_.getContainingType() == descriptor), "FieldDescriptor does not match message type.")
val __fields = descriptor.getFields
com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb(
__fieldsMap.getOrElse(__fields.get(0), "").asInstanceOf[String],
__fieldsMap.getOrElse(__fields.get(1), "").asInstanceOf[String],
__fieldsMap.getOrElse(__fields.get(2), "").asInstanceOf[String]
)
}
def descriptor: com.google.protobuf.Descriptors.Descriptor = EventsProto.descriptor.getMessageTypes.get(16)
def messageCompanionForField(__field: com.google.protobuf.Descriptors.FieldDescriptor): com.trueaccord.scalapb.GeneratedMessageCompanion[_] = throw new MatchError(__field)
def enumCompanionForField(__field: com.google.protobuf.Descriptors.FieldDescriptor): com.trueaccord.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__field)
lazy val defaultInstance = com.komanov.serialization.domain.protos.events.PageMetaTagAddedPb(
)
implicit class PageMetaTagAddedPbLens[UpperPB](_l: com.trueaccord.lenses.Lens[UpperPB, PageMetaTagAddedPb]) extends com.trueaccord.lenses.ObjectLens[UpperPB, PageMetaTagAddedPb](_l) {
def path: com.trueaccord.lenses.Lens[UpperPB, String] = field(_.path)((c_, f_) => c_.copy(path = f_))
def name: com.trueaccord.lenses.Lens[UpperPB, String] = field(_.name)((c_, f_) => c_.copy(name = f_))
def value: com.trueaccord.lenses.Lens[UpperPB, String] = field(_.value)((c_, f_) => c_.copy(value = f_))
}
final val PATH_FIELD_NUMBER = 1
final val NAME_FIELD_NUMBER = 2
final val VALUE_FIELD_NUMBER = 3
}
|
dkomanov/scala-serialization
|
scala-serialization/src/main/scala/com/komanov/serialization/domain/protos/events/PageMetaTagAddedPb.scala
|
Scala
|
mit
| 5,197 |
package funpep.server
import scala.concurrent.ExecutionContext.Implicits._
import scalaz._
import scalaz.concurrent._
import scalaz.stream._
import scalaz.syntax.functor._
import org.http4s._
import org.http4s.server.middleware._
import org.http4s.server.blaze._
import net.bmjames.opts.{ execParser, info }
import funpep._
import funpep.data._
import funpep.util.functions._
import funpep.util.types._
import funpep.util.ops.foldable._
import service._
object FunpepServer {
def main(args: Array[String]): Unit = {
parseArgs(args).fold(showErrors, runFunpep).run[Task].run
}
def showErrors(errors: NonEmptyList[ErrorMsg]): Process[Task, Unit] =
AsyncP {
val errStr = errors.mkString(identity, "\n")
System.err.println(errStr)
}
// using mapK and flatMap in here is even uglier than this ".apply" stuff
def runFunpep(options: Options): Process[Task, Unit] =
analyzerQueue.apply(options).flatMap[Task, Unit] { queue ⇒
val server = runServer(queue).apply(options)
val analyzer = runAnalyzer(queue).apply(options)
server.merge(analyzer)
}
private def parseArgs(args: Array[String]): ValidationNel[ErrorMsg, Options] =
execParser(args, "funpep-server", info(Options.options))
private def analyzerQueue: KleisliP[Options, AnalyzerQueue[AminoAcid]] =
KleisliP { options ⇒
implicit val strategy = fixedPoolStrategy(options.numThreads)
AnalyzerQueue(Analyzer[AminoAcid](options.database))
}
private def httpRouter(queue: AnalyzerQueue[AminoAcid]): HttpService =
RouterService.service(
s ⇒ CORS(AutoSlash(s)),
AnalyzerService(queue),
DatasetService()
)
private def runServer(queue: AnalyzerQueue[AminoAcid]): KleisliP[Options, Unit] =
KleisliP { options ⇒
val router = httpRouter(queue)
val server = BlazeBuilder.bindHttp(options.httpPort).mountService(router, options.httpPath)
AsyncP(server.run.awaitShutdown)
}
private def runAnalyzer(queue: AnalyzerQueue[AminoAcid]): KleisliP[Options, Unit] =
KleisliP { options ⇒ queue.analyzerLoop.apply(options.clustalo).void }
}
|
agjacome/funpep
|
server/src/main/scala/funpep/server/FunpepServer.scala
|
Scala
|
mit
| 2,142 |
package org.scalatest.examples.suite.oneargtest
import org.scalatest.fixture
import java.io._
class ExampleSuite extends fixture.Suite {
case class FixtureParam(file: File, writer: FileWriter)
def withFixture(test: OneArgTest) = {
// create the fixture
val file = File.createTempFile("hello", "world")
val writer = new FileWriter(file)
val theFixture = FixtureParam(file, writer)
try {
writer.write("ScalaTest is ") // set up the fixture
withFixture(test.toNoArgTest(theFixture)) // "loan" the fixture to the test
}
finally writer.close() // clean up the fixture
}
def `test: testing should be easy` (f: FixtureParam) {
f.writer.write("easy!")
f.writer.flush()
assert(f.file.length === 18)
}
def `test: testing should be fun` (f: FixtureParam) {
f.writer.write("fun!")
f.writer.flush()
assert(f.file.length === 17)
}
}
|
svn2github/scalatest
|
examples/src/main/scala/org/scalatest/examples/suite/oneargtest/ExampleSuite.scala
|
Scala
|
apache-2.0
| 905 |
package sledtr.section
import org.scalatest.FunSuite
import java.text.SimpleDateFormat
class Ch2SectionTest extends FunSuite {
test("レスをバラバラにする") {
var m = Ch2Res("", 0, "俺より強い名無しに会いにいく<><>2010/09/25(土) 20:25:03 ID:G3NorCXq0<> おいw<>")
var f = new SimpleDateFormat("yyyy/MM/dd kk:mm:ss")
(m: @unchecked) match {
case Some(r) =>
expect("俺より強い名無しに会いにいく") { r.name }
expect("G3NorCXq0") { r.id }
expect("2010/09/25 20:25:03") { f.format(r.date) }
expect("おいw") { r.text }
}
m = Ch2Res("", 0, "俺より強い名無しに会いにいく<>sage<>2010/09/25(土) 20:34:34 ID:3a/O5zYI0<> ネモ明日十一試合かよw <> ")
(m: @unchecked) match {
case Some(r) =>
expect("俺より強い名無しに会いにいく") { r.name }
expect("3a/O5zYI0") { r.id }
expect("2010/09/25 20:34:34") { f.format(r.date) }
expect("ネモ明日十一試合かよw") { r.text }
}
}
}
|
K2Da/sledtr
|
src/test/scala/sledtr/section/Ch2SectionTest.scala
|
Scala
|
gpl-3.0
| 1,073 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.workbench.lift.snippet
import java.util.logging.Logger
import de.fuberlin.wiwiss.silk.workspace.User
import de.fuberlin.wiwiss.silk.workbench.evaluation.CurrentGenerateLinksTask
import de.fuberlin.wiwiss.silk.workbench.lift.util.{JS, SelectField, Dialog}
import de.fuberlin.wiwiss.silk.config.RuntimeConfig
import de.fuberlin.wiwiss.silk.execution.GenerateLinksTask
object GenerateLinksDialog extends Dialog {
override val title = "Generate Links"
private val noOutputName = "Display only"
private val output = SelectField("Output", "The output where the generated links are written", () => noOutputName :: User().project.outputModule.tasks.map(_.name.toString).toList, () => noOutputName)
override val fields = output :: Nil
override protected def dialogParams = ("autoOpen" -> "false") :: ("width" -> "400") :: ("modal" -> "true") :: Nil
private val logger = Logger.getLogger(getClass.getName)
/** We use a custom runtime config */
private val runtimeConfig = RuntimeConfig(useFileCache = false, partitionSize = 300, generateLinksWithEntities = true)
override protected def onSubmit() = {
val generateLinksTask =
new GenerateLinksTask(
sources = User().project.sourceModule.tasks.map(_.source),
linkSpec = User().linkingTask.linkSpec,
outputs = if(output.value == noOutputName) Traversable.empty else Traversable(User().project.outputModule.task(output.value).output),
runtimeConfig = runtimeConfig
)
CurrentGenerateLinksTask() = generateLinksTask
generateLinksTask.runInBackground()
JS.Empty
}
}
|
fusepoolP3/p3-silk
|
silk-workbench-outdated/src/main/scala/de/fuberlin/wiwiss/silk/workbench/lift/snippet/GenerateLinksDialog.scala
|
Scala
|
apache-2.0
| 2,187 |
package com.github.sparkfy.recovery
import com.github.sparkfy.serializer.Serializer
/**
* Implementation of this class can be plugged in as recovery mode alternative.
*/
abstract class RecoveryFactory(conf: Map[String, String], serializer: Serializer) {
/**
* PersistenceEngine defines how the persistent data(Information about worker, driver etc..)
* is handled for recovery.
*
*/
def createPersistenceEngine(): PersistenceEngine
/**
* Create an instance of LeaderAgent that decides who gets elected as master.
*/
def createLeaderElectionAgent(master: LeaderElectable): LeaderElectionAgent
}
|
sparkfy/sparkfy
|
sparkfy-common/src/main/scala/com/github/sparkfy/recovery/RecoveryFactory.scala
|
Scala
|
apache-2.0
| 626 |
package org.elastic.rest.scala.driver
import org.elastic.rest.scala.driver.RestBase._
import org.elastic.rest.scala.driver.RestBaseRuntimeTyped._
import org.elastic.rest.scala.driver.utils.NoJsonHelpers._
import org.elastic.rest.scala.driver.test_utils.SampleResources._
import org.elastic.rest.scala.driver.test_utils.SampleResourcesTyped._
import org.elastic.rest.scala.driver.utils.MockRestDriver
import utest._
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
import scala.concurrent.ExecutionContext.Implicits.global
object RestBaseAsyncTests extends TestSuite {
val tests = this {
"Check JSON serialization" - {
val handler: PartialFunction[BaseDriverOp, Future[String]] = {
case BaseDriverOp(`/$resource`(index), "GET", None, List(), List()) =>
Future.successful(s"""{"index":"$index"}""")
}
implicit val mockDriver = new MockRestDriver(handler)
val res = Await.result(`/$resource`("test").read().execJ(), Duration("1 second"))
res ==> MockJson("""{"index":"test"}""")
}
"Check JSON de-serialization" - {
val handler: PartialFunction[BaseDriverOp, Future[String]] = {
case BaseDriverOp(`/$resource`(index), method @ _, Some(s @ _), List(), List()) =>
Future.successful(s"$method: $s")
case BaseDriverOp(`/$resource`(index), method @ _, None, List(), List()) =>
Future.successful(s"$method")
case BaseDriverOp(`/$resource_ut`(index), method @ _, None, List(), List()) =>
Future.successful(s"$method")
case BaseDriverOp(`/$resource_ut`(index), method @ _, Some(s @ _), List(), List()) =>
Future.successful(s"$method: $s")
case BaseDriverOp(`/$resource_tu`(index), method @ _, Some(s @ _), List(), List()) =>
Future.successful(s"$method: $s")
case BaseDriverOp(`/$resource_tt`(index), method @ _, Some(s @ _), List(), List()) =>
Future.successful(s"$method: $s")
}
implicit val mockDriver = new MockRestDriver(handler)
// Coverage test for BaseDriverOps and related (resultS / execS / resultJ / execJ / result / exec)
// Read with data
{
val expected = "GET: test"
val res_ss = `/$resource`("test").readS("test").resultS(Duration("1 second")).get
res_ss ==> expected
val res_js = `/$resource`("test").readJ(MockJson("test")).resultS(Duration("1 second")).get
res_js ==> expected
val res_sj = Await.result(`/$resource`("test").readS("test").execS(), Duration("1 second"))
res_sj ==> expected
val res_jj = Await.result(`/$resource`("test").readJ(MockJson("test")).execJ(), Duration("1 second"))
res_jj.s ==> expected
val res_st = `/$resource_ut`("test").readS("test").result(Duration("1 second")).get
res_st.s ==> expected
val res_jt = Await.result(`/$resource_ut`("test").readJ(MockJson("test")).exec(), Duration("1 second"))
res_jt.s ==> expected
val res_ts = `/$resource_tt`("test").read(InWrapper("test")).resultS(Duration("1 second")).get
res_ts ==> expected
val res_tj = `/$resource_tt`("test").read(InWrapper("test")).resultJ(Duration("1 second")).get
res_tj.s ==> expected
val res_tt = `/$resource_tt`("test").read(InWrapper("test")).result(Duration("1 second")).get
res_tt.s ==> expected
}
// Send
{
val expected = "POST: test"
val res_ss = `/$resource`("test").sendS("test").resultS(Duration("1 second")).get
res_ss ==> expected
val res_js = `/$resource`("test").sendJ(MockJson("test")).resultS(Duration("1 second")).get
res_js ==> expected
val res_sj = Await.result(`/$resource`("test").sendS("test").execS(), Duration("1 second"))
res_sj ==> expected
val res_jj = Await.result(`/$resource`("test").sendJ(MockJson("test")).execJ(), Duration("1 second"))
res_jj.s ==> expected
val res_st = `/$resource_ut`("test").sendS("test").result(Duration("1 second")).get
res_st.s ==> expected
val res_jt = Await.result(`/$resource_ut`("test").sendJ(MockJson("test")).exec(), Duration("1 second"))
res_jt.s ==> expected
val res_ts = `/$resource_tt`("test").send(InWrapper("test")).resultS(Duration("1 second")).get
res_ts ==> expected
val res_tj = `/$resource_tt`("test").send(InWrapper("test")).resultJ(Duration("1 second")).get
res_tj.s ==> expected
val res_tt = `/$resource_tt`("test").send(InWrapper("test")).result(Duration("1 second")).get
res_tt.s ==> expected
}
// Write
{
val expected = "PUT: test"
val res_ss = `/$resource`("test").writeS("test").resultS(Duration("1 second")).get
res_ss ==> expected
val res_js = `/$resource`("test").writeJ(MockJson("test")).resultS(Duration("1 second")).get
res_js ==> expected
val res_sj = Await.result(`/$resource`("test").writeS("test").execS(), Duration("1 second"))
res_sj ==> expected
val res_jj = Await.result(`/$resource`("test").writeJ(MockJson("test")).execJ(), Duration("1 second"))
res_jj.s ==> expected
val res_st = `/$resource_ut`("test").writeS("test").result(Duration("1 second")).get
res_st.s ==> expected
val res_jt = Await.result(`/$resource_ut`("test").writeJ(MockJson("test")).exec(), Duration("1 second"))
res_jt.s ==> expected
val res_ts = `/$resource_tt`("test").write(InWrapper("test")).resultS(Duration("1 second")).get
res_ts ==> expected
val res_tj = `/$resource_tt`("test").write(InWrapper("test")).resultJ(Duration("1 second")).get
res_tj.s ==> expected
val res_tt = `/$resource_tt`("test").write(InWrapper("test")).result(Duration("1 second")).get
res_tt.s ==> expected
}
// Delete with data
{
val expected = "DELETE: test"
val res_ss = `/$resource`("test").deleteS("test").resultS(Duration("1 second")).get
res_ss ==> expected
val res_js = `/$resource`("test").deleteJ(MockJson("test")).resultS(Duration("1 second")).get
res_js ==> expected
val res_sj = Await.result(`/$resource`("test").deleteS("test").execS(), Duration("1 second"))
res_sj ==> expected
val res_jj = Await.result(`/$resource`("test").deleteJ(MockJson("test")).execJ(), Duration("1 second"))
res_jj.s ==> expected
val res_st = `/$resource_ut`("test").deleteS("test").result(Duration("1 second")).get
res_st.s ==> expected
val res_jt = Await.result(`/$resource_ut`("test").deleteJ(MockJson("test")).exec(), Duration("1 second"))
res_jt.s ==> expected
val res_ts = `/$resource_tt`("test").delete(InWrapper("test")).resultS(Duration("1 second")).get
res_ts ==> expected
val res_tj = `/$resource_tt`("test").delete(InWrapper("test")).resultJ(Duration("1 second")).get
res_tj.s ==> expected
val res_tt = `/$resource_tt`("test").delete(InWrapper("test")).result(Duration("1 second")).get
res_tt.s ==> expected
}
// (Check the operations without data while we're here)
// Check
{
val expected = "HEAD"
val res_s = Await.result(`/$resource`("test").check().execS(), Duration("1 second"))
val res_j = Await.result(`/$resource`("test").check().execJ(), Duration("1 second"))
val res_t = Await.result(`/$resource_ut`("test").check().exec(), Duration("1 second"))
res_s ==> expected
res_j.s ==> expected
res_t.s ==> expected
}
// Read
{
val expected = "GET"
val res_s = Await.result(`/$resource`("test").read().execS(), Duration("1 second"))
val res_j = Await.result(`/$resource`("test").read().execJ(), Duration("1 second"))
val res_t = Await.result(`/$resource_ut`("test").read().exec(), Duration("1 second"))
res_s ==> expected
res_j.s ==> expected
res_t.s ==> expected
}
// Send
{
val expected = "POST"
val res_s = Await.result(`/$resource`("test").send().execS(), Duration("1 second"))
val res_j = Await.result(`/$resource`("test").send().execJ(), Duration("1 second"))
val res_t = Await.result(`/$resource_ut`("test").send().exec(), Duration("1 second"))
res_s ==> expected
res_j.s ==> expected
res_t.s ==> expected
}
// Write
{
val expected = "PUT"
val res_s = Await.result(`/$resource`("test").write().execS(), Duration("1 second"))
val res_j = Await.result(`/$resource`("test").write().execJ(), Duration("1 second"))
val res_t = Await.result(`/$resource_ut`("test").write().exec(), Duration("1 second"))
res_s ==> expected
res_j.s ==> expected
res_t.s ==> expected
}
// Delete
{
val expected = "DELETE"
val res_s = Await.result(`/$resource`("test").delete().execS(), Duration("1 second"))
val res_j = Await.result(`/$resource`("test").delete().execJ(), Duration("1 second"))
val res_t = Await.result(`/$resource_ut`("test").delete().exec(), Duration("1 second"))
res_s ==> expected
res_j.s ==> expected
res_t.s ==> expected
}
}
}
}
|
Alex-At-Home/rest_client_library
|
rest_scala_core/jvm/src/test/scala/org/elastic/rest/scala/driver/RestBaseAsyncTests.scala
|
Scala
|
apache-2.0
| 9,567 |
package nz.ubermouse.hummingbirdsyncer.printers
import nz.ubermouse.hummingbirdsyncer.api.Trakt.TraktActivity
import com.typesafe.scalalogging.slf4j.Logging
/**
* Created by Taylor on 18/07/14.
*/
object TraktActivityPrinter extends Logging {
def apply(activities:List[TraktActivity]) {
for(activity <- activities)
logger.trace(stringRepresentation(activity))
}
def apply(activity:TraktActivity):Unit = apply(List(activity))
def stringRepresentation(activity: TraktActivity) = {
s"""
| ${activity.show.title} (${activity.show.slug})
- Tvdb Id: ${activity.show.tvdb_id}
- Season: ${activity.episode.season}
- Episode: ${activity.episode.episode}
""".stripMargin
}
}
|
UberMouse/TraktToHummingbird
|
src/main/scala/nz/ubermouse/hummingbirdsyncer/printers/TraktActivityPrinter.scala
|
Scala
|
mit
| 730 |
/*
* The MIT License
*
* Copyright (c) 2016 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
*/
package com.fulcrumgenomics.vcf
import java.util.NoSuchElementException
import com.fulcrumgenomics.FgBioDef._
import com.fulcrumgenomics.fasta.SequenceDictionary
import htsjdk.samtools.util._
import htsjdk.variant.variantcontext.VariantContext
import htsjdk.variant.vcf.VCFFileReader
import scala.annotation.tailrec
object ByIntervalListVariantContextIterator {
/**
* Creates an iterator over variant contexts that overlap any interval in an interval list.
*
* All variants will be read in and compared to the intervals.
*/
def apply(iterator: Iterator[VariantContext],
intervalList: IntervalList,
dict: SequenceDictionary): Iterator[VariantContext] = {
new OverlapDetectionVariantContextIterator(iterator, intervalList, dict)
}
/**
* Creates an iterator over variant contexts that overlap any interval in an interval list.
*
* The interval lists should be sorted and uniqued.
*
* The VCF will be queried when moving to the next variant context, and so may be quite slow
* if we jump around the VCF a lot.
*/
def apply(reader: VCFFileReader,
intervalList: IntervalList): Iterator[VariantContext] = {
new IndexQueryVariantContextIterator(reader, intervalList)
}
}
private class OverlapDetectionVariantContextIterator(val iter: Iterator[VariantContext],
val intervalList: IntervalList,
val dict: SequenceDictionary)
extends Iterator[VariantContext] {
require(dict != null)
private val intervals = intervalList.uniqued(false).iterator().buffered
private var nextVariantContext: Option[VariantContext] = None
this.advance()
def hasNext: Boolean = this.nextVariantContext.isDefined
def next(): VariantContext = {
this.nextVariantContext match {
case None => throw new NoSuchElementException("Called next when hasNext is false")
case Some(ctx) => yieldAndThen(ctx) { this.nextVariantContext = None; this.advance() }
}
}
def remove(): Unit = throw new UnsupportedOperationException
private def contigIdx(locatable: Locatable): Int = dict(locatable.getContig).index
private def coordLessThan(left: Locatable, right: Locatable): Boolean = {
val leftContigIdx = contigIdx(left)
val rightContigIdx = contigIdx(right)
leftContigIdx < rightContigIdx || (leftContigIdx == rightContigIdx && left.getEnd < right.getStart)
}
private def overlaps(left: Locatable, right: Locatable): Boolean = {
contigIdx(left) == contigIdx(right) && CoordMath.overlaps(left.getStart, left.getEnd, right.getStart, right.getEnd)
}
@tailrec
private def advance(): Unit = {
if (this.iter.isEmpty) return
val ctx = iter.next()
// Move to the interval that overlaps or is past this context...
while (intervals.hasNext && coordLessThan(intervals.head, ctx)) {
intervals.next()
}
if (intervals.isEmpty) { } // no more intervals
else if (overlaps(ctx, intervals.head)) { nextVariantContext = Some(ctx) } // overlaps!!!
else if (iter.isEmpty) { } // no more variants
else { this.advance() } // move to the next context
}
}
/** NB: if a variant overlaps multiple intervals, only returns it once. */
private class IndexQueryVariantContextIterator(private val reader: VCFFileReader, intervalList: IntervalList)
extends Iterator[VariantContext] {
private var iter: Option[Iterator[VariantContext]] = None
private val intervals = intervalList.iterator()
private var previousInterval: Option[Interval] = None
this.advance()
def hasNext: Boolean = {
this.iter.exists(_.hasNext)
}
def next(): VariantContext = {
this.iter match {
case None => throw new NoSuchElementException("Called next when hasNext is false")
case Some(i) => yieldAndThen(i.next()) { advance() }
}
}
def remove(): Unit = throw new UnsupportedOperationException
private def overlapsInterval(ctx: VariantContext, interval: Interval): Boolean = {
if (!ctx.getContig.equals(interval.getContig)) false // different contig
else if (interval.getStart <= ctx.getStart && ctx.getStart <= interval.getEnd) true // start falls within this interval, count it
else if (ctx.getStart < interval.getStart && interval.getEnd <= ctx.getEnd) true // the variant encloses the interval
else false
}
private def advance(): Unit = {
while (!this.iter.exists(_.hasNext) && this.intervals.hasNext) {
val interval = this.intervals.next()
// Validate sort order of the intervals
previousInterval.foreach { lastInterval =>
val lastIntervalIdx = intervalList.getHeader.getSequenceIndex(lastInterval.getContig)
val intervalIdx = intervalList.getHeader.getSequenceIndex(interval.getContig)
if (intervalIdx < lastIntervalIdx || (lastIntervalIdx == intervalIdx && interval.getStart <= lastInterval.getEnd)) {
throw new IllegalStateException(s"Intervals are out of order: '$lastInterval' and '$lastInterval'")
}
}
val lastInterval = previousInterval
previousInterval = Some(interval)
// NB: for variants that span an indel, make sure it was not output in the previous interval
val iter = this.reader.query(interval.getContig, interval.getStart, interval.getEnd)
.filter { ctx => overlapsInterval(ctx, interval) }
.filter { ctx => lastInterval.forall { interval => !overlapsInterval(ctx, interval) } }
this.iter = Some(iter)
}
}
}
|
fulcrumgenomics/fgbio
|
src/main/scala/com/fulcrumgenomics/vcf/ByIntervalListVariantContextIterator.scala
|
Scala
|
mit
| 6,710 |
package main.scala.proofdisplay
import org.scalajs.dom
import org.scalajs.dom.html
import dom.document
import scala.scalajs.js.annotation.JSExportTopLevel
import proofdisplay.ProverStub
object TestObject {
def main(args: Array[String]): Unit = {
// appendPar(document.body, "$$\\\\mathcal{test}$$")
}
def appendPar(targetNode: dom.Node, text: String) = {
val parNode = document.createElement("p")
val textNode = document.createTextNode(text)
parNode.appendChild(textNode)
targetNode.appendChild(parNode)
}
def updateText(targetNode: dom.Node, text: String) = {
targetNode.textContent = text
}
@JSExportTopLevel("search")
def search(): Unit = {
val input1 = document.getElementById("goal_id").asInstanceOf[html.Input]
val goal = input1.value
val input2 = document.getElementById("prem_id").asInstanceOf[html.Input]
val premises = input2.value
val splitPrem = premises.split(',')
//pass input to proof facade for processing
//fastOptJS fails on this - 'linking error'
val result = ProverStub.prove(goal, splitPrem:_*)
//but not this
//val result = ProverStub.proveStub(goal, splitPrem:_*)
updateText(document.getElementById("top"), result)
}
}
|
bgroenks96/PropLogic
|
webx/src/main/scala/proofdisplay/TestObject.scala
|
Scala
|
mit
| 1,263 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.toad.config
import io.vertx.core.Vertx
import monix.execution.Scheduler.{global => scheduler}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable
import scala.collection.mutable.Queue
import scala.concurrent.duration.DurationInt
import scala.jdk.CollectionConverters.IterableHasAsScala
/**
* Configures and initializes template / routing etc engines. Please see i.t.t.config Package object for important
* implicits in scope including the following: Template engines (Jade / mustache etc) via i.t.t.a.Actors Akka actor
* system Vertx engine Camel Routing Vertx scala integration isn't quite there for 3.x so we'll do that portion in Java
* until then.
*/
object Bootstrap {
val logger: Logger = LoggerFactory.getLogger(getClass.getSimpleName)
logger.info(s"ip: $serverIp port: $serverPort @ $hostName")
val serverInfo = ServerInfo(serverIp, serverPort, hostName)
def init(): Unit = {
logger.info("Bootstrapping")
import Implicits._
// slowInit()
}
// Spin up Vertx
// Let's create a simple vertx route
// val router = Router.router(vertx)
//
// // Bind "/" to our hello message.
// router.route("/")
// .handler(
// new Handler[RoutingContext] {
// override def handle(ctx: RoutingContext) {
// ctx.response().putHeader("content-type", "text/html").end("<h1>Hello from my first Vert.x 3 application</h1>")
// }
// })
// vertx.deployVerticle("io.truthencode.toad.verticle.MyFirstVerticle")
// val rh = requestHandler {
// new Handler[HttpServerRequest] {
// override def handle(httpServerRequest: HttpServerRequest): Unit = {
// router.accept(httpServerRequest)
// }
// }
// }
// val vertx = {
// val options = new VertxOptions().setClusterManager(mgr)
// Vertx.clusteredVertx(options, (evt: AsyncResult[Vertx]) => {
// if (evt.succeeded()) {
// logger.info("Vertx cluster successfully started")
// val v = evt.result()
// v.deployVerticle(new SimpleScalaVerticle, (ar2: AsyncResult[String]) => {
// if (ar2.succeeded())
// logger.info("We have Verticle liftoff :)")
// else {
// logger.error("Verticle-ly challenged!", ar2.cause())
// }
// })
// } else {
// logger.info("Failed to initialize Vertx cluster")
// }
// })
// Vertx.vertx()
// }
// vertx.createHttpServer().requestHandler {
// new Handler[HttpServerRequest] {
// override def handle(httpServerRequest: HttpServerRequest): Unit = {
// router.accept(httpServerRequest)
// }
// }
// }.listen(serverPort.toInt, serverIp, (ar: AsyncResult[HttpServer]) => {
// if (ar.succeeded()) {
// logger.info("Actually listening now")
//
// }
// else {
// logger.error("Failed to attach Vert.X", ar.cause())
// }
//
// })
// Socko Webserver routing (should migrate to camel routing to vertx)
/**
* val routes = Routing.Routes val webServer = new WebServer(WebServerConfig("OpenShift", serverIp, serverPort.toInt),
* routes, system) Runtime.getRuntime.addShutdownHook(new Thread { override def run { vertx.close() webServer.stop() }
* }) webServer.start()
*/
/**
* Displays configured ip, port and hostname to info logger.
*/
def status(): Unit = {
logger.info(s"server configured for @ $serverIp on port $serverPort, and should be available on $hostName")
}
}
|
adarro/ddo-calc
|
incubating/toad-api/src/main/scala/io/truthencode/toad/config/Bootstrap.scala
|
Scala
|
apache-2.0
| 4,202 |
package rpgboss.editor.dialog.db
import rpgboss.editor._
import rpgboss.editor.uibase._
import rpgboss.editor.dialog.db.components._
import rpgboss.editor.uibase.SwingUtils._
import scala.swing._
import scala.swing.event._
import rpgboss.editor.dialog._
import rpgboss.model._
import rpgboss.model.Constants._
import net.java.dev.designgridlayout._
import rpgboss.editor.resourceselector._
import rpgboss.editor.Internationalized._
class SkillsPanel(
owner: Window,
sm: StateMaster,
val dbDiag: DatabaseDialog)
extends RightPaneArrayDatabasePanel(
owner,
dbDiag.model.enums.skills) {
def panelName = getMessage("Skills")
def newDefaultInstance() = Skill()
def editPaneForItem(idx: Int, model: Skill) = {
new BoxPanel(Orientation.Horizontal) with DisposableComponent {
val normalFields = new DesignGridPanel {
val fName = textField(
model.name,
v => {
model.name = v
refreshModel()
})
val fScope = enumIdCombo(Scope)(model.scopeId, model.scopeId = _)
val fCost = new NumberSpinner(0, 100, model.cost, model.cost = _)
val fAnimationId = indexedCombo(
dbDiag.model.enums.animations,
model.animationId,
model.animationId = _)
row().grid(lbl(getMessageColon("Name"))).add(fName)
row().grid(lbl(getMessageColon("Scope"))).add(fScope)
row().grid(lbl(getMessageColon("Skill_Point_Cost"))).add(fCost)
row().grid(lbl(getMessageColon("Animation"))).add(fAnimationId)
}
contents += new BoxPanel(Orientation.Vertical) {
contents += normalFields
val damageHelp = new TextArea {
text =
"Damage expressions are JavaScript.\\n" +
"'a' is the attacker and 'b' is the target.\\n\\n" +
"Valid expresions are:\\n" +
"a.atk, a.spd, a.mag, " +
"a.arm, a.mre, a.hp, a.mhp, a.mp, and a.mmp. \\n\\n" +
"Same with b.atk, b.spd, etc.\\n\\n" +
"Examples:\\n" +
"a.atk*1.2\\n" +
"Math.max(a.atk - b.def, 0)\\n" +
"100 + a.mag"
maximumSize = new Dimension(300, 300)
lineWrap = true
wordWrap = true
editable = false
}
contents += damageHelp
}
contents += new BoxPanel(Orientation.Vertical) {
val effectPanel = new EffectPanel(owner, dbDiag, model.effects,
model.effects = _,
EffectContext.Skill)
val damagePanel =
new DamageFormulaArrayPanel(dbDiag, model.damages, model.damages = _)
contents += effectPanel
contents += damagePanel
}
}
}
override def onListDataUpdate() = {
dbDiag.model.enums.skills = dataAsArray
}
}
|
DrDub/rpgboss
|
desktop/src/main/scala/rpgboss/editor/dialog/db/SkillsPanel.scala
|
Scala
|
agpl-3.0
| 2,842 |
package at.forsyte.apalache.tla.typecheck.passes
import at.forsyte.apalache.infra.ExceptionAdapter
import at.forsyte.apalache.infra.passes.{Pass, PassOptions, TerminalPassWithTlaModule, WriteablePassOptions}
import at.forsyte.apalache.tla.imp.passes.{SanyParserPass, SanyParserPassImpl}
import com.google.inject.AbstractModule
import com.google.inject.name.Names
class TypeCheckerModule extends AbstractModule {
override def configure(): Unit = {
// the options singleton
bind(classOf[PassOptions])
.to(classOf[WriteablePassOptions])
// exception handler
bind(classOf[ExceptionAdapter])
.to(classOf[EtcTypeCheckerAdapter])
// SanyParserPassImpl is the default implementation of SanyParserPass
bind(classOf[SanyParserPass])
.to(classOf[SanyParserPassImpl])
// and it also the initial pass for PassChainExecutor
bind(classOf[Pass])
.annotatedWith(Names.named("InitialPass"))
.to(classOf[SanyParserPass])
// EtcTypeCheckerPassImpl
bind(classOf[EtcTypeCheckerPass])
.to(classOf[EtcTypeCheckerPassImpl])
// the type checker is the next one after the parser
bind(classOf[Pass])
.annotatedWith(Names.named("AfterParser"))
.to(classOf[EtcTypeCheckerPass])
// the next pass after EtcTypeCheckerPass is the terminal pass
bind(classOf[Pass])
.annotatedWith(Names.named("AfterTypeChecker"))
.to(classOf[TerminalPassWithTlaModule])
}
}
|
konnov/apalache
|
tla-types/src/main/scala/at/forsyte/apalache/tla/typecheck/passes/TypeCheckerModule.scala
|
Scala
|
apache-2.0
| 1,455 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.objects
case class InsidePolygon(
p1Lat: String,
p1Lng: String,
p2Lat: String,
p2Lng: String,
p3Lat: String,
p3Lng: String
) {
override def toString = s"[$p1Lat,$p1Lng,$p2Lat,$p2Lng,$p3Lat,$p3Lng]"
}
object InsidePolygon {
def apply(
p1Lat: Float,
p1Lng: Float,
p2Lat: Float,
p2Lng: Float,
p3Lat: Float,
p3Lng: Float
): InsidePolygon = {
InsidePolygon(
p1Lat.toString,
p1Lng.toString,
p2Lat.toString,
p2Lng.toString,
p3Lat.toString,
p3Lng.toString
)
}
}
|
algolia/algoliasearch-client-scala
|
src/main/scala/algolia/objects/InsidePolygon.scala
|
Scala
|
mit
| 1,756 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.math.{BigDecimal => JavaBigDecimal}
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.unsafe.types.UTF8String.{IntWrapper, LongWrapper}
object Cast {
/**
* Returns true iff we can cast `from` type to `to` type.
*/
def canCast(from: DataType, to: DataType): Boolean = (from, to) match {
case (fromType, toType) if fromType == toType => true
case (NullType, _) => true
case (_, StringType) => true
case (StringType, BinaryType) => true
case (StringType, BooleanType) => true
case (DateType, BooleanType) => true
case (TimestampType, BooleanType) => true
case (_: NumericType, BooleanType) => true
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
case (_: NumericType, TimestampType) => true
case (StringType, DateType) => true
case (TimestampType, DateType) => true
case (StringType, CalendarIntervalType) => true
case (StringType, _: NumericType) => true
case (BooleanType, _: NumericType) => true
case (DateType, _: NumericType) => true
case (TimestampType, _: NumericType) => true
case (_: NumericType, _: NumericType) => true
case (ArrayType(fromType, fn), ArrayType(toType, tn)) =>
canCast(fromType, toType) &&
resolvableNullability(fn || forceNullable(fromType, toType), tn)
case (MapType(fromKey, fromValue, fn), MapType(toKey, toValue, tn)) =>
canCast(fromKey, toKey) &&
(!forceNullable(fromKey, toKey)) &&
canCast(fromValue, toValue) &&
resolvableNullability(fn || forceNullable(fromValue, toValue), tn)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).forall {
case (fromField, toField) =>
canCast(fromField.dataType, toField.dataType) &&
resolvableNullability(
fromField.nullable || forceNullable(fromField.dataType, toField.dataType),
toField.nullable)
}
case (udt1: UserDefinedType[_], udt2: UserDefinedType[_]) if udt1.userClass == udt2.userClass =>
true
case _ => false
}
/**
* Return true if we need to use the `timeZone` information casting `from` type to `to` type.
* The patterns matched reflect the current implementation in the Cast node.
* c.f. usage of `timeZone` in:
* * Cast.castToString
* * Cast.castToDate
* * Cast.castToTimestamp
*/
def needsTimeZone(from: DataType, to: DataType): Boolean = (from, to) match {
case (StringType, TimestampType) => true
case (DateType, TimestampType) => true
case (TimestampType, StringType) => true
case (TimestampType, DateType) => true
case (ArrayType(fromType, _), ArrayType(toType, _)) => needsTimeZone(fromType, toType)
case (MapType(fromKey, fromValue, _), MapType(toKey, toValue, _)) =>
needsTimeZone(fromKey, toKey) || needsTimeZone(fromValue, toValue)
case (StructType(fromFields), StructType(toFields)) =>
fromFields.length == toFields.length &&
fromFields.zip(toFields).exists {
case (fromField, toField) =>
needsTimeZone(fromField.dataType, toField.dataType)
}
case _ => false
}
/**
* Return true iff we may truncate during casting `from` type to `to` type. e.g. long -> int,
* timestamp -> date.
*/
def mayTruncate(from: DataType, to: DataType): Boolean = (from, to) match {
case (from: NumericType, to: DecimalType) if !to.isWiderThan(from) => true
case (from: DecimalType, to: NumericType) if !from.isTighterThan(to) => true
case (from, to) if illegalNumericPrecedence(from, to) => true
case (TimestampType, DateType) => true
case (StringType, to: NumericType) => true
case _ => false
}
private def illegalNumericPrecedence(from: DataType, to: DataType): Boolean = {
val fromPrecedence = TypeCoercion.numericPrecedence.indexOf(from)
val toPrecedence = TypeCoercion.numericPrecedence.indexOf(to)
toPrecedence > 0 && fromPrecedence > toPrecedence
}
def forceNullable(from: DataType, to: DataType): Boolean = (from, to) match {
case (NullType, _) => true
case (_, _) if from == to => false
case (StringType, BinaryType) => false
case (StringType, _) => true
case (_, StringType) => false
case (FloatType | DoubleType, TimestampType) => true
case (TimestampType, DateType) => false
case (_, DateType) => true
case (DateType, TimestampType) => false
case (DateType, _) => true
case (_, CalendarIntervalType) => true
case (_, _: DecimalType) => true // overflow
case (_: FractionalType, _: IntegralType) => true // NaN, infinity
case _ => false
}
private def resolvableNullability(from: Boolean, to: Boolean) = !from || to
}
/**
* Cast the child expression to the target data type.
*
* When cast from/to timezone related types, we need timeZoneId, which will be resolved with
* session local timezone by an analyzer [[ResolveTimeZone]].
*/
@ExpressionDescription(
usage = "_FUNC_(expr AS type) - Casts the value `expr` to the target data type `type`.",
examples = """
Examples:
> SELECT _FUNC_('10' as int);
10
""")
case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with NullIntolerant {
def this(child: Expression, dataType: DataType) = this(child, dataType, None)
override def toString: String = s"cast($child as ${dataType.simpleString})"
override def checkInputDataTypes(): TypeCheckResult = {
if (Cast.canCast(child.dataType, dataType)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
s"cannot cast ${child.dataType.simpleString} to ${dataType.simpleString}")
}
}
override def nullable: Boolean = Cast.forceNullable(child.dataType, dataType) || child.nullable
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
// When this cast involves TimeZone, it's only resolved if the timeZoneId is set;
// Otherwise behave like Expression.resolved.
override lazy val resolved: Boolean =
childrenResolved && checkInputDataTypes().isSuccess && (!needsTimeZone || timeZoneId.isDefined)
private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType)
// [[func]] assumes the input is no longer null because eval already does the null check.
@inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])
// UDFToString
private[this] def castToString(from: DataType): Any => Any = from match {
case BinaryType => buildCast[Array[Byte]](_, UTF8String.fromBytes)
case DateType => buildCast[Int](_, d => UTF8String.fromString(DateTimeUtils.dateToString(d)))
case TimestampType => buildCast[Long](_,
t => UTF8String.fromString(DateTimeUtils.timestampToString(t, timeZone)))
case _ => buildCast[Any](_, o => UTF8String.fromString(o.toString))
}
// BinaryConverter
private[this] def castToBinary(from: DataType): Any => Any = from match {
case StringType => buildCast[UTF8String](_, _.getBytes)
}
// UDFToBoolean
private[this] def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
if (StringUtils.isTrueString(s)) {
true
} else if (StringUtils.isFalseString(s)) {
false
} else {
null
}
})
case TimestampType =>
buildCast[Long](_, t => t != 0)
case DateType =>
// Hive would return null when cast from date to boolean
buildCast[Int](_, d => null)
case LongType =>
buildCast[Long](_, _ != 0)
case IntegerType =>
buildCast[Int](_, _ != 0)
case ShortType =>
buildCast[Short](_, _ != 0)
case ByteType =>
buildCast[Byte](_, _ != 0)
case DecimalType() =>
buildCast[Decimal](_, !_.isZero)
case DoubleType =>
buildCast[Double](_, _ != 0)
case FloatType =>
buildCast[Float](_, _ != 0)
}
// TimestampConverter
private[this] def castToTimestamp(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, utfs => DateTimeUtils.stringToTimestamp(utfs, timeZone).orNull)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0)
case LongType =>
buildCast[Long](_, l => longToTimestamp(l))
case IntegerType =>
buildCast[Int](_, i => longToTimestamp(i.toLong))
case ShortType =>
buildCast[Short](_, s => longToTimestamp(s.toLong))
case ByteType =>
buildCast[Byte](_, b => longToTimestamp(b.toLong))
case DateType =>
buildCast[Int](_, d => DateTimeUtils.daysToMillis(d, timeZone) * 1000)
// TimestampWritable.decimalToTimestamp
case DecimalType() =>
buildCast[Decimal](_, d => decimalToTimestamp(d))
// TimestampWritable.doubleToTimestamp
case DoubleType =>
buildCast[Double](_, d => doubleToTimestamp(d))
// TimestampWritable.floatToTimestamp
case FloatType =>
buildCast[Float](_, f => doubleToTimestamp(f.toDouble))
}
private[this] def decimalToTimestamp(d: Decimal): Long = {
(d.toBigDecimal * 1000000L).longValue()
}
private[this] def doubleToTimestamp(d: Double): Any = {
if (d.isNaN || d.isInfinite) null else (d * 1000000L).toLong
}
// converting seconds to us
private[this] def longToTimestamp(t: Long): Long = t * 1000000L
// converting us to seconds
private[this] def timestampToLong(ts: Long): Long = math.floor(ts.toDouble / 1000000L).toLong
// converting us to seconds in double
private[this] def timestampToDouble(ts: Long): Double = {
ts / 1000000.0
}
// DateConverter
private[this] def castToDate(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s).orNull)
case TimestampType =>
// throw valid precision more than seconds, according to Hive.
// Timestamp.nanos is in 0 to 999,999,999, no more than a second.
buildCast[Long](_, t => DateTimeUtils.millisToDays(t / 1000L, timeZone))
}
// IntervalConverter
private[this] def castToInterval(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => CalendarInterval.fromString(s.toString))
}
// LongConverter
private[this] def castToLong(from: DataType): Any => Any = from match {
case StringType =>
val result = new LongWrapper()
buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1L else 0L)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toLong(b)
}
// IntConverter
private[this] def castToInt(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else null)
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1 else 0)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toInt)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b)
}
// ShortConverter
private[this] def castToShort(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toShort(result)) {
result.value.toShort
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toShort else 0.toShort)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toShort)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toShort
}
// ByteConverter
private[this] def castToByte(from: DataType): Any => Any = from match {
case StringType =>
val result = new IntWrapper()
buildCast[UTF8String](_, s => if (s.toByte(result)) {
result.value.toByte
} else {
null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1.toByte else 0.toByte)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToLong(t).toByte)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toInt(b).toByte
}
/**
* Change the precision / scale in a given decimal to those set in `decimalType` (if any),
* returning null if it overflows or modifying `value` in-place and returning it if successful.
*
* NOTE: this modifies `value` in-place, so don't call it on external data.
*/
private[this] def changePrecision(value: Decimal, decimalType: DecimalType): Decimal = {
if (value.changePrecision(decimalType.precision, decimalType.scale)) value else null
}
/**
* Create new `Decimal` with precision and scale given in `decimalType` (if any),
* returning null if it overflows or creating a new `value` and returning it if successful.
*/
private[this] def toPrecision(value: Decimal, decimalType: DecimalType): Decimal =
value.toPrecision(decimalType.precision, decimalType.scale)
private[this] def castToDecimal(from: DataType, target: DecimalType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try {
changePrecision(Decimal(new JavaBigDecimal(s.toString)), target)
} catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
case DateType =>
buildCast[Int](_, d => null) // date can't cast to decimal in Hive
case TimestampType =>
// Note that we lose precision here.
buildCast[Long](_, t => changePrecision(Decimal(timestampToDouble(t)), target))
case dt: DecimalType =>
b => toPrecision(b.asInstanceOf[Decimal], target)
case t: IntegralType =>
b => changePrecision(Decimal(t.integral.asInstanceOf[Integral[Any]].toLong(b)), target)
case x: FractionalType =>
b => try {
changePrecision(Decimal(x.fractional.asInstanceOf[Fractional[Any]].toDouble(b)), target)
} catch {
case _: NumberFormatException => null
}
}
// DoubleConverter
private[this] def castToDouble(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try s.toString.toDouble catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1d else 0d)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t))
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toDouble(b)
}
// FloatConverter
private[this] def castToFloat(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => try s.toString.toFloat catch {
case _: NumberFormatException => null
})
case BooleanType =>
buildCast[Boolean](_, b => if (b) 1f else 0f)
case DateType =>
buildCast[Int](_, d => null)
case TimestampType =>
buildCast[Long](_, t => timestampToDouble(t).toFloat)
case x: NumericType =>
b => x.numeric.asInstanceOf[Numeric[Any]].toFloat(b)
}
private[this] def castArray(fromType: DataType, toType: DataType): Any => Any = {
val elementCast = cast(fromType, toType)
// TODO: Could be faster?
buildCast[ArrayData](_, array => {
val values = new Array[Any](array.numElements())
array.foreach(fromType, (i, e) => {
if (e == null) {
values(i) = null
} else {
values(i) = elementCast(e)
}
})
new GenericArrayData(values)
})
}
private[this] def castMap(from: MapType, to: MapType): Any => Any = {
val keyCast = castArray(from.keyType, to.keyType)
val valueCast = castArray(from.valueType, to.valueType)
buildCast[MapData](_, map => {
val keys = keyCast(map.keyArray()).asInstanceOf[ArrayData]
val values = valueCast(map.valueArray()).asInstanceOf[ArrayData]
new ArrayBasedMapData(keys, values)
})
}
private[this] def castStruct(from: StructType, to: StructType): Any => Any = {
val castFuncs: Array[(Any) => Any] = from.fields.zip(to.fields).map {
case (fromField, toField) => cast(fromField.dataType, toField.dataType)
}
// TODO: Could be faster?
buildCast[InternalRow](_, row => {
val newRow = new GenericInternalRow(from.fields.length)
var i = 0
while (i < row.numFields) {
newRow.update(i,
if (row.isNullAt(i)) null else castFuncs(i)(row.get(i, from.apply(i).dataType)))
i += 1
}
newRow
})
}
private[this] def cast(from: DataType, to: DataType): Any => Any = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the codegen path.
if (DataType.equalsStructurally(from, to)) {
identity
} else {
to match {
case dt if dt == from => identity[Any]
case StringType => castToString(from)
case BinaryType => castToBinary(from)
case DateType => castToDate(from)
case decimal: DecimalType => castToDecimal(from, decimal)
case TimestampType => castToTimestamp(from)
case CalendarIntervalType => castToInterval(from)
case BooleanType => castToBoolean(from)
case ByteType => castToByte(from)
case ShortType => castToShort(from)
case IntegerType => castToInt(from)
case FloatType => castToFloat(from)
case LongType => castToLong(from)
case DoubleType => castToDouble(from)
case array: ArrayType =>
castArray(from.asInstanceOf[ArrayType].elementType, array.elementType)
case map: MapType => castMap(from.asInstanceOf[MapType], map)
case struct: StructType => castStruct(from.asInstanceOf[StructType], struct)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
identity[Any]
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
}
}
private[this] lazy val cast: Any => Any = cast(child.dataType, dataType)
protected override def nullSafeEval(input: Any): Any = cast(input)
override def genCode(ctx: CodegenContext): ExprCode = {
// If the cast does not change the structure, then we don't really need to cast anything.
// We can return what the children return. Same thing should happen in the interpreted path.
if (DataType.equalsStructurally(child.dataType, dataType)) {
child.genCode(ctx)
} else {
super.genCode(ctx)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)
ev.copy(code = eval.code +
castCode(ctx, eval.value, eval.isNull, ev.value, ev.isNull, dataType, nullSafeCast))
}
// The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull`
// in parameter list, because the returned code will be put in null safe evaluation region.
private[this] type CastFunction = (String, String, String) => String
private[this] def nullSafeCastFunction(
from: DataType,
to: DataType,
ctx: CodegenContext): CastFunction = to match {
case _ if from == NullType => (c, evPrim, evNull) => s"$evNull = true;"
case _ if to == from => (c, evPrim, evNull) => s"$evPrim = $c;"
case StringType => castToStringCode(from, ctx)
case BinaryType => castToBinaryCode(from)
case DateType => castToDateCode(from, ctx)
case decimal: DecimalType => castToDecimalCode(from, decimal, ctx)
case TimestampType => castToTimestampCode(from, ctx)
case CalendarIntervalType => castToIntervalCode(from)
case BooleanType => castToBooleanCode(from)
case ByteType => castToByteCode(from, ctx)
case ShortType => castToShortCode(from, ctx)
case IntegerType => castToIntCode(from, ctx)
case FloatType => castToFloatCode(from)
case LongType => castToLongCode(from, ctx)
case DoubleType => castToDoubleCode(from)
case array: ArrayType =>
castArrayCode(from.asInstanceOf[ArrayType].elementType, array.elementType, ctx)
case map: MapType => castMapCode(from.asInstanceOf[MapType], map, ctx)
case struct: StructType => castStructCode(from.asInstanceOf[StructType], struct, ctx)
case udt: UserDefinedType[_]
if udt.userClass == from.asInstanceOf[UserDefinedType[_]].userClass =>
(c, evPrim, evNull) => s"$evPrim = $c;"
case _: UserDefinedType[_] =>
throw new SparkException(s"Cannot cast $from to $to.")
}
// Since we need to cast input expressions recursively inside ComplexTypes, such as Map's
// Key and Value, Struct's field, we need to name out all the variable names involved in a cast.
private[this] def castCode(ctx: CodegenContext, input: String, inputIsNull: String,
result: String, resultIsNull: String, resultType: DataType, cast: CastFunction): String = {
s"""
boolean $resultIsNull = $inputIsNull;
${ctx.javaType(resultType)} $result = ${ctx.defaultValue(resultType)};
if (!$inputIsNull) {
${cast(input, result, resultIsNull)}
}
"""
}
private[this] def castToStringCode(from: DataType, ctx: CodegenContext): CastFunction = {
from match {
case BinaryType =>
(c, evPrim, evNull) => s"$evPrim = UTF8String.fromBytes($c);"
case DateType =>
(c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString(
org.apache.spark.sql.catalyst.util.DateTimeUtils.dateToString($c));"""
case TimestampType =>
val tz = ctx.addReferenceObj("timeZone", timeZone)
(c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString(
org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($c, $tz));"""
case _ =>
(c, evPrim, evNull) => s"$evPrim = UTF8String.fromString(String.valueOf($c));"
}
}
private[this] def castToBinaryCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) => s"$evPrim = $c.getBytes();"
}
private[this] def castToDateCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val intOpt = ctx.freshName("intOpt")
(c, evPrim, evNull) => s"""
scala.Option<Integer> $intOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDate($c);
if ($intOpt.isDefined()) {
$evPrim = ((Integer) $intOpt.get()).intValue();
} else {
$evNull = true;
}
"""
case TimestampType =>
val tz = ctx.addReferenceObj("timeZone", timeZone)
(c, evPrim, evNull) =>
s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays($c / 1000L, $tz);"
case _ =>
(c, evPrim, evNull) => s"$evNull = true;"
}
private[this] def changePrecision(d: String, decimalType: DecimalType,
evPrim: String, evNull: String): String =
s"""
if ($d.changePrecision(${decimalType.precision}, ${decimalType.scale})) {
$evPrim = $d;
} else {
$evNull = true;
}
"""
private[this] def castToDecimalCode(
from: DataType,
target: DecimalType,
ctx: CodegenContext): CastFunction = {
val tmp = ctx.freshName("tmpDecimal")
from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
Decimal $tmp = Decimal.apply(new java.math.BigDecimal($c.toString()));
${changePrecision(tmp, target, evPrim, evNull)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = $c ? Decimal.apply(1) : Decimal.apply(0);
${changePrecision(tmp, target, evPrim, evNull)}
"""
case DateType =>
// date can't cast to decimal in Hive
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
// Note that we lose precision here.
(c, evPrim, evNull) =>
s"""
Decimal $tmp = Decimal.apply(
scala.math.BigDecimal.valueOf(${timestampToDoubleCode(c)}));
${changePrecision(tmp, target, evPrim, evNull)}
"""
case DecimalType() =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = $c.clone();
${changePrecision(tmp, target, evPrim, evNull)}
"""
case x: IntegralType =>
(c, evPrim, evNull) =>
s"""
Decimal $tmp = Decimal.apply((long) $c);
${changePrecision(tmp, target, evPrim, evNull)}
"""
case x: FractionalType =>
// All other numeric types can be represented precisely as Doubles
(c, evPrim, evNull) =>
s"""
try {
Decimal $tmp = Decimal.apply(scala.math.BigDecimal.valueOf((double) $c));
${changePrecision(tmp, target, evPrim, evNull)}
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
}
}
private[this] def castToTimestampCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val tz = ctx.addReferenceObj("timeZone", timeZone)
val longOpt = ctx.freshName("longOpt")
(c, evPrim, evNull) =>
s"""
scala.Option<Long> $longOpt =
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestamp($c, $tz);
if ($longOpt.isDefined()) {
$evPrim = ((Long) $longOpt.get()).longValue();
} else {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1L : 0L;"
case _: IntegralType =>
(c, evPrim, evNull) => s"$evPrim = ${longToTimeStampCode(c)};"
case DateType =>
val tz = ctx.addReferenceObj("timeZone", timeZone)
(c, evPrim, evNull) =>
s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.daysToMillis($c, $tz) * 1000;"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = ${decimalToTimestampCode(c)};"
case DoubleType =>
(c, evPrim, evNull) =>
s"""
if (Double.isNaN($c) || Double.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * 1000000L);
}
"""
case FloatType =>
(c, evPrim, evNull) =>
s"""
if (Float.isNaN($c) || Float.isInfinite($c)) {
$evNull = true;
} else {
$evPrim = (long)($c * 1000000L);
}
"""
}
private[this] def castToIntervalCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""$evPrim = CalendarInterval.fromString($c.toString());
if(${evPrim} == null) {
${evNull} = true;
}
""".stripMargin
}
private[this] def decimalToTimestampCode(d: String): String =
s"($d.toBigDecimal().bigDecimal().multiply(new java.math.BigDecimal(1000000L))).longValue()"
private[this] def longToTimeStampCode(l: String): String = s"$l * 1000000L"
private[this] def timestampToIntegerCode(ts: String): String =
s"java.lang.Math.floor((double) $ts / 1000000L)"
private[this] def timestampToDoubleCode(ts: String): String = s"$ts / 1000000.0"
private[this] def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = StringUtils.getClass.getName.stripSuffix("$")
(c, evPrim, evNull) =>
s"""
if ($stringUtils.isTrueString($c)) {
$evPrim = true;
} else if ($stringUtils.isFalseString($c)) {
$evPrim = false;
} else {
$evNull = true;
}
"""
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = $c != 0;"
case DateType =>
// Hive would return null when cast from date to boolean
(c, evPrim, evNull) => s"$evNull = true;"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = !$c.isZero();"
case n: NumericType =>
(c, evPrim, evNull) => s"$evPrim = $c != 0;"
}
private[this] def castToByteCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("intWrapper")
(c, evPrim, evNull) =>
s"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toByte($wrapper)) {
$evPrim = (byte) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? (byte) 1 : (byte) 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (byte) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toByte();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (byte) $c;"
}
private[this] def castToShortCode(
from: DataType,
ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("intWrapper")
(c, evPrim, evNull) =>
s"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toShort($wrapper)) {
$evPrim = (short) $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? (short) 1 : (short) 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (short) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toShort();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (short) $c;"
}
private[this] def castToIntCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("intWrapper")
(c, evPrim, evNull) =>
s"""
UTF8String.IntWrapper $wrapper = new UTF8String.IntWrapper();
if ($c.toInt($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1 : 0;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (int) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toInt();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (int) $c;"
}
private[this] def castToLongCode(from: DataType, ctx: CodegenContext): CastFunction = from match {
case StringType =>
val wrapper = ctx.freshName("longWrapper")
(c, evPrim, evNull) =>
s"""
UTF8String.LongWrapper $wrapper = new UTF8String.LongWrapper();
if ($c.toLong($wrapper)) {
$evPrim = $wrapper.value;
} else {
$evNull = true;
}
$wrapper = null;
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1L : 0L;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (long) ${timestampToIntegerCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toLong();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (long) $c;"
}
private[this] def castToFloatCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
$evPrim = Float.valueOf($c.toString());
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1.0f : 0.0f;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = (float) (${timestampToDoubleCode(c)});"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toFloat();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (float) $c;"
}
private[this] def castToDoubleCode(from: DataType): CastFunction = from match {
case StringType =>
(c, evPrim, evNull) =>
s"""
try {
$evPrim = Double.valueOf($c.toString());
} catch (java.lang.NumberFormatException e) {
$evNull = true;
}
"""
case BooleanType =>
(c, evPrim, evNull) => s"$evPrim = $c ? 1.0d : 0.0d;"
case DateType =>
(c, evPrim, evNull) => s"$evNull = true;"
case TimestampType =>
(c, evPrim, evNull) => s"$evPrim = ${timestampToDoubleCode(c)};"
case DecimalType() =>
(c, evPrim, evNull) => s"$evPrim = $c.toDouble();"
case x: NumericType =>
(c, evPrim, evNull) => s"$evPrim = (double) $c;"
}
private[this] def castArrayCode(
fromType: DataType, toType: DataType, ctx: CodegenContext): CastFunction = {
val elementCast = nullSafeCastFunction(fromType, toType, ctx)
val arrayClass = classOf[GenericArrayData].getName
val fromElementNull = ctx.freshName("feNull")
val fromElementPrim = ctx.freshName("fePrim")
val toElementNull = ctx.freshName("teNull")
val toElementPrim = ctx.freshName("tePrim")
val size = ctx.freshName("n")
val j = ctx.freshName("j")
val values = ctx.freshName("values")
(c, evPrim, evNull) =>
s"""
final int $size = $c.numElements();
final Object[] $values = new Object[$size];
for (int $j = 0; $j < $size; $j ++) {
if ($c.isNullAt($j)) {
$values[$j] = null;
} else {
boolean $fromElementNull = false;
${ctx.javaType(fromType)} $fromElementPrim =
${ctx.getValue(c, fromType, j)};
${castCode(ctx, fromElementPrim,
fromElementNull, toElementPrim, toElementNull, toType, elementCast)}
if ($toElementNull) {
$values[$j] = null;
} else {
$values[$j] = $toElementPrim;
}
}
}
$evPrim = new $arrayClass($values);
"""
}
private[this] def castMapCode(from: MapType, to: MapType, ctx: CodegenContext): CastFunction = {
val keysCast = castArrayCode(from.keyType, to.keyType, ctx)
val valuesCast = castArrayCode(from.valueType, to.valueType, ctx)
val mapClass = classOf[ArrayBasedMapData].getName
val keys = ctx.freshName("keys")
val convertedKeys = ctx.freshName("convertedKeys")
val convertedKeysNull = ctx.freshName("convertedKeysNull")
val values = ctx.freshName("values")
val convertedValues = ctx.freshName("convertedValues")
val convertedValuesNull = ctx.freshName("convertedValuesNull")
(c, evPrim, evNull) =>
s"""
final ArrayData $keys = $c.keyArray();
final ArrayData $values = $c.valueArray();
${castCode(ctx, keys, "false",
convertedKeys, convertedKeysNull, ArrayType(to.keyType), keysCast)}
${castCode(ctx, values, "false",
convertedValues, convertedValuesNull, ArrayType(to.valueType), valuesCast)}
$evPrim = new $mapClass($convertedKeys, $convertedValues);
"""
}
private[this] def castStructCode(
from: StructType, to: StructType, ctx: CodegenContext): CastFunction = {
val fieldsCasts = from.fields.zip(to.fields).map {
case (fromField, toField) => nullSafeCastFunction(fromField.dataType, toField.dataType, ctx)
}
val rowClass = classOf[GenericInternalRow].getName
val tmpResult = ctx.freshName("tmpResult")
val tmpInput = ctx.freshName("tmpInput")
val fieldsEvalCode = fieldsCasts.zipWithIndex.map { case (cast, i) =>
val fromFieldPrim = ctx.freshName("ffp")
val fromFieldNull = ctx.freshName("ffn")
val toFieldPrim = ctx.freshName("tfp")
val toFieldNull = ctx.freshName("tfn")
val fromType = ctx.javaType(from.fields(i).dataType)
s"""
boolean $fromFieldNull = $tmpInput.isNullAt($i);
if ($fromFieldNull) {
$tmpResult.setNullAt($i);
} else {
$fromType $fromFieldPrim =
${ctx.getValue(tmpInput, from.fields(i).dataType, i.toString)};
${castCode(ctx, fromFieldPrim,
fromFieldNull, toFieldPrim, toFieldNull, to.fields(i).dataType, cast)}
if ($toFieldNull) {
$tmpResult.setNullAt($i);
} else {
${ctx.setColumn(tmpResult, to.fields(i).dataType, i, toFieldPrim)};
}
}
"""
}
val fieldsEvalCodes = ctx.splitExpressions(
expressions = fieldsEvalCode,
funcName = "castStruct",
arguments = ("InternalRow", tmpInput) :: (rowClass, tmpResult) :: Nil)
(input, result, resultIsNull) =>
s"""
final $rowClass $tmpResult = new $rowClass(${fieldsCasts.length});
final InternalRow $tmpInput = $input;
$fieldsEvalCodes
$result = $tmpResult;
"""
}
override def sql: String = dataType match {
// HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this
// type of casting can only be introduced by the analyzer, and can be omitted when converting
// back to SQL query string.
case _: ArrayType | _: MapType | _: StructType => child.sql
case _ => s"CAST(${child.sql} AS ${dataType.sql})"
}
}
/**
* Cast the child expression to the target data type, but will throw error if the cast might
* truncate, e.g. long -> int, timestamp -> data.
*/
case class UpCast(child: Expression, dataType: DataType, walkedTypePath: Seq[String])
extends UnaryExpression with Unevaluable {
override lazy val resolved = false
}
|
saltstar/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
|
Scala
|
apache-2.0
| 40,249 |
package cucumber.runtime
import org.junit.{Test, Assert}
import Assert._
import collection.JavaConverters._
import _root_.gherkin.I18n
import gherkin.formatter.model.Tag
class ScalaDslTest {
@Test
def emptyBefore {
var called = false
object Befores extends ScalaDsl with EN {
Before {
called = true
}
}
assertEquals(1, Befores.beforeHooks.size)
val hook = Befores.beforeHooks.head
assertTrue(hook.matches(List[Tag]().asJava))
hook.execute(null)
assertTrue(called)
assertEquals(Int.MaxValue, hook.getOrder)
}
@Test
def taggedBefore {
var called = false
object Befores extends ScalaDsl with EN {
Before("@foo,@bar", "@zap"){
called = true
}
}
assertEquals(1, Befores.beforeHooks.size)
val hook = Befores.beforeHooks.head
assertFalse(hook.matches(List[Tag]().asJava))
assertTrue(hook.matches(List(new Tag("@bar", 0), new Tag("@zap", 0)).asJava))
assertFalse(hook.matches(List(new Tag("@bar", 1)).asJava))
hook.execute(null)
assertTrue(called)
assertEquals(Int.MaxValue, hook.getOrder)
}
@Test
def orderedBefore {
var called = false
object Befores extends ScalaDsl with EN {
Before(10){
called = true
}
}
val hook = Befores.beforeHooks(0)
assertEquals(10, hook.getOrder)
}
@Test
def taggedOrderedBefore {
var called = false
object Befores extends ScalaDsl with EN {
Before(10, "@foo,@bar", "@zap"){
called = true
}
}
val hook = Befores.beforeHooks(0)
assertEquals(10, hook.getOrder)
}
@Test
def emptyAfter {
var called = false
object Afters extends ScalaDsl with EN {
After {
called = true
}
}
assertEquals(1, Afters.afterHooks.size)
val hook = Afters.afterHooks.head
assertTrue(hook.matches(List[Tag]().asJava))
hook.execute(null)
assertTrue(called)
}
@Test
def taggedAfter {
var called = false
object Afters extends ScalaDsl with EN {
After("@foo,@bar", "@zap"){
called = true
}
}
assertEquals(1, Afters.afterHooks.size)
val hook = Afters.afterHooks.head
assertFalse(hook.matches(List[Tag]().asJava))
assertTrue(hook.matches(List(new Tag("@bar", 0), new Tag("@zap", 0)).asJava))
assertFalse(hook.matches(List(new Tag("@bar", 1)).asJava))
hook.execute(null)
assertTrue(called)
}
@Test
def noArg {
var called = false
object Dummy extends ScalaDsl with EN {
Given("x"){
called = true
}
}
assertEquals(1, Dummy.stepDefinitions.size)
val step = Dummy.stepDefinitions.head
assertEquals("ScalaDslTest.scala:126", step.getLocation) // be careful with formatting or this test will break
assertEquals("x", step.getPattern)
step.execute(new I18n("en"), Array())
assertTrue(called)
}
@Test
def args {
var thenumber = 0
var thecolour = ""
object Dummy extends ScalaDsl with EN {
Given("Oh boy, (\\\\d+) (\\\\s+) cukes"){ (num:Int, colour:String) =>
thenumber = num
thecolour = colour
}
}
assertEquals(1, Dummy.stepDefinitions.size)
val step = Dummy.stepDefinitions(0)
step.execute(new I18n("en"), Array("5", "green"))
assertEquals(5, thenumber)
assertEquals("green", thecolour)
}
@Test
def transformation {
case class Person(name:String)
var person:Person = null
object Dummy extends ScalaDsl with EN {
implicit val transformPerson = Transform(Person(_))
Given("Person (\\\\s+)"){ p:Person =>
person = p
}
}
Dummy.stepDefinitions(0).execute(new I18n("en"), Array("Aslak"))
assertEquals(Person("Aslak"), person)
}
}
|
mbellani/cucumber-jvm
|
scala/src/test/scala/cucumber/runtime/ScalaDslTest.scala
|
Scala
|
mit
| 3,770 |
import scalaz.\\/
/**
* Created by denis on 9/4/16.
*/
class GenerationException(number: Long, message: String)
extends Exception(message)
object ScalazDisjunction {
def queryNextNumberE: Exception \\/ Long = {
val source = Math.round(Math.random * 100)
if (source <= 60) \\/.right(source)
else \\/.left(new Exception("The generated number is too big!"))
}
def queryNextNumberT: Throwable \\/ Long = \\/.fromTryCatchNonFatal {
val source = Math.round(Math.random * 100)
if (source <= 60) source
else throw new Exception("The generated number is too big!")
}
def queryNextNumberGE: GenerationException \\/ Long = \\/.fromTryCatchThrowable[Long, GenerationException] {
val source = Math.round(Math.random * 100)
if (source <= 90) source
else throw new GenerationException(source, "The generated number is too big!")
}
def main(args: Array[String]): Unit = {
val lst = List(queryNextNumberGE, queryNextNumberGE, queryNextNumberGE)
import scalaz._
import Scalaz._
val lstD = lst.sequence
println(lstD)
}
}
|
denisftw/advanced-scala-code
|
scalaz/src/main/scala/ScalazDisjunction.scala
|
Scala
|
apache-2.0
| 1,080 |
package argonaut.internal
import argonaut._
import scala.collection.immutable.{ SortedMap, MapLike }
object Macros extends MacrosCompat {
def materializeCodecImpl[T: c.WeakTypeTag](c: Context): c.Expr[CodecJson[T]] = {
import c.universe._
val tpe = weakTypeOf[T]
val encode = materializeEncodeImpl[T](c)
val decode = materializeDecodeImpl[T](c)
c.Expr[CodecJson[T]](q"""
_root_.argonaut.CodecJson.derived[$tpe]($encode, $decode)
""")
}
def materializeEncodeImpl[T: c.WeakTypeTag](c: Context): c.Expr[EncodeJson[T]] = {
import c.universe._
val tpe = weakTypeOf[T]
val primaryConstructor = getDeclarations(c)(tpe).collectFirst{
case m: MethodSymbol if m.isPrimaryConstructor => m
}
primaryConstructor match {
case Some(constructor) => {
val fieldNames: List[c.universe.Name] = getParameterLists(c)(constructor).flatten.map{field =>
field.name
}
val decodedNames: List[String] = fieldNames.map(_.decodedName.toString)
val fieldTypes: List[c.universe.Type] = getParameterLists(c)(constructor).flatten.map{field =>
getDeclaration(c)(tpe, field.name).typeSignature
}
val fieldCount = fieldNames.size
val invocations = fieldNames.map{fieldName =>
val termName = createTermName(c)(fieldName.toString)
q"toEncode.$termName"
}
val methodName = createTermName(c)("jencode" + (fieldCount.toString) + "L")
val expr = c.Expr[EncodeJson[T]]{q"""
_root_.argonaut.EncodeJson.$methodName[$tpe, ..$fieldTypes](toEncode => (..$invocations))(..$decodedNames)
"""}
//println(expr)
expr
}
case None => c.abort(c.enclosingPosition, "Could not identify primary constructor for " + tpe)
}
}
def materializeDecodeImpl[T: c.WeakTypeTag](c: Context): c.Expr[DecodeJson[T]] = {
import c.universe._
val tpe = weakTypeOf[T]
val primaryConstructor = getDeclarations(c)(tpe).collectFirst{
case m: MethodSymbol if m.isPrimaryConstructor => m
}
primaryConstructor match {
case Some(constructor) => {
val fieldNames: List[c.universe.Name] = getParameterLists(c)(constructor).flatten.map{field =>
field.name
}
val decodedNames: List[String] = fieldNames.map(_.decodedName.toString)
val fieldTypes: List[c.universe.Type] = getParameterLists(c)(constructor).flatten.map{field =>
getDeclaration(c)(tpe, field.name).typeSignature
}
val fieldCount = fieldNames.size
val functionParameters = fieldNames.zip(fieldTypes).map{case (fieldName, fieldType) =>
val termName = createTermName(c)(fieldName.toString)
q"$termName: $fieldType"
}
val parameters = fieldNames.map{fieldName =>
val termName = createTermName(c)(fieldName.toString)
q"$termName"
}
val methodName = createTermName(c)("jdecode" + (fieldCount.toString) + "L")
val expr = c.Expr[DecodeJson[T]]{q"""
_root_.argonaut.DecodeJson.$methodName[..$fieldTypes, $tpe]((..$functionParameters) => new $tpe(..$parameters))(..$decodedNames)
"""}
//println(expr)
expr
}
case None => c.abort(c.enclosingPosition, "Could not identify primary constructor for " + tpe)
}
}
}
|
etorreborre/argonaut
|
src/main/scala/argonaut/internal/Macros.scala
|
Scala
|
bsd-3-clause
| 3,364 |
package breeze.linalg
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import operators._
import support._
import support.CanTraverseValues.ValuesVisitor
import breeze.generic.{UFunc}
import breeze.generic.UFunc.{UImpl2, UImpl, InPlaceImpl2}
import breeze.macros.expand
import breeze.math._
import breeze.stats.distributions.Rand
import breeze.storage.{Zero, Storage}
import scala.util.hashing.MurmurHash3
import scala.{specialized=>spec}
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.mutable.ArrayBuilder
import scala.collection.immutable.BitSet
import scala.reflect.ClassTag
/**
* Trait for operators and such used in vectors.
* @author dlwh
*/
trait VectorLike[@spec V, +Self <: Vector[V]] extends Tensor[Int, V] with TensorLike[Int, V, Self] {
def map[V2, That](fn: V=>V2)(implicit canMapValues: CanMapValues[Self @uncheckedVariance, V, V2, That]):That = values map fn
def foreach[U](fn: V=>U): Unit = { values foreach fn }
}
/**
* A Vector represents the mathematical concept of a vector in math.
* @tparam V
*/
trait Vector[@spec(Int, Double, Float) V] extends VectorLike[V, Vector[V]]{
/**
* @return the set of keys in this vector (0 until length)
*/
def keySet: Set[Int] = BitSet( (0 until length) :_*)
def length: Int
override def size = length
def iterator = Iterator.range(0, size).map{i => i -> apply(i)}
def valuesIterator = Iterator.range(0, size).map{i => apply(i)}
def keysIterator = Iterator.range(0, size)
def copy: Vector[V]
override def equals(p1: Any) = p1 match {
case x: Vector[_] =>
this.length == x.length &&
(valuesIterator sameElements x.valuesIterator)
case _ => false
}
def toDenseVector(implicit cm: ClassTag[V]) = {
DenseVector(toArray)
}
/**Returns copy of this [[breeze.linalg.Vector]] as a [[scala.Array]]*/
def toArray(implicit cm: ClassTag[V]) = {
val result = new Array[V](length)
var i = 0
while(i < length) {
result(i) = apply(i)
i += 1
}
result
}
//ToDo 2: Should this be deprecated and changed to `toScalaVector`?
/**Returns copy of this [[breeze.linalg.Vector]] as a [[scala.Vector]]*/
def toVector(implicit cm: ClassTag[V]) = Vector[V]( toArray )
//ToDo 2: implement fold/scan/reduce to operate along one axis of a matrix/tensor
// <editor-fold defaultstate="collapsed" desc=" scala.collection -like padTo, fold/scan/reduce ">
/** See [[scala.collection.mutable.ArrayOps.padTo]].
*/
def padTo(len: Int, elem: V)(implicit cm: ClassTag[V]): Vector[V] = Vector[V]( toArray.padTo(len, elem) )
def exists(f: V=>Boolean) = valuesIterator.exists(f)
override def forall(f: V=>Boolean) = valuesIterator.forall(f)
/** See [[scala.collection.mutable.ArrayOps.fold]].
*/
def fold[E1 >: V](z: E1)(op: (E1, E1) => E1 ): E1 = valuesIterator.fold(z)( op )
/** See [[scala.collection.mutable.ArrayOps.foldLeft]].
*/
def foldLeft[B >: V](z: B)(op: (B, V) => B ): B = valuesIterator.foldLeft(z)( op )
/** See [[scala.collection.mutable.ArrayOps.foldRight]].
*/
def foldRight[B >: V](z: B)(op: (V, B) => B ): B = valuesIterator.foldRight(z)( op )
/** See [[scala.collection.mutable.ArrayOps.reduce]].
*/
def reduce[E1 >: V](op: (E1, E1) => E1 ): E1 = valuesIterator.reduce( op )
/** See [[scala.collection.mutable.ArrayOps.reduceLeft]].
*/
def reduceLeft[B >: V](op: (B, V) => B ): B = {
valuesIterator.reduceLeft( op )
}
/** See [[scala.collection.mutable.ArrayOps.reduceRight]].
*/
def reduceRight[B >: V](op: (V, B) => B ): B = {
valuesIterator.reduceRight( op )
}
/** See [[scala.collection.mutable.ArrayOps.scan]].
*/
def scan[E1 >: V](z: E1)(op: (E1, E1) => E1 )(implicit cm: ClassTag[V], cm1: ClassTag[E1]): Vector[E1] = {
Vector[E1]( toArray.scan(z)( op ))
}
/** See [[scala.collection.mutable.ArrayOps.scanLeft]].
*/
def scanLeft[B >: V](z: B)(op: (B, V) => B )(implicit cm1: ClassTag[B]): Vector[B] = {
Vector[B]( valuesIterator.scanLeft(z)( op ).toArray )
}
/** See [[scala.collection.mutable.ArrayOps.scanRight]].
*/
def scanRight[B >: V](z: B)(op: (V, B) => B )(implicit cm1: ClassTag[B]): Vector[B] = Vector[B]( valuesIterator.scanRight(z)( op ).toArray )
// </editor-fold>
}
object Vector extends VectorConstructors[Vector] with VectorOps {
/**
* Creates a Vector of size size.
* @param size
* @tparam V
* @return
*/
def zeros[V: ClassTag : Zero](size: Int): Vector[V] = DenseVector.zeros(size)
/**
* Creates a vector with the specified elements
* @param values
* @tparam V
* @return
*/
def apply[@spec(Double, Int, Float, Long) V](values: Array[V]): Vector[V] = DenseVector(values)
implicit def canCopy[E]:CanCopy[Vector[E]] = new CanCopy[Vector[E]] {
// Should not inherit from T=>T because those get used by the compiler.
def apply(t: Vector[E]): Vector[E] = t.copy
}
// There's a bizarre error specializing float's here.
class CanZipMapValuesVector[@spec(Int, Double) V, @spec(Int, Double) RV:ClassTag] extends CanZipMapValues[Vector[V],V,RV,Vector[RV]] {
def create(length : Int) = DenseVector(new Array[RV](length))
/**Maps all corresponding values from the two collection. */
def map(from: Vector[V], from2: Vector[V], fn: (V, V) => RV) = {
require(from.length == from2.length, "Vector lengths must match!")
val result = create(from.length)
var i = 0
while (i < from.length) {
result.data(i) = fn(from(i), from2(i))
i += 1
}
result
}
}
implicit def canMapValues[V, V2](implicit man: ClassTag[V2]):CanMapValues[Vector[V], V, V2, Vector[V2]] = {
new CanMapValues[Vector[V], V, V2, Vector[V2]] {
/**Maps all key-value pairs from the given collection. */
def apply(from: Vector[V], fn: (V) => V2) = {
DenseVector.tabulate(from.length)(i => fn(from(i)))
}
}
}
implicit def canMapActiveValues[V, V2](implicit man: ClassTag[V2]):CanMapActiveValues[Vector[V], V, V2, Vector[V2]] = {
new CanMapActiveValues[Vector[V], V, V2, Vector[V2]] {
/**Maps all key-value pairs from the given collection. */
def apply(from: Vector[V], fn: (V) => V2) = {
DenseVector.tabulate(from.length)(i => fn(from(i)))
}
}
}
implicit def scalarOf[T]: ScalarOf[Vector[T], T] = ScalarOf.dummy
implicit def negFromScale[@spec(Double, Int, Float, Long) V, Double](implicit scale: OpMulScalar.Impl2[Vector[V], V, Vector[V]], ring: Ring[V]) = {
new OpNeg.Impl[Vector[V], Vector[V]] {
override def apply(a : Vector[V]) = {
scale(a, ring.negate(ring.one))
}
}
}
implicit def zipMap[V, R:ClassTag] = new CanZipMapValuesVector[V, R]
implicit val zipMap_d = new CanZipMapValuesVector[Double, Double]
implicit val zipMap_f = new CanZipMapValuesVector[Float, Float]
implicit val zipMap_i = new CanZipMapValuesVector[Int, Int]
class CanZipMapKeyValuesVector[@spec(Double, Int, Float, Long) V, @spec(Int, Double) RV:ClassTag] extends CanZipMapKeyValues[Vector[V],Int, V,RV,Vector[RV]] {
def create(length : Int) = DenseVector(new Array[RV](length))
/**Maps all corresponding values from the two collection. */
def map(from: Vector[V], from2: Vector[V], fn: (Int, V, V) => RV): Vector[RV] = {
require(from.length == from2.length, "Vector lengths must match!")
val result = create(from.length)
var i = 0
while (i < from.length) {
result.data(i) = fn(i, from(i), from2(i))
i += 1
}
result
}
override def mapActive(from: Vector[V], from2: Vector[V], fn: (Int, V, V) => RV): Vector[RV] = {
map(from, from2, fn)
}
}
implicit def zipMapKV[V, R:ClassTag]: CanZipMapKeyValuesVector[V, R] = new CanZipMapKeyValuesVector[V, R]
/**Returns the k-norm of this Vector. */
implicit def canNorm[T](implicit canNormS: norm.Impl[T, Double]): norm.Impl2[Vector[T], Double, Double] = {
new norm.Impl2[Vector[T], Double, Double] {
def apply(v: Vector[T], n: Double): Double = {
import v._
if (n == 1) {
var sum = 0.0
activeValuesIterator foreach (v => sum += canNormS(v) )
sum
} else if (n == 2) {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = canNormS(v); sum += nn * nn })
math.sqrt(sum)
} else if (n == Double.PositiveInfinity) {
var max = 0.0
activeValuesIterator foreach (v => { val nn = canNormS(v); if (nn > max) max = nn })
max
} else {
var sum = 0.0
activeValuesIterator foreach (v => { val nn = canNormS(v); sum += math.pow(nn,n) })
math.pow(sum, 1.0 / n)
}
}
}
}
implicit def canIterateValues[V]: CanTraverseValues[Vector[V], V] = new CanTraverseValues[Vector[V], V] {
def isTraversableAgain(from: Vector[V]): Boolean = true
def traverse(from: Vector[V], fn: ValuesVisitor[V]): Unit = {
for( v <- from.valuesIterator) {
fn.visit(v)
}
}
}
implicit def canTraverseKeyValuePairs[V]: CanTraverseKeyValuePairs[Vector[V], Int, V] =
new CanTraverseKeyValuePairs[Vector[V], Int, V] {
def isTraversableAgain(from: Vector[V]): Boolean = true
def traverse(from: Vector[V], fn: CanTraverseKeyValuePairs.KeyValuePairsVisitor[Int, V]): Unit = {
for(i <- 0 until from.length)
fn.visit(i, from(i))
}
}
implicit def space[V:Field:Zero:ClassTag]: MutableFiniteCoordinateField[Vector[V], Int, V] = {
val f = implicitly[Field[V]]
import f.normImpl
implicit val _dim = dim.implVDim[V, Vector[V]]
MutableFiniteCoordinateField.make[Vector[V], Int, V]
}
}
trait VectorOps { this: Vector.type =>
import breeze.math.PowImplicits._
@expand.valify
@expand
implicit def v_v_Idempotent_Op[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _})
op: Op.Impl2[T, T, T]):BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]] = new BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]] {
override def bindingMissing(a: Vector[T], b: Vector[T]): Vector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result = a.copy
for((k,v) <- b.activeIterator) {
result(k) = op(a(k), v)
}
result
}
}
implicit def v_v_Idempotent_OpSub[T:Ring]:OpSub.Impl2[Vector[T], Vector[T], Vector[T]] =
new OpSub.Impl2[Vector[T], Vector[T], Vector[T]] {
val r = implicitly[Ring[T]]
def apply(a: Vector[T], b: Vector[T]): Vector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result = a.copy
for((k,v) <- b.activeIterator) {
result(k) = r.-(a(k), v)
}
result
}
}
implicit def v_v_Idempotent_OpAdd[T:Semiring]:OpAdd.Impl2[Vector[T], Vector[T], Vector[T]] =
new OpAdd.Impl2[Vector[T], Vector[T], Vector[T]] {
val r = implicitly[Semiring[T]]
def apply(a: Vector[T], b: Vector[T]): Vector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result = a.copy
for((k,v) <- b.activeIterator) {
result(k) = r.+(a(k), v)
}
result
}
}
@expand
@expand.valify
implicit def v_v_nilpotent_Op[@expand.args(Int, Double, Float, Long) T]
(implicit @expand.sequence[T](0, 0.0, 0.0f, 0l) zero: T):BinaryRegistry[Vector[T], Vector[T], OpMulScalar.type, Vector[T]] = new BinaryRegistry[Vector[T], Vector[T], OpMulScalar.type, Vector[T]] {
override def bindingMissing(a: Vector[T], b: Vector[T]): Vector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
for((k,v) <- b.activeIterator) {
val r = a(k) * v
if(r != zero)
builder.add(k, r)
}
builder.toVector
}
}
@expand
@expand.valify
implicit def v_v_Op[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: Op.Impl2[T, T, T]):BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]] = new BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]] {
override def bindingMissing(a: Vector[T], b: Vector[T]): Vector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result = Vector.zeros[T](a.length)
var i = 0
while(i < a.length) {
result(i) = op(a(i), b(i))
i += 1
}
result
}
}
/*
@expand
implicit def cast_v_v_Op[V1, V2,
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar,OpDiv, OpSet, OpMod, OpPow) Op <: OpType](implicit v1: V1<:<Vector[T], v2: V2<:<Vector[T]) = {
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].asInstanceOf[Op.Impl2[V1, V2, Vector[T]]]
}
*/
@expand
@expand.valify
implicit def v_s_Op[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0l)
zero: T):BinaryRegistry[Vector[T], T, Op.type, Vector[T]] = new BinaryRegistry[Vector[T], T, Op.type, Vector[T]] {
override def bindingMissing(a: Vector[T], b: T): Vector[T] = {
val result = Vector.zeros[T](a.length)
var i = 0
while(i < a.length) {
result(i) = op(a(i), b)
i += 1
}
result
}
}
@expand
@expand.valify
implicit def s_v_Op[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0l)
zero: T):BinaryRegistry[T, Vector[T], Op.type, Vector[T]] = new BinaryRegistry[T, Vector[T], Op.type, Vector[T]] {
override def bindingMissing(b: T, a: Vector[T]): Vector[T] = {
val result = Vector.zeros[T](a.length)
var i = 0
while(i < a.length) {
result(i) = op(b, a(i))
i += 1
}
result
}
}
@expand
implicit def v_sField_Op[@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpMod, OpPow) Op <: OpType, T:Field:ClassTag]
(implicit @expand.sequence[Op]({f.+(_,_)}, {f.-(_,_)}, {f.*(_,_)}, {f.*(_,_)}, {f./(_,_)}, {f.%(_,_)}, {f.pow(_,_)}) op: Op.Impl2[T, T, T]):
BinaryRegistry[Vector[T], T, Op.type, Vector[T]] = new BinaryRegistry[Vector[T], T, Op.type, Vector[T]] {
val f = implicitly[Field[T]]
override def bindingMissing(a: Vector[T], b: T): Vector[T] = {
val result = Vector.zeros[T](a.length)
var i = 0
while(i < a.length) {
result(i) = op(a(i), b)
i += 1
}
result
}
}
@expand
@expand.valify
implicit def v_v_UpdateOp[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: Op.Impl2[T, T, T]):BinaryUpdateRegistry[Vector[T], Vector[T], Op.type] = new BinaryUpdateRegistry[Vector[T], Vector[T], Op.type] {
override def bindingMissing(a: Vector[T], b: Vector[T]):Unit = {
require(b.length == a.length, "Vectors must be the same length!")
var i = 0
while(i < a.length) {
a(i) = op(a(i), b(i))
i += 1
}
}
}
@expand
@expand.valify
implicit def v_v_Idempotent_UpdateOp[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _})
op: Op.Impl2[T, T, T]):BinaryUpdateRegistry[Vector[T], Vector[T], Op.type] = new BinaryUpdateRegistry[Vector[T], Vector[T], Op.type] {
override def bindingMissing(a: Vector[T], b: Vector[T]):Unit = {
require(b.length == a.length, "Vectors must be the same length!")
for( (k,v) <- b.activeIterator) {
a(k) = op(a(k), v)
}
}
}
implicit def castUpdateOps[V1, V2, T, Op](implicit v1ev: V1<:<Vector[T],
V2ev: V2<:<Vector[T],
op: UFunc.InPlaceImpl2[Op, Vector[T], Vector[T]]): InPlaceImpl2[Op, V1, V2] = {
op.asInstanceOf[UFunc.InPlaceImpl2[Op, V1, V2]]
}
implicit def castOps[V1, V2, T, Op, VR](implicit v1ev: V1<:<Vector[T],
V2ev: V2<:<Vector[T],
op: UImpl2[Op, Vector[T], Vector[T], VR]): UImpl2[Op, V1, V2, VR] = {
op.asInstanceOf[UFunc.UImpl2[Op, V1, V2, VR]]
}
// implicit def castScalarOps[V1, T, Op, VR](implicit v1ev: V1<:<Vector[T],
// op: UImpl2[Op, Vector[T], T, VR]): UImpl2[Op, V1, T, VR] = {
// op.asInstanceOf[UFunc.UImpl2[Op, V1, T, VR]]
// }
//
// implicit def castScalarLhsOps[V1, T, Op, VR](implicit v1ev: V1<:<Vector[T],
// op: UImpl2[Op, T, Vector[T], VR]): UImpl2[Op, T, V1, VR] = {
// op.asInstanceOf[UFunc.UImpl2[Op, T, V1, VR]]
// }
import shapeless._
implicit def castFunc[V1, T, Op, VR](implicit v1ev: V1<:<Vector[T], v1ne: V1 =:!= Vector[T],
op: UImpl[Op, Vector[T], VR]): UImpl[Op, V1, VR] = {
op.asInstanceOf[UFunc.UImpl[Op, V1, VR]]
}
@expand
@expand.valify
implicit def v_s_UpdateOp[@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: Op.Impl2[T, T, T]):BinaryUpdateRegistry[Vector[T], T, Op.type] = new BinaryUpdateRegistry[Vector[T], T, Op.type] {
override def bindingMissing(a: Vector[T], b: T):Unit = {
var i = 0
while(i < a.length) {
a(i) = op(a(i), b)
i += 1
}
}
}
@expand
implicit def v_s_UpdateOp[@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod, OpPow) Op <: OpType, T:Field:ClassTag]
(implicit @expand.sequence[Op]({f.+(_,_)}, {f.-(_, _)}, {f.*(_, _)}, {f.*(_, _)}, {f./(_, _)}, {(a,b) => b}, {f.%(_,_)}, {f.pow(_,_)})
op: Op.Impl2[T, T, T]):BinaryUpdateRegistry[Vector[T], T, Op.type] = new BinaryUpdateRegistry[Vector[T], T, Op.type] {
val f = implicitly[Field[T]]
override def bindingMissing(a: Vector[T], b: T):Unit = {
var i = 0
while(i < a.length) {
a(i) = op(a(i), b)
i += 1
}
}
}
@expand
@expand.valify
implicit def canDot_V_V[@expand.args(Int, Long, Float, Double) T](implicit @expand.sequence[T](0, 0l, 0.0f, 0.0) zero: T): BinaryRegistry[Vector[T], Vector[T], breeze.linalg.operators.OpMulInner.type, T] = {
new BinaryRegistry[Vector[T], Vector[T], breeze.linalg.operators.OpMulInner.type, T] {
override def bindingMissing(a: Vector[T], b: Vector[T]):T = {
require(b.length == a.length, "Vectors must be the same length!")
if (a.activeSize > b.activeSize) {
bindingMissing(b, a)
} else {
var result : T = zero
for( (k,v) <- a.activeIterator) {
result += v * b(k)
}
result
}
}
}
}
implicit def canDot_V_V[T:ClassTag:Semiring]: BinaryRegistry[Vector[T], Vector[T], breeze.linalg.operators.OpMulInner.type, T] = {
new BinaryRegistry[Vector[T], Vector[T], breeze.linalg.operators.OpMulInner.type, T] {
val s = implicitly[Semiring[T]]
override def bindingMissing(a: Vector[T], b: Vector[T]):T = {
require(b.length == a.length, "Vectors must be the same length!")
if (a.activeSize > b.activeSize) {
bindingMissing(b, a)
} else {
var result : T = s.zero
for( (k,v) <- a.activeIterator) {
result = s.+(result,s.*(v, b(k)))
}
result
}
}
}
}
@expand
@expand.valify
implicit def axpy[@expand.args(Int, Double, Float, Long) V]: TernaryUpdateRegistry[Vector[V], V, Vector[V], scaleAdd.type] = {
new TernaryUpdateRegistry[Vector[V], V, Vector[V], scaleAdd.type] {
override def bindingMissing(a: Vector[V], s: V, b: Vector[V]) {
require(b.length == a.length, "Vectors must be the same length!")
if(s == 0) return
var i = 0
for( (k, v) <- b.activeIterator) {
a(k) += s * v
i += 1
}
}
}
}
implicit def axpy[V:Semiring:ClassTag]: TernaryUpdateRegistry[Vector[V], V, Vector[V], scaleAdd.type] = {
new TernaryUpdateRegistry[Vector[V], V, Vector[V], scaleAdd.type] {
val sr = implicitly[Semiring[V]]
override def bindingMissing(a: Vector[V], s: V, b: Vector[V]) {
require(b.length == a.length, "Vectors must be the same length!")
if(s == 0) return
var i = 0
for( (k, v) <- b.activeIterator) {
a(k) = sr.+(a(k), sr.*(s, v))
i += 1
}
}
}
}
@expand
@expand.valify
implicit def zipValuesImpl_V_V[@expand.args(Int, Double, Float, Long) T]: BinaryRegistry[Vector[T], Vector[T], zipValues.type, ZippedValues[T, T]] = {
new BinaryRegistry[Vector[T], Vector[T], zipValues.type, ZippedValues[T, T]] {
protected override def bindingMissing(a: Vector[T], b: Vector[T]):ZippedValues[T, T] = {
require(a.length == b.length, "vector dimension mismatch")
ZippedVectorValues(a,b)
}
}
}
implicit def zipValuesSubclass[Vec1, Vec2, T, U](implicit view1: Vec1<:<Vector[T],
view2: Vec2 <:< Vector[U],
op: zipValues.Impl2[Vector[T], Vector[U], ZippedValues[T, U]]) = {
op.asInstanceOf[zipValues.Impl2[Vec1, Vec2, ZippedValues[T, U]]]
}
case class ZippedVectorValues[@spec(Double, Int, Float, Long) T,
@spec(Double, Int, Float, Long) U](a: Vector[T], b: Vector[U]) extends ZippedValues[T, U] {
def foreach(f: (T, U) => Unit): Unit = {
var i = 0
while(i < a.length) {
f(a(i), b(i))
i += 1
}
}
}
implicit def vAddIntoField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpAdd.InPlaceImpl2[Vector[T], Vector[T]] = {
new OpAdd.InPlaceImpl2[Vector[T], Vector[T]] {
override def apply(v: Vector[T], v2: Vector[T]) = {
for(i <- 0 until v.length) v(i) = field.+(v(i), v2(i))
}
}
}
implicit def vSubIntoField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpSub.InPlaceImpl2[Vector[T], Vector[T]] = {
new OpSub.InPlaceImpl2[Vector[T], Vector[T]] {
override def apply(v: Vector[T], v2: Vector[T]) = {
for(i <- 0 until v.length) v(i) = field.-(v(i), v2(i))
}
}
}
implicit def vMulIntoField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpMulScalar.InPlaceImpl2[Vector[T], Vector[T]] = {
new OpMulScalar.InPlaceImpl2[Vector[T], Vector[T]] {
override def apply(v: Vector[T], v2: Vector[T]) = {
for(i <- 0 until v.length) v(i) = field.*(v(i), v2(i))
}
}
}
implicit def vDivIntoField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpDiv.InPlaceImpl2[Vector[T], Vector[T]] = {
new OpDiv.InPlaceImpl2[Vector[T], Vector[T]] {
override def apply(v: Vector[T], v2: Vector[T]) = {
for(i <- 0 until v.length) v(i) = field./(v(i), v2(i))
}
}
}
implicit def vPowInto[T](implicit pow: OpPow.Impl2[T, T, T], zero: Zero[T], ct: ClassTag[T]):OpPow.InPlaceImpl2[Vector[T], Vector[T]] = {
new OpPow.InPlaceImpl2[Vector[T], Vector[T]] {
override def apply(v: Vector[T], v2: Vector[T]) = {
for(i <- 0 until v.length) v(i) = pow(v(i), v2(i))
}
}
}
implicit def vAddIntoSField[T](implicit field: Semiring[T], zero: Zero[T], ct: ClassTag[T]):OpAdd.InPlaceImpl2[Vector[T], T] = {
new OpAdd.InPlaceImpl2[Vector[T], T] {
override def apply(v: Vector[T], v2: T) = {
for(i <- 0 until v.length) v(i) = field.+(v(i), v2)
}
}
}
implicit def vAddSField[T](implicit field: Semiring[T], zero: Zero[T], ct: ClassTag[T]):OpAdd.Impl2[Vector[T], T, Vector[T]] = {
binaryOpFromUpdateOp(implicitly[CanCopy[Vector[T]]], vAddIntoSField, ct)
}
implicit def vSubSField[T](implicit field: Ring[T], zero: Zero[T], ct: ClassTag[T]):OpSub.Impl2[Vector[T], T, Vector[T]] = binaryOpFromUpdateOp(implicitly[CanCopy[Vector[T]]], vSubIntoSField, ct)
implicit def vMulScalarSField[T](implicit field: Semiring[T], zero: Zero[T], ct: ClassTag[T]):OpMulScalar.Impl2[Vector[T], T, Vector[T]] = binaryOpFromUpdateOp(implicitly[CanCopy[Vector[T]]], vMulScalarIntoSField, ct)
implicit def vDivSField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpDiv.Impl2[Vector[T], T, Vector[T]] = binaryOpFromUpdateOp(implicitly[CanCopy[Vector[T]]], vDivIntoSField, ct)
implicit def vPowS[T](implicit pow: OpPow.Impl2[T, T, T], zero: Zero[T], ct: ClassTag[T]):OpPow.Impl2[Vector[T], T, Vector[T]] = binaryOpFromUpdateOp(implicitly[CanCopy[Vector[T]]], vPowIntoS, ct)
implicit def vSubIntoSField[T](implicit field: Ring[T], zero: Zero[T], ct: ClassTag[T]):OpSub.InPlaceImpl2[Vector[T], T] = {
new OpSub.InPlaceImpl2[Vector[T], T] {
override def apply(v: Vector[T], v2: T) = {
for(i <- 0 until v.length) v(i) = field.-(v(i), v2)
}
}
}
implicit def vMulScalarIntoSField[T](implicit field: Semiring[T], zero: Zero[T], ct: ClassTag[T]):OpMulScalar.InPlaceImpl2[Vector[T], T] = {
new OpMulScalar.InPlaceImpl2[Vector[T], T] {
override def apply(v: Vector[T], v2: T) = {
for(i <- 0 until v.length) v(i) = field.*(v(i), v2)
}
}
}
implicit def vDivIntoSField[T](implicit field: Field[T], zero: Zero[T], ct: ClassTag[T]):OpDiv.InPlaceImpl2[Vector[T], T] = {
new OpDiv.InPlaceImpl2[Vector[T], T] {
override def apply(v: Vector[T], v2: T) = {
for(i <- 0 until v.length) v(i) = field./(v(i), v2)
}
}
}
implicit def vPowIntoS[T](implicit pow: OpPow.Impl2[T, T, T], zero: Zero[T], ct: ClassTag[T]):OpPow.InPlaceImpl2[Vector[T], T] = {
new OpPow.InPlaceImpl2[Vector[T], T] {
override def apply(v: Vector[T], v2: T) = {
for(i <- 0 until v.length) v(i) = pow(v(i), v2)
}
}
}
implicit def dotField[T](implicit field: Semiring[T]):OpMulInner.Impl2[Vector[T], Vector[T], T] = {
new OpMulInner.Impl2[Vector[T], Vector[T], T] {
override def apply(v: Vector[T], v2: Vector[T]): T = {
var acc = field.zero
for(i <- 0 until v.length) {
acc = field.+(acc, field.*(v(i), v2(i)))
}
acc
}
}
}
def binaryOpFromUpdateOp[Op<:OpType, V, Other]
(implicit copy: CanCopy[Vector[V]], op: UFunc.InPlaceImpl2[Op, Vector[V], Other], man: ClassTag[V]):
UFunc.UImpl2[Op, Vector[V], Other, Vector[V]] = {
new UFunc.UImpl2[Op, Vector[V], Other, Vector[V]] {
override def apply(a : Vector[V], b : Other): Vector[V] = {
val c = copy(a)
op(c, b)
c
}
}
}
implicit def implOpSet_V_V_InPlace[V]: OpSet.InPlaceImpl2[Vector[V], Vector[V]] = {
new OpSet.InPlaceImpl2[Vector[V], Vector[V]] {
def apply(a: Vector[V], b: Vector[V]): Unit = {
require(b.length == a.length, "Vectors must be the same length!")
for (i <- 0 until a.length) {
a(i) = b(i)
}
}
}
}
implicit def implOpSet_V_S_InPlace[V]: OpSet.InPlaceImpl2[Vector[V], V] = {
new OpSet.InPlaceImpl2[Vector[V], V] {
def apply(a: Vector[V], b: V): Unit = {
for (i <- 0 until a.length) {
a(i) = b
}
}
}
}
implicit def canGaxpy[V:Semiring]: scaleAdd.InPlaceImpl3[Vector[V], V, Vector[V]] =
new scaleAdd.InPlaceImpl3[Vector[V], V, Vector[V]] {
val ring = implicitly[Semiring[V]]
def apply(a: Vector[V], s: V, b: Vector[V]): Unit = {
require(b.length == a.length, "Vectors must be the same length!")
for (i <- 0 until a.length) {
a(i) = ring.+(a(i), ring.*(s, b(i)))
}
}
}
}
/**
* Trait that can mixed to companion objects to enable utility methods for creating vectors.
* @tparam Vec
*/
trait VectorConstructors[Vec[T]<:Vector[T]] {
/**
* Creates a Vector of size size.
* @param size
* @tparam V
* @return
*/
def zeros[V:ClassTag:Zero](size: Int): Vec[V]
/**
* Creates a vector with the specified elements
* @param values
* @tparam V
* @return
*/
def apply[@spec(Double, Int, Float, Long) V](values: Array[V]): Vec[V]
/**
* Creates a vector with the specified elements
* @param values
* @tparam V
* @return
*/
def apply[V:ClassTag](values: V*): Vec[V] = {
// manual specialization so that we create the right DenseVector specialization... @specialized doesn't work here
val man = implicitly[ClassTag[V]]
if(man == manifest[Double]) apply(values.toArray.asInstanceOf[Array[Double]]).asInstanceOf[Vec[V]]
else if (man == manifest[Float]) apply(values.toArray.asInstanceOf[Array[Float]]).asInstanceOf[Vec[V]]
else if (man == manifest[Int]) apply(values.toArray.asInstanceOf[Array[Int]]).asInstanceOf[Vec[V]]
else apply(values.toArray)
// apply(values.toArray)
}
//ToDo 2: I'm not sure fill/tabulate are really useful outside of the context of a DenseVector?
/**
* Analogous to Array.fill
* @param size
* @param v
* @tparam V
* @return
*/
def fill[@spec(Double, Int, Float, Long) V:ClassTag](size: Int)(v: =>V): Vec[V] = {
apply(Array.fill(size)(v))
}
/**
* Analogous to Array.tabulate
* @param size
* @param f
* @tparam V
* @return
*/
def tabulate[@spec(Double, Int, Float, Long) V:ClassTag](size: Int)(f: Int=>V): Vec[V] = {
apply(Array.tabulate(size)(f))
}
/**
* Analogous to Array.tabulate, but taking a scala.Range to iterate over, instead of an index.
* @param f
* @tparam V
* @return
*/
def tabulate[@spec(Double, Int, Float, Long) V:ClassTag](range: Range)(f: Int=>V):Vec[V]= {
val b = ArrayBuilder.make[V]()
b.sizeHint(range.length)
var i = 0
while (i < range.length) {
b += f( range(i) )
i += 1
}
apply(b.result )
}
implicit def canCreateZeros[V:ClassTag:Zero]: CanCreateZeros[Vec[V], Int] =
new CanCreateZeros[Vec[V], Int] {
def apply(d: Int): Vec[V] = {
zeros[V](d)
}
}
implicit def canTabulate[V:ClassTag:Zero]: CanTabulate[Int, Vec[V], V] = new CanTabulate[Int,Vec[V],V] {
def apply(d: Int, f: (Int) => V): Vec[V] = tabulate(d)(f)
}
/**
* Creates a Vector of uniform random numbers in (0,1)
* @param size
* @param rand
* @return
*/
def rand[T:ClassTag](size: Int, rand: Rand[T] = Rand.uniform): Vec[T] = {
// Array#fill is slow.
val arr = new Array[T](size)
var i = 0
while(i < arr.length) {
arr(i) = rand.draw()
i += 1
}
apply(arr)
}
def range(start:Int, end: Int): Vec[Int] = range(start,end,1)
def range(start:Int, end: Int, step: Int): Vec[Int] = apply[Int](Array.range(start,end,step))
def rangeF(start: Float, end: Float, step: Float = 1.0f): Vec[Float] = {
import spire.implicits.cfor
require(end > start)
require(end-start > step)
val size: Int = math.floor((end - start)/step).toInt
val data = new Array[Float](size)
cfor(0)(i => i < size, i => i+1)(i => {
data(i) = (start+i*step)
})
apply(data)
}
def rangeD(start: Double, end: Double, step: Double = 1.0): Vec[Double] = {
import spire.implicits.cfor
require(end > start)
require(end-start > step)
val size: Int = math.floor((end - start)/step).toInt
val data = new Array[Double](size)
cfor(0)(i => i < size, i => i+1)(i => {
data(i) = (start+i*step)
})
apply(data)
}
}
trait StorageVector[V] extends Vector[V] with Storage[V]
|
claydonkey/breeze
|
math/src/main/scala/breeze/linalg/Vector.scala
|
Scala
|
apache-2.0
| 32,964 |
package models.jbehave
import java.io.File
import org.clapper.classutil.ClassFinder
import play.api.libs.json.{Json, JsValue}
import org.jbehave.core.steps.{StepCandidate, Steps}
import org.jbehave.core.configuration.MostUsefulConfiguration
import scala.collection.JavaConversions._
import com.technologyconversations.bdd.steps.util.{BddOptionParam, BddParam}
import org.jbehave.core.annotations.{Then, When, Given}
import groovy.lang.GroovyClassLoader
class JBehaveSteps(stepsDir: String = "steps", composites: List[String] = List.empty[String]) {
def stepsToJson: JsValue = {
val stepsMap = stepsCandidates.map { step =>
val stepString = step.toString
val stepType = stepString.split(" ")(0)
Map(
"type" -> stepType,
"step" -> stepString.replaceFirst("GIVEN", "Given").replaceFirst("WHEN", "When").replaceFirst("THEN", "Then")
)
}
Json.toJson(Map("steps" -> Json.toJson(stepsMap)))
}
def classesToJson: JsValue = {
val classesMap = classes.map { className =>
Map(
"name" -> Json.toJson(className.substring(className.lastIndexOf(".") + 1)),
"fullName" -> Json.toJson(className),
"params" -> Json.toJson(classParamsMap(className))
)
}
Json.toJson(Map("classes" -> Json.toJson(classesMap)))
}
private[jbehave] def stepsJars = {
val classPathJars = ClassFinder.classpath.filter(_.getName.toLowerCase.contains("steps")).toList
val stepsJars = new java.io.File(stepsDir).listFiles.toList
classPathJars ::: stepsJars
}
private[jbehave] def classes = {
val jarClasses = ClassFinder(stepsJars).getClasses()
.filter(classInfo => classInfo.name.toLowerCase.contains("steps"))
.filter(classInfo => hasSteps(classInfo.name))
.map(_.name)
.toList.distinct
val compositeClasses = composites.map { composite =>
if (composite.endsWith(".java")) {
composite.replace(".java", "").replace(File.separator, ".")
} else {
composite
}
}
jarClasses ::: compositeClasses
}
private[jbehave] def hasSteps(className: String): Boolean = {
for(method <- Class.forName(className).getMethods) {
if (method.getAnnotation(classOf[Given]) != null ||
method.getAnnotation(classOf[When]) != null ||
method.getAnnotation(classOf[Then]) != null) return true
}
false
}
private[jbehave] def classParams(className: String): List[BddParam] = {
Class.forName(className).getMethods
.filter(_.getAnnotation(classOf[BddParam]) != null)
.map(_.getAnnotation(classOf[BddParam]))
.toList
}
private[jbehave] def classParamsMap(className: String): List[Map[String, JsValue]] = {
classParams(className).map(param => Map(
"key" -> Json.toJson(param.value()),
"value" -> Json.toJson(System.getProperty(className + "." + param.value(), "")),
"description" -> Json.toJson(param.description()),
"options" -> Json.toJson(optionsToJson(param.options().toList))
))
}
private[jbehave] def optionsToJson(options: List[BddOptionParam]): List[Map[String, JsValue]] = {
options.map(option => Map(
"text" -> Json.toJson(option.text()),
"value" -> Json.toJson(option.value()),
"isSelected" -> Json.toJson(option.isSelected())
))
}
private[jbehave] def steps = {
val config = new MostUsefulConfiguration()
classes.map { className =>
if (className.endsWith(".groovy")) {
val instance = new GroovyClassLoader().parseClass(new File(className)).newInstance
new Steps(config, instance)
} else {
val instance = Class.forName(className).newInstance()
new Steps(config, instance)
}
}
}
private[jbehave] def stepsCandidates = {
def stepsCandidates(steps: List[Steps], candidates: List[StepCandidate]): List[StepCandidate] = {
if (steps.isEmpty) {
candidates
} else {
stepsCandidates(steps.tail, candidates ::: steps.head.listCandidates().toList)
}
}
stepsCandidates(steps, List()).sortWith(_.toString < _.toString)
}
}
object JBehaveSteps {
def apply(): JBehaveSteps = new JBehaveSteps
def apply(stepsDir: String = "steps", composites: List[String] = List.empty[String]): JBehaveSteps = new JBehaveSteps(stepsDir, composites)
}
|
TechnologyConversations/TechnologyConversationsBdd
|
app/models/jbehave/JBehaveSteps.scala
|
Scala
|
apache-2.0
| 4,327 |
/*
* The MIT License
*
* Copyright 2014 Kamnev Georgiy ([email protected]).
*
* Данная лицензия разрешает, безвозмездно, лицам, получившим копию данного программного
* обеспечения и сопутствующей документации (в дальнейшем именуемыми "Программное Обеспечение"),
* использовать Программное Обеспечение без ограничений, включая неограниченное право на
* использование, копирование, изменение, объединение, публикацию, распространение, сублицензирование
* и/или продажу копий Программного Обеспечения, также как и лицам, которым предоставляется
* данное Программное Обеспечение, при соблюдении следующих условий:
*
* Вышеупомянутый копирайт и данные условия должны быть включены во все копии
* или значимые части данного Программного Обеспечения.
*
* ДАННОЕ ПРОГРАММНОЕ ОБЕСПЕЧЕНИЕ ПРЕДОСТАВЛЯЕТСЯ «КАК ЕСТЬ», БЕЗ ЛЮБОГО ВИДА ГАРАНТИЙ,
* ЯВНО ВЫРАЖЕННЫХ ИЛИ ПОДРАЗУМЕВАЕМЫХ, ВКЛЮЧАЯ, НО НЕ ОГРАНИЧИВАЯСЬ ГАРАНТИЯМИ ТОВАРНОЙ ПРИГОДНОСТИ,
* СООТВЕТСТВИЯ ПО ЕГО КОНКРЕТНОМУ НАЗНАЧЕНИЮ И НЕНАРУШЕНИЯ ПРАВ. НИ В КАКОМ СЛУЧАЕ АВТОРЫ
* ИЛИ ПРАВООБЛАДАТЕЛИ НЕ НЕСУТ ОТВЕТСТВЕННОСТИ ПО ИСКАМ О ВОЗМЕЩЕНИИ УЩЕРБА, УБЫТКОВ
* ИЛИ ДРУГИХ ТРЕБОВАНИЙ ПО ДЕЙСТВУЮЩИМ КОНТРАКТАМ, ДЕЛИКТАМ ИЛИ ИНОМУ, ВОЗНИКШИМ ИЗ, ИМЕЮЩИМ
* ПРИЧИНОЙ ИЛИ СВЯЗАННЫМ С ПРОГРАММНЫМ ОБЕСПЕЧЕНИЕМ ИЛИ ИСПОЛЬЗОВАНИЕМ ПРОГРАММНОГО ОБЕСПЕЧЕНИЯ
* ИЛИ ИНЫМИ ДЕЙСТВИЯМИ С ПРОГРАММНЫМ ОБЕСПЕЧЕНИЕМ.
*/
package xyz.cofe.odtexport.odt.xtest
import xyz.cofe.odtexport.xtest.XTest;
import org.w3c.dom.{Node => XMLNode};
import xyz.cofe.odtexport.odt.style.Property;
import xyz.cofe.odtexport.odt.style.Styles;
/**
* Набор условий для проверки свойств "стиля"
* @param Имя свойства
*/
class StyleProperty( styles: =>Styles, val name:String* ) {
private def getPropertyByName( name:String, props:List[Property] ):List[Property] = {
var res : List[Property] = List();
for( p <- props ){
if( p.name.equals(name) )res = p :: res;
}
res
}
private def getChildrenByName( name:String, props:List[Property] ):List[Property] = {
var res : List[Property] = List();
for( p <- props ){
for( pc <- p.children ){
if( pc.name.equals(name) )res = pc :: res;
}
}
res
}
private def printProps( props:List[Property] ):Unit = {
for( p <- props ){
println( p.toString );
}
}
private def getProperty( props:List[Property] ):List[Property] = {
if( name.length==0 ){
return null;
}
var current : List[Property] = props;
current = getPropertyByName( name(0), current );
for( i <- 1 until name.length ){
current = getChildrenByName( name(i), current );
}
current
}
private def getPropertyValue( props:List[Property] ) : String = {
val v = getProperty( props );
if( v!=null ){
if( v.length>0 ){
return v(0).value;
}
}
null
}
private def getPropertyValue( node: XMLNode ) : String = {
if( node==null )return null;
val styleName = styles.getStyleNameOf(node);
if( styleName != null ) {
val props = styles.getStyleProperties(styleName);
if( props != null ) {
return getPropertyValue( props );
}
}
null
}
/**
* Значение указанного свойства стиля совпадает с переданным значением
* @param value Искомое значение
* @return Проверка на совпадение свойства с искоммым значением
*/
def == (value:String) : XTest = {
new XTest() {
override def test( node: XMLNode ):Boolean = {
val stylePropValue = getPropertyValue( node );
/*print("SP");
for( n <- name )print( "["+n+"]" );
println( "="+stylePropValue+" , need="+value );*/
return if( stylePropValue==null ){
value==null;
}else{
if( value==null )
false
else
stylePropValue.equals(value);
}
}
}
}
/**
* Проверяет наличие указанного свойства
* @return Проверка наличии указанного свойства
*/
def exists : XTest = {
new XTest() {
override def test( node: XMLNode ):Boolean = {
val styleName = styles.getStyleNameOf(node);
if( styleName == null )return false;
val props = styles.getStyleProperties(styleName);
if( props==null )return false;
val nprops = getProperty( props );
return nprops.size > 0;
}
}
}
}
|
gochaorg/odt-export
|
src/main/scala/xyz/cofe/odtexport/odt/xtest/StyleProperty.scala
|
Scala
|
mit
| 5,979 |
/*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.kayenta.judge
import java.util.Collections
import com.netflix.kayenta.judge.utils.{MapUtils, RandomUtils}
import org.apache.commons.math3.stat.StatUtils
import org.scalatest.FunSuite
import org.scalatest.Matchers._
class UtilsSuite extends FunSuite {
test("RandomUtils List of Random Samples (Zero Mean)"){
val seed = 123456789
RandomUtils.init(seed)
val randomSample: Array[Double] = RandomUtils.normal(mean = 0.0, stdev = 1.0, numSamples = 500)
val mean = StatUtils.mean(randomSample)
val variance = StatUtils.variance(randomSample)
assert(mean === (0.0 +- 0.2))
assert(variance === (1.0 +- 0.2))
assert(randomSample.length === 500)
}
test("RandomUtils List of Random Samples (Non-zero Mean)"){
val seed = 123456789
RandomUtils.init(seed)
val randomSample: Array[Double] = RandomUtils.normal(mean = 10.0, stdev = 3.0, numSamples = 1000)
val mean = StatUtils.mean(randomSample)
val stdev = math.sqrt(StatUtils.variance(randomSample))
assert(mean === (10.0 +- 0.2))
assert(stdev === (3.0 +- 0.2))
assert(randomSample.length === 1000)
}
test("RandomUtils Random Sample (Zero Variance)"){
val randomSample = RandomUtils.normal(mean = 0.0, stdev = 0.0, numSamples = 1)
assert(randomSample.head === 0.0)
}
test("MapUtils Get Path") {
val map = Map(
"foo" -> Map(
"bar" -> 42,
"baz" -> "abc"
),
"list" -> List(
Map("a" -> "1"),
Map("b" -> "2"),
Map("a" -> "3")
)
)
assert(MapUtils.get(map) === Some(map))
assert(MapUtils.get(42) === Some(42))
assert(MapUtils.get(map, "not_found") === None)
assert(MapUtils.get(map, "foo", "not_found") === None)
assert(MapUtils.get(map, "foo", "bar") === Some(42))
assert(MapUtils.get(map, "foo", "baz") === Some("abc"))
assert(MapUtils.get(map, "foo", "bar", "baz") === None)
assert(MapUtils.get(map, "list", "a") === Some(List("1", "3")))
assert(MapUtils.get(map, "list", "b") === Some(List("2")))
assert(MapUtils.get(map, "list", "c") === None)
}
test("MapUtils Get of Null") {
assert(MapUtils.get(null, "this", "and", "that") === None)
}
test("MapUtils Get of Java Map") {
val foo: java.util.Map[String, Object] = new java.util.HashMap()
foo.put("bar", new Integer(42))
foo.put("baz", "abc")
val list: java.util.List[Object] = new java.util.ArrayList()
list.add(Collections.singletonMap("a", "1"))
list.add(Collections.singletonMap("b", "2"))
list.add(Collections.singletonMap("a", "3"))
val map: java.util.Map[String, Object] = new java.util.HashMap()
map.put("foo", foo)
map.put("list", list)
assert(MapUtils.get(map) === Some(map))
assert(MapUtils.get(42) === Some(42))
assert(MapUtils.get(map, "not_found") === None)
assert(MapUtils.get(map, "foo", "not_found") === None)
assert(MapUtils.get(map, "foo", "bar") === Some(42))
assert(MapUtils.get(map, "foo", "baz") === Some("abc"))
assert(MapUtils.get(map, "foo", "bar", "baz") === None)
assert(MapUtils.get(map, "list", "a") === Some(List("1", "3")))
assert(MapUtils.get(map, "list", "b") === Some(List("2")))
assert(MapUtils.get(map, "list", "c") === None)
}
test("MapUtils get of a double works when it's an Integer") {
val foo: java.util.Map[String, Object] = new java.util.HashMap()
foo.put("bar", new Integer(42))
assert(MapUtils.getAsDoubleWithDefault(1.0, foo, "bar") === 42.0);
}
}
|
spinnaker/kayenta
|
kayenta-judge/src/test/scala/com/netflix/kayenta/judge/UtilsSuite.scala
|
Scala
|
apache-2.0
| 4,118 |
package foo.bar
import org.junit._
import org.junit.Assert._
class Basic
{
val foo = new Foo
@Test
def checkBind(): Unit =
{
try { assertTrue( foo.eval("3") == 3) }
catch { case e => e.printStackTrace; throw e}
}
}
|
jamesward/xsbt
|
sbt/src/sbt-test/compiler-project/run-test/src/test/scala/Basic.scala
|
Scala
|
bsd-3-clause
| 226 |
package com.twitter.finagle.http
import com.twitter.conversions.time._
import org.jboss.netty.handler.codec.http.DefaultCookie
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import scala.collection.JavaConverters._
@RunWith(classOf[JUnitRunner])
class CookieTest extends FunSuite {
test("mutate underlying") {
val cookie = new Cookie("name", "value")
cookie.comment = "hello"
cookie.commentUrl = "hello.com"
cookie.domain = ".twitter.com"
cookie.maxAge = 100.seconds
cookie.path = "/1/statuses/show"
cookie.ports = Seq(1, 2, 3)
cookie.value = "value2"
cookie.version = 1
cookie.httpOnly = true
cookie.isDiscard = false
cookie.isSecure = true
assert(cookie.name == "name")
assert(cookie.comment == "hello")
assert(cookie.commentUrl == "hello.com")
assert(cookie.domain == ".twitter.com")
assert(cookie.maxAge == 100.seconds)
assert(cookie.path == "/1/statuses/show")
assert(cookie.ports == Set(1, 2, 3))
assert(cookie.value == "value2")
assert(cookie.version == 1)
assert(cookie.httpOnly == true)
assert(cookie.isDiscard == false)
assert(cookie.isSecure == true)
}
test("constructor sets correct params") {
val cookie = new Cookie(
name = "name",
value = "value",
domain = Some("domain"),
path = Some("path"),
maxAge = Some(99.seconds),
secure = true,
httpOnly = false
)
assert(cookie.name == "name")
assert(cookie.value == "value")
assert(cookie.domain == "domain")
assert(cookie.path == "path")
assert(cookie.maxAge == 99.seconds)
assert(cookie.secure == true)
assert(cookie.httpOnly == false)
}
test("equals: not equal if object is different") {
val foo = "hello"
val cookie = new Cookie("name", "value")
assert(!cookie.equals(foo))
}
test("equals: not equal if names are not equal") {
val c1 = new Cookie(name = "name1", "value")
val c2 = new Cookie(name = "name2", "value")
assert(!c1.equals(c2))
}
test("equals: Not equal if paths are not equal") {
// first path is null
val c1 = new Cookie("name", "value", path = None)
val c2 = new Cookie("name", "value", path = Some("path"))
assert(!c1.equals(c2))
// second path is null
val c3 = new Cookie("name", "value", path = Some("path"))
val c4 = new Cookie("name", "value", path = None)
assert(!c3.equals(c4))
// both paths exist
val c5 = new Cookie("name", "value", path = Some("path1"))
val c6 = new Cookie("name", "value", path = Some("path2"))
assert(!c5.equals(c6))
}
test("equals: Not equal if domains are not equal") {
// first domain is null
val c1 = new Cookie("name", "value", path = Some("path"), domain = None)
val c2 = new Cookie("name", "value", path = Some("path"), domain = Some("domain"))
assert(!c1.equals(c2))
// second domain is null
val c3 = new Cookie("name", "value", path = Some("path"), domain = Some("domain"))
val c4 = new Cookie("name", "value", path = Some("path"), domain = None)
assert(!c3.equals(c4))
// both domains exist
val c5 = new Cookie("name", "value", path = Some("path"), domain = Some("domain1"))
val c6 = new Cookie("name", "value", path = Some("path"), domain = Some("domain2"))
assert(!c5.equals(c6))
}
test("equals: equal if names/paths/domains are equal") {
// path and domain both null
val c1 = new Cookie("name", "value", path = None, domain = None)
val c2 = new Cookie("name", "value", path = None, domain = None)
assert(c1.equals(c2))
// domain null
val c3 = new Cookie("name", "value", path = Some("path"), domain = None)
val c4 = new Cookie("name", "value", path = Some("path"), domain = None)
assert(c3.equals(c4))
// path and domain non-null
val c5 = new Cookie("name", "value", path = Some("path"), domain = Some("domain"))
val c6 = new Cookie("name", "value", path = Some("path"), domain = Some("domain"))
assert(c5.equals(c6))
}
test("default are the same as DefaultCookie default") {
val cookie = new Cookie("name", "value")
val nettyCookie = new DefaultCookie("name", "value")
assert(cookie.name == nettyCookie.getName)
assert(cookie.value == nettyCookie.getValue)
assert(cookie.domain == nettyCookie.getDomain)
assert(cookie.path == nettyCookie.getPath)
assert(cookie.comment == nettyCookie.getComment)
assert(cookie.commentUrl == nettyCookie.getCommentUrl)
assert(cookie.discard == nettyCookie.isDiscard)
assert(cookie.ports == nettyCookie.getPorts.asScala.toSet)
assert(cookie.maxAge == nettyCookie.getMaxAge.seconds)
assert(cookie.version == nettyCookie.getVersion)
assert(cookie.secure == nettyCookie.isSecure)
assert(cookie.httpOnly == nettyCookie.isHttpOnly)
}
test("Throws exception if name is empty") {
intercept[IllegalArgumentException] {
new Cookie(" ", "value")
}
}
test("Throws exception if name starts with $") {
intercept[IllegalArgumentException] {
new Cookie("$dolladollabillz", "value")
}
}
test("Throws exception if name contains illegal char") {
Set('\\t', '\\n', '\\u000b', '\\f', '\\r', ' ', ',', ';', '=').foreach { c =>
intercept[IllegalArgumentException] {
new Cookie(s"hello${c}goodbye", "value")
}
}
}
test("name is trimmed") {
val cookie = new Cookie(" name ", "value")
assert(cookie.name == "name")
}
test("ports are validated") {
intercept[IllegalArgumentException] {
val cookie = new Cookie("name", "value")
cookie.ports = Seq(-1, 0, 5) // cannot be negative
}
val cookie = new Cookie("name", "value")
intercept[IllegalArgumentException] {
cookie.ports = Seq(-1, 0, 5) // cannot be negative
}
}
private[this] val IllegalFieldChars = Set('\\n', '\\u000b', '\\f', '\\r', ';')
test("path trimmed and validated") {
val cookie = new Cookie("name", "value", path = Some(" /path"))
assert(cookie.path == "/path")
IllegalFieldChars.foreach { c =>
intercept[IllegalArgumentException] {
cookie.path = s"hello${c}goodbye"
}
intercept[IllegalArgumentException] {
new Cookie("name", "value", path = Some(s"hello${c}goodbye"))
}
}
}
test("domain trimmed and validated") {
val cookie = new Cookie("name", "value", domain = Some(" domain"))
assert(cookie.domain == "domain")
IllegalFieldChars.foreach { c =>
intercept[IllegalArgumentException] {
cookie.domain = s"hello${c}goodbye"
}
intercept[IllegalArgumentException] {
new Cookie("name", "value", domain = Some(s"hello${c}goodbye"))
}
}
}
test("comment trimmed and validated") {
val cookie = new Cookie("name", "value")
cookie.comment = " comment "
assert(cookie.comment == "comment")
IllegalFieldChars.foreach { c =>
intercept[IllegalArgumentException] {
cookie.path = s"hello${c}goodbye"
}
intercept[IllegalArgumentException] {
val cookie = new Cookie("name", "value")
cookie.comment = s"hello${c}goodbye"
}
}
}
test("commentUrl trimmed and validated") {
val cookie = new Cookie("name", "value")
cookie.commentUrl = " commentUrl "
assert(cookie.commentUrl == "commentUrl")
IllegalFieldChars.foreach { c =>
intercept[IllegalArgumentException] {
cookie.path = s"hello${c}goodbye"
}
intercept[IllegalArgumentException] {
val cookie = new Cookie("name", "value")
cookie.commentUrl = s"hello${c}goodbye"
}
}
}
test(
"methods that copy existing params and create a new Cookie with an additional configured param "
) {
val cookie = new Cookie("name", "value")
.domain(Some("domain"))
.maxAge(Some(99.seconds))
.httpOnly(true)
.secure(true)
assert(cookie.name == "name")
assert(cookie.value == "value")
assert(cookie.domain == "domain")
assert(cookie.maxAge == 99.seconds)
assert(cookie.httpOnly == true)
assert(cookie.secure == true)
}
}
|
mkhq/finagle
|
finagle-base-http/src/test/scala/com/twitter/finagle/http/CookieTest.scala
|
Scala
|
apache-2.0
| 8,196 |
package org.jetbrains.plugins.scala
package lang.lexer
import com.intellij.openapi.editor.ex.util.{LexerEditorHighlighter, SegmentArrayWithData}
import com.intellij.openapi.editor.impl.EditorImpl
import com.intellij.testFramework.fixtures.CodeInsightTestFixture
import org.jetbrains.plugins.scala.base.EditorActionTestBase
import org.junit.Assert.assertEquals
/**
* User: Dmitry.Naydanov
* Date: 29.07.14.
*/
class IncrementalLexerHighlightingTest extends EditorActionTestBase {
import CodeInsightTestFixture.CARET_MARKER
private def segments() = {
val editor = myFixture.getEditor match {
case impl: EditorImpl => impl
}
val highlighter = editor.getHighlighter match {
case impl: LexerEditorHighlighter => impl
}
highlighter.getSegments
}
private def doTest(text: String, typed: Char*): Unit = {
configureByText(text)
typed.foreach {
case '\\r' => performBackspaceAction()
case '\\n' => performEnterAction()
case char => performTypingAction(char)
}
val actualSegments = segments()
myFixture.configureByText(myFileType, myFixture.getFile.getText)
val expectedSegments = segments()
assertEquals(actualSegments.getSegmentCount, expectedSegments.getSegmentCount)
import IncrementalLexerHighlightingTest.asTuples
asTuples(actualSegments)
.zip(asTuples(expectedSegments))
.foreach {
case (actual, expected) => assertEquals(actual, expected)
}
}
def testSimple(): Unit = {
val text =
s"""
|object dummy {
| val a = 1
| val b = "ololo"$CARET_MARKER
| val c = s"ololo$$a"
|}
""".stripMargin
doTest(text, ';')
}
def testScl7330(): Unit = {
val text = "object ololo {\\n" + s"(escritorTexto$CARET_MARKER)\\n" +
"iniBloque(s\\"\\"\\"filename=\\"$fich\\"\\"\\"\\")\\n" + "}"
doTest(text, ',', ' ', '\\r', '\\r')
}
def testNestedStrings(): Unit = {
val text =
s"""
|object ololo {
| val x = s"aaa $${val y = s"ccc $${val z = s"eee $CARET_MARKER fff"} ddd"} bbb"
|}
""".stripMargin
doTest(text, '$', '$')
}
def testDiffNestedString(): Unit = {
val text =
s"""
|fooboo(
| s${"\\"\\"\\""}
| $${val yy = s"aaa $${
| val zz =
| s${"\\"\\"\\""}
| Boo!
| ${"\\"\\"\\""}.stripMargin
| } bbb"}
| ${"\\"\\"\\""}$CARET_MARKER
|)""".stripMargin.replace("\\r", "")
doTest(text, ',', ' ', '\\r', '\\r')
}
/**
* That relates straight to incremental highlighting - see SCL-8958 itself and comment to
* [[org.jetbrains.plugins.scala.lang.lexer.ScalaLexer#previousToken]]
*/
def testScl8958(): Unit = {
val before =
s"""
|class Test {
| val test1 = <div></div>
|}
|
|class Test2 {$CARET_MARKER}
""".stripMargin
val after =
s"""
|class Test {
| val test1 = <div></div>
|}
|
|class Test2 {
| $CARET_MARKER
|}
""".stripMargin
checkGeneratedTextAfterEnter(before, after)
}
def testInterpolatedString(): Unit = {
val text = "s\\"\\"\\"\\n ${if (true)" + CARET_MARKER + "}\\n\\n\\"\\"\\"\\n{}\\nval a = 1"
doTest(text, ' ')
}
def testBig(): Unit = {
val text =
s"""package es.fcc.bibl.bd
import javax.xml.parsers.DocumentBuilderFactory
/** Sincronizacion bidireccional del contenido de tablas con el servidor.
*
*
* Uso:
* - Establecer antes autentificacion si es necesario: {{{
* Authenticator.setDefault(new Authenticator() {
* override def getPasswordAuthentication() = new PasswordAuthentication("miusuario", "micontraseña")
* })
* }}}
* Pendiente:
* - Soporte de redirecciones
* Ver https://fccmadoc.atlassian.net/browse/RDS-4858 para especificaciones.
*
* @param servidor Servidor a donde enviar, sin dominio fccma.com
* de las tablas de las que se depende.
* @param mensaje Mensaje que aparecen en la notificacion de progreso.*/
class Sincronizador(servidor: String, ruta: String, soporte: String, tblsProcesar: Seq[TablaBase] = Seq(),
params: Map[String, String] = Map.empty, mensaje: String = "Sincronizar datos")(implicit ctx: Activity) {
protected val msj = ListBuffer[String]()
/** Subconjunto de [[tblsProcesar]] que van sincronizadas bidireccionalmente.*/
protected val tblsEnviar = tblsProcesar.collect { case t: TablaSincronizada[_] => t }
future {
val url = s"https://$$servidor.fccma.com/fccma/"
if (hayErrores)
notif.setSmallIcon(R.drawable.stat_notify_sync_error)
notif.setOngoing(false)
if (msj.size > 0) {
val detalles = new NotificationCompat.InboxStyle()
detalles.setBigContentTitle("Avisos:")
msj.foreach(detalles.addLine(_))
notif.setStyle(detalles)
}
notifica()
}
/** Fabrica el XML que va dentro de la peticion.
* Metodo con test.*/
protected[bd] def fabricaXml(os: OutputStream) {
val esc = new OutputStreamWriter(os, "UTF-8")
esc.write(s"Content-Type: multipart/mixed;boundary=$$frontera\\r\\n")
iniBloque()
val totalFilas = (for (t <- tblsEnviar) yield t.cuantos).sum
val gen = new GeneradorNombresFichero
notif.setContentText("Enviando cambios locales")
notif.setProgress(totalFilas, 0, false)
notifica()
var filas = 0
new EscritorXml(esc) {
ele("Sincronizacion") {
ele("soporte") {
ele("nombre") { txt(soporte) }
}
}
}
if (gen.contador > 0) {
notif.setContentTitle("Enviando binarios")
notif.setProgress(gen.contador, 0, false)
notifica()
val gen2 = new GeneradorNombresFichero
for {
tbl <- tblsEnviar
blob <- tbl.leeBlobs
} {
val fich = gen2.genera(blob)
iniBloque(s${"\\"\\"\\""}Content-Disposition:form-data;name="$$fich";filename="$$fich" $CARET_MARKER${"\\"\\"\\""}, "Content-Transfer-Encoding:binary")
// No hace falta seguramente pero por si acaso
notif.setProgress(gen.contador, gen2.contador, false)
notifica()
}
}
// Cerrar multipart MIME
esc.write(s"\\r\\n--$$frontera--\\r\\n")
esc.close()
}
}"""
doTest(text, '\\r', ' ', ' ', '\\r', '\\r')
}
def testScl9396(): Unit = {
val text =
"""
|package azaza
|
|
|object Main {
| def fooboo() {
| val paymentType = 123
|
| if (true) {
| """ + CARET_MARKER +
""""Unsupported payment type: [$paymentType]" ; val a = 1
| }
| }
|}
""".stripMargin
doTest(text, 's')
}
}
object IncrementalLexerHighlightingTest {
private def asTuples(array: SegmentArrayWithData) =
for (i <- 0 until array.getSegmentCount)
yield (array.getSegmentStart(i), array.getSegmentEnd(i), array.getSegmentData(i))
}
|
loskutov/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/lexer/IncrementalLexerHighlightingTest.scala
|
Scala
|
apache-2.0
| 7,016 |
package it.nerdammer.spark.hbase
import java.util
import it.nerdammer.spark.hbase.conversion.{SingleColumnFieldWriter, FieldWriter}
import scala.reflect.ClassTag
import scala.util.Random
trait SaltingProvider[T] extends Serializable{
def salting: Array[T]
def salt(rowKey: Array[Byte]): T
protected def verify(implicit writer: FieldWriter[T]): Unit = {
if(length==0)
throw new IllegalArgumentException("Salting cannot have length 0")
}
def length(implicit writer: FieldWriter[T]): Int = {
if(!writer.isInstanceOf[SingleColumnFieldWriter[T]]) {
throw new IllegalArgumentException("Salting array must be composed of primitive types")
}
val singleColumnFieldWriter = writer.asInstanceOf[SingleColumnFieldWriter[T]]
salting
.map(s => singleColumnFieldWriter.mapColumn(s))
.map(o => o.getOrElse(Array[Byte]()))
.map(a => a.size)
.foldLeft(None.asInstanceOf[Option[Int]])((size, saltSize) => {
if (size.nonEmpty && size.get != saltSize)
throw new IllegalArgumentException(s"You cannot use salts with different lengths: ${size.get} and $saltSize")
Some(saltSize)
})
.get
}
}
trait SaltingProviderFactory[T] extends Serializable {
def getSaltingProvider(salting: Iterable[T]): SaltingProvider[T]
}
class RandomSaltingProvider[T: ClassTag](val salting: Array[T])(implicit writer: FieldWriter[T]) extends SaltingProvider[T] {
verify(writer)
def this(saltingIterable: Iterable[T])(implicit writer: FieldWriter[T]) = this(saltingIterable.toArray)
def randomizer = new Random
override def salt(rowKey: Array[Byte]): T = salting(randomizer.nextInt(salting.size))
}
class HashSaltingProvider[T: ClassTag](val salting: Array[T])(implicit writer: FieldWriter[T]) extends SaltingProvider[T] {
verify(writer)
def this(saltingIterable: Iterable[T])(implicit writer: FieldWriter[T]) = this(saltingIterable.toArray)
def hash(rowKey: Array[Byte]) = util.Arrays.hashCode(rowKey)
override def salt(rowKey: Array[Byte]): T = salting((hash(rowKey) & 0x7fffffff) % salting.size)
}
trait SaltingProviderConversions {
implicit def defaultHaltingProviderFactory[T: ClassTag](implicit writer: FieldWriter[T]): SaltingProviderFactory[T] = new SaltingProviderFactory[T] {
def getSaltingProvider(salting: Iterable[T]): SaltingProvider[T] = new HashSaltingProvider[T](salting)
}
}
|
nerdammer/spark-hbase-connector
|
src/main/scala/it/nerdammer/spark/hbase/SaltingProvider.scala
|
Scala
|
apache-2.0
| 2,408 |
package scorex.app.api.http
import java.nio.charset.StandardCharsets
import play.api.libs.json.Json
import scorex.account.{Account, PublicKeyAccount}
import scorex.app.Controller
import scorex.crypto.{Base58, SigningFunctionsImpl}
import spray.routing.HttpService
import scala.util.{Failure, Success, Try}
trait AddressHttpService extends HttpService with CommonApiFunctions {
import Controller.wallet
lazy val adressesRouting =
pathPrefix("addresses") {
path("") {
get {
complete {
val addresses = wallet.privateKeyAccounts().map(_.address)
Json.arr(addresses).toString()
}
}
} ~ path("validate" / Segment) { case address =>
get {
complete {
val jsRes = Json.obj("address" -> address, "valid" -> Account.isValidAddress(address))
Json.stringify(jsRes)
}
}
} ~ path("seed" / Segment) { case address =>
get {
complete {
//CHECK IF WALLET EXISTS
val jsRes = withAccount(address) { account =>
wallet.exportAccountSeed(account.address) match {
case None => ApiError.json(ApiError.WalletSeedExportFailed)
case Some(seed) => Json.obj("address" -> address, "seed" -> Base58.encode(seed))
}
}
Json.stringify(jsRes)
}
}
} ~ path("new") {
get {
complete {
walletNotExists().getOrElse {
wallet.generateNewAccount() match {
case Some(pka) => Json.obj("address" -> pka.address)
case None => ApiError.json(ApiError.Unknown)
}
}.toString()
}
}
} ~ path("balance" / Segment / IntNumber) { case (address, confirmations) =>
//todo: confirmations parameter doesn't work atm
get {
complete {
val jsRes = balanceJson(address, confirmations)
Json.stringify(jsRes)
}
}
} ~ path("balance" / Segment) { case address =>
get {
complete {
val jsRes = balanceJson(address, 1)
Json.stringify(jsRes)
}
}
} ~ path("generatingbalance" / Segment) { case address =>
get {
complete {
val jsRes = if (!Account.isValidAddress(address)) {
ApiError.json(ApiError.InvalidAddress)
} else {
Json.obj(
"address" -> address,
"balance" -> Controller.blockchainStorage.generationBalance(address)
)
}
Json.stringify(jsRes)
}
}
} ~ path("verify" / Segment) { case address =>
post {
entity(as[String]) { jsText =>
complete {
val jsRes = Try {
val js = Json.parse(jsText)
val msg = (js \\ "message").as[String]
val signature = (js \\ "signature").as[String]
val pubKey = (js \\ "publickey").as[String]
if (!Account.isValidAddress(address)) {
ApiError.json(ApiError.InvalidAddress)
} else {
//DECODE SIGNATURE
(Base58.decode(msg), Base58.decode(signature), Base58.decode(pubKey)) match {
case (Failure(_), _, _) => ApiError.json(ApiError.InvalidMessage)
case (_, Failure(_), _) => ApiError.json(ApiError.InvalidSignature)
case (_, _, Failure(_)) => ApiError.json(ApiError.InvalidPublicKey)
case (Success(msgBytes), Success(signatureBytes), Success(pubKeyBytes)) =>
val account = new PublicKeyAccount(pubKeyBytes)
val isValid = account.address == address &&
SigningFunctionsImpl.verify(signatureBytes, msgBytes, pubKeyBytes)
Json.obj("valid" -> isValid)
}
}
}.getOrElse(ApiError.json(ApiError.WrongJson))
Json.stringify(jsRes)
}
}
}
} ~ path("sign" / Segment) { case address =>
post {
entity(as[String]) { message =>
complete {
val jsRes = walletNotExists().getOrElse {
if (!Account.isValidAddress(address)) {
ApiError.json(ApiError.InvalidAddress)
} else {
wallet.privateKeyAccount(address) match {
case None => ApiError.json(ApiError.WalletAddressNotExists)
case Some(account) =>
Try(SigningFunctionsImpl.sign(account, message.getBytes(StandardCharsets.UTF_8))) match {
case Success(signature) =>
Json.obj("message" -> message,
"publickey" -> Base58.encode(account.publicKey),
"signature" -> Base58.encode(signature))
case Failure(t) => ApiError.json(t)
}
}
}
}
jsRes.toString()
}
}
}
} ~ path("address" / Segment) { case address => //todo: fix routing to that?
delete {
complete {
val jsRes = walletNotExists().getOrElse {
if (!Account.isValidAddress(address)) {
ApiError.json(ApiError.InvalidAddress)
} else {
val deleted = wallet.privateKeyAccount(address).exists(account =>
wallet.deleteAccount(account))
Json.obj("deleted" -> deleted)
}
}
jsRes.toString()
}
}
} /* todo: fix or remove ~ path("") {
post {
entity(as[String]) { seed =>
complete {
val jsRes = if (seed.isEmpty) {
walletNotExists().getOrElse {
wallet.generateNewAccount() match {
case Some(pka) => Json.obj("address" -> pka.address)
case None => ApiError.toJson(ApiError.ERROR_UNKNOWN)
}
}
} else {
walletNotExists().getOrElse {
//DECODE SEED
Try(Base58.decode(seed)).toOption.flatMap { seedBytes =>
if (seedBytes != null && seedBytes.size == 32) {
Some(Json.obj("address" -> wallet.importAccountSeed(seedBytes)))
} else None
}.getOrElse(ApiError.toJson(ApiError.ERROR_INVALID_SEED))
}
}
Json.stringify(jsRes)
}
}
}
} */
}
private def balanceJson(address: String, confirmations: Int) =
if (!Account.isValidAddress(address)) {
ApiError.json(ApiError.InvalidAddress)
} else {
Json.obj(
"address" -> address,
"confirmations" -> confirmations,
"balance" -> Controller.blockchainStorage.balance(address, confirmations)
)
}
}
|
pozharko/Scorex-Lagonaki
|
src/main/scala/scorex/app/api/http/AddressHttpService.scala
|
Scala
|
cc0-1.0
| 7,177 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import scala.collection.JavaConverters.mapAsJavaMapConverter
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.connector.catalog.SupportsNamespaces
import org.apache.spark.util.Utils
/**
* Physical plan node for creating a namespace.
*/
case class CreateNamespaceExec(
catalog: SupportsNamespaces,
namespace: Seq[String],
ifNotExists: Boolean,
private var properties: Map[String, String])
extends V2CommandExec {
override protected def run(): Seq[InternalRow] = {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import org.apache.spark.sql.connector.catalog.SupportsNamespaces._
val ns = namespace.toArray
if (!catalog.namespaceExists(ns)) {
try {
val ownership =
Map(PROP_OWNER -> Utils.getCurrentUserName())
catalog.createNamespace(ns, (properties ++ ownership).asJava)
} catch {
case _: NamespaceAlreadyExistsException if ifNotExists =>
logWarning(s"Namespace ${namespace.quoted} was created concurrently. Ignoring.")
}
} else if (!ifNotExists) {
throw new NamespaceAlreadyExistsException(ns)
}
Seq.empty
}
override def output: Seq[Attribute] = Seq.empty
}
|
witgo/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/CreateNamespaceExec.scala
|
Scala
|
apache-2.0
| 2,229 |
import sbt._
import Keys._
import Scope.{ GlobalScope, ThisScope }
object LaunchProguard {
lazy val Proguard = config("proguard") hide;
lazy val configurationFile = settingKey[File]("Location of the generated proguard configuration file.")
lazy val proguard = taskKey[File]("Produces the final compacted jar that contains only the classes needed using proguard.")
lazy val proguardConfiguration = taskKey[File]("Creates the configuration file to use with proguard.")
lazy val options = taskKey[Seq[String]]("Proguard options.")
lazy val optimizePasses = settingKey[Int]("Number of optimization passes proguard performs.")
lazy val keepFullClasses = settingKey[Seq[String]]("Fully qualified names of classes that proguard should preserve the non-private API of.")
lazy val settings: Seq[Setting[_]] =
inScope(GlobalScope)(inConfig(Proguard)(globalSettings)) ++
inConfig(Proguard)(baseSettings) :+
(libraryDependencies += "net.sf.proguard" % "proguard-base" % "4.8" % Proguard.name)
/** Defaults */
def globalSettings = Seq(
optimizePasses := 2, // more don't seem to help much and it increases proguard runtime
keepFullClasses := Nil,
options := basicOptions
)
def baseSettings = Seq(
optimizeSetting,
options ++= keepFullClasses.value map ("-keep public class " + _ + " {\\n\\tpublic protected * ;\\n}"),
configurationFile := target.value / "proguard.pro",
proguardConfiguration := writeProguardConfiguration.value,
proguard := proguardTask.value
)
/** Options to set the number of optimization passes or to disable optimization altogether. */
def optimizeSetting = options ++= {
val passes = optimizePasses.value
if (passes <= 0)
Seq("-dontoptimize")
else
Seq(
"-optimizationpasses " + passes.toString,
// optimization is problematic without this option, possibly proguard can't handle certain scalac-generated bytecode
"-optimizations !code/allocation/variable"
)
}
def specific(launchSub: Reference): Seq[Setting[_]] = inConfig(Proguard)(Seq(
keepFullClasses ++= "xsbti.**" :: Nil,
artifactPath := target.value / ("sbt-launch-" + version.value + ".jar"),
options ++= dependencyOptions(launchSub).value,
options += "-injars " + mkpath(packageBin.value),
packageBin := (packageBin in (launchSub, Compile)).value,
options ++= mainClass.in(launchSub, Compile).value.toList map keepMain,
options += "-outjars " + mkpath(artifactPath.value),
fullClasspath := Classpaths.managedJars(configuration.value, classpathTypes.value, update.value)
))
def basicOptions =
Seq(
"-keep,allowoptimization,allowshrinking class * { *; }", // no obfuscation
"-keepattributes SourceFile,LineNumberTable", // preserve debugging information
"-dontnote",
"-dontwarn", // too many warnings generated for scalac-generated bytecode last time this was enabled
"-ignorewarnings")
/** Option to preserve the main entry point. */
private def keepMain(className: String) =
s"""-keep public class $className {
| public static void main(java.lang.String[]);
|}""".stripMargin
private def excludeIvyResources =
"META-INF/**" ::
"fr/**" ::
"**/antlib.xml" ::
"**/*.png" ::
"org/apache/ivy/core/settings/ivyconf*.xml" ::
"org/apache/ivy/core/settings/ivysettings-*.xml" ::
"org/apache/ivy/plugins/resolver/packager/*" ::
"**/ivy_vfs.xml" ::
"org/apache/ivy/plugins/report/ivy-report-*" ::
Nil
// libraryFilter and the Scala library-specific filtering in mapInJars can be removed for 2.11, since it is properly modularized
private def libraryFilter = "(!META-INF/**,!*.properties,!scala/util/parsing/*.class,**.class)"
private def generalFilter = "(!META-INF/**,!*.properties)"
def dependencyOptions(launchSub: Reference) = Def.task {
val cp = (dependencyClasspath in (launchSub, Compile)).value
val analysis = (compile in (launchSub, Compile)).value
mapJars(cp.files, analysis.relations.allBinaryDeps.toSeq, streams.value.log)
}
def mapJars(in: Seq[File], all: Seq[File], log: Logger): Seq[String] =
mapInJars(in, log) ++ mapLibraryJars(all filterNot in.toSet)
def writeProguardConfiguration = Def.task {
val content = options.value.mkString("\\n")
val conf = configurationFile.value
if (!conf.exists || IO.read(conf) != content) {
streams.value.log.info("Proguard configuration written to " + conf)
IO.write(conf, content)
}
conf
}
def mapLibraryJars(libraryJars: Seq[File]): Seq[String] = libraryJars.map(f => "-libraryjars " + mkpath(f))
def mapOutJar(outJar: File) = "-outjars " + mkpath(outJar)
def mkpath(f: File): String = mkpath(f.getAbsolutePath, '\\"')
def mkpath(path: String, delimiter: Char): String = delimiter + path + delimiter
def proguardTask = Def.task {
val inJar = packageBin.value
val outputJar = artifactPath.value
val configFile = proguardConfiguration.value
val f = FileFunction.cached(cacheDirectory.value / "proguard", FilesInfo.hash) { _ =>
runProguard(outputJar, configFile, fullClasspath.value.files, streams.value.log)
Set(outputJar)
}
f(Set(inJar, configFile)) // make the assumption that if the classpath changed, the outputJar would change
outputJar
}
def runProguard(outputJar: File, configFile: File, cp: Seq[File], log: Logger) {
IO.delete(outputJar)
val fileString = mkpath(configFile.getAbsolutePath, '\\'')
val exitValue = Process("java", List("-Xmx256M", "-cp", Path.makeString(cp), "proguard.ProGuard", "-include " + fileString)) ! log
if (exitValue != 0) error("Proguard failed with nonzero exit code (" + exitValue + ")")
}
def mapInJars(inJars: Seq[File], log: Logger): Seq[String] =
{
val (ivyJars, notIvy) = inJars partition isJarX("ivy")
val (libraryJar, remaining) = notIvy partition isJarX("scala-library")
val (compilerJar, otherJars) = remaining partition isJarX("scala-compiler")
log.debug("proguard configuration:")
log.debug("\\tIvy jar location: " + ivyJars.mkString(", "))
log.debug("\\tOther jars:\\n\\t" + otherJars.mkString("\\n\\t"))
((withJar(ivyJars.toSeq, "Ivy") + excludeString(excludeIvyResources)) ::
(withJar(libraryJar, "Scala library") + libraryFilter) ::
otherJars.map(jar => mkpath(jar) + generalFilter).toList) map { "-injars " + _ }
}
private def excludeString(s: List[String]) = s.map("!" + _).mkString("(", ",", ")")
private def withJar[T](files: Seq[File], name: String) = mkpath(files.headOption getOrElse error(name + " not present"))
private def isJarX(x: String)(file: File) =
{
val name = file.getName
name.startsWith(x) && name.endsWith(".jar")
}
}
|
xeno-by/old-scalameta-sbt
|
project/Proguard.scala
|
Scala
|
bsd-3-clause
| 6,816 |
package katas.scala.doors
import org.junit.Test
import org.scalatest.Matchers
class Doors8 extends Matchers {
@Test def `end state of doors`() {
walkDoors(amount = 1) should equal(Seq(true))
walkDoors(amount = 2) should equal(Seq(true, false))
walkDoors(amount = 3) should equal(Seq(true, false, false))
walkDoors(amount = 4) should equal(Seq(true, false, false, true))
walkDoors(amount = 5) should equal(Seq(true, false, false, true, false))
walkDoors(amount = 6) should equal(Seq(true, false, false, true, false, false))
walkDoors(amount = 7) should equal(Seq(true, false, false, true, false, false, false))
walkDoors(amount = 8) should equal(Seq(true, false, false, true, false, false, false, false))
}
private def walkDoors(amount: Int): Seq[Boolean] = {
val doors = Array.fill(amount){ false }
1.to(amount).foreach{ stepSize =>
Range(stepSize - 1, amount, stepSize).foreach { i =>
doors(i) = !doors(i)
}
}
doors.toSeq
}
}
|
dkandalov/katas
|
scala/src/katas/scala/doors/Doors8.scala
|
Scala
|
unlicense
| 968 |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db;
import scouter.server.core.cache.TextCache
import scouter.server.db.text.TextTable
import scouter.util.HashUtil
import scouter.lang.TextTypes
object TextRD {
def getString(date: String, divs: String, hash: Int): String = {
val out = TextCache.get(divs, hash);
if (out != null)
return out;
try {
if (TextPermWR.isA(divs)) {
return TextPermRD.getString(divs, hash);
}
val table = TextWR.open(date)
val b = table.get(divs, hash);
if (b == null)
return null;
val text = new String(b, "UTF-8");
TextCache.put(divs, hash, text);
return text;
} catch {
case e: Exception => e.printStackTrace()
}
return null;
}
}
|
yuyupapa/OpenSource
|
scouter.server/src/scouter/server/db/TextRD.scala
|
Scala
|
apache-2.0
| 1,421 |
/**
*
* Copyright 2014 Lukas Karas, Avast a.s. <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.avast.bigmap
import java.io.File
class TsvMap(sortedTextFile: File)(
implicit val keyColumns: Int = 1,
implicit val columnDelimiter: Char = '\\t'
) extends Map[Array[String], Array[String]] {
val rowFactory = new TsvRowFactory()
lazy val fileReader = new LargeFileReader(sortedTextFile)
lazy val bSearch = new BinarySearch[TsvRow](fileReader)(
rowFactory,
new TsvRowComparator()
)
override def +[B1 >: Array[String]](kv: (Array[String], B1)): Map[Array[String], B1] = sys.error("Extending map is not supported")
def value(row: TsvRow): Array[String] = row.columns(keyColumns, row.columnCount)
def key(row: TsvRow): Array[String] = row.columns(0, keyColumns)
override def get(key: Array[String]): Option[Array[String]] = bSearch
.search(rowFactory(key))
.map(value(_))
override def iterator: Iterator[(Array[String], Array[String])] = fileReader
.getLines()
.map[(Array[String], Array[String])](line => {
val row = rowFactory(line)
(key(row), value(row))
})
override def -(key: Array[String]): Map[Array[String], Array[String]] = sys.error("Removing from map is not supported")
}
object TsvMapTest {
val columnDelimiter = '\\t'
val keyColumns = 1
def main(args: Array[String]): Unit = {
val sortedFile = args(0)
val key = args.tail
val map = new TsvMap(new File(sortedFile))
val v = map.get(key)
println(key.mkString("(", ", ", ")") + " => " + v.map(arr => arr.mkString("(", ", ", ")")))
}
}
|
avast/BigMap
|
src/main/scala/com/avast/bigmap/TsvMap.scala
|
Scala
|
apache-2.0
| 2,132 |
package views.disposal_of_vehicle.priv
import composition.TestHarness
import helpers.disposal_of_vehicle.CookieFactoryForUISpecs
import org.openqa.selenium.WebDriver
import org.scalatest.selenium.WebBrowser
import WebBrowser.go
import WebBrowser.pageSource
import WebBrowser.pageTitle
import pages.disposal_of_vehicle.BeforeYouStartPage
import pages.disposal_of_vehicle.priv.DisposeSuccessForPrivateKeeperPage
import uk.gov.dvla.vehicles.presentation.common.testhelpers.{UiSpec, UiTag}
import views.disposal_of_vehicle.DisposeSuccess
class DisposeSuccessIntegrationSpec extends UiSpec with TestHarness {
"new disposal button" should {
"be present when the disposal is done by a private keeper" taggedAs UiTag in
new WebBrowserForSelenium {
go to BeforeYouStartPage
cacheSetup()
go to DisposeSuccessForPrivateKeeperPage
pageTitle should equal(DisposeSuccessForPrivateKeeperPage.title)
pageSource should include(s"""id="${DisposeSuccess.NewDisposalId}"""")
}
}
private def cacheSetup()(implicit webDriver: WebDriver) =
CookieFactoryForUISpecs.
setupTradeDetails().
businessChooseYourAddress().
enterAddressManually().
dealerDetails().
vehicleAndKeeperDetailsModel().
privateDisposeFormModel().
disposeTransactionId().
vehicleRegistrationNumber().
disposeFormTimestamp()
}
|
dvla/vehicles-online
|
test/views/disposal_of_vehicle/priv/DisposeSuccessIntegrationSpec.scala
|
Scala
|
mit
| 1,382 |
/*
* Copyright 2017-2020 Aleksey Fomkin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package korolev.internal
import korolev._
import korolev.effect.{Effect, Queue, Reporter, Scheduler, Stream}
import korolev.effect.syntax._
import korolev.state.{StateDeserializer, StateManager, StateSerializer}
import levsha.Document.Node
import levsha.{Id, StatefulRenderContext, XmlNs}
import levsha.events.EventId
import scala.collection.mutable
import Context._
import korolev.data.Bytes
import korolev.util.JsCode
import korolev.web.FormData
import scala.concurrent.ExecutionContext
/**
* Component state holder and effects performer
*
* Performing cycle:
*
* 1. prepare()
* 2. Optionally setState()
* 3. applyRenderContext()
* 4. dropObsoleteMisc()
*
* @tparam AS Type of top level state (application state)
* @tparam CS Type of component state
*/
final class ComponentInstance
[
F[_]: Effect,
AS: StateSerializer: StateDeserializer, M,
CS: StateSerializer: StateDeserializer, P, E
](
nodeId: Id,
sessionId: Qsid,
frontend: Frontend[F],
eventRegistry: EventRegistry[F],
stateManager: StateManager[F],
getRenderNum: () => Int,
val component: Component[F, CS, P, E],
stateQueue: Queue[F, (Id, Any)],
createMiscProxy: (StatefulRenderContext[Binding[F, AS, M]], (StatefulRenderContext[Binding[F, CS, E]], Binding[F, CS, E]) => Unit) => StatefulRenderContext[Binding[F, CS, E]],
scheduler: Scheduler[F],
reporter: Reporter
) { self =>
import ComponentInstance._
import reporter.Implicit
private val miscLock = new Object()
private val markedDelays = mutable.Set.empty[Id] // Set of the delays which are should survive
private val markedComponentInstances = mutable.Set.empty[Id]
private val delays = mutable.Map.empty[Id, DelayInstance[F, CS, E]]
private val elements = mutable.Map.empty[ElementId, Id]
private val events = mutable.Map.empty[EventId, Vector[Event[F, CS, E]]]
private val nestedComponents = mutable.Map.empty[Id, ComponentInstance[F, CS, E, _, _, _]]
// Why we use '() => F[Unit]'? Because should
// support scala.concurrent.Future which is has
// strict semantic (runs immediately).
private val immediatePendingEffects = Queue[F, () => F[Unit]]()
@volatile private var eventSubscription = Option.empty[E => _]
private[korolev] object browserAccess extends Access[F, CS, E] {
private def getId(elementId: ElementId): F[Id] = Effect[F].delay {
unsafeGetId(elementId)
}
private def unsafeGetId(elementId: ElementId): Id = {
// miscLock synchronization required
// because prop handler methods can be
// invoked during render.
miscLock.synchronized {
elements.get(elementId) match {
case None =>
elementId.name match {
case Some(name) => throw new Exception(s"No element matched for accessor $name")
case None => throw new Exception(s"No element matched for accessor")
}
case Some(id) => id
}
}
}
def imap[S2](read: PartialFunction[CS, S2], write: PartialFunction[(CS, S2), CS]): Access[F, S2, E] =
new MappedAccess[F, CS, S2, E](this, read, write)
def property(elementId: ElementId): PropertyHandler[F] = {
val idF = getId(elementId)
new PropertyHandler[F] {
def get(propName: String): F[String] = idF.flatMap { id =>
frontend.extractProperty(id, propName)
}
def set(propName: String, value: Any): F[Unit] = idF.flatMap { id =>
// XmlNs argument is empty cause it will be ignored
frontend.setProperty(id, propName, value)
}
}
}
def focus(element: ElementId): F[Unit] =
getId(element).flatMap { id =>
frontend.focus(id)
}
def publish(message: E): F[Unit] =
Effect[F].delay(eventSubscription.foreach(f => f(message)))
def state: F[CS] = {
val state = stateManager.read[CS](nodeId)
state.map(_.getOrElse(throw new RuntimeException("State is empty")))
}
def sessionId: F[Qsid] = Effect[F].delay(self.sessionId)
def transition(f: Transition[CS]): F[Unit] = applyTransition(f, sync = false)
def syncTransition(f: Transition[CS]): F[Unit] = applyTransition(f, sync = true)
def downloadFormData(element: ElementId): F[FormData] =
for {
id <- getId(element)
formData <- frontend.uploadForm(id)
} yield formData
def downloadFiles(id: ElementId): F[List[(FileHandler, Bytes)]] = {
downloadFilesAsStream(id).flatMap { streams =>
Effect[F].sequence {
streams.map { case (handler, data) =>
data
.fold(Bytes.empty)(_ ++ _)
.map(b => (handler, b))
}
}
}
}
def downloadFilesAsStream(id: ElementId): F[List[(FileHandler, Stream[F, Bytes])]] = {
listFiles(id).flatMap { handlers =>
Effect[F].sequence {
handlers.map { handler =>
downloadFileAsStream(handler).map(f => (handler, f))
}
}
}
}
/**
* Get selected file as a stream from input
*/
def downloadFileAsStream(handler: FileHandler): F[Stream[F, Bytes]] = {
for {
id <- getId(handler.elementId)
streams <- frontend.uploadFile(id, handler)
} yield streams
}
def listFiles(elementId: ElementId): F[List[FileHandler]] =
for {
id <- getId(elementId)
files <- frontend.listFiles(id)
} yield {
files.map { case (fileName, size) =>
FileHandler(fileName, size)(elementId)
}
}
def uploadFile(name: String,
stream: Stream[F, Bytes],
size: Option[Long],
mimeType: String): F[Unit] =
frontend.downloadFile(name, stream, size, mimeType)
def resetForm(elementId: ElementId): F[Unit] =
getId(elementId).flatMap { id =>
frontend.resetForm(id)
}
def evalJs(code: JsCode): F[String] =
frontend.evalJs(code.mkString(unsafeGetId))
def eventData: F[String] = frontend.extractEventData(getRenderNum())
def registerCallback(name: String)(f: String => F[Unit]): F[Unit] =
frontend.registerCustomCallback(name)(f)
}
/**
* Subscribes to component instance events.
* Callback will be invoked on call of `access.publish()` in the
* component instance context.
*/
def setEventsSubscription(callback: E => _): Unit = {
eventSubscription = Some(callback)
}
def applyRenderContext(parameters: P,
rc: StatefulRenderContext[Binding[F, AS, M]],
snapshot: StateManager.Snapshot): Unit = miscLock.synchronized {
// Reset all event handlers delays and elements
prepare()
val state = snapshot[CS](nodeId).getOrElse(component.initialState)
val node =
try {
component.render(parameters, state)
} catch {
case e: MatchError =>
Node[Binding[F, CS, E]] { rc =>
reporter.error(s"Render is not defined for $state", e)
rc.openNode(XmlNs.html, "span")
rc.addTextNode("Render is not defined for the state")
rc.closeNode("span")
}
}
val proxy = createMiscProxy(rc, { (proxy, misc) =>
misc match {
case event: Event[F, CS, E] =>
val id = rc.currentContainerId
val eid = EventId(id, event.`type`, event.phase)
val es = events.getOrElseUpdate(eid, Vector.empty)
events.put(eid, es :+ event)
eventRegistry.registerEventType(event.`type`)
case element: ElementId =>
val id = rc.currentContainerId
elements.put(element, id)
()
case delay: Delay[F, CS, E] =>
val id = rc.currentContainerId
markedDelays += id
if (!delays.contains(id)) {
val delayInstance = new DelayInstance(delay, scheduler, reporter)
delays.put(id, delayInstance)
delayInstance.start(browserAccess)
}
case entry: ComponentEntry[F, CS, E, Any, Any, Any] =>
val id = rc.subsequentId
nestedComponents.get(id) match {
case Some(n: ComponentInstance[F, CS, E, Any, Any, Any]) if n.component.id == entry.component.id =>
// Use nested component instance
markedComponentInstances += id
n.setEventsSubscription((e: Any) => entry.eventHandler(browserAccess, e).runAsyncForget)
n.applyRenderContext(entry.parameters, proxy, snapshot)
case _ =>
val n = entry.createInstance(
id, sessionId, frontend, eventRegistry,
stateManager, getRenderNum, stateQueue,
scheduler, reporter
)
markedComponentInstances += id
nestedComponents.put(id, n)
n.unsafeInitialize()
n.setEventsSubscription((e: Any) => entry.eventHandler(browserAccess, e).runAsyncForget)
n.applyRenderContext(entry.parameters, proxy, snapshot)
}
}
})
node(proxy)
}
def applyTransition(transition: Transition[CS], sync: Boolean): F[Unit] = {
val effect = () =>
for {
state <- stateManager.read[CS](nodeId)
newState = transition(state.getOrElse(component.initialState))
_ <- stateManager.write(nodeId, newState)
_ <- stateQueue.enqueue(nodeId, newState)
} yield ()
if (sync) effect()
else immediatePendingEffects.enqueue(effect)
}
def applyEvent(eventId: EventId): Boolean = {
events.get(eventId) match {
case Some(events: Vector[Event[F, CS, E]]) =>
// A user defines the event effect, so we
// don't control the time of execution.
// We shouldn't block the application if
// the user's code waits for something
// for a long time.
events.forall { event =>
event.effect(browserAccess).runAsyncForget
!event.stopPropagation
}
case None =>
nestedComponents.values.forall { nested =>
nested.applyEvent(eventId)
}
}
}
/**
* Remove all delays and nested component instances
* which were not marked during applying render context.
*/
def dropObsoleteMisc(): Unit = miscLock.synchronized {
delays foreach {
case (id, delay) =>
if (!markedDelays.contains(id)) {
delays.remove(id)
delay.cancel()
}
}
nestedComponents foreach {
case (id, nested) =>
if (!markedComponentInstances.contains(id)) {
nestedComponents.remove(id)
nested
.destroy()
.after(stateManager.delete(id))
.runAsyncForget
}
else nested.dropObsoleteMisc()
}
}
/**
* Prepares component instance to applying render context.
* Removes all temporary and obsolete misc.
* All nested components also will be prepared.
*/
private def prepare(): Unit = {
markedComponentInstances.clear()
markedDelays.clear()
elements.clear()
events.clear()
// Remove only finished delays
delays foreach {
case (id, delay) =>
if (delay.isFinished)
delays.remove(id)
}
}
/**
* Close 'immediatePendingEffects' in this component and
* all nested components.
*
* MUST be invoked after closing connection.
*/
def destroy(): F[Unit] =
for {
_ <- immediatePendingEffects.close()
_ <- nestedComponents
.values
.toList
.map(_.destroy())
.sequence
.unit
} yield ()
protected def unsafeInitialize(): Unit =
immediatePendingEffects.stream
.foreach(_.apply())
.runAsyncForget
// Execute effects sequentially
def initialize()(implicit ec: ExecutionContext): F[Effect.Fiber[F, Unit]] =
Effect[F].start(immediatePendingEffects.stream.foreach(_.apply()))
}
private object ComponentInstance {
import Context.Access
import Context.Delay
final class DelayInstance[F[_]: Effect, S, M](delay: Delay[F, S, M],
scheduler: Scheduler[F],
reporter: Reporter) {
@volatile private var handler = Option.empty[Scheduler.JobHandler[F, _]]
@volatile private var finished = false
def isFinished: Boolean = finished
def cancel(): Unit = {
handler.foreach(_.unsafeCancel())
}
def start(access: Access[F, S, M]): Unit = {
handler = Some {
scheduler.unsafeScheduleOnce(delay.duration) {
finished = true
delay.effect(access)
}
}
}
}
}
|
fomkin/korolev
|
modules/korolev/src/main/scala/korolev/internal/ComponentInstance.scala
|
Scala
|
apache-2.0
| 13,331 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.si.model.stream
import com.bwsw.sj.common.dal.model.service.ServiceDomain
import com.bwsw.sj.common.dal.model.stream._
import com.bwsw.sj.common.dal.repository.ConnectionRepository
import com.bwsw.sj.common.rest.utils.ValidationUtils.validateName
import com.bwsw.sj.common.utils.{MessageResourceUtils, StreamLiterals}
import scaldi.Injectable.inject
import scaldi.Injector
import scala.collection.mutable.ArrayBuffer
class SjStream(val streamType: String,
val name: String,
val service: String,
val tags: Array[String],
val force: Boolean,
val description: String,
val creationDate: String)
(implicit injector: Injector) {
protected val messageResourceUtils: MessageResourceUtils = inject[MessageResourceUtils]
import messageResourceUtils.createMessage
protected val connectionRepository: ConnectionRepository = inject[ConnectionRepository]
protected var serviceDomain: ServiceDomain = _
def to(): StreamDomain = ???
/**
* Validates stream
*
* @return empty array if stream is correct, validation errors otherwise
*/
def validate(): ArrayBuffer[String] = {
val errors = ArrayBuffer[String]()
errors ++= validateGeneralFields()
StreamLiterals.typeToServiceType.get(streamType) match {
case Some(serviceType) =>
val (serviceDomain, extractedErrors) = extractServiceByName(service, serviceType)
if (serviceDomain.isEmpty)
errors ++= extractedErrors
else {
this.serviceDomain = serviceDomain.get
if (errors.isEmpty)
errors ++= validateSpecificFields()
}
case None =>
}
errors
}
def validateSpecificFields(): ArrayBuffer[String] = ArrayBuffer[String]()
/**
* Creates structure in storage, used by stream
*/
def create(): Unit = {}
/**
* Deletes structure in storage, used by stream
*/
def delete(): Unit = {}
/**
* Validates fields which common for all types of stream
*
* @return empty array if fields is correct, validation errors otherwise
*/
protected def validateGeneralFields(): ArrayBuffer[String] = {
val streamDAO = connectionRepository.getStreamRepository
val errors = new ArrayBuffer[String]()
// 'name' field
Option(name) match {
case Some("") | None =>
errors += createMessage("entity.error.attribute.required", "Name")
case Some(x) =>
errors ++= validateStreamName(name)
val streamObj = streamDAO.get(x)
if (streamObj.isDefined) {
errors += createMessage("entity.error.already.exists", "Stream", x)
}
}
// 'streamType' field
Option(streamType) match {
case Some("") | None =>
errors += createMessage("entity.error.attribute.required", "Type")
case Some(t) =>
if (!StreamLiterals.types.contains(t)) {
errors += createMessage("entity.error.unknown.type.must.one.of", t, "stream", StreamLiterals.types.mkString("[", ", ", "]"))
}
}
errors
}
protected def extractServiceByName(serviceName: String, serviceType: String): (Option[ServiceDomain], ArrayBuffer[String]) = {
val errors = new ArrayBuffer[String]()
var serviceDomain: Option[ServiceDomain] = None
Option(service) match {
case Some("") | None =>
errors += createMessage("entity.error.attribute.required", "Service")
case Some(x) =>
val serviceDAO = connectionRepository.getServiceRepository
val serviceObj = serviceDAO.get(x)
serviceObj match {
case None =>
errors += createMessage("entity.error.doesnot.exist", "Service", x)
case Some(someService) =>
if (someService.serviceType != serviceType) {
errors += createMessage("entity.error.must.one.type.other.given",
s"Service for '$streamType' stream",
serviceType,
someService.serviceType)
} else {
serviceDomain = Some(someService)
}
}
}
(serviceDomain, errors)
}
protected def validateStreamName(name: String): ArrayBuffer[String] = {
val errors = new ArrayBuffer[String]()
if (!validateName(name)) {
errors += createMessage("entity.error.incorrect.name", "Stream", name, "stream")
}
errors
}
}
class StreamCreator {
def from(streamDomain: StreamDomain)(implicit injector: Injector): SjStream = streamDomain.streamType match {
case StreamLiterals.`tstreamsType` =>
val tStreamStream = streamDomain.asInstanceOf[TStreamStreamDomain]
new TStreamStream(
tStreamStream.name,
tStreamStream.service.name,
tStreamStream.partitions,
tStreamStream.tags,
tStreamStream.force,
tStreamStream.streamType,
tStreamStream.description,
tStreamStream.creationDate.toString)
case StreamLiterals.`restType` =>
val restStream = streamDomain.asInstanceOf[RestStreamDomain]
new RestStream(
restStream.name,
restStream.service.name,
restStream.tags,
restStream.force,
restStream.streamType,
restStream.description,
restStream.creationDate.toString)
case StreamLiterals.`kafkaType` =>
val kafkaStream = streamDomain.asInstanceOf[KafkaStreamDomain]
new KafkaStream(
kafkaStream.name,
kafkaStream.service.name,
kafkaStream.partitions,
kafkaStream.replicationFactor,
kafkaStream.tags,
kafkaStream.force,
kafkaStream.streamType,
kafkaStream.description,
kafkaStream.creationDate.toString)
case StreamLiterals.`jdbcType` =>
val jdbcStream = streamDomain.asInstanceOf[JDBCStreamDomain]
new JDBCStream(
jdbcStream.name,
jdbcStream.service.name,
jdbcStream.primary,
jdbcStream.tags,
jdbcStream.force,
jdbcStream.streamType,
jdbcStream.description,
jdbcStream.creationDate.toString)
case StreamLiterals.`elasticsearchType` =>
val esStream = streamDomain.asInstanceOf[ESStreamDomain]
new ESStream(
esStream.name,
esStream.service.name,
esStream.tags,
esStream.force,
esStream.streamType,
esStream.description,
esStream.creationDate.toString)
}
}
|
bwsw/sj-platform
|
core/sj-common/src/main/scala/com/bwsw/sj/common/si/model/stream/SjStream.scala
|
Scala
|
apache-2.0
| 7,273 |
package sri.mobile.examples.router.components
import sri.core.ElementFactory._
import sri.core.ReactElement
import sri.mobile.examples.router.RouterExampleApp.{Fourth, Second}
import sri.mobile.examples.router.routes.ThirdModule
import sri.universal.components._
import sri.universal.router
import sri.universal.router.{UniversalRouterComponent, _}
import sri.universal.styles.UniversalStyleSheet
import scala.scalajs.js
import scala.scalajs.js.Dynamic.{literal => json}
import scala.scalajs.js.annotation.ScalaJSDefined
object HomeScreen {
@ScalaJSDefined
class Component extends UniversalRouterComponent[Unit, Unit] {
override def render(): ReactElement = View(style = styles.container)(
View(style = styles.row)(
getStaticBlock("Static Second Screen", Second),
getStaticBlock("Static Module Third Home Screen", ThirdModule.Home)
),
View(style = styles.row)(
getStaticBlock("Static Module Third Second Screen", ThirdModule.Second),
getDynamicBlock("Dynamic Fourth Screen")
)
)
def getStaticBlock(text: String, page: StaticPage) = {
TouchableHighlight(style = styles.block,
underlayColor = "grey",
key = text,
onPress = () => navigateTo(page))(
Text(style = styles.text)(text)
)
}
def getDynamicBlock(text: String) = {
TouchableHighlight(style = styles.block,
underlayColor = "grey",
key = text,
onPress = () => navigateTo(Fourth, Person("Sri"), "Sri"))(
Text(style = styles.text)(text)
)
}
def onTextClick() = {
navigateTo(Second)
}
}
val ctor = getTypedConstructor(js.constructorOf[Component], classOf[Component])
ctor.contextTypes = router.routerContextTypes
def apply() = createElementNoProps(ctor)
}
object styles extends UniversalStyleSheet {
val container = style(flexOne,
// alignItems.center,
justifyContent.center,
// backgroundColor := "purple"
backgroundColor := "#ED4721"
)
val row = style(
flex := 1,
flexDirection.row,
// flexWrap.wrap,
margin := 10)
val block = style(
flex := 1,
alignSelf.stretch,
margin := 10,
padding := 10,
borderRadius := 10,
shadowColor := "black",
shadowOpacity := 0.5,
shadowRadius := 2,
shadowOffset := json(height = 1, width = 0),
backgroundColor := "#343536",
justifyContent.center
)
val text = style(fontSize := 16,
overflow.hidden,
textAlign.center,
color := "grey",
fontWeight._500)
}
|
hamazy/sri
|
mobile-examples/src/main/scala/sri/mobile/examples/router/components/HomeScreen.scala
|
Scala
|
apache-2.0
| 2,567 |
/**
* Copyright (C) 2014 TU Berlin ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.peelframework.core.results.model
/** Model class for experiments.
*
* Captures information for an [[org.peelframework.core.beans.experiment.Experiment Experiment]] bean execution.
*
* @param name The value of the bean `name` property.
* @param suite The `id` value of the enclosing [[org.peelframework.core.beans.experiment.ExperimentSuite ExperimentSuite]] bean.
* @param system The `id` value of the associated runner [[org.peelframework.core.beans.experiment.Experiment Experiment]] bean.
*/
case class Experiment(
name: Symbol,
suite: Symbol,
system: Symbol
) {
val id = name.## * 31 + suite.##
}
/** [[Experiment]] companion and storage manager. */
object Experiment extends PersistedAPI[Experiment] {
import java.sql.Connection
import anorm.SqlParser._
import anorm._
override val tableName = "experiment"
override val rowParser = {
get[Int] ("id") ~
get[String] ("name") ~
get[String] ("suite") ~
get[String] ("system") map {
case id ~ name ~ suite ~ system => Experiment(Symbol(name), Symbol(suite), Symbol(system))
}
}
override def createTable()(implicit conn: Connection): Unit = if (!tableExists) {
SQL( s"""
CREATE TABLE experiment (
id INTEGER NOT NULL,
name VARCHAR(127) NOT NULL,
suite VARCHAR(63) NOT NULL,
system VARCHAR(63) NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (system) REFERENCES system(id) ON DELETE RESTRICT
)""").execute()
}
override def insert(x: Experiment)(implicit conn: Connection): Unit = {
SQL"""
INSERT INTO experiment(id, name, suite, system) VALUES(
${x.id},
${x.name.name},
${x.suite.name},
${x.system.name}
)
""".executeInsert()
}
override def insert(xs: Seq[Experiment])(implicit conn: Connection): Unit = if (xs.nonEmpty) {
BatchSql(
s"""
INSERT INTO experiment(id, name, suite, system) VALUES(
{id},
{name},
{suite},
{system}
)
""",
namedParametersFor(xs.head),
xs.tail.map(namedParametersFor): _*
).execute()
}
override def insertMissing(xs: Seq[Experiment])(implicit conn: Connection) = {
val current = selectAll().toSet // find current experiments
insert(xs.filterNot(current.contains)) // insert the ones which are not in the current list
}
override def update(x: Experiment)(implicit conn: Connection): Unit = {
SQL"""
UPDATE experiment SET
name = ${x.name.name},
suite = ${x.suite.name},
system = ${x.system.name}
WHERE
id = ${x.id}
""".executeUpdate()
}
override def delete(x: Experiment)(implicit conn: Connection): Unit = {
SQL"""
DELETE FROM experiment WHERE id = ${x.id}
""".execute()
}
def namedParametersFor(x: Experiment): Seq[NamedParameter] = Seq[NamedParameter](
'id -> x.id,
'name -> x.name.name,
'suite -> x.suite.name,
'system -> x.system.name
)
}
|
akunft/peel
|
peel-core/src/main/scala/org/peelframework/core/results/model/Experiment.scala
|
Scala
|
apache-2.0
| 3,646 |
package modules
import com.google.inject.name.Named
import com.google.inject.{ AbstractModule, Provides }
import com.mohiva.play.silhouette.api.actions.{ SecuredErrorHandler, UnsecuredErrorHandler }
import com.mohiva.play.silhouette.api.crypto._
import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository
import com.mohiva.play.silhouette.api.services._
import com.mohiva.play.silhouette.api.util._
import com.mohiva.play.silhouette.api.{ Environment, EventBus, Silhouette, SilhouetteProvider }
import com.mohiva.play.silhouette.crypto.{ JcaCookieSigner, JcaCookieSignerSettings, JcaCrypter, JcaCrypterSettings }
import com.mohiva.play.silhouette.impl.authenticators._
import com.mohiva.play.silhouette.impl.providers._
import com.mohiva.play.silhouette.impl.providers.oauth1._
import com.mohiva.play.silhouette.impl.providers.oauth1.secrets.{ CookieSecretProvider, CookieSecretSettings }
import com.mohiva.play.silhouette.impl.providers.oauth1.services.PlayOAuth1Service
import com.mohiva.play.silhouette.impl.providers.oauth2._
import com.mohiva.play.silhouette.impl.providers.oauth2.state.{ CookieStateProvider, CookieStateSettings, DummyStateProvider }
import com.mohiva.play.silhouette.impl.providers.openid.YahooProvider
import com.mohiva.play.silhouette.impl.providers.openid.services.PlayOpenIDService
import com.mohiva.play.silhouette.impl.services._
import com.mohiva.play.silhouette.impl.util._
import com.mohiva.play.silhouette.password.BCryptPasswordHasher
import com.mohiva.play.silhouette.persistence.daos.{ DelegableAuthInfoDAO, InMemoryAuthInfoDAO, MongoAuthInfoDAO }
import com.mohiva.play.silhouette.persistence.repositories.DelegableAuthInfoRepository
import models.SteamProfile
import models.daos._
import models.services.{ CustomSteamProvider, SteamUserMicroServiceImpl, UserMicroService, UserService }
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
import net.codingwell.scalaguice.ScalaModule
import play.api.Configuration
import play.api.libs.concurrent.Execution.Implicits._
import play.api.libs.json.Json
import play.api.libs.openid.OpenIdClient
import play.api.libs.ws.WSClient
import play.modules.reactivemongo.ReactiveMongoApi
import utils.auth.{ CustomSecuredErrorHandler, CustomUnsecuredErrorHandler, DefaultEnv }
/**
* The Guice module which wires all Silhouette dependencies.
*/
class SilhouetteModule extends AbstractModule with ScalaModule {
// println(
// """
// |sdfg
// |dsfg
// |asdfsdf!!!!!!!!!!!
// |asg
// |dfg
// |dfg
// |reg
// |asdgf
// |as
// |dfgd
// |fg
// |sdfg
// |dsf
// |g
// |dfg
// |sdfddfgdfgdfgdfg
// """.stripMargin)
/**
* Configures the module.
*/
def configure() {
bind[Silhouette[DefaultEnv]].to[SilhouetteProvider[DefaultEnv]]
bind[UnsecuredErrorHandler].to[CustomUnsecuredErrorHandler]
bind[SecuredErrorHandler].to[CustomSecuredErrorHandler]
bind[UserMicroService[SteamProfile]].to[SteamUserMicroServiceImpl]
bind[CacheLayer].to[PlayCacheLayer]
bind[IDGenerator].toInstance(new SecureRandomIDGenerator())
bind[PasswordHasher].toInstance(new BCryptPasswordHasher)
bind[FingerprintGenerator].toInstance(new DefaultFingerprintGenerator(false))
bind[EventBus].toInstance(EventBus())
bind[Clock].toInstance(Clock())
// Replace this with the bindings to your concrete DAOs
bind[DelegableAuthInfoDAO[PasswordInfo]].toInstance(new InMemoryAuthInfoDAO[PasswordInfo])
bind[DelegableAuthInfoDAO[OAuth1Info]].toInstance(new InMemoryAuthInfoDAO[OAuth1Info])
bind[DelegableAuthInfoDAO[OAuth2Info]].toInstance(new InMemoryAuthInfoDAO[OAuth2Info])
// bind[DelegableAuthInfoDAO[OpenIDInfo]].toInstance(new InMemoryAuthInfoDAO[OpenIDInfo])
}
/**
* Provides the HTTP layer implementation.
*
* @param client Play's WS client.
* @return The HTTP layer implementation.
*/
@Provides
def provideHTTPLayer(client: WSClient): HTTPLayer = new PlayHTTPLayer(client)
/**
* Provides the Silhouette environment.
*
* @param userService The user service implementation.
* @param authenticatorService The authentication service implementation.
* @param eventBus The event bus instance.
* @return The Silhouette environment.
*/
@Provides
def provideEnvironment(
userService: UserService,
authenticatorService: AuthenticatorService[SessionAuthenticator],
eventBus: EventBus): Environment[DefaultEnv] = {
Environment[DefaultEnv](
userService,
authenticatorService,
Seq(),
eventBus
)
}
/**
* Provides the social provider registry.
* @param steamProvider The Yahoo provider implementation.
* @return The Silhouette environment.
*/
@Provides
def provideSocialProviderRegistry(
steamProvider: CustomSteamProvider): SocialProviderRegistry = {
SocialProviderRegistry(Seq(
steamProvider
))
}
/**
* Provides the cookie signer for the OAuth1 token secret provider.
*
* @param configuration The Play configuration.
* @return The cookie signer for the OAuth1 token secret provider.
*/
@Provides @Named("oauth1-token-secret-cookie-signer")
def provideOAuth1TokenSecretCookieSigner(configuration: Configuration): CookieSigner = {
val config = configuration.underlying.as[JcaCookieSignerSettings]("silhouette.oauth1TokenSecretProvider.cookie.signer")
new JcaCookieSigner(config)
}
/**
* Provides the crypter for the OAuth1 token secret provider.
*
* @param configuration The Play configuration.
* @return The crypter for the OAuth1 token secret provider.
*/
@Provides @Named("oauth1-token-secret-crypter")
def provideOAuth1TokenSecretCrypter(configuration: Configuration): Crypter = {
val config = configuration.underlying.as[JcaCrypterSettings]("silhouette.oauth1TokenSecretProvider.crypter")
new JcaCrypter(config)
}
/**
* Provides the cookie signer for the OAuth2 state provider.
*
* @param configuration The Play configuration.
* @return The cookie signer for the OAuth2 state provider.
*/
@Provides @Named("oauth2-state-cookie-signer")
def provideOAuth2StageCookieSigner(configuration: Configuration): CookieSigner = {
val config = configuration.underlying.as[JcaCookieSignerSettings]("silhouette.oauth2StateProvider.cookie.signer")
new JcaCookieSigner(config)
}
/**
* Provides the cookie signer for the authenticator.
*
* @param configuration The Play configuration.
* @return The cookie signer for the authenticator.
*/
@Provides @Named("authenticator-cookie-signer")
def provideAuthenticatorCookieSigner(configuration: Configuration): CookieSigner = {
val config = configuration.underlying.as[JcaCookieSignerSettings]("silhouette.authenticator.cookie.signer")
new JcaCookieSigner(config)
}
/**
* Provides the crypter for the authenticator.
*
* @param configuration The Play configuration.
* @return The crypter for the authenticator.
*/
@Provides @Named("authenticator-crypter")
def provideAuthenticatorCrypter(configuration: Configuration): Crypter = {
val config = configuration.underlying.as[JcaCrypterSettings]("silhouette.authenticator.crypter")
new JcaCrypter(config)
}
/**
* Provides the auth info repository.
*
* @param passwordInfoDAO The implementation of the delegable password auth info DAO.
* @param oauth1InfoDAO The implementation of the delegable OAuth1 auth info DAO.
* @param oauth2InfoDAO The implementation of the delegable OAuth2 auth info DAO.
* @param openIDInfoDAO The implementation of the delegable OpenID auth info DAO.
* @return The auth info repository instance.
*/
@Provides
def provideAuthInfoRepository(
passwordInfoDAO: DelegableAuthInfoDAO[PasswordInfo],
oauth1InfoDAO: DelegableAuthInfoDAO[OAuth1Info],
oauth2InfoDAO: DelegableAuthInfoDAO[OAuth2Info],
openIDInfoDAO: DelegableAuthInfoDAO[OpenIDInfo]): AuthInfoRepository = {
new DelegableAuthInfoRepository(passwordInfoDAO, oauth1InfoDAO, oauth2InfoDAO, openIDInfoDAO)
}
/**
* Provides the authenticator service.
*
* @param cookieSigner The cookie signer implementation.
* @param crypter The crypter implementation.
* @param fingerprintGenerator The fingerprint generator implementation.
* @param idGenerator The ID generator implementation.
* @param configuration The Play configuration.
* @param clock The clock instance.
* @return The authenticator service.
*/
@Provides
def provideAuthenticatorService(
@Named("authenticator-cookie-signer") cookieSigner: CookieSigner,
@Named("authenticator-crypter") crypter: Crypter,
fingerprintGenerator: FingerprintGenerator,
idGenerator: IDGenerator,
configuration: Configuration,
clock: Clock): AuthenticatorService[CookieAuthenticator] = {
val config = configuration.underlying.as[CookieAuthenticatorSettings]("silhouette.authenticator")
val encoder = new CrypterAuthenticatorEncoder(crypter)
new CookieAuthenticatorService(config, None, cookieSigner, encoder, fingerprintGenerator, idGenerator, clock)
}
/**
* Provides the authenticator service.
*
* @param cookieSigner The cookie signer implementation.
* @param crypter The crypter implementation.
* @param fingerprintGenerator The fingerprint generator implementation.
* @param idGenerator The ID generator implementation.
* @param configuration The Play configuration.
* @param clock The clock instance.
* @return The authenticator service.
*/
@Provides
def provideAuthenticatorService2(
@Named("authenticator-cookie-signer") cookieSigner: CookieSigner,
@Named("authenticator-crypter") crypter: Crypter,
fingerprintGenerator: FingerprintGenerator,
idGenerator: IDGenerator,
configuration: Configuration,
clock: Clock): AuthenticatorService[SessionAuthenticator] = {
val config = configuration.underlying.as[SessionAuthenticatorSettings]("silhouette.authenticator")
val encoder = new CrypterAuthenticatorEncoder(crypter)
new SessionAuthenticatorService(config, fingerprintGenerator, encoder, clock)
}
/**
* Provides the avatar service.
*
* @param httpLayer The HTTP layer implementation.
* @return The avatar service implementation.
*/
@Provides
def provideAvatarService(httpLayer: HTTPLayer): AvatarService = new GravatarService(httpLayer)
/**
* Provides the OAuth1 token secret provider.
*
* @param cookieSigner The cookie signer implementation.
* @param crypter The crypter implementation.
* @param configuration The Play configuration.
* @param clock The clock instance.
* @return The OAuth1 token secret provider implementation.
*/
@Provides
def provideOAuth1TokenSecretProvider(
@Named("oauth1-token-secret-cookie-signer") cookieSigner: CookieSigner,
@Named("oauth1-token-secret-crypter") crypter: Crypter,
configuration: Configuration,
clock: Clock): OAuth1TokenSecretProvider = {
val settings = configuration.underlying.as[CookieSecretSettings]("silhouette.oauth1TokenSecretProvider")
new CookieSecretProvider(settings, cookieSigner, crypter, clock)
}
/**
* Provides the OAuth2 state provider.
*
* @param idGenerator The ID generator implementation.
* @param cookieSigner The cookie signer implementation.
* @param configuration The Play configuration.
* @param clock The clock instance.
* @return The OAuth2 state provider implementation.
*/
@Provides
def provideOAuth2StateProvider(
idGenerator: IDGenerator,
@Named("oauth2-state-cookie-signer") cookieSigner: CookieSigner,
configuration: Configuration, clock: Clock): OAuth2StateProvider = {
val settings = configuration.underlying.as[CookieStateSettings]("silhouette.oauth2StateProvider")
new CookieStateProvider(settings, idGenerator, cookieSigner, clock)
}
/**
* Provides the implementation of the delegable OpenID auth info DAO.
*
* @param reactiveMongoApi The ReactiveMongo API.
* @param config The Play configuration.
* @return The implementation of the delegable OpenID auth info DAO.
*/
@Provides
def provideOpenIDInfoDAO(reactiveMongoApi: ReactiveMongoApi, config: Configuration): DelegableAuthInfoDAO[OpenIDInfo] = {
implicit lazy val format = Json.format[OpenIDInfo]
new MongoAuthInfoDAO[OpenIDInfo](reactiveMongoApi, config)
}
/**
* Provides the password hasher registry.
*
* @param passwordHasher The default password hasher implementation.
* @return The password hasher registry.
*/
@Provides
def providePasswordHasherRegistry(passwordHasher: PasswordHasher): PasswordHasherRegistry = {
new PasswordHasherRegistry(passwordHasher)
}
/**
* Provides the credentials provider.
*
* @param authInfoRepository The auth info repository implementation.
* @param passwordHasherRegistry The password hasher registry.
* @return The credentials provider.
*/
@Provides
def provideCredentialsProvider(
authInfoRepository: AuthInfoRepository,
passwordHasherRegistry: PasswordHasherRegistry): CredentialsProvider = {
new CredentialsProvider(authInfoRepository, passwordHasherRegistry)
}
/**
* Provides the Facebook provider.
*
* @param httpLayer The HTTP layer implementation.
* @param stateProvider The OAuth2 state provider implementation.
* @param configuration The Play configuration.
* @return The Facebook provider.
*/
@Provides
def provideFacebookProvider(
httpLayer: HTTPLayer,
stateProvider: OAuth2StateProvider,
configuration: Configuration): FacebookProvider = {
new FacebookProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.facebook"))
}
/**
* Provides the Google provider.
*
* @param httpLayer The HTTP layer implementation.
* @param stateProvider The OAuth2 state provider implementation.
* @param configuration The Play configuration.
* @return The Google provider.
*/
@Provides
def provideGoogleProvider(
httpLayer: HTTPLayer,
stateProvider: OAuth2StateProvider,
configuration: Configuration): GoogleProvider = {
new GoogleProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.google"))
}
/**
* Provides the VK provider.
*
* @param httpLayer The HTTP layer implementation.
* @param stateProvider The OAuth2 state provider implementation.
* @param configuration The Play configuration.
* @return The VK provider.
*/
@Provides
def provideVKProvider(
httpLayer: HTTPLayer,
stateProvider: OAuth2StateProvider,
configuration: Configuration): VKProvider = {
new VKProvider(httpLayer, stateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.vk"))
}
/**
* Provides the Clef provider.
*
* @param httpLayer The HTTP layer implementation.
* @param configuration The Play configuration.
* @return The Clef provider.
*/
@Provides
def provideClefProvider(httpLayer: HTTPLayer, configuration: Configuration): ClefProvider = {
new ClefProvider(httpLayer, new DummyStateProvider, configuration.underlying.as[OAuth2Settings]("silhouette.clef"))
}
/**
* Provides the Twitter provider.
*
* @param httpLayer The HTTP layer implementation.
* @param tokenSecretProvider The token secret provider implementation.
* @param configuration The Play configuration.
* @return The Twitter provider.
*/
@Provides
def provideTwitterProvider(
httpLayer: HTTPLayer,
tokenSecretProvider: OAuth1TokenSecretProvider,
configuration: Configuration): TwitterProvider = {
val settings = configuration.underlying.as[OAuth1Settings]("silhouette.twitter")
new TwitterProvider(httpLayer, new PlayOAuth1Service(settings), tokenSecretProvider, settings)
}
/**
* Provides the Xing provider.
*
* @param httpLayer The HTTP layer implementation.
* @param tokenSecretProvider The token secret provider implementation.
* @param configuration The Play configuration.
* @return The Xing provider.
*/
@Provides
def provideXingProvider(
httpLayer: HTTPLayer,
tokenSecretProvider: OAuth1TokenSecretProvider,
configuration: Configuration): XingProvider = {
val settings = configuration.underlying.as[OAuth1Settings]("silhouette.xing")
new XingProvider(httpLayer, new PlayOAuth1Service(settings), tokenSecretProvider, settings)
}
/**
* Provides the Yahoo provider.
*
* @param cacheLayer The cache layer implementation.
* @param httpLayer The HTTP layer implementation.
* @param client The OpenID client implementation.
* @param configuration The Play configuration.
* @return The Yahoo provider.
*/
@Provides
def provideYahooProvider(
cacheLayer: CacheLayer,
httpLayer: HTTPLayer,
client: OpenIdClient,
configuration: Configuration): YahooProvider = {
val settings = configuration.underlying.as[OpenIDSettings]("silhouette.yahoo")
new YahooProvider(httpLayer, new PlayOpenIDService(client, settings), settings)
}
@Provides
def provideSteamProvider(
cacheLayer: CacheLayer,
httpLayer: HTTPLayer,
client: OpenIdClient,
configuration: Configuration,
steamUserDAO: SteamUserDAO): CustomSteamProvider = {
val settings = configuration.underlying.as[OpenIDSettings]("silhouette.steam")
new CustomSteamProvider(httpLayer, new PlayOpenIDService(client, settings), settings, steamUserDAO)
}
}
|
hnrklssn/game-check-match
|
app/modules/SilhouetteModule.scala
|
Scala
|
apache-2.0
| 17,664 |
package pl.combosolutions.backup
trait Executable[T <: Executable[T]] extends Serializable {
def run: Async[Result[T]]
def digest[U](implicit interpreter: Result[T]#Interpreter[U]): Async[U]
}
|
MateuszKubuszok/BackupDSL
|
modules/common/src/main/scala/pl/combosolutions/backup/Executable.scala
|
Scala
|
mit
| 200 |
package de.qualitune.ast
/**
* @author Max Leuthaeuser
* @since 27.02.12
*/
case class ASTElement()
|
max-leuthaeuser/CPSTextInterpreter
|
src/main/scala/de/qualitune/ast/ASTElement.scala
|
Scala
|
gpl-3.0
| 109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import scala.collection.mutable.Queue
import scala.collection.Map
import scala.reflect.ClassTag
import java.io.InputStream
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.Props
import akka.actor.SupervisorStrategy
import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat}
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.fs.Path
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.MetadataCleaner
import org.apache.spark.streaming.dstream._
import org.apache.spark.streaming.receivers._
import org.apache.spark.streaming.scheduler._
import org.apache.hadoop.conf.Configuration
/**
* Main entry point for Spark Streaming functionality. It provides methods used to create
* [[org.apache.spark.streaming.dstream.DStream]]s from various input sources. It can be either
* created by providing a Spark master URL and an appName, or from a org.apache.spark.SparkConf
* configuration (see core Spark documentation), or from an existing org.apache.spark.SparkContext.
* The associated SparkContext can be accessed using `context.sparkContext`. After
* creating and transforming DStreams, the streaming computation can be started and stopped
* using `context.start()` and `context.stop()`, respectively.
* `context.awaitTransformation()` allows the current thread to wait for the termination
* of the context by `stop()` or by an exception.
*/
class StreamingContext private[streaming] (
sc_ : SparkContext,
cp_ : Checkpoint,
batchDur_ : Duration
) extends Logging {
/**
* Create a StreamingContext using an existing SparkContext.
* @param sparkContext existing SparkContext
* @param batchDuration the time interval at which streaming data will be divided into batches
*/
def this(sparkContext: SparkContext, batchDuration: Duration) = {
this(sparkContext, null, batchDuration)
}
/**
* Create a StreamingContext by providing the configuration necessary for a new SparkContext.
* @param conf a org.apache.spark.SparkConf object specifying Spark parameters
* @param batchDuration the time interval at which streaming data will be divided into batches
*/
def this(conf: SparkConf, batchDuration: Duration) = {
this(StreamingContext.createNewSparkContext(conf), null, batchDuration)
}
/**
* Create a StreamingContext by providing the details necessary for creating a new SparkContext.
* @param master cluster URL to connect to (e.g. mesos://host:port, spark://host:port, local[4]).
* @param appName a name for your job, to display on the cluster web UI
* @param batchDuration the time interval at which streaming data will be divided into batches
*/
def this(
master: String,
appName: String,
batchDuration: Duration,
sparkHome: String = null,
jars: Seq[String] = Nil,
environment: Map[String, String] = Map()) = {
this(StreamingContext.createNewSparkContext(master, appName, sparkHome, jars, environment),
null, batchDuration)
}
/**
* Recreate a StreamingContext from a checkpoint file.
* @param path Path to the directory that was specified as the checkpoint directory
* @param hadoopConf Optional, configuration object if necessary for reading from
* HDFS compatible filesystems
*/
def this(path: String, hadoopConf: Configuration = new Configuration) =
this(null, CheckpointReader.read(path, new SparkConf(), hadoopConf).get, null)
if (sc_ == null && cp_ == null) {
throw new Exception("Spark Streaming cannot be initialized with " +
"both SparkContext and checkpoint as null")
}
private[streaming] val isCheckpointPresent = (cp_ != null)
private[streaming] val sc: SparkContext = {
if (isCheckpointPresent) {
new SparkContext(cp_.sparkConf)
} else {
sc_
}
}
if (MetadataCleaner.getDelaySeconds(sc.conf) < 0) {
throw new SparkException("Spark Streaming cannot be used without setting spark.cleaner.ttl; "
+ "set this property before creating a SparkContext (use SPARK_JAVA_OPTS for the shell)")
}
private[streaming] val conf = sc.conf
private[streaming] val env = SparkEnv.get
private[streaming] val graph: DStreamGraph = {
if (isCheckpointPresent) {
cp_.graph.setContext(this)
cp_.graph.restoreCheckpointData()
cp_.graph
} else {
assert(batchDur_ != null, "Batch duration for streaming context cannot be null")
val newGraph = new DStreamGraph()
newGraph.setBatchDuration(batchDur_)
newGraph
}
}
private val nextNetworkInputStreamId = new AtomicInteger(0)
private[streaming] var checkpointDir: String = {
if (isCheckpointPresent) {
sc.setCheckpointDir(cp_.checkpointDir)
cp_.checkpointDir
} else {
null
}
}
private[streaming] val checkpointDuration: Duration = {
if (isCheckpointPresent) cp_.checkpointDuration else graph.batchDuration
}
private[streaming] val scheduler = new JobScheduler(this)
private[streaming] val waiter = new ContextWaiter
/**
* Return the associated Spark context
*/
def sparkContext = sc
/**
* Set each DStreams in this context to remember RDDs it generated in the last given duration.
* DStreams remember RDDs only for a limited duration of time and releases them for garbage
* collection. This method allows the developer to specify how to long to remember the RDDs (
* if the developer wishes to query old data outside the DStream computation).
* @param duration Minimum duration that each DStream should remember its RDDs
*/
def remember(duration: Duration) {
graph.remember(duration)
}
/**
* Set the context to periodically checkpoint the DStream operations for driver
* fault-tolerance.
* @param directory HDFS-compatible directory where the checkpoint data will be reliably stored.
* Note that this must be a fault-tolerant file system like HDFS for
*/
def checkpoint(directory: String) {
if (directory != null) {
val path = new Path(directory)
val fs = path.getFileSystem(sparkContext.hadoopConfiguration)
fs.mkdirs(path)
val fullPath = fs.getFileStatus(path).getPath().toString
sc.setCheckpointDir(fullPath)
checkpointDir = fullPath
} else {
checkpointDir = null
}
}
private[streaming] def initialCheckpoint: Checkpoint = {
if (isCheckpointPresent) cp_ else null
}
private[streaming] def getNewNetworkStreamId() = nextNetworkInputStreamId.getAndIncrement()
/**
* Create an input stream with any arbitrary user implemented network receiver.
* Find more details at: http://spark-project.org/docs/latest/streaming-custom-receivers.html
* @param receiver Custom implementation of NetworkReceiver
*/
def networkStream[T: ClassTag](
receiver: NetworkReceiver[T]): DStream[T] = {
new PluggableInputDStream[T](this, receiver)
}
/**
* Create an input stream with any arbitrary user implemented actor receiver.
* Find more details at: http://spark-project.org/docs/latest/streaming-custom-receivers.html
* @param props Props object defining creation of the actor
* @param name Name of the actor
* @param storageLevel RDD storage level. Defaults to memory-only.
*
* @note An important point to note:
* Since Actor may exist outside the spark framework, It is thus user's responsibility
* to ensure the type safety, i.e parametrized type of data received and actorStream
* should be same.
*/
def actorStream[T: ClassTag](
props: Props,
name: String,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2,
supervisorStrategy: SupervisorStrategy = ReceiverSupervisorStrategy.defaultStrategy
): DStream[T] = {
networkStream(new ActorReceiver[T](props, name, storageLevel, supervisorStrategy))
}
/**
* Create a input stream from TCP source hostname:port. Data is received using
* a TCP socket and the receive bytes is interpreted as UTF8 encoded `\\n` delimited
* lines.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param storageLevel Storage level to use for storing the received objects
* (default: StorageLevel.MEMORY_AND_DISK_SER_2)
*/
def socketTextStream(
hostname: String,
port: Int,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[String] = {
socketStream[String](hostname, port, SocketReceiver.bytesToLines, storageLevel)
}
/**
* Create a input stream from TCP source hostname:port. Data is received using
* a TCP socket and the receive bytes it interepreted as object using the given
* converter.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param converter Function to convert the byte stream to objects
* @param storageLevel Storage level to use for storing the received objects
* @tparam T Type of the objects received (after converting bytes to objects)
*/
def socketStream[T: ClassTag](
hostname: String,
port: Int,
converter: (InputStream) => Iterator[T],
storageLevel: StorageLevel
): DStream[T] = {
new SocketInputDStream[T](this, hostname, port, converter, storageLevel)
}
/**
* Create a input stream from network source hostname:port, where data is received
* as serialized blocks (serialized using the Spark's serializer) that can be directly
* pushed into the block manager without deserializing them. This is the most efficient
* way to receive data.
* @param hostname Hostname to connect to for receiving data
* @param port Port to connect to for receiving data
* @param storageLevel Storage level to use for storing the received objects
* (default: StorageLevel.MEMORY_AND_DISK_SER_2)
* @tparam T Type of the objects in the received blocks
*/
def rawSocketStream[T: ClassTag](
hostname: String,
port: Int,
storageLevel: StorageLevel = StorageLevel.MEMORY_AND_DISK_SER_2
): DStream[T] = {
new RawInputDStream[T](this, hostname, port, storageLevel)
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them using the given key-value types and input format.
* Files must be written to the monitored directory by "moving" them from another
* location within the same file system. File names starting with . are ignored.
* @param directory HDFS directory to monitor for new file
* @tparam K Key type for reading HDFS file
* @tparam V Value type for reading HDFS file
* @tparam F Input format for reading HDFS file
*/
def fileStream[
K: ClassTag,
V: ClassTag,
F <: NewInputFormat[K, V]: ClassTag
] (directory: String): DStream[(K, V)] = {
new FileInputDStream[K, V, F](this, directory)
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them using the given key-value types and input format.
* Files must be written to the monitored directory by "moving" them from another
* location within the same file system.
* @param directory HDFS directory to monitor for new file
* @param filter Function to filter paths to process
* @param newFilesOnly Should process only new files and ignore existing files in the directory
* @tparam K Key type for reading HDFS file
* @tparam V Value type for reading HDFS file
* @tparam F Input format for reading HDFS file
*/
def fileStream[
K: ClassTag,
V: ClassTag,
F <: NewInputFormat[K, V]: ClassTag
] (directory: String, filter: Path => Boolean, newFilesOnly: Boolean): DStream[(K, V)] = {
new FileInputDStream[K, V, F](this, directory, filter, newFilesOnly)
}
/**
* Create a input stream that monitors a Hadoop-compatible filesystem
* for new files and reads them as text files (using key as LongWritable, value
* as Text and input format as TextInputFormat). Files must be written to the
* monitored directory by "moving" them from another location within the same
* file system. File names starting with . are ignored.
* @param directory HDFS directory to monitor for new file
*/
def textFileStream(directory: String): DStream[String] = {
fileStream[LongWritable, Text, TextInputFormat](directory).map(_._2.toString)
}
/**
* Create an input stream from a queue of RDDs. In each batch,
* it will process either one or all of the RDDs returned by the queue.
* @param queue Queue of RDDs
* @param oneAtATime Whether only one RDD should be consumed from the queue in every interval
* @tparam T Type of objects in the RDD
*/
def queueStream[T: ClassTag](
queue: Queue[RDD[T]],
oneAtATime: Boolean = true
): DStream[T] = {
queueStream(queue, oneAtATime, sc.makeRDD(Seq[T](), 1))
}
/**
* Create an input stream from a queue of RDDs. In each batch,
* it will process either one or all of the RDDs returned by the queue.
* @param queue Queue of RDDs
* @param oneAtATime Whether only one RDD should be consumed from the queue in every interval
* @param defaultRDD Default RDD is returned by the DStream when the queue is empty.
* Set as null if no RDD should be returned when empty
* @tparam T Type of objects in the RDD
*/
def queueStream[T: ClassTag](
queue: Queue[RDD[T]],
oneAtATime: Boolean,
defaultRDD: RDD[T]
): DStream[T] = {
new QueueInputDStream(this, queue, oneAtATime, defaultRDD)
}
/**
* Create a unified DStream from multiple DStreams of the same type and same slide duration.
*/
def union[T: ClassTag](streams: Seq[DStream[T]]): DStream[T] = {
new UnionDStream[T](streams.toArray)
}
/**
* Create a new DStream in which each RDD is generated by applying a function on RDDs of
* the DStreams.
*/
def transform[T: ClassTag](
dstreams: Seq[DStream[_]],
transformFunc: (Seq[RDD[_]], Time) => RDD[T]
): DStream[T] = {
new TransformedDStream[T](dstreams, sparkContext.clean(transformFunc))
}
/** Add a [[org.apache.spark.streaming.scheduler.StreamingListener]] object for
* receiving system events related to streaming.
*/
def addStreamingListener(streamingListener: StreamingListener) {
scheduler.listenerBus.addListener(streamingListener)
}
private def validate() {
assert(graph != null, "Graph is null")
graph.validate()
assert(
checkpointDir == null || checkpointDuration != null,
"Checkpoint directory has been set, but the graph checkpointing interval has " +
"not been set. Please use StreamingContext.checkpoint() to set the interval."
)
}
/**
* Start the execution of the streams.
*/
def start() = synchronized {
validate()
scheduler.start()
}
/**
* Wait for the execution to stop. Any exceptions that occurs during the execution
* will be thrown in this thread.
*/
def awaitTermination() {
waiter.waitForStopOrError()
}
/**
* Wait for the execution to stop. Any exceptions that occurs during the execution
* will be thrown in this thread.
* @param timeout time to wait in milliseconds
*/
def awaitTermination(timeout: Long) {
waiter.waitForStopOrError(timeout)
}
/**
* Stop the execution of the streams.
* @param stopSparkContext Stop the associated SparkContext or not
*/
def stop(stopSparkContext: Boolean = true) = synchronized {
scheduler.stop()
logInfo("StreamingContext stopped successfully")
waiter.notifyStop()
if (stopSparkContext) sc.stop()
}
}
/**
* StreamingContext object contains a number of utility functions related to the
* StreamingContext class.
*/
object StreamingContext extends Logging {
private[streaming] val DEFAULT_CLEANER_TTL = 3600
implicit def toPairDStreamFunctions[K: ClassTag, V: ClassTag](stream: DStream[(K,V)]) = {
new PairDStreamFunctions[K, V](stream)
}
/**
* Either recreate a StreamingContext from checkpoint data or create a new StreamingContext.
* If checkpoint data exists in the provided `checkpointPath`, then StreamingContext will be
* recreated from the checkpoint data. If the data does not exist, then the StreamingContext
* will be created by called the provided `creatingFunc`.
*
* @param checkpointPath Checkpoint directory used in an earlier StreamingContext program
* @param creatingFunc Function to create a new StreamingContext
* @param hadoopConf Optional Hadoop configuration if necessary for reading from the
* file system
* @param createOnError Optional, whether to create a new StreamingContext if there is an
* error in reading checkpoint data. By default, an exception will be
* thrown on error.
*/
def getOrCreate(
checkpointPath: String,
creatingFunc: () => StreamingContext,
hadoopConf: Configuration = new Configuration(),
createOnError: Boolean = false
): StreamingContext = {
val checkpointOption = try {
CheckpointReader.read(checkpointPath, new SparkConf(), hadoopConf)
} catch {
case e: Exception =>
if (createOnError) {
None
} else {
throw e
}
}
checkpointOption.map(new StreamingContext(null, _, null)).getOrElse(creatingFunc())
}
/**
* Find the JAR from which a given class was loaded, to make it easy for users to pass
* their JARs to StreamingContext.
*/
def jarOfClass(cls: Class[_]) = SparkContext.jarOfClass(cls)
private[streaming] def createNewSparkContext(conf: SparkConf): SparkContext = {
// Set the default cleaner delay to an hour if not already set.
// This should be sufficient for even 1 second batch intervals.
if (MetadataCleaner.getDelaySeconds(conf) < 0) {
MetadataCleaner.setDelaySeconds(conf, DEFAULT_CLEANER_TTL)
}
val sc = new SparkContext(conf)
sc
}
private[streaming] def createNewSparkContext(
master: String,
appName: String,
sparkHome: String,
jars: Seq[String],
environment: Map[String, String]
): SparkContext = {
val conf = SparkContext.updatedConf(
new SparkConf(), master, appName, sparkHome, jars, environment)
createNewSparkContext(conf)
}
private[streaming] def rddToFileName[T](prefix: String, suffix: String, time: Time): String = {
if (prefix == null) {
time.milliseconds.toString
} else if (suffix == null || suffix.length ==0) {
prefix + "-" + time.milliseconds
} else {
prefix + "-" + time.milliseconds + "." + suffix
}
}
}
|
dotunolafunmiloye/spark
|
streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala
|
Scala
|
apache-2.0
| 19,947 |
package io.buoyant.linkerd
package protocol
import com.fasterxml.jackson.annotation.{JsonIgnore, JsonSubTypes, JsonTypeInfo}
import com.fasterxml.jackson.core.{JsonParser, TreeNode}
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonNode}
import com.twitter.conversions.storage._
import com.twitter.finagle.buoyant.{PathMatcher, ParamsMaybeWith}
import com.twitter.finagle.buoyant.linkerd.{DelayedRelease, Headers, HttpEngine, HttpTraceInitializer}
import com.twitter.finagle.client.{AddrMetadataExtraction, StackClient}
import com.twitter.finagle.filter.DtabStatsFilter
import com.twitter.finagle.http.filter.StatsFilter
import com.twitter.finagle.http.{Request, Response, param => hparam}
import com.twitter.finagle.liveness.FailureAccrualFactory
import com.twitter.finagle.service.Retries
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle.{Path, ServiceFactory, Stack, param => fparam}
import com.twitter.util.Future
import io.buoyant.linkerd.protocol.http._
import io.buoyant.router.{ClassifiedRetries, Http, RoutingFactory}
import io.buoyant.router.RoutingFactory.{IdentifiedRequest, RequestIdentification, UnidentifiedRequest}
import io.buoyant.router.http.AddForwardedHeader
import scala.collection.JavaConverters._
class HttpInitializer extends ProtocolInitializer.Simple {
val name = "http"
protected type Req = com.twitter.finagle.http.Request
protected type Rsp = com.twitter.finagle.http.Response
protected val defaultRouter = {
val pathStack = Http.router.pathStack
.prepend(Headers.Dst.PathFilter.module)
.replace(StackClient.Role.prepFactory, DelayedRelease.module)
.prepend(http.ErrorResponder.module)
val boundStack = Http.router.boundStack
.prepend(Headers.Dst.BoundFilter.module)
val clientStack = Http.router.clientStack
.prepend(http.AccessLogger.module)
.replace(HttpTraceInitializer.role, HttpTraceInitializer.clientModule)
.replace(Headers.Ctx.clientModule.role, Headers.Ctx.clientModule)
.insertAfter(DtabStatsFilter.role, HttpLoggerConfig.module)
.insertAfter(Retries.Role, http.StatusCodeStatsFilter.module)
.insertAfter(AddrMetadataExtraction.Role, RewriteHostHeader.module)
// ensure the client-stack framing filter is placed below the stats filter
// so that any malframed responses it fails are counted as errors
.insertAfter(FailureAccrualFactory.role, FramingFilter.clientModule)
Http.router
.withPathStack(pathStack)
.withBoundStack(boundStack)
.withClientStack(clientStack)
}
/**
* Apply the router's codec configuration parameters to a server.
*/
override protected def configureServer(router: Router, server: Server): Server =
super.configureServer(router, server)
.configured(router.params[hparam.MaxChunkSize])
.configured(router.params[hparam.MaxHeaderSize])
.configured(router.params[hparam.MaxInitialLineSize])
.configured(router.params[hparam.MaxRequestSize])
.configured(router.params[hparam.MaxResponseSize])
.configured(router.params[hparam.Streaming])
.configured(router.params[hparam.CompressionLevel])
protected val defaultServer = {
val stk = Http.server.stack
.replace(HttpTraceInitializer.role, HttpTraceInitializer.serverModule)
.replace(Headers.Ctx.serverModule.role, Headers.Ctx.serverModule)
.prepend(http.ErrorResponder.module)
.prepend(http.StatusCodeStatsFilter.module)
// ensure the server-stack framing filter is placed below the stats filter
// so that any malframed requests it fails are counted as errors
.insertAfter(StatsFilter.role, FramingFilter.serverModule)
.insertBefore(AddForwardedHeader.module.role, AddForwardedHeaderConfig.module)
Http.server.withStack(stk)
}
override def clearServerContext(stk: ServerStack): ServerStack = {
// Does NOT use the ClearContext module that forcibly clears the
// context. Instead, we just strip out headers on inbound requests.
stk.remove(HttpTraceInitializer.role)
.replace(Headers.Ctx.serverModule.role, Headers.Ctx.clearServerModule)
}
val configClass = classOf[HttpConfig]
override def defaultServerPort: Int = 4140
}
object HttpInitializer extends HttpInitializer
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
property = "kind",
visible = true,
defaultImpl = classOf[HttpDefaultClient]
)
@JsonSubTypes(Array(
new JsonSubTypes.Type(value = classOf[HttpDefaultClient], name = "io.l5d.global"),
new JsonSubTypes.Type(value = classOf[HttpStaticClient], name = "io.l5d.static")
))
abstract class HttpClient extends Client
class HttpDefaultClient extends HttpClient with DefaultClient with HttpClientConfig
class HttpStaticClient(val configs: Seq[HttpPrefixConfig]) extends HttpClient with StaticClient
class HttpPrefixConfig(prefix: PathMatcher) extends PrefixConfig(prefix) with HttpClientConfig
trait HttpClientConfig extends ClientConfig {
var engine: Option[HttpEngine] = None
@JsonIgnore
override def params(vars: Map[String, String]) = engine match {
case Some(engine) => engine.mk(super.params(vars))
case None => super.params(vars)
}
}
@JsonTypeInfo(
use = JsonTypeInfo.Id.NAME,
include = JsonTypeInfo.As.EXISTING_PROPERTY,
property = "kind",
visible = true,
defaultImpl = classOf[HttpDefaultSvc]
)
@JsonSubTypes(Array(
new JsonSubTypes.Type(value = classOf[HttpDefaultSvc], name = "io.l5d.global"),
new JsonSubTypes.Type(value = classOf[HttpStaticSvc], name = "io.l5d.static")
))
abstract class HttpSvc extends Svc
class HttpDefaultSvc extends HttpSvc with DefaultSvc with HttpSvcConfig
class HttpStaticSvc(val configs: Seq[HttpSvcPrefixConfig]) extends HttpSvc with StaticSvc
class HttpSvcPrefixConfig(prefix: PathMatcher) extends SvcPrefixConfig(prefix) with HttpSvcConfig
trait HttpSvcConfig extends SvcConfig {
@JsonIgnore
override def baseResponseClassifier = ClassifiedRetries.orElse(
ResponseClassifiers.NonRetryableServerFailures,
super.baseResponseClassifier
)
@JsonIgnore
override def responseClassifier =
super.responseClassifier.map { classifier =>
ResponseClassifiers.NonRetryableChunked(
ResponseClassifiers.HeaderRetryable(classifier)
)
}
}
case class HttpServerConfig(
engine: Option[HttpEngine],
addForwardedHeader: Option[AddForwardedHeaderConfig]
) extends ServerConfig {
@JsonIgnore
override def serverParams = {
val params = super.serverParams + AddForwardedHeaderConfig.Param(addForwardedHeader)
engine match {
case None => params
case Some(engine) => engine.mk(params)
}
}
}
// Cribbed from https://gist.github.com/Aivean/6bb90e3942f3bf966608
class HttpIdentifierConfigDeserializer extends JsonDeserializer[Option[Seq[HttpIdentifierConfig]]] {
override def deserialize(p: JsonParser, ctxt: DeserializationContext): Option[Seq[HttpIdentifierConfig]] = {
val codec = p.getCodec
codec.readTree[TreeNode](p) match {
case n: JsonNode if n.isArray =>
Some(n.asScala.toList.map(codec.treeToValue(_, classOf[HttpIdentifierConfig])))
case node => Some(Seq(codec.treeToValue(node, classOf[HttpIdentifierConfig])))
}
}
override def getNullValue(ctxt: DeserializationContext): Option[Seq[HttpIdentifierConfig]] = None
}
case class HttpConfig(
httpAccessLog: Option[String],
@JsonDeserialize(using = classOf[HttpIdentifierConfigDeserializer]) identifier: Option[Seq[HttpIdentifierConfig]],
loggers: Option[Seq[HttpLoggerConfig]],
maxChunkKB: Option[Int],
maxHeadersKB: Option[Int],
maxInitialLineKB: Option[Int],
maxRequestKB: Option[Int],
maxResponseKB: Option[Int],
streamingEnabled: Option[Boolean],
compressionLevel: Option[Int]
) extends RouterConfig {
var client: Option[HttpClient] = None
var servers: Seq[HttpServerConfig] = Nil
var service: Option[HttpSvc] = None
@JsonIgnore
override val protocol: ProtocolInitializer = HttpInitializer
@JsonIgnore
override val defaultResponseClassifier = ResponseClassifiers.NonRetryableChunked(
ResponseClassifiers.HeaderRetryable(
ClassifiedRetries.orElse(
ResponseClassifiers.NonRetryableServerFailures,
ClassifiedRetries.Default
)
)
)
@JsonIgnore
private[this] val loggerParam = loggers.map { configs =>
val loggerStack =
configs.foldRight[Stack[ServiceFactory[Request, Response]]](nilStack) { (config, next) =>
config.module.toStack(next)
}
HttpLoggerConfig.param.Logger(loggerStack)
}
@JsonIgnore
private[this] val combinedIdentifier = identifier.map { configs =>
Http.param.HttpIdentifier { (prefix, dtab) =>
RoutingFactory.Identifier.compose(configs.map(_.newIdentifier(prefix, dtab)))
}
}
@JsonIgnore
override def routerParams: Stack.Params = super.routerParams
.maybeWith(httpAccessLog.map(AccessLogger.param.File(_)))
.maybeWith(loggerParam)
.maybeWith(combinedIdentifier)
.maybeWith(maxChunkKB.map(kb => hparam.MaxChunkSize(kb.kilobytes)))
.maybeWith(maxHeadersKB.map(kb => hparam.MaxHeaderSize(kb.kilobytes)))
.maybeWith(maxInitialLineKB.map(kb => hparam.MaxInitialLineSize(kb.kilobytes)))
.maybeWith(maxRequestKB.map(kb => hparam.MaxRequestSize(kb.kilobytes)))
.maybeWith(maxResponseKB.map(kb => hparam.MaxResponseSize(kb.kilobytes)))
.maybeWith(streamingEnabled.map(hparam.Streaming(_)))
.maybeWith(compressionLevel.map(hparam.CompressionLevel(_)))
}
|
denverwilliams/linkerd
|
linkerd/protocol/http/src/main/scala/io/buoyant/linkerd/protocol/HttpConfig.scala
|
Scala
|
apache-2.0
| 9,629 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package kafka.api
import java.util.Properties
import junit.framework.Assert
import kafka.consumer.SimpleConsumer
import kafka.integration.KafkaServerTestHarness
import kafka.server.{KafkaServer, KafkaConfig}
import kafka.utils.TestUtils
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer._
import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback
import org.apache.kafka.common.MetricName
import org.apache.kafka.common.metrics.KafkaMetric
import org.junit.Assert._
import org.junit.{After, Before, Test}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
class QuotasTest extends KafkaServerTestHarness {
private val producerBufferSize = 300000
private val producerId1 = "QuotasTestProducer-1"
private val producerId2 = "QuotasTestProducer-2"
private val consumerId1 = "QuotasTestConsumer-1"
private val consumerId2 = "QuotasTestConsumer-2"
val numServers = 2
val overridingProps = new Properties()
// Low enough quota that a producer sending a small payload in a tight loop should get throttled
overridingProps.put(KafkaConfig.ProducerQuotaBytesPerSecondDefaultProp, "8000")
overridingProps.put(KafkaConfig.ConsumerQuotaBytesPerSecondDefaultProp, "2500")
// un-throttled
overridingProps.put(KafkaConfig.ProducerQuotaBytesPerSecondOverridesProp, producerId2 + "=" + Long.MaxValue)
overridingProps.put(KafkaConfig.ConsumerQuotaBytesPerSecondOverridesProp, consumerId2 + "=" + Long.MaxValue)
override def generateConfigs() = {
FixedPortTestUtils.createBrokerConfigs(numServers,
zkConnect,
enableControlledShutdown = false)
.map(KafkaConfig.fromProps(_, overridingProps))
}
var producers = mutable.Buffer[KafkaProducer[Array[Byte], Array[Byte]]]()
var consumers = mutable.Buffer[KafkaConsumer[Array[Byte], Array[Byte]]]()
var replicaConsumers = mutable.Buffer[SimpleConsumer]()
var leaderNode: KafkaServer = null
var followerNode: KafkaServer = null
private val topic1 = "topic-1"
@Before
override def setUp() {
super.setUp()
val producerProps = new Properties()
producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
producerProps.put(ProducerConfig.ACKS_CONFIG, "0")
producerProps.put(ProducerConfig.BLOCK_ON_BUFFER_FULL_CONFIG, "false")
producerProps.put(ProducerConfig.BUFFER_MEMORY_CONFIG, producerBufferSize.toString)
producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, producerId1)
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArraySerializer])
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArraySerializer])
producers += new KafkaProducer[Array[Byte], Array[Byte]](producerProps)
producerProps.put(ProducerConfig.CLIENT_ID_CONFIG, producerId2)
producers += new KafkaProducer[Array[Byte], Array[Byte]](producerProps)
val numPartitions = 1
val leaders = TestUtils.createTopic(zkClient, topic1, numPartitions, numServers, servers)
leaderNode = if (leaders(0).get == servers.head.config.brokerId) servers.head else servers(1)
followerNode = if (leaders(0).get != servers.head.config.brokerId) servers.head else servers(1)
assertTrue("Leader of all partitions of the topic should exist", leaders.values.forall(leader => leader.isDefined))
// Create consumers
val consumerProps = new Properties
consumerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokerList)
consumerProps.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "QuotasTest")
consumerProps.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
consumerProps.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.bootstrapUrl)
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArrayDeserializer])
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
classOf[org.apache.kafka.common.serialization.ByteArrayDeserializer])
consumerProps.put(ConsumerConfig.PARTITION_ASSIGNMENT_STRATEGY_CONFIG, "range")
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, consumerId1)
consumers += new KafkaConsumer(consumerProps)
// Create replica consumers with the same clientId as the high level consumer. These requests should never be throttled
replicaConsumers += new SimpleConsumer("localhost", leaderNode.boundPort(), 1000000, 64*1024, consumerId1)
consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, consumerId2)
consumers += new KafkaConsumer(consumerProps)
replicaConsumers += new SimpleConsumer("localhost", leaderNode.boundPort(), 1000000, 64*1024, consumerId2)
}
@After
override def tearDown() {
producers.foreach( _.close )
consumers.foreach( _.close )
replicaConsumers.foreach( _.close )
super.tearDown()
}
@Test
def testThrottledProducerConsumer() {
val allMetrics: mutable.Map[MetricName, KafkaMetric] = leaderNode.metrics.metrics().asScala
val numRecords = 1000
produce(producers.head, numRecords)
val producerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.ProduceKey),
"Tracking throttle-time per client",
"client-id", producerId1)
Assert.assertTrue("Should have been throttled", allMetrics(producerMetricName).value() > 0)
// Consumer should read in a bursty manner and get throttled immediately
consume(consumers.head, numRecords)
// The replica consumer should not be throttled also. Create a fetch request which will exceed the quota immediately
val request = new FetchRequestBuilder().addFetch(topic1, 0, 0, 1024*1024).replicaId(followerNode.config.brokerId).build()
replicaConsumers.head.fetch(request)
val consumerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.FetchKey),
"Tracking throttle-time per client",
"client-id", consumerId1)
Assert.assertTrue("Should have been throttled", allMetrics(consumerMetricName).value() > 0)
}
@Test
def testProducerConsumerOverrideUnthrottled() {
val allMetrics: mutable.Map[MetricName, KafkaMetric] = leaderNode.metrics.metrics().asScala
val numRecords = 1000
produce(producers(1), numRecords)
val producerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.ProduceKey),
"Tracking throttle-time per client",
"client-id", producerId2)
Assert.assertEquals("Should not have been throttled", Double.NaN, allMetrics(producerMetricName).value())
// The "client" consumer does not get throttled.
consume(consumers(1), numRecords)
// The replica consumer should not be throttled also. Create a fetch request which will exceed the quota immediately
val request = new FetchRequestBuilder().addFetch(topic1, 0, 0, 1024*1024).replicaId(followerNode.config.brokerId).build()
replicaConsumers(1).fetch(request)
val consumerMetricName = new MetricName("throttle-time",
RequestKeys.nameForKey(RequestKeys.FetchKey),
"Tracking throttle-time per client",
"client-id", consumerId2)
Assert.assertEquals("Should not have been throttled", Double.NaN, allMetrics(consumerMetricName).value())
}
def produce(p: KafkaProducer[Array[Byte], Array[Byte]], count: Int): Int = {
var numBytesProduced = 0
for (i <- 0 to count) {
val payload = i.toString.getBytes
numBytesProduced += payload.length
p.send(new ProducerRecord[Array[Byte], Array[Byte]](topic1, null, null, payload),
new ErrorLoggingCallback(topic1, null, null, true)).get()
Thread.sleep(1)
}
numBytesProduced
}
def consume(consumer: KafkaConsumer[Array[Byte], Array[Byte]], numRecords: Int) {
consumer.subscribe(topic1)
var numConsumed = 0
while (numConsumed < numRecords) {
for (cr <- consumer.poll(100)) {
numConsumed += 1
}
}
}
}
|
Tony-Zhang03/kafka
|
core/src/test/scala/integration/kafka/api/QuotasTest.scala
|
Scala
|
apache-2.0
| 9,321 |
/*
* Copyright (c) 2014-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.iglu.schemaddl.redshift
/**
* Class representing comment block in Ddl file
* Can be rendered into file along with other Ddl-statements
*
* @param lines sequence of lines
* @param prepend optional amount of spaces to prepend delimiter (--)
*/
case class CommentBlock(lines: Vector[String], prepend: Int = 0) extends Statement {
import CommentBlock._
override val separator = ""
def toDdl = lines.map(l => "--" + emptyOrSpace(l)).mkString("\\n")
}
object CommentBlock {
def apply(line: String, prepend: Int): CommentBlock =
CommentBlock(Vector(line), prepend)
/**
* Don't prepend empty strings with space
*/
private def emptyOrSpace(line: String): String =
if (line.nonEmpty) s" $line"
else ""
}
|
snowplow/schema-ddl
|
modules/core/src/main/scala/com.snowplowanalytics/iglu.schemaddl/redshift/CommentBlock.scala
|
Scala
|
apache-2.0
| 1,469 |
package deploymentzone.actor.unit
import deploymentzone.actor._
import org.scalatest.{WordSpecLike, Matchers}
import akka.testkit.{ImplicitSender, TestActorRef}
import com.typesafe.config.ConfigFactory
import akka.actor.ActorInitializationException
class StatsActorSpec
extends TestKit("stats-actor-unit-spec")
with WordSpecLike
with Matchers
with ImplicitSender {
"StatsActor" when {
"using configuration-only props" should {
"fall back to the default port when its not specified" in {
pending // upgrade to akka 2.3
val config = ConfigFactory.load("just-hostname.conf")
val subject = TestActorRef[StatsActor](StatsActor.props(config))
subject.underlyingActor.address.getPort should be(Defaults.STATSD_UDP_PORT)
}
"throw an exception when no hostname is specified" in {
pending // upgrade to akka 2.3
val config = ConfigFactory.empty()
val props = StatsActor.props(config)
system.actorOf(ExceptionSieve.props(props))
val subject = expectMsgClass(classOf[ActorInitializationException])
subject.getCause.getMessage should startWith("No configuration setting found for key 'deploymentzone")
}
"get initialized with the expected values when all values are specified" in {
pending // upgrade to akka 2.3
val config = ConfigFactory.load("stats-actor.conf")
val subject = TestActorRef[StatsActor](StatsActor.props(config))
subject.underlyingActor.address.getAddress.getHostAddress should be("127.0.0.1")
subject.underlyingActor.address.getPort should be(9999)
subject.underlyingActor.namespace should be("mango")
}
}
"using empty props" should {
"load the expected props" in {
pending // upgrade to akka 2.3
val subject = TestActorRef[StatsActor](StatsActor.props())
subject.underlyingActor.address.getAddress.getHostAddress should be("127.0.0.1")
subject.underlyingActor.address.getPort should be(32768)
subject.underlyingActor.namespace should be("")
}
}
"using another constructor" should {
"override the default settings with only the provided values" in {
pending // upgrade to akka 2.3
val subject = TestActorRef[StatsActor](StatsActor.props("127.0.0.1", "a-namespace"))
subject.underlyingActor.address.getAddress.getHostAddress should be("127.0.0.1")
subject.underlyingActor.address.getPort should be(32768)
subject.underlyingActor.namespace should be("a-namespace")
}
}
}
}
|
jamesmulcahy/akka-actor-statsd
|
src/test/scala/deploymentzone/actor/unit/StatsActorSpec.scala
|
Scala
|
mit
| 2,585 |
// Copyright (C) 2015, codejitsu.
package net.codejitsu.tasks
import net.codejitsu.tasks.dsl.Tasks._
import net.codejitsu.tasks.dsl._
/**
* Upload file to remote host(s).
*
* @param source source file to upload
* @param target destination hosts
* @param destinationPath path on destination hosts
* @param usingSudo true, if task have to be started with sudo
* @param usingPar true, if parallel execution required.
* @param user user
*/
final case class Upload[S <: Stage](target: Hosts,
source: String,
destinationPath: String,
usingSudo: Boolean = false,
usingPar: Boolean = false,
exec: String = "/usr/bin/rsync")(implicit user: User, stage: S, rights: S Allow Upload[S])
extends TaskM[Boolean] with UsingSudo[Upload[S]] with UsingParallelExecution[Upload[S]] {
private lazy val uploadProcs = target.hosts map {
case h: HostLike =>
val up: Process = "rsync" on Localhost ~> {
case Start => if (usingSudo) {
Sudo ~ Exec(exec, List("-avzhe", "ssh", source, s"${h.toString()}:$destinationPath"))
} else {
Exec(exec, List("-avzhe", "ssh", source, s"${h.toString()}:$destinationPath"))
}
}
up
}
private lazy val uploadTask: TaskM[Boolean] = if (usingPar) {
Processes(uploadProcs) !! Start
} else {
Processes(uploadProcs) ! Start
}
override def description: String = "upload file(s)"
override def run(verbose: VerbosityLevel = NoOutput, input: Option[TaskResult[_]] = None): TaskResult[Boolean] =
LoggedRun(
verbose,
usingSudo,
usingPar,
target,
s"$description '${source}' -> '${destinationPath}'",
uploadTask,
input
)(verbose)
override def sudo: Upload[S] = copy[S](usingSudo = true)
override def par: Upload[S] = copy[S](usingPar = true)
}
|
codejitsu/tasks
|
tasks-dsl/src/main/scala/net/codejitsu/tasks/Upload.scala
|
Scala
|
apache-2.0
| 1,951 |
package com.v_standard.utils
import java.util.concurrent.atomic.AtomicInteger
/**
* カウンタークラス。
*/
class Counter(initVal:Int = 0) {
/** カウント値 */
private val count = new AtomicInteger(initVal)
/**
* インクリメントして値を取得。
*
* @return 値
*/
def get(): Int = count.incrementAndGet()
}
|
VanishStandard/scalikejdbc-orm
|
src/main/scala/com/v_standard/utils/Counter.scala
|
Scala
|
bsd-3-clause
| 348 |
package fpinscala.answers.datastructures
sealed trait Tree[+A]
case class Leaf[A](value: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
object Tree {
def size[A](t: Tree[A]): Int = t match {
case Leaf(_) => 1
case Branch(l,r) => 1 + size(l) + size(r)
}
/*
We're using the method `max` that exists on all `Int` values rather than an explicit `if` expression.
Note how similar the implementation is to `size`. We'll abstract out the common pattern in a later exercise.
*/
def maximum(t: Tree[Int]): Int = t match {
case Leaf(n) => n
case Branch(l,r) => maximum(l) max maximum(r)
}
/*
Again, note how similar the implementation is to `size` and `maximum`.
*/
def depth[A](t: Tree[A]): Int = t match {
case Leaf(_) => 0
case Branch(l,r) => 1 + (depth(l) max depth(r))
}
def map[A,B](t: Tree[A])(f: A => B): Tree[B] = t match {
case Leaf(a) => Leaf(f(a))
case Branch(l,r) => Branch(map(l)(f), map(r)(f))
}
/*
Like `foldRight` for lists, `fold` receives a "handler" for each of the data constructors of the type, and recursively
accumulates some value using these handlers. As with `foldRight`, `fold(t)(Leaf(_))(Branch(_,_)) == t`, and we can use
this function to implement just about any recursive function that would otherwise be defined by pattern matching.
*/
def fold[A,B](t: Tree[A])(f: A => B)(g: (B,B) => B): B = t match {
case Leaf(a) => f(a)
case Branch(l,r) => g(fold(l)(f)(g), fold(r)(f)(g))
}
def sizeViaFold[A](t: Tree[A]): Int =
fold(t)(a => 1)(1 + _ + _)
def maximumViaFold(t: Tree[Int]): Int =
fold(t)(a => a)(_ max _)
def depthViaFold[A](t: Tree[A]): Int =
fold(t)(a => 0)((d1,d2) => 1 + (d1 max d2))
/*
Note the type annotation required on the expression `Leaf(f(a))`. Without this annotation, we get an error like this:
type mismatch;
found : fpinscala.answers.datastructures.Branch[B]
required: fpinscala.answers.datastructures.Leaf[B]
fold(t)(a => Leaf(f(a)))(Branch(_,_))
^
This error is an unfortunate consequence of Scala using subtyping to encode algebraic data types. Without the
annotation, the result type of the fold gets inferred as `Leaf[B]` and it is then expected that the second argument
to `fold` will return `Leaf[B]`, which it doesn't (it returns `Branch[B]`). Really, we'd prefer Scala to
infer `Tree[B]` as the result type in both cases. When working with algebraic data types in Scala, it's somewhat
common to define helper functions that simply call the corresponding data constructors but give the less specific
result type:
def leaf[A](a: A): Tree[A] = Leaf(a)
def branch[A](l: Tree[A], r: Tree[A]): Tree[A] = Branch(l, r)
*/
def mapViaFold[A,B](t: Tree[A])(f: A => B): Tree[B] =
fold(t)(a => Leaf(f(a)): Tree[B])(Branch(_,_))
}
|
peterbecich/fpinscala
|
answers/src/main/scala/fpinscala/datastructures/Tree.scala
|
Scala
|
mit
| 2,944 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.models.utils
import com.intel.analytics.bigdl.models.lenet.LeNet5
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.scalatest.{FlatSpec, Matchers}
class ModelBroadcastSpec extends FlatSpec with Matchers {
Logger.getLogger("org").setLevel(Level.WARN)
Logger.getLogger("akka").setLevel(Level.WARN)
val sc = new SparkContext("local[1]", "ModelBroadcast")
val model = LeNet5(10)
val modelBroadCast = ModelBroadcast[Float].broadcast(sc, model)
modelBroadCast.value().toString should be(model.toString)
modelBroadCast.value().parameters()._1 should be(model.parameters()._1)
sc.stop()
}
|
SeaOfOcean/BigDL
|
dl/src/test/scala/com/intel/analytics/bigdl/models/utils/ModelBroadcastSpec.scala
|
Scala
|
apache-2.0
| 1,467 |
//
// Logger.scala -- Scala object Logger
// Project OrcScala
//
// Created by jthywiss on Oct 15, 2018.
//
// Copyright (c) 2019 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.values.sites
/** Logger for the orc.values.sites package
*
* @author jthywiss
*/
object Logger extends orc.util.Logger("orc.values.sites")
|
orc-lang/orc
|
OrcScala/src/orc/values/sites/Logger.scala
|
Scala
|
bsd-3-clause
| 559 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.storage.common.jobs
import java.io.{DataInput, DataOutput}
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.{Kryo, KryoSerializable}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce.Job
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.filter.factory.FastFilterFactory
import org.locationtech.geomesa.fs.storage.api.StorageMetadata.StorageFileAction.StorageFileAction
import org.locationtech.geomesa.fs.storage.api.StorageMetadata.{StorageFileAction, StorageFilePath}
import org.locationtech.geomesa.fs.storage.common.utils.StorageUtils.FileType
import org.locationtech.geomesa.fs.storage.common.utils.StorageUtils.FileType.FileType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
object StorageConfiguration {
object Counters {
val Group = "org.locationtech.geomesa.jobs.fs"
val Features = "features"
val Written = "written"
val Failed = "failed"
}
val PathKey = "geomesa.fs.path"
val PartitionsKey = "geomesa.fs.partitions"
val FileTypeKey = "geomesa.fs.output.file-type"
val SftNameKey = "geomesa.fs.sft.name"
val SftSpecKey = "geomesa.fs.sft.spec"
val FilterKey = "geomesa.fs.filter"
val TransformSpecKey = "geomesa.fs.transform.spec"
val TransformDefinitionKey = "geomesa.fs.transform.defs"
val PathActionKey = "geomesa.fs.path.action"
def setSft(conf: Configuration, sft: SimpleFeatureType): Unit = {
val name = Option(sft.getName.getNamespaceURI).map(ns => s"$ns:${sft.getTypeName}").getOrElse(sft.getTypeName)
conf.set(SftNameKey, name)
conf.set(SftSpecKey, SimpleFeatureTypes.encodeType(sft, includeUserData = true))
}
def getSft(conf: Configuration): SimpleFeatureType =
SimpleFeatureTypes.createType(conf.get(SftNameKey), conf.get(SftSpecKey))
def getSftName(conf: Configuration): String = conf.get(SftNameKey)
def getSftSpec(conf: Configuration): String = conf.get(SftSpecKey)
def setRootPath(conf: Configuration, path: Path): Unit = conf.set(PathKey, path.toString)
def getRootPath(conf: Configuration): Path = new Path(conf.get(PathKey))
def setPartitions(conf: Configuration, partitions: Array[String]): Unit =
conf.setStrings(PartitionsKey, partitions: _*)
def getPartitions(conf: Configuration): Array[String] = conf.getStrings(PartitionsKey)
def setFileType(conf: Configuration, fileType: FileType): Unit = conf.set(FileTypeKey, fileType.toString)
def getFileType(conf: Configuration): FileType = FileType.withName(conf.get(FileTypeKey))
def setFilter(conf: Configuration, filter: Filter): Unit = conf.set(FilterKey, ECQL.toCQL(filter))
def getFilter(conf: Configuration, sft: SimpleFeatureType): Option[Filter] =
Option(conf.get(FilterKey)).map(FastFilterFactory.toFilter(sft, _))
def setTransforms(conf: Configuration, transforms: (String, SimpleFeatureType)): Unit = {
val (tdefs, tsft) = transforms
conf.set(TransformDefinitionKey, tdefs)
conf.set(TransformSpecKey, SimpleFeatureTypes.encodeType(tsft, includeUserData = true))
}
def getTransforms(conf: Configuration): Option[(String, SimpleFeatureType)] = {
for { defs <- Option(conf.get(TransformDefinitionKey)); spec <- Option(conf.get(TransformSpecKey)) } yield {
(defs, SimpleFeatureTypes.createType("", spec))
}
}
def setPathActions(conf: Configuration, paths: Seq[StorageFilePath]): Unit = {
paths.foreach { case StorageFilePath(f, path) =>
conf.set(s"$PathActionKey.${path.getName}", s"${f.timestamp}:${f.action}")
}
}
def getPathAction(conf: Configuration, path: Path): (Long, StorageFileAction) = {
val Array(ts, action) = conf.get(s"$PathActionKey.${path.getName}").split(":")
(ts.toLong, StorageFileAction.withName(action))
}
/**
* Key used for merging feature updates.
*
* Implements hadoop writable for m/r, kryo serializable for spark, and comparable to sort in
* reverse chronological order
*/
class SimpleFeatureAction extends Writable with KryoSerializable with Comparable[SimpleFeatureAction] {
private var _id: String = _
private var _timestamp: Long = _
private var _action: StorageFileAction = _
def this(id: String, timestamp: Long, action: StorageFileAction) = {
this()
this._id = id
this._timestamp = timestamp
this._action = action
}
def id: String = _id
def timestamp: Long = _timestamp
def action: StorageFileAction = _action
override def compareTo(o: SimpleFeatureAction): Int = {
var res = _id.compareTo(o.id)
if (res == 0) {
res = _timestamp.compareTo(o.timestamp) * -1 // note: reverse chronological sort
if (res == 0) {
res = _action.compareTo(o.action)
}
}
res
}
override def write(out: DataOutput): Unit = {
out.writeUTF(_id)
out.writeLong(_timestamp)
out.writeUTF(_action.toString)
}
override def readFields(in: DataInput): Unit = {
_id = in.readUTF()
_timestamp = in.readLong()
_action = StorageFileAction.withName(in.readUTF())
}
override def write(kryo: Kryo, output: Output): Unit = {
output.writeString(_id)
output.writeLong(_timestamp)
output.writeString(_action.toString)
}
override def read(kryo: Kryo, input: Input): Unit = {
_id = input.readString()
_timestamp = input.readLong()
_action = StorageFileAction.withName(input.readString())
}
}
}
trait StorageConfiguration {
def configureOutput(sft: SimpleFeatureType, job: Job): Unit
}
|
elahrvivaz/geomesa
|
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-common/src/main/scala/org/locationtech/geomesa/fs/storage/common/jobs/StorageConfiguration.scala
|
Scala
|
apache-2.0
| 6,338 |
package controllers
import java.util.concurrent.CountDownLatch
import play.api.mvc._
import play.libs.Akka
import rros.{SocketListener, Response, RROSProtocol}
import rros.play.{PublisherSocketAdapter, ChannelManagementTableImpl, ReceiverSocketAdapter}
import scala.util.Random
/**
* Created by namnguyen on 3/12/15.
*/
object Sockets extends Controller{
implicit val remoteActorSystem = Akka.system()
val managementTable = new ChannelManagementTableImpl()
/**
* Create a bi-directional channel
* @param endpoint
* @return
*/
def socket(endpoint:String) = WebSocket.using[String] { request =>
val rrosAdapter = new ReceiverSocketAdapter(endpoint, managementTable)
rrosAdapter += new SocketListener {
override def onClose(): Unit = { println(s"$endpoint close")}
override def onFailure(exc: Exception): Unit = { println(s"$endpoint failure")}
override def onReceived(message: String): Unit = { println(s"$endpoint recceive $message")}
override def onConnect(): Unit = { println("On Connect")}
}
val rros_protocol = RROSProtocol(rrosAdapter)
rros_protocol.onRequestReceived( Some { implicit rros_request =>
println(rros_request)
Thread.sleep(Random.nextInt(500))
Response("OK",Some("request ["+rros_request.uri+"]"))
} )
rrosAdapter.handle
}
/**
* Push from http request
* @param endpoint
* @param content
* @return
*/
def push(endpoint:String,verb:String,resource:String,content:String) = Action { request =>
val pushAdapter = new PublisherSocketAdapter(endpoint,managementTable)
val rros_protocol = RROSProtocol(pushAdapter)
val countDown = new CountDownLatch(1)
var out_response:rros.Response = null
var out_exception:Exception = null
rros_protocol.send(
rros.Request(verb,resource,Some(content))
,onComplete = { response =>
out_response = response
countDown.countDown()
}
,onFailure = { exc =>
out_exception = exc
countDown.countDown()
}
)
countDown.await()
pushAdapter.close()
if (out_response!=null)
Ok(s"$out_response")
else
Ok(s"$out_exception")
}
}
|
namhnguyen/RROS
|
examples/play_socket_server/app/controllers/Sockets.scala
|
Scala
|
apache-2.0
| 2,198 |
package com.orendainx.trucking.storm.bolts
import java.util
import com.orendainx.trucking.commons.models.{EnrichedTruckAndTrafficData, TruckEventTypes, WindowedDriverStats}
import com.typesafe.scalalogging.Logger
import org.apache.storm.task.{OutputCollector, TopologyContext}
import org.apache.storm.topology.OutputFieldsDeclarer
import org.apache.storm.topology.base.BaseWindowedBolt
import org.apache.storm.tuple.{Fields, Values}
import org.apache.storm.windowing.TupleWindow
import scala.collection.JavaConverters._
/**
* Takes EnrichedTruckAndTrafficData and generates driver statistics. It emits WindowedDriverStats onto its stream.
*
* @author Edgar Orendain <[email protected]>
*/
class DataWindowingBolt extends BaseWindowedBolt {
private lazy val log = Logger(this.getClass)
private var outputCollector: OutputCollector = _
override def prepare(stormConf: util.Map[_, _], context: TopologyContext, collector: OutputCollector): Unit = {
outputCollector = collector
}
override def execute(inputWindow: TupleWindow): Unit = {
val driverStats = inputWindow.get().asScala
.map(_.getValueByField("data").asInstanceOf[EnrichedTruckAndTrafficData]) // List[Tuple] => List[EnrichedTruckAndTrafficData]
.groupBy(d => d.driverId) // List[EnrichedTruckAndTrafficData] => Map[driverId, List[EnrichedTruckAndTrafficData]]
.mapValues({ dataLst => // Map[driverId, List[EnrichedTruckAndTrafficData]] => Map[driverId, (tupleOfStats)]
val sums = dataLst
.map(e => (e.speed, e.foggy, e.rainy, e.windy, if (e.eventType == TruckEventTypes.Normal) 0 else 1))
.foldLeft((0, 0, 0, 0, 0))((s, v) => (s._1 + v._1, s._2 + v._2, s._3 + v._3, s._4 + v._4, s._5 + v._5))
(sums._1 / dataLst.size, sums._2, sums._3, sums._4, sums._5)
})
/*
* At this point, driverStats is a map where its values are the following over the span of the window:
* - Driver id
* - Average speed
* - Total fog
* - Total rain
* - Total wind
* - Total violations
*/
driverStats.foreach({case (id, s) => outputCollector.emit(new Values("WindowedDriverStats", WindowedDriverStats(id, s._1, s._2, s._3, s._4, s._5)))})
// Acknowledge all tuples processed. It is best practice to perform this after all processing has been completed.
inputWindow.get().asScala.foreach(outputCollector.ack)
}
override def declareOutputFields(declarer: OutputFieldsDeclarer): Unit = declarer.declare(new Fields("dataType", "data"))
}
|
orendain/trucking-iot
|
storm-topology/src/main/scala/com/orendainx/trucking/storm/bolts/DataWindowingBolt.scala
|
Scala
|
apache-2.0
| 2,526 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql.agg
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.OptimizerConfigOptions
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
import org.apache.flink.table.planner.utils.{AggregatePhaseStrategy, CountAggFunction, TableTestBase}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Test}
import java.sql.Timestamp
import java.util
import scala.collection.JavaConversions._
@RunWith(classOf[Parameterized])
class GroupWindowTest(aggStrategy: AggregatePhaseStrategy) extends TableTestBase {
private val util = batchTestUtil()
@Before
def before(): Unit = {
util.tableEnv.getConfig.getConfiguration.setString(
OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY, aggStrategy.toString)
util.addFunction("countFun", new CountAggFunction)
util.addTableSource[(Int, Timestamp, Int, Long)]("MyTable", 'a, 'b, 'c, 'd)
util.addTableSource[(Timestamp, Long, Int, String)]("MyTable1", 'ts, 'a, 'b, 'c)
util.addTableSource[(Int, Long, String, Int, Timestamp)]("MyTable2", 'a, 'b, 'c, 'd, 'ts)
util.tableEnv.executeSql(
s"""
|create table MyTable3 (
| a int,
| b bigint,
| c as proctime()
|) with (
| 'connector' = 'COLLECTION'
|)
|""".stripMargin)
}
@Test
def testHopWindowNoOffset(): Unit = {
val sqlQuery =
"SELECT SUM(a) AS sumA, COUNT(b) AS cntB FROM MyTable2 " +
"GROUP BY HOP(ts, INTERVAL '1' HOUR, INTERVAL '2' HOUR, TIME '10:00:00')"
expectedException.expect(classOf[TableException])
expectedException.expectMessage("HOP window with alignment is not supported yet.")
util.verifyExecPlan(sqlQuery)
}
@Test
def testSessionWindowNoOffset(): Unit = {
val sqlQuery =
"SELECT SUM(a) AS sumA, COUNT(b) AS cntB FROM MyTable2 " +
"GROUP BY SESSION(ts, INTERVAL '2' HOUR, TIME '10:00:00')"
expectedException.expect(classOf[TableException])
expectedException.expectMessage("SESSION window with alignment is not supported yet.")
util.verifyExecPlan(sqlQuery)
}
@Test
def testVariableWindowSize(): Unit = {
expectedException.expect(classOf[TableException])
expectedException.expectMessage("Only constant window descriptors are supported")
util.verifyExecPlan(
"SELECT COUNT(*) FROM MyTable2 GROUP BY TUMBLE(ts, b * INTERVAL '1' MINUTE)")
}
@Test
def testTumbleWindowWithInvalidUdAggArgs(): Unit = {
val weightedAvg = new WeightedAvgWithMerge
util.addFunction("weightedAvg", weightedAvg)
val sql = "SELECT weightedAvg(c, a) AS wAvg FROM MyTable2 " +
"GROUP BY TUMBLE(ts, INTERVAL '4' MINUTE)"
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("SQL validation failed. "
+ "Given parameters of function 'weightedAvg' do not match any signature.")
util.verifyExecPlan(sql)
}
@Test
def testWindowProctime(): Unit = {
val sqlQuery =
"SELECT TUMBLE_PROCTIME(ts, INTERVAL '4' MINUTE) FROM MyTable2 " +
"GROUP BY TUMBLE(ts, INTERVAL '4' MINUTE), c"
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"PROCTIME window property is not supported in batch queries.")
util.verifyExecPlan(sqlQuery)
}
@Test(expected = classOf[AssertionError])
def testWindowAggWithGroupSets(): Unit = {
// TODO supports group sets
// currently, the optimized plan is not collect, and an exception will be thrown in code-gen
val sql =
"""
|SELECT COUNT(*),
| TUMBLE_END(ts, INTERVAL '15' MINUTE) + INTERVAL '1' MINUTE
|FROM MyTable1
| GROUP BY rollup(TUMBLE(ts, INTERVAL '15' MINUTE), b)
""".stripMargin
util.verifyRelPlanNotExpected(sql, "TUMBLE(ts")
}
@Test
def testNoGroupingTumblingWindow(): Unit = {
val sqlQuery = "SELECT AVG(c), SUM(a) FROM MyTable GROUP BY TUMBLE(b, INTERVAL '3' SECOND)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testTumblingWindowSortAgg1(): Unit = {
val sqlQuery = "SELECT MAX(c) FROM MyTable1 GROUP BY a, TUMBLE(ts, INTERVAL '3' SECOND)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testTumblingWindowSortAgg2(): Unit = {
val sqlQuery = "SELECT AVG(c), countFun(a) FROM MyTable " +
"GROUP BY a, d, TUMBLE(b, INTERVAL '3' SECOND)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testTumblingWindowHashAgg1(): Unit = {
val sqlQuery = "SELECT COUNT(c) FROM MyTable1 GROUP BY a, TUMBLE(ts, INTERVAL '3' SECOND)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testTumblingWindowHashAgg2(): Unit = {
val sql = "SELECT AVG(c), COUNT(a) FROM MyTable GROUP BY a, d, TUMBLE(b, INTERVAL '3' SECOND)"
util.verifyExecPlan(sql)
}
@Test
def testNonPartitionedTumblingWindow(): Unit = {
val sqlQuery =
"SELECT SUM(a) AS sumA, COUNT(b) AS cntB FROM MyTable2 GROUP BY TUMBLE(ts, INTERVAL '2' HOUR)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testPartitionedTumblingWindow(): Unit = {
val sqlQuery =
"""
|SELECT TUMBLE_START(ts, INTERVAL '4' MINUTE),
| TUMBLE_END(ts, INTERVAL '4' MINUTE),
| TUMBLE_ROWTIME(ts, INTERVAL '4' MINUTE),
| c,
| SUM(a) AS sumA,
| MIN(b) AS minB
|FROM MyTable2
| GROUP BY TUMBLE(ts, INTERVAL '4' MINUTE), c
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testTumblingWindowWithUdAgg(): Unit = {
util.addFunction("weightedAvg", new WeightedAvgWithMerge)
val sql = "SELECT weightedAvg(b, a) AS wAvg FROM MyTable2 " +
"GROUP BY TUMBLE(ts, INTERVAL '4' MINUTE)"
util.verifyExecPlan(sql)
}
@Test
def testTumblingWindowWithProctime(): Unit = {
val sql = "select sum(a), max(b) from MyTable3 group by TUMBLE(c, INTERVAL '1' SECOND)"
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Window can not be defined over a proctime attribute column for batch mode")
util.verifyExecPlan(sql)
}
@Test
def testNoGroupingSlidingWindow(): Unit = {
val sqlQuery =
"""
|SELECT SUM(a),
| HOP_START(b, INTERVAL '3' SECOND, INTERVAL '3' SECOND),
| HOP_END(b, INTERVAL '3' SECOND, INTERVAL '3' SECOND)
|FROM MyTable
| GROUP BY HOP(b, INTERVAL '3' SECOND, INTERVAL '3' SECOND)
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowSortAgg1(): Unit = {
val sqlQuery = "SELECT MAX(c) FROM MyTable1 " +
"GROUP BY a, HOP(ts, INTERVAL '3' SECOND, INTERVAL '1' HOUR)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowSortAgg2(): Unit = {
val sqlQuery = "SELECT MAX(c) FROM MyTable1 " +
"GROUP BY b, HOP(ts, INTERVAL '0.111' SECOND(1,3), INTERVAL '1' SECOND)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowSortAgg3(): Unit = {
val sqlQuery = "SELECT countFun(c) FROM MyTable " +
" GROUP BY a, d, HOP(b, INTERVAL '3' SECOND, INTERVAL '1' HOUR)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowSortAggWithPaneOptimization(): Unit = {
val sqlQuery = "SELECT COUNT(c) FROM MyTable1 " +
"GROUP BY a, HOP(ts, INTERVAL '3' SECOND, INTERVAL '1' HOUR)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowHashAgg(): Unit = {
val sqlQuery = "SELECT count(c) FROM MyTable1 " +
"GROUP BY b, HOP(ts, INTERVAL '3' SECOND, INTERVAL '1' HOUR)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testNonPartitionedSlidingWindow(): Unit = {
val sqlQuery =
"SELECT SUM(a) AS sumA, COUNT(b) AS cntB " +
"FROM MyTable2 " +
"GROUP BY HOP(ts, INTERVAL '15' MINUTE, INTERVAL '90' MINUTE)"
util.verifyExecPlan(sqlQuery)
}
@Test
def testPartitionedSlidingWindow(): Unit = {
val sqlQuery =
"SELECT " +
" c, " +
" HOP_END(ts, INTERVAL '1' HOUR, INTERVAL '3' HOUR), " +
" HOP_START(ts, INTERVAL '1' HOUR, INTERVAL '3' HOUR), " +
" HOP_ROWTIME(ts, INTERVAL '1' HOUR, INTERVAL '3' HOUR), " +
" SUM(a) AS sumA, " +
" AVG(b) AS avgB " +
"FROM MyTable2 " +
"GROUP BY HOP(ts, INTERVAL '1' HOUR, INTERVAL '3' HOUR), d, c"
util.verifyExecPlan(sqlQuery)
}
@Test
def testSlidingWindowWithProctime(): Unit = {
val sql =
s"""
|select sum(a), max(b)
|from MyTable3
|group by HOP(c, INTERVAL '1' SECOND, INTERVAL '1' MINUTE)
|""".stripMargin
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Window can not be defined over a proctime attribute column for batch mode")
util.verifyExecPlan(sql)
}
@Test
// TODO session window is not supported now
def testNonPartitionedSessionWindow(): Unit = {
val sqlQuery = "SELECT COUNT(*) AS cnt FROM MyTable2 GROUP BY SESSION(ts, INTERVAL '30' MINUTE)"
expectedException.expect(classOf[TableException])
expectedException.expectMessage(
"Cannot generate a valid execution plan for the given query")
util.verifyExecPlan(sqlQuery)
}
@Test
// TODO session window is not supported now
def testPartitionedSessionWindow(): Unit = {
val sqlQuery =
"""
|SELECT c, d,
| SESSION_START(ts, INTERVAL '12' HOUR),
| SESSION_END(ts, INTERVAL '12' HOUR),
| SESSION_ROWTIME(ts, INTERVAL '12' HOUR),
| SUM(a) AS sumA,
| MIN(b) AS minB
|FROM MyTable2
| GROUP BY SESSION(ts, INTERVAL '12' HOUR), c, d
""".stripMargin
expectedException.expect(classOf[TableException])
expectedException.expectMessage(
"Cannot generate a valid execution plan for the given query")
util.verifyExecPlan(sqlQuery)
}
@Test
def testSessionWindowWithProctime(): Unit = {
val sql =
s"""
|select sum(a), max(b)
|from MyTable3
|group by SESSION(c, INTERVAL '1' MINUTE)
|""".stripMargin
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Window can not be defined over a proctime attribute column for batch mode")
util.verifyExecPlan(sql)
}
@Test
def testWindowEndOnly(): Unit = {
val sqlQuery =
"SELECT TUMBLE_END(ts, INTERVAL '4' MINUTE) FROM MyTable2 " +
"GROUP BY TUMBLE(ts, INTERVAL '4' MINUTE), c"
util.verifyExecPlan(sqlQuery)
}
@Test
def testExpressionOnWindowHavingFunction(): Unit = {
val sql =
"""
|SELECT COUNT(*),
| HOP_START(ts, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE)
|FROM MyTable2
| GROUP BY HOP(ts, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE)
| HAVING
| SUM(a) > 0 AND
| QUARTER(HOP_START(ts, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE)) = 1
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testDecomposableAggFunctions(): Unit = {
val sql =
"""
|SELECT VAR_POP(b),
| VAR_SAMP(b),
| STDDEV_POP(b),
| STDDEV_SAMP(b),
| TUMBLE_START(ts, INTERVAL '15' MINUTE),
| TUMBLE_END(ts, INTERVAL '15' MINUTE)
|FROM MyTable1
| GROUP BY TUMBLE(ts, INTERVAL '15' MINUTE)
""".stripMargin
util.verifyExecPlan(sql)
}
// TODO: fix the plan regression when FLINK-19668 is fixed.
@Test
def testReturnTypeInferenceForWindowAgg() = {
val sql =
"""
|SELECT
| SUM(correct) AS s,
| AVG(correct) AS a,
| TUMBLE_START(b, INTERVAL '15' MINUTE) AS wStart
|FROM (
| SELECT CASE a
| WHEN 1 THEN 1
| ELSE 99
| END AS correct, b
| FROM MyTable
|)
|GROUP BY TUMBLE(b, INTERVAL '15' MINUTE)
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testWindowAggregateWithDifferentWindows(): Unit = {
// This test ensures that the LogicalWindowAggregate node' digest contains the window specs.
// This allows the planner to make the distinction between similar aggregations using different
// windows (see FLINK-15577).
val sql =
"""
|WITH window_1h AS (
| SELECT 1
| FROM MyTable2
| GROUP BY HOP(`ts`, INTERVAL '1' HOUR, INTERVAL '1' HOUR)
|),
|
|window_2h AS (
| SELECT 1
| FROM MyTable2
| GROUP BY HOP(`ts`, INTERVAL '1' HOUR, INTERVAL '2' HOUR)
|)
|
|(SELECT * FROM window_1h)
|UNION ALL
|(SELECT * FROM window_2h)
|""".stripMargin
util.verifyExecPlan(sql)
}
}
object GroupWindowTest {
@Parameterized.Parameters(name = "aggStrategy={0}")
def parameters(): util.Collection[AggregatePhaseStrategy] = {
Seq[AggregatePhaseStrategy](
AggregatePhaseStrategy.AUTO,
AggregatePhaseStrategy.ONE_PHASE,
AggregatePhaseStrategy.TWO_PHASE
)
}
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/GroupWindowTest.scala
|
Scala
|
apache-2.0
| 14,021 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.types._
class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
test("concat") {
def testConcat(inputs: String*): Unit = {
val expected = if (inputs.contains(null)) null else inputs.mkString
checkEvaluation(Concat(inputs.map(Literal.create(_, StringType))), expected, EmptyRow)
}
testConcat()
testConcat(null)
testConcat("")
testConcat("ab")
testConcat("a", "b")
testConcat("a", "b", "C")
testConcat("a", null, "C")
testConcat("a", null, null)
testConcat(null, null, null)
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
testConcat("数据", null, "砖头")
// scalastyle:on
}
test("SPARK-22498: Concat should not generate codes beyond 64KB") {
val N = 5000
val strs = (1 to N).map(x => s"s$x")
checkEvaluation(Concat(strs.map(Literal.create(_, StringType))), strs.mkString, EmptyRow)
}
test("SPARK-22771 Check Concat.checkInputDataTypes results") {
assert(Concat(Seq.empty[Expression]).checkInputDataTypes().isSuccess)
assert(Concat(Literal.create("a") :: Literal.create("b") :: Nil)
.checkInputDataTypes().isSuccess)
assert(Concat(Literal.create("a".getBytes) :: Literal.create("b".getBytes) :: Nil)
.checkInputDataTypes().isSuccess)
assert(Concat(Literal.create(1) :: Literal.create(2) :: Nil)
.checkInputDataTypes().isFailure)
assert(Concat(Literal.create("a") :: Literal.create("b".getBytes) :: Nil)
.checkInputDataTypes().isFailure)
}
test("concat_ws") {
def testConcatWs(expected: String, sep: String, inputs: Any*): Unit = {
val inputExprs = inputs.map {
case s: Seq[_] => Literal.create(s, ArrayType(StringType))
case null => Literal.create(null, StringType)
case s: String => Literal.create(s, StringType)
}
val sepExpr = Literal.create(sep, StringType)
checkEvaluation(ConcatWs(sepExpr +: inputExprs), expected, EmptyRow)
}
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
testConcatWs(null, null)
testConcatWs(null, null, "a", "b")
testConcatWs("", "")
testConcatWs("ab", "哈哈", "ab")
testConcatWs("a哈哈b", "哈哈", "a", "b")
testConcatWs("a哈哈b", "哈哈", "a", null, "b")
testConcatWs("a哈哈b哈哈c", "哈哈", null, "a", null, "b", "c")
testConcatWs("ab", "哈哈", Seq("ab"))
testConcatWs("a哈哈b", "哈哈", Seq("a", "b"))
testConcatWs("a哈哈b哈哈c哈哈d", "哈哈", Seq("a", null, "b"), null, "c", Seq(null, "d"))
testConcatWs("a哈哈b哈哈c", "哈哈", Seq("a", null, "b"), null, "c", Seq.empty[String])
testConcatWs("a哈哈b哈哈c", "哈哈", Seq("a", null, "b"), null, "c", Seq[String](null))
// scalastyle:on
}
test("SPARK-22549: ConcatWs should not generate codes beyond 64KB") {
val N = 5000
val sepExpr = Literal.create("#", StringType)
val strings1 = (1 to N).map(x => s"s$x")
val inputsExpr1 = strings1.map(Literal.create(_, StringType))
checkEvaluation(ConcatWs(sepExpr +: inputsExpr1), strings1.mkString("#"), EmptyRow)
val strings2 = (1 to N).map(x => Seq(s"s$x"))
val inputsExpr2 = strings2.map(Literal.create(_, ArrayType(StringType)))
checkEvaluation(
ConcatWs(sepExpr +: inputsExpr2), strings2.map(s => s(0)).mkString("#"), EmptyRow)
}
test("elt") {
def testElt(result: String, n: java.lang.Integer, args: String*): Unit = {
checkEvaluation(
Elt(Literal.create(n, IntegerType) +: args.map(Literal.create(_, StringType))),
result)
}
testElt("hello", 1, "hello", "world")
testElt(null, 1, null, "world")
testElt(null, null, "hello", "world")
// Invalid ranages
testElt(null, 3, "hello", "world")
testElt(null, 0, "hello", "world")
testElt(null, -1, "hello", "world")
// type checking
assert(Elt(Seq.empty).checkInputDataTypes().isFailure)
assert(Elt(Seq(Literal(1))).checkInputDataTypes().isFailure)
assert(Elt(Seq(Literal(1), Literal("A"))).checkInputDataTypes().isSuccess)
assert(Elt(Seq(Literal(1), Literal(2))).checkInputDataTypes().isFailure)
}
test("SPARK-22550: Elt should not generate codes beyond 64KB") {
val N = 10000
val strings = (1 to N).map(x => s"s$x")
val args = Literal.create(N, IntegerType) +: strings.map(Literal.create(_, StringType))
checkEvaluation(Elt(args), s"s$N")
}
test("StringComparison") {
val row = create_row("abc", null)
val c1 = 'a.string.at(0)
val c2 = 'a.string.at(1)
checkEvaluation(c1 contains "b", true, row)
checkEvaluation(c1 contains "x", false, row)
checkEvaluation(c2 contains "b", null, row)
checkEvaluation(c1 contains Literal.create(null, StringType), null, row)
checkEvaluation(c1 startsWith "a", true, row)
checkEvaluation(c1 startsWith "b", false, row)
checkEvaluation(c2 startsWith "a", null, row)
checkEvaluation(c1 startsWith Literal.create(null, StringType), null, row)
checkEvaluation(c1 endsWith "c", true, row)
checkEvaluation(c1 endsWith "b", false, row)
checkEvaluation(c2 endsWith "b", null, row)
checkEvaluation(c1 endsWith Literal.create(null, StringType), null, row)
}
test("Substring") {
val row = create_row("example", "example".toArray.map(_.toByte))
val s = 'a.string.at(0)
// substring from zero position with less-than-full length
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)), "ex", row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(2, IntegerType)), "ex", row)
// substring from zero position with full length
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(7, IntegerType)), "example", row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(7, IntegerType)), "example", row)
// substring from zero position with greater-than-full length
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(100, IntegerType)),
"example", row)
checkEvaluation(Substring(s, Literal.create(1, IntegerType), Literal.create(100, IntegerType)),
"example", row)
// substring from nonzero position with less-than-full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(2, IntegerType)),
"xa", row)
// substring from nonzero position with full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(6, IntegerType)),
"xample", row)
// substring from nonzero position with greater-than-full length
checkEvaluation(Substring(s, Literal.create(2, IntegerType), Literal.create(100, IntegerType)),
"xample", row)
// zero-length substring (within string bounds)
checkEvaluation(Substring(s, Literal.create(0, IntegerType), Literal.create(0, IntegerType)),
"", row)
// zero-length substring (beyond string bounds)
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
"", row)
// substring(null, _, _) -> null
checkEvaluation(Substring(s, Literal.create(100, IntegerType), Literal.create(4, IntegerType)),
null, create_row(null))
// substring(_, null, _) -> null
checkEvaluation(Substring(s, Literal.create(null, IntegerType), Literal.create(4, IntegerType)),
null, row)
// substring(_, _, null) -> null
checkEvaluation(
Substring(s, Literal.create(100, IntegerType), Literal.create(null, IntegerType)),
null,
row)
// 2-arg substring from zero position
checkEvaluation(
Substring(s, Literal.create(0, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"example",
row)
checkEvaluation(
Substring(s, Literal.create(1, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"example",
row)
// 2-arg substring from nonzero position
checkEvaluation(
Substring(s, Literal.create(2, IntegerType), Literal.create(Integer.MAX_VALUE, IntegerType)),
"xample",
row)
val s_notNull = 'a.string.notNull.at(0)
assert(Substring(s, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable)
assert(
Substring(s_notNull, Literal.create(0, IntegerType), Literal.create(2, IntegerType)).nullable
=== false)
assert(Substring(s_notNull,
Literal.create(null, IntegerType), Literal.create(2, IntegerType)).nullable)
assert(Substring(s_notNull,
Literal.create(0, IntegerType), Literal.create(null, IntegerType)).nullable)
checkEvaluation(s.substr(0, 2), "ex", row)
checkEvaluation(s.substr(0), "example", row)
checkEvaluation(s.substring(0, 2), "ex", row)
checkEvaluation(s.substring(0), "example", row)
val bytes = Array[Byte](1, 2, 3, 4)
checkEvaluation(Substring(bytes, 0, 2), Array[Byte](1, 2))
checkEvaluation(Substring(bytes, 1, 2), Array[Byte](1, 2))
checkEvaluation(Substring(bytes, 2, 2), Array[Byte](2, 3))
checkEvaluation(Substring(bytes, 3, 2), Array[Byte](3, 4))
checkEvaluation(Substring(bytes, 4, 2), Array[Byte](4))
checkEvaluation(Substring(bytes, 8, 2), Array.empty[Byte])
checkEvaluation(Substring(bytes, -1, 2), Array[Byte](4))
checkEvaluation(Substring(bytes, -2, 2), Array[Byte](3, 4))
checkEvaluation(Substring(bytes, -3, 2), Array[Byte](2, 3))
checkEvaluation(Substring(bytes, -4, 2), Array[Byte](1, 2))
checkEvaluation(Substring(bytes, -5, 2), Array[Byte](1))
checkEvaluation(Substring(bytes, -8, 2), Array.empty[Byte])
}
test("string substring_index function") {
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(3)), "www.apache.org")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(2)), "www.apache")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(1)), "www")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(0)), "")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(-3)), "www.apache.org")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(-2)), "apache.org")
checkEvaluation(
SubstringIndex(Literal("www.apache.org"), Literal("."), Literal(-1)), "org")
checkEvaluation(
SubstringIndex(Literal(""), Literal("."), Literal(-2)), "")
checkEvaluation(
SubstringIndex(Literal.create(null, StringType), Literal("."), Literal(-2)), null)
checkEvaluation(SubstringIndex(
Literal("www.apache.org"), Literal.create(null, StringType), Literal(-2)), null)
// non ascii chars
// scalastyle:off
checkEvaluation(
SubstringIndex(Literal("大千世界大千世界"), Literal( "千"), Literal(2)), "大千世界大")
// scalastyle:on
checkEvaluation(
SubstringIndex(Literal("www||apache||org"), Literal( "||"), Literal(2)), "www||apache")
}
test("ascii for string") {
val a = 'a.string.at(0)
checkEvaluation(Ascii(Literal("efg")), 101, create_row("abdef"))
checkEvaluation(Ascii(a), 97, create_row("abdef"))
checkEvaluation(Ascii(a), 0, create_row(""))
checkEvaluation(Ascii(a), null, create_row(null))
checkEvaluation(Ascii(Literal.create(null, StringType)), null, create_row("abdef"))
}
test("string for ascii") {
val a = 'a.long.at(0)
checkEvaluation(Chr(Literal(48L)), "0", create_row("abdef"))
checkEvaluation(Chr(a), "a", create_row(97L))
checkEvaluation(Chr(a), "a", create_row(97L + 256L))
checkEvaluation(Chr(a), "", create_row(-9L))
checkEvaluation(Chr(a), Character.MIN_VALUE.toString, create_row(0L))
checkEvaluation(Chr(a), Character.MIN_VALUE.toString, create_row(256L))
checkEvaluation(Chr(a), null, create_row(null))
checkEvaluation(Chr(a), 149.toChar.toString, create_row(149L))
checkEvaluation(Chr(Literal.create(null, LongType)), null, create_row("abdef"))
}
test("base64/unbase64 for string") {
val a = 'a.string.at(0)
val b = 'b.binary.at(0)
val bytes = Array[Byte](1, 2, 3, 4)
checkEvaluation(Base64(Literal(bytes)), "AQIDBA==", create_row("abdef"))
checkEvaluation(Base64(UnBase64(Literal("AQIDBA=="))), "AQIDBA==", create_row("abdef"))
checkEvaluation(Base64(UnBase64(Literal(""))), "", create_row("abdef"))
checkEvaluation(Base64(UnBase64(Literal.create(null, StringType))), null, create_row("abdef"))
checkEvaluation(Base64(UnBase64(a)), "AQIDBA==", create_row("AQIDBA=="))
checkEvaluation(Base64(b), "AQIDBA==", create_row(bytes))
checkEvaluation(Base64(b), "", create_row(Array.empty[Byte]))
checkEvaluation(Base64(b), null, create_row(null))
checkEvaluation(Base64(Literal.create(null, BinaryType)), null, create_row("abdef"))
checkEvaluation(UnBase64(a), null, create_row(null))
checkEvaluation(UnBase64(Literal.create(null, StringType)), null, create_row("abdef"))
}
test("encode/decode for string") {
val a = 'a.string.at(0)
val b = 'b.binary.at(0)
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
checkEvaluation(
Decode(Encode(Literal("大千世界"), Literal("UTF-16LE")), Literal("UTF-16LE")), "大千世界")
checkEvaluation(
Decode(Encode(a, Literal("utf-8")), Literal("utf-8")), "大千世界", create_row("大千世界"))
checkEvaluation(
Decode(Encode(a, Literal("utf-8")), Literal("utf-8")), "", create_row(""))
// scalastyle:on
checkEvaluation(Encode(a, Literal("utf-8")), null, create_row(null))
checkEvaluation(Encode(Literal.create(null, StringType), Literal("utf-8")), null)
checkEvaluation(Encode(a, Literal.create(null, StringType)), null, create_row(""))
checkEvaluation(Decode(b, Literal("utf-8")), null, create_row(null))
checkEvaluation(Decode(Literal.create(null, BinaryType), Literal("utf-8")), null)
checkEvaluation(Decode(b, Literal.create(null, StringType)), null, create_row(null))
}
test("initcap unit test") {
checkEvaluation(InitCap(Literal.create(null, StringType)), null)
checkEvaluation(InitCap(Literal("a b")), "A B")
checkEvaluation(InitCap(Literal(" a")), " A")
checkEvaluation(InitCap(Literal("the test")), "The Test")
checkEvaluation(InitCap(Literal("sParK")), "Spark")
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
checkEvaluation(InitCap(Literal("世界")), "世界")
// scalastyle:on
}
test("Levenshtein distance") {
checkEvaluation(Levenshtein(Literal.create(null, StringType), Literal("")), null)
checkEvaluation(Levenshtein(Literal(""), Literal.create(null, StringType)), null)
checkEvaluation(Levenshtein(Literal(""), Literal("")), 0)
checkEvaluation(Levenshtein(Literal("abc"), Literal("abc")), 0)
checkEvaluation(Levenshtein(Literal("kitten"), Literal("sitting")), 3)
checkEvaluation(Levenshtein(Literal("frog"), Literal("fog")), 1)
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
checkEvaluation(Levenshtein(Literal("千世"), Literal("fog")), 3)
checkEvaluation(Levenshtein(Literal("世界千世"), Literal("大a界b")), 4)
// scalastyle:on
}
test("soundex unit test") {
checkEvaluation(SoundEx(Literal("ZIN")), "Z500")
checkEvaluation(SoundEx(Literal("SU")), "S000")
checkEvaluation(SoundEx(Literal("")), "")
checkEvaluation(SoundEx(Literal.create(null, StringType)), null)
// scalastyle:off
// non ascii characters are not allowed in the code, so we disable the scalastyle here.
checkEvaluation(SoundEx(Literal("测试")), "测试")
checkEvaluation(SoundEx(Literal("Tschüss")), "T220")
// scalastyle:on
checkEvaluation(SoundEx(Literal("zZ")), "Z000", create_row("s8"))
checkEvaluation(SoundEx(Literal("RAGSSEEESSSVEEWE")), "R221")
checkEvaluation(SoundEx(Literal("Ashcraft")), "A261")
checkEvaluation(SoundEx(Literal("Aswcraft")), "A261")
checkEvaluation(SoundEx(Literal("Tymczak")), "T522")
checkEvaluation(SoundEx(Literal("Pfister")), "P236")
checkEvaluation(SoundEx(Literal("Miller")), "M460")
checkEvaluation(SoundEx(Literal("Peterson")), "P362")
checkEvaluation(SoundEx(Literal("Peters")), "P362")
checkEvaluation(SoundEx(Literal("Auerbach")), "A612")
checkEvaluation(SoundEx(Literal("Uhrbach")), "U612")
checkEvaluation(SoundEx(Literal("Moskowitz")), "M232")
checkEvaluation(SoundEx(Literal("Moskovitz")), "M213")
checkEvaluation(SoundEx(Literal("relyheewsgeessg")), "R422")
checkEvaluation(SoundEx(Literal("!!")), "!!")
}
test("replace") {
checkEvaluation(
StringReplace(Literal("replace"), Literal("pl"), Literal("123")), "re123ace")
checkEvaluation(StringReplace(Literal("replace"), Literal("pl"), Literal("")), "reace")
checkEvaluation(StringReplace(Literal("replace"), Literal(""), Literal("123")), "replace")
checkEvaluation(StringReplace(Literal.create(null, StringType),
Literal("pl"), Literal("123")), null)
checkEvaluation(StringReplace(Literal("replace"),
Literal.create(null, StringType), Literal("123")), null)
checkEvaluation(StringReplace(Literal("replace"),
Literal("pl"), Literal.create(null, StringType)), null)
// test for multiple replace
checkEvaluation(StringReplace(Literal("abcabc"), Literal("b"), Literal("12")), "a12ca12c")
checkEvaluation(StringReplace(Literal("abcdabcd"), Literal("bc"), Literal("")), "adad")
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringReplace(Literal("花花世界"), Literal("花世"), Literal("ab")), "花ab界")
// scalastyle:on
}
test("translate") {
checkEvaluation(
StringTranslate(Literal("translate"), Literal("rnlt"), Literal("123")), "1a2s3ae")
checkEvaluation(StringTranslate(Literal("translate"), Literal(""), Literal("123")), "translate")
checkEvaluation(StringTranslate(Literal("translate"), Literal("rnlt"), Literal("")), "asae")
// test for multiple mapping
checkEvaluation(StringTranslate(Literal("abcd"), Literal("aba"), Literal("123")), "12cd")
checkEvaluation(StringTranslate(Literal("abcd"), Literal("aba"), Literal("12")), "12cd")
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringTranslate(Literal("花花世界"), Literal("花界"), Literal("ab")), "aa世b")
// scalastyle:on
}
test("TRIM") {
val s = 'a.string.at(0)
checkEvaluation(StringTrim(Literal(" aa ")), "aa", create_row(" abdef "))
checkEvaluation(StringTrim("aa", "a"), "", create_row(" abdef "))
checkEvaluation(StringTrim(Literal(" aabbtrimccc"), "ab cd"), "trim", create_row("bdef"))
checkEvaluation(StringTrim(Literal("a<a >@>.,>"), "a.,@<>"), " ", create_row(" abdef "))
checkEvaluation(StringTrim(s), "abdef", create_row(" abdef "))
checkEvaluation(StringTrim(s, "abd"), "ef", create_row("abdefa"))
checkEvaluation(StringTrim(s, "a"), "bdef", create_row("aaabdefaaaa"))
checkEvaluation(StringTrim(s, "SLSQ"), "park", create_row("SSparkSQLS"))
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringTrim(s), "花花世界", create_row(" 花花世界 "))
checkEvaluation(StringTrim(s, "花世界"), "", create_row("花花世界花花"))
checkEvaluation(StringTrim(s, "花 "), "世界", create_row(" 花花世界花花"))
checkEvaluation(StringTrim(s, "花 "), "世界", create_row(" 花 花 世界 花 花 "))
checkEvaluation(StringTrim(s, "a花世"), "界", create_row("aa花花世界花花aa"))
checkEvaluation(StringTrim(s, "a@#( )"), "花花世界花花", create_row("aa()花花世界花花@ #"))
checkEvaluation(StringTrim(Literal("花trim"), "花 "), "trim", create_row(" abdef "))
// scalastyle:on
checkEvaluation(StringTrim(Literal("a"), Literal.create(null, StringType)), null)
checkEvaluation(StringTrim(Literal.create(null, StringType), Literal("a")), null)
}
test("LTRIM") {
val s = 'a.string.at(0)
checkEvaluation(StringTrimLeft(Literal(" aa ")), "aa ", create_row(" abdef "))
checkEvaluation(StringTrimLeft(Literal("aa"), "a"), "", create_row(" abdef "))
checkEvaluation(StringTrimLeft(Literal("aa "), "a "), "", create_row(" abdef "))
checkEvaluation(StringTrimLeft(Literal("aabbcaaaa"), "ab"), "caaaa", create_row(" abdef "))
checkEvaluation(StringTrimLeft(s), "abdef ", create_row(" abdef "))
checkEvaluation(StringTrimLeft(s, "a"), "bdefa", create_row("abdefa"))
checkEvaluation(StringTrimLeft(s, "a "), "bdefaaaa", create_row(" aaabdefaaaa"))
checkEvaluation(StringTrimLeft(s, "Spk"), "arkSQLS", create_row("SSparkSQLS"))
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringTrimLeft(s), "花花世界 ", create_row(" 花花世界 "))
checkEvaluation(StringTrimLeft(s, "花"), "世界花花", create_row("花花世界花花"))
checkEvaluation(StringTrimLeft(s, "花 世"), "界花花", create_row(" 花花世界花花"))
checkEvaluation(StringTrimLeft(s, "花"), "a花花世界花花 ", create_row("a花花世界花花 "))
checkEvaluation(StringTrimLeft(s, "a花界"), "世界花花aa", create_row("aa花花世界花花aa"))
checkEvaluation(StringTrimLeft(s, "a世界"), "花花世界花花", create_row("花花世界花花"))
// scalastyle:on
checkEvaluation(StringTrimLeft(Literal.create(null, StringType), Literal("a")), null)
checkEvaluation(StringTrimLeft(Literal("a"), Literal.create(null, StringType)), null)
}
test("RTRIM") {
val s = 'a.string.at(0)
checkEvaluation(StringTrimRight(Literal(" aa ")), " aa", create_row(" abdef "))
checkEvaluation(StringTrimRight(Literal("a"), "a"), "", create_row(" abdef "))
checkEvaluation(StringTrimRight(Literal("ab"), "ab"), "", create_row(" abdef "))
checkEvaluation(StringTrimRight(Literal("aabbaaaa %"), "a %"), "aabb", create_row("def"))
checkEvaluation(StringTrimRight(s), " abdef", create_row(" abdef "))
checkEvaluation(StringTrimRight(s, "a"), "abdef", create_row("abdefa"))
checkEvaluation(StringTrimRight(s, "abf de"), "", create_row(" aaabdefaaaa"))
checkEvaluation(StringTrimRight(s, "S*&"), "SSparkSQL", create_row("SSparkSQLS*"))
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringTrimRight(Literal("a"), "花"), "a", create_row(" abdef "))
checkEvaluation(StringTrimRight(Literal("花"), "a"), "花", create_row(" abdef "))
checkEvaluation(StringTrimRight(Literal("花花世界"), "界花世"), "", create_row(" abdef "))
checkEvaluation(StringTrimRight(s), " 花花世界", create_row(" 花花世界 "))
checkEvaluation(StringTrimRight(s, "花a#"), "花花世界", create_row("花花世界花花###aa花"))
checkEvaluation(StringTrimRight(s, "花"), "", create_row("花花花花"))
checkEvaluation(StringTrimRight(s, "花 界b@"), " 花花世", create_row(" 花花世 b界@花花 "))
// scalastyle:on
checkEvaluation(StringTrimRight(Literal("a"), Literal.create(null, StringType)), null)
checkEvaluation(StringTrimRight(Literal.create(null, StringType), Literal("a")), null)
}
test("FORMAT") {
checkEvaluation(FormatString(Literal("aa%d%s"), Literal(123), Literal("a")), "aa123a")
checkEvaluation(FormatString(Literal("aa")), "aa", create_row(null))
checkEvaluation(FormatString(Literal("aa%d%s"), Literal(123), Literal("a")), "aa123a")
checkEvaluation(FormatString(Literal("aa%d%s"), 12, "cc"), "aa12cc")
checkEvaluation(FormatString(Literal.create(null, StringType), 12, "cc"), null)
checkEvaluation(
FormatString(Literal("aa%d%s"), Literal.create(null, IntegerType), "cc"), "aanullcc")
checkEvaluation(
FormatString(Literal("aa%d%s"), 12, Literal.create(null, StringType)), "aa12null")
}
test("SPARK-22603: FormatString should not generate codes beyond 64KB") {
val N = 4500
val args = (1 to N).map(i => Literal.create(i.toString, StringType))
val format = "%s" * N
val expected = (1 to N).map(i => i.toString).mkString
checkEvaluation(FormatString(Literal(format) +: args: _*), expected)
}
test("INSTR") {
val s1 = 'a.string.at(0)
val s2 = 'b.string.at(1)
val s3 = 'c.string.at(2)
val row1 = create_row("aaads", "aa", "zz")
checkEvaluation(StringInstr(Literal("aaads"), Literal("aa")), 1, row1)
checkEvaluation(StringInstr(Literal("aaads"), Literal("de")), 0, row1)
checkEvaluation(StringInstr(Literal.create(null, StringType), Literal("de")), null, row1)
checkEvaluation(StringInstr(Literal("aaads"), Literal.create(null, StringType)), null, row1)
checkEvaluation(StringInstr(s1, s2), 1, row1)
checkEvaluation(StringInstr(s1, s3), 0, row1)
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(StringInstr(s1, s2), 3, create_row("花花世界", "世界"))
checkEvaluation(StringInstr(s1, s2), 1, create_row("花花世界", "花"))
checkEvaluation(StringInstr(s1, s2), 0, create_row("花花世界", "小"))
// scalastyle:on
}
test("LOCATE") {
val s1 = 'a.string.at(0)
val s2 = 'b.string.at(1)
val s3 = 'c.string.at(2)
val s4 = 'd.int.at(3)
val row1 = create_row("aaads", "aa", "zz", 2)
val row2 = create_row(null, "aa", "zz", 1)
val row3 = create_row("aaads", null, "zz", 1)
val row4 = create_row(null, null, null, 1)
checkEvaluation(new StringLocate(Literal("aa"), Literal("aaads")), 1, row1)
checkEvaluation(StringLocate(Literal("aa"), Literal("aaads"), Literal(0)), 0, row1)
checkEvaluation(StringLocate(Literal("aa"), Literal("aaads"), Literal(1)), 1, row1)
checkEvaluation(StringLocate(Literal("aa"), Literal("aaads"), Literal(2)), 2, row1)
checkEvaluation(StringLocate(Literal("aa"), Literal("aaads"), Literal(3)), 0, row1)
checkEvaluation(new StringLocate(Literal("de"), Literal("aaads")), 0, row1)
checkEvaluation(StringLocate(Literal("de"), Literal("aaads"), 2), 0, row1)
checkEvaluation(new StringLocate(s2, s1), 1, row1)
checkEvaluation(StringLocate(s2, s1, s4), 2, row1)
checkEvaluation(new StringLocate(s3, s1), 0, row1)
checkEvaluation(StringLocate(s3, s1, Literal.create(null, IntegerType)), 0, row1)
checkEvaluation(new StringLocate(s2, s1), null, row2)
checkEvaluation(new StringLocate(s2, s1), null, row3)
checkEvaluation(new StringLocate(s2, s1, Literal.create(null, IntegerType)), 0, row4)
}
test("LPAD/RPAD") {
val s1 = 'a.string.at(0)
val s2 = 'b.int.at(1)
val s3 = 'c.string.at(2)
val row1 = create_row("hi", 5, "??")
val row2 = create_row("hi", 1, "?")
val row3 = create_row(null, 1, "?")
val row4 = create_row("hi", null, "?")
val row5 = create_row("hi", 1, null)
checkEvaluation(StringLPad(Literal("hi"), Literal(5), Literal("??")), "???hi", row1)
checkEvaluation(StringLPad(Literal("hi"), Literal(1), Literal("??")), "h", row1)
checkEvaluation(StringLPad(s1, s2, s3), "???hi", row1)
checkEvaluation(StringLPad(s1, s2, s3), "h", row2)
checkEvaluation(StringLPad(s1, s2, s3), null, row3)
checkEvaluation(StringLPad(s1, s2, s3), null, row4)
checkEvaluation(StringLPad(s1, s2, s3), null, row5)
checkEvaluation(StringRPad(Literal("hi"), Literal(5), Literal("??")), "hi???", row1)
checkEvaluation(StringRPad(Literal("hi"), Literal(1), Literal("??")), "h", row1)
checkEvaluation(StringRPad(s1, s2, s3), "hi???", row1)
checkEvaluation(StringRPad(s1, s2, s3), "h", row2)
checkEvaluation(StringRPad(s1, s2, s3), null, row3)
checkEvaluation(StringRPad(s1, s2, s3), null, row4)
checkEvaluation(StringRPad(s1, s2, s3), null, row5)
}
test("REPEAT") {
val s1 = 'a.string.at(0)
val s2 = 'b.int.at(1)
val row1 = create_row("hi", 2)
val row2 = create_row(null, 1)
checkEvaluation(StringRepeat(Literal("hi"), Literal(2)), "hihi", row1)
checkEvaluation(StringRepeat(Literal("hi"), Literal(-1)), "", row1)
checkEvaluation(StringRepeat(s1, s2), "hihi", row1)
checkEvaluation(StringRepeat(s1, s2), null, row2)
}
test("REVERSE") {
val s = 'a.string.at(0)
val row1 = create_row("abccc")
checkEvaluation(Reverse(Literal("abccc")), "cccba", row1)
checkEvaluation(Reverse(s), "cccba", row1)
checkEvaluation(Reverse(Literal.create(null, StringType)), null, row1)
}
test("SPACE") {
val s1 = 'b.int.at(0)
val row1 = create_row(2)
val row2 = create_row(null)
checkEvaluation(StringSpace(Literal(2)), " ", row1)
checkEvaluation(StringSpace(Literal(-1)), "", row1)
checkEvaluation(StringSpace(Literal(0)), "", row1)
checkEvaluation(StringSpace(s1), " ", row1)
checkEvaluation(StringSpace(s1), null, row2)
}
test("length for string / binary") {
val a = 'a.string.at(0)
val b = 'b.binary.at(0)
val bytes = Array[Byte](1, 2, 3, 1, 2)
val string = "abdef"
// scalastyle:off
// non ascii characters are not allowed in the source code, so we disable the scalastyle.
checkEvaluation(Length(Literal("a花花c")), 4, create_row(string))
checkEvaluation(OctetLength(Literal("a花花c")), 8, create_row(string))
checkEvaluation(BitLength(Literal("a花花c")), 8 * 8, create_row(string))
// scalastyle:on
checkEvaluation(Length(Literal(bytes)), 5, create_row(Array.empty[Byte]))
checkEvaluation(OctetLength(Literal(bytes)), 5, create_row(Array.empty[Byte]))
checkEvaluation(BitLength(Literal(bytes)), 5 * 8, create_row(Array.empty[Byte]))
checkEvaluation(Length(a), 5, create_row(string))
checkEvaluation(OctetLength(a), 5, create_row(string))
checkEvaluation(BitLength(a), 5 * 8, create_row(string))
checkEvaluation(Length(b), 5, create_row(bytes))
checkEvaluation(OctetLength(b), 5, create_row(bytes))
checkEvaluation(BitLength(b), 5 * 8, create_row(bytes))
checkEvaluation(Length(a), 0, create_row(""))
checkEvaluation(OctetLength(a), 0, create_row(""))
checkEvaluation(BitLength(a), 0, create_row(""))
checkEvaluation(Length(b), 0, create_row(Array.empty[Byte]))
checkEvaluation(OctetLength(b), 0, create_row(Array.empty[Byte]))
checkEvaluation(BitLength(b), 0, create_row(Array.empty[Byte]))
checkEvaluation(Length(a), null, create_row(null))
checkEvaluation(OctetLength(a), null, create_row(null))
checkEvaluation(BitLength(a), null, create_row(null))
checkEvaluation(Length(b), null, create_row(null))
checkEvaluation(OctetLength(b), null, create_row(null))
checkEvaluation(BitLength(b), null, create_row(null))
checkEvaluation(Length(Literal.create(null, StringType)), null, create_row(string))
checkEvaluation(OctetLength(Literal.create(null, StringType)), null, create_row(string))
checkEvaluation(BitLength(Literal.create(null, StringType)), null, create_row(string))
checkEvaluation(Length(Literal.create(null, BinaryType)), null, create_row(bytes))
checkEvaluation(OctetLength(Literal.create(null, BinaryType)), null, create_row(bytes))
checkEvaluation(BitLength(Literal.create(null, BinaryType)), null, create_row(bytes))
}
test("format_number / FormatNumber") {
checkEvaluation(FormatNumber(Literal(4.asInstanceOf[Byte]), Literal(3)), "4.000")
checkEvaluation(FormatNumber(Literal(4.asInstanceOf[Short]), Literal(3)), "4.000")
checkEvaluation(FormatNumber(Literal(4.0f), Literal(3)), "4.000")
checkEvaluation(FormatNumber(Literal(4), Literal(3)), "4.000")
checkEvaluation(FormatNumber(Literal(12831273.23481d), Literal(3)), "12,831,273.235")
checkEvaluation(FormatNumber(Literal(12831273.83421d), Literal(0)), "12,831,274")
checkEvaluation(FormatNumber(Literal(123123324123L), Literal(3)), "123,123,324,123.000")
checkEvaluation(FormatNumber(Literal(123123324123L), Literal(-1)), null)
checkEvaluation(
FormatNumber(
Literal(Decimal(123123324123L) * Decimal(123123.21234d)), Literal(4)),
"15,159,339,180,002,773.2778")
checkEvaluation(FormatNumber(Literal.create(null, IntegerType), Literal(3)), null)
assert(FormatNumber(Literal.create(null, NullType), Literal(3)).resolved === false)
checkEvaluation(FormatNumber(Literal(12332.123456), Literal("##############.###")), "12332.123")
checkEvaluation(FormatNumber(Literal(12332.123456), Literal("##.###")), "12332.123")
checkEvaluation(FormatNumber(Literal(4.asInstanceOf[Byte]), Literal("##.####")), "4")
checkEvaluation(FormatNumber(Literal(4.asInstanceOf[Short]), Literal("##.####")), "4")
checkEvaluation(FormatNumber(Literal(4.0f), Literal("##.###")), "4")
checkEvaluation(FormatNumber(Literal(4), Literal("##.###")), "4")
checkEvaluation(FormatNumber(Literal(12831273.23481d),
Literal("###,###,###,###,###.###")), "12,831,273.235")
checkEvaluation(FormatNumber(Literal(12831273.83421d), Literal("")), "12,831,274")
checkEvaluation(FormatNumber(Literal(123123324123L), Literal("###,###,###,###,###.###")),
"123,123,324,123")
checkEvaluation(
FormatNumber(Literal(Decimal(123123324123L) * Decimal(123123.21234d)),
Literal("###,###,###,###,###.####")), "15,159,339,180,002,773.2778")
checkEvaluation(FormatNumber(Literal.create(null, IntegerType), Literal("##.###")), null)
assert(FormatNumber(Literal.create(null, NullType), Literal("##.###")).resolved === false)
checkEvaluation(FormatNumber(Literal(12332.123456), Literal("#,###,###,###,###,###,##0")),
"12,332")
checkEvaluation(FormatNumber(
Literal.create(null, IntegerType), Literal.create(null, StringType)), null)
checkEvaluation(FormatNumber(
Literal.create(null, IntegerType), Literal.create(null, IntegerType)), null)
}
test("find in set") {
checkEvaluation(
FindInSet(Literal.create(null, StringType), Literal.create(null, StringType)), null)
checkEvaluation(FindInSet(Literal("ab"), Literal.create(null, StringType)), null)
checkEvaluation(FindInSet(Literal.create(null, StringType), Literal("abc,b,ab,c,def")), null)
checkEvaluation(FindInSet(Literal("ab"), Literal("abc,b,ab,c,def")), 3)
checkEvaluation(FindInSet(Literal("abf"), Literal("abc,b,ab,c,def")), 0)
checkEvaluation(FindInSet(Literal("ab,"), Literal("abc,b,ab,c,def")), 0)
}
test("ParseUrl") {
def checkParseUrl(expected: String, urlStr: String, partToExtract: String): Unit = {
checkEvaluation(ParseUrl(Seq(urlStr, partToExtract)), expected)
}
def checkParseUrlWithKey(
expected: String,
urlStr: String,
partToExtract: String,
key: String): Unit = {
checkEvaluation(ParseUrl(Seq(urlStr, partToExtract, key)), expected)
}
checkParseUrl("spark.apache.org", "http://spark.apache.org/path?query=1", "HOST")
checkParseUrl("/path", "http://spark.apache.org/path?query=1", "PATH")
checkParseUrl("query=1", "http://spark.apache.org/path?query=1", "QUERY")
checkParseUrl("Ref", "http://spark.apache.org/path?query=1#Ref", "REF")
checkParseUrl("http", "http://spark.apache.org/path?query=1", "PROTOCOL")
checkParseUrl("/path?query=1", "http://spark.apache.org/path?query=1", "FILE")
checkParseUrl("spark.apache.org:8080", "http://spark.apache.org:8080/path?query=1", "AUTHORITY")
checkParseUrl("userinfo", "http://[email protected]/path?query=1", "USERINFO")
checkParseUrlWithKey("1", "http://spark.apache.org/path?query=1", "QUERY", "query")
// Null checking
checkParseUrl(null, null, "HOST")
checkParseUrl(null, "http://spark.apache.org/path?query=1", null)
checkParseUrl(null, null, null)
checkParseUrl(null, "test", "HOST")
checkParseUrl(null, "http://spark.apache.org/path?query=1", "NO")
checkParseUrl(null, "http://spark.apache.org/path?query=1", "USERINFO")
checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1", "HOST", "query")
checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1", "QUERY", "quer")
checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1", "QUERY", null)
checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1", "QUERY", "")
// exceptional cases
intercept[java.util.regex.PatternSyntaxException] {
evaluateWithoutCodegen(ParseUrl(Seq(Literal("http://spark.apache.org/path?"),
Literal("QUERY"), Literal("???"))))
}
// arguments checking
assert(ParseUrl(Seq(Literal("1"))).checkInputDataTypes().isFailure)
assert(ParseUrl(Seq(Literal("1"), Literal("2"), Literal("3"), Literal("4")))
.checkInputDataTypes().isFailure)
assert(ParseUrl(Seq(Literal("1"), Literal(2))).checkInputDataTypes().isFailure)
assert(ParseUrl(Seq(Literal(1), Literal("2"))).checkInputDataTypes().isFailure)
assert(ParseUrl(Seq(Literal("1"), Literal("2"), Literal(3))).checkInputDataTypes().isFailure)
}
test("Sentences") {
val nullString = Literal.create(null, StringType)
checkEvaluation(Sentences(nullString, nullString, nullString), null)
checkEvaluation(Sentences(nullString, nullString), null)
checkEvaluation(Sentences(nullString), null)
checkEvaluation(Sentences("", nullString, nullString), Seq.empty)
checkEvaluation(Sentences("", nullString), Seq.empty)
checkEvaluation(Sentences(""), Seq.empty)
val answer = Seq(
Seq("Hi", "there"),
Seq("The", "price", "was"),
Seq("But", "not", "now"))
checkEvaluation(Sentences("Hi there! The price was $1,234.56.... But, not now."), answer)
checkEvaluation(Sentences("Hi there! The price was $1,234.56.... But, not now.", "en"), answer)
checkEvaluation(Sentences("Hi there! The price was $1,234.56.... But, not now.", "en", "US"),
answer)
checkEvaluation(Sentences("Hi there! The price was $1,234.56.... But, not now.", "XXX", "YYY"),
answer)
}
}
|
icexelloss/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
|
Scala
|
apache-2.0
| 39,491 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.typed
import com.twitter.scalding.Execution
object ValuePipe extends java.io.Serializable {
implicit def toTypedPipe[V](v: ValuePipe[V]): TypedPipe[V] = v.toTypedPipe
def fold[T, U, V](l: ValuePipe[T], r: ValuePipe[U])(f: (T, U) => V): ValuePipe[V] =
l.leftCross(r).collect { case (t, Some(u)) => f(t, u) }
def apply[T](t: T): ValuePipe[T] = LiteralValue(t)
def empty: ValuePipe[Nothing] = EmptyValue
}
/**
* ValuePipe is special case of a TypedPipe of just a optional single element. It is like a distribute Option
* type It allows to perform scalar based operations on pipes like normalization.
*/
sealed trait ValuePipe[+T] extends java.io.Serializable {
def leftCross[U](that: ValuePipe[U]): ValuePipe[(T, Option[U])] = that match {
case EmptyValue => map((_, None))
case LiteralValue(v2) => map((_, Some(v2)))
// We don't know if a computed value is empty or not. We need to run the MR job:
case _ => ComputedValue(toTypedPipe.leftCross(that))
}
def collect[U](fn: PartialFunction[T, U]): ValuePipe[U] =
filter(fn.isDefinedAt(_)).map(fn(_))
def map[U](fn: T => U): ValuePipe[U]
def filter(fn: T => Boolean): ValuePipe[T]
/**
* Identical to toOptionExecution.map(_.get) The result will be an exception if there is no value. The name
* here follows the convention of adding Execution to the name so in the repl in is removed
*/
def getExecution: Execution[T] = toOptionExecution.flatMap {
case Some(t) => Execution.from(t)
// same exception as scala.None.get
// https://github.com/scala/scala/blob/2.12.x/src/library/scala/Option.scala#L347
case None => Execution.failed(new java.util.NoSuchElementException("None.get"))
}
/**
* Like the above, but with a lazy parameter that is evaluated if the value pipe is empty The name here
* follows the convention of adding Execution to the name so in the repl in is removed
*/
def getOrElseExecution[U >: T](t: => U): Execution[U] = toOptionExecution.map(_.getOrElse(t))
def toTypedPipe: TypedPipe[T]
/**
* Convert this value to an Option. It is an error if somehow this is not either empty or has one value. The
* name here follows the convention of adding Execution to the name so in the repl in is removed
*/
def toOptionExecution: Execution[Option[T]] =
toTypedPipe.toIterableExecution.map { it =>
it.iterator.take(2).toList match {
case Nil => None
case h :: Nil => Some(h)
case items => sys.error("More than 1 item in an ValuePipe: " + items.toString)
}
}
def debug: ValuePipe[T]
}
case object EmptyValue extends ValuePipe[Nothing] {
override def leftCross[U](that: ValuePipe[U]) = this
override def map[U](fn: Nothing => U): ValuePipe[U] = this
override def filter(fn: Nothing => Boolean) = this
override def toTypedPipe: TypedPipe[Nothing] = TypedPipe.empty
override def toOptionExecution = Execution.from(None)
def debug: ValuePipe[Nothing] = {
println("EmptyValue")
this
}
}
final case class LiteralValue[T](value: T) extends ValuePipe[T] {
override def map[U](fn: T => U) = LiteralValue(fn(value))
override def filter(fn: T => Boolean) = if (fn(value)) this else EmptyValue
override def toTypedPipe = TypedPipe.from(Iterable(value))
override def toOptionExecution = Execution.from(Some(value))
def debug: ValuePipe[T] = map { v =>
println("LiteralValue(" + v.toString + ")")
v
}
}
final case class ComputedValue[T](override val toTypedPipe: TypedPipe[T]) extends ValuePipe[T] {
override def map[U](fn: T => U) = ComputedValue(toTypedPipe.map(fn))
override def filter(fn: T => Boolean) = ComputedValue(toTypedPipe.filter(fn))
def debug: ValuePipe[T] = map { value =>
println("ComputedValue(" + value.toString + ")")
value
}
}
|
twitter/scalding
|
scalding-core/src/main/scala/com/twitter/scalding/typed/ValuePipe.scala
|
Scala
|
apache-2.0
| 4,402 |
package org.automanlang.core.info
import java.util.{Date, UUID}
import org.automanlang.core.info.QuestionType.QuestionType
import org.automanlang.core.scheduler.Task
import scala.slick.driver.SQLiteDriver.simple.MappedColumnType
case class QuestionInfo(computation_id: UUID,
name: String,
question_text: String,
question_desc: String,
question_type: QuestionType,
start_time: Date,
confidence_level: Double,
tasks: List[Task],
total_answers_needed: Int,
total_budget: BigDecimal,
budget_used: BigDecimal,
dont_reject: Boolean,
epochs: List[EpochInfo])
object QuestionType extends Enumeration {
type QuestionType = Value
val CheckboxQuestion = Value("CheckboxQuestion")
val CheckboxDistributionQuestion = Value("CheckboxDistributionQuestion")
val MultiEstimationQuestion = Value("MultiEstimationQuestion")
val EstimationQuestion = Value("EstimationQuestion")
val FreeTextQuestion = Value("FreeTextQuestion")
val FreeTextDistributionQuestion = Value("FreeTextDistributionQuestion")
val RadioButtonQuestion = Value("RadioButtonQuestion")
val RadioButtonDistributionQuestion = Value("RadioButtonDistributionQuestion")
val Survey = Value("Survey")
val VariantQuestion = Value("VariantQuestion")
}
|
dbarowy/AutoMan
|
libautoman/src/main/scala/org/automanlang/core/info/QuestionInfo.scala
|
Scala
|
gpl-2.0
| 1,505 |
package varys.framework.master
import akka.actor._
import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent}
import akka.routing._
import java.text.SimpleDateFormat
import java.util.Date
import java.util.concurrent.atomic._
import java.util.concurrent.ConcurrentHashMap
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.collection.JavaConversions._
import scala.concurrent.{Future, ExecutionContext}
import varys.framework._
import varys.framework.master.scheduler._
import varys.framework.master.ui.MasterWebUI
import varys.{Logging, Utils, VarysException}
import varys.util.{AkkaUtils, SlaveToBpsMap}
private[varys] class Master(
systemName:String,
actorName: String,
host: String,
port: Int,
webUiPort: Int)
extends Logging {
val NUM_MASTER_INSTANCES = System.getProperty("varys.master.numInstances", "1").toInt
val DATE_FORMAT = new SimpleDateFormat("yyyyMMddHHmmss") // For coflow IDs
val SLAVE_TIMEOUT = System.getProperty("varys.slave.timeout", "60").toLong * 1000
val CONSIDER_DEADLINE = System.getProperty("varys.master.considerDeadline", "false").toBoolean
val idToSlave = new ConcurrentHashMap[String, SlaveInfo]()
val actorToSlave = new ConcurrentHashMap[ActorRef, SlaveInfo]
val addressToSlave = new ConcurrentHashMap[Address, SlaveInfo]
val hostToSlave = new ConcurrentHashMap[String, SlaveInfo]
val idToRxBps = new SlaveToBpsMap
val idToTxBps = new SlaveToBpsMap
var nextCoflowNumber = new AtomicInteger()
val idToCoflow = new ConcurrentHashMap[String, CoflowInfo]()
val completedCoflows = new ArrayBuffer[CoflowInfo]
var nextClientNumber = new AtomicInteger()
val idToClient = new ConcurrentHashMap[String, ClientInfo]()
val actorToClient = new ConcurrentHashMap[ActorRef, ClientInfo]
val addressToClient = new ConcurrentHashMap[Address, ClientInfo]
val webUiStarted = new AtomicBoolean(false)
// ExecutionContext for Futures
implicit val futureExecContext = ExecutionContext.fromExecutor(Utils.newDaemonCachedThreadPool())
private def now() = System.currentTimeMillis
// Create the scheduler object
val schedulerClass = System.getProperty(
"varys.master.scheduler",
"varys.framework.master.scheduler.SEBFScheduler")
val coflowScheduler = Class.forName(schedulerClass).newInstance.asInstanceOf[CoflowScheduler]
def start(): (ActorSystem, Int) = {
val (actorSystem, boundPort) = AkkaUtils.createActorSystem(systemName, host, port)
val actor = actorSystem.actorOf(
Props(new MasterActor(host, boundPort, webUiPort)).withRouter(
RoundRobinRouter(nrOfInstances = NUM_MASTER_INSTANCES)),
name = actorName)
(actorSystem, boundPort)
}
private[varys] class MasterActor(
ip: String,
port: Int,
webUiPort: Int)
extends Actor with Logging {
val webUi = new MasterWebUI(self, webUiPort)
val masterPublicAddress = {
val envVar = System.getenv("VARYS_PUBLIC_DNS")
if (envVar != null) envVar else ip
}
override def preStart() {
logInfo("Starting Varys master at varys://" + ip + ":" + port)
// Listen for remote client disconnection events, since they don't go through Akka's watch()
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
if (!webUiStarted.getAndSet(true)) {
webUi.start()
}
// context.system.scheduler.schedule(0 millis, SLAVE_TIMEOUT millis, self, CheckForSlaveTimeOut)
}
override def postStop() {
webUi.stop()
}
override def receive = {
case RegisterSlave(id, host, slavePort, slave_webUiPort, slave_commPort, publicAddress) => {
val currentSender = sender
logInfo("Registering slave %s:%d".format(host, slavePort))
if (idToSlave.containsKey(id)) {
currentSender ! RegisterSlaveFailed("Duplicate slave ID")
} else {
addSlave(
id,
host,
slavePort,
slave_webUiPort,
slave_commPort,
publicAddress,
currentSender)
// Wait for webUi to bind. Needed when NUM_MASTER_INSTANCES > 1.
while (webUi.boundPort == None) {
Thread.sleep(100)
}
// context.watch doesn't work with remote actors but helps for testing
// context.watch(currentSender)
currentSender ! RegisteredSlave("http://" + masterPublicAddress + ":" + webUi.boundPort.get)
}
}
case RegisterClient(clientName, host, commPort) => {
val currentSender = sender
val st = now
logTrace("Registering client %s@%s:%d".format(clientName, host, commPort))
if (hostToSlave.containsKey(host)) {
val client = addClient(clientName, host, commPort, currentSender)
// context.watch doesn't work with remote actors but helps for testing
// context.watch(currentSender)
val slave = hostToSlave(host)
currentSender ! RegisteredClient(
client.id,
slave.id,
"varys://" + slave.host + ":" + slave.port)
logInfo("Registered client " + clientName + " with ID " + client.id + " in " +
(now - st) + " milliseconds")
} else {
currentSender ! RegisterClientFailed("No Varys slave at " + host)
}
}
case RegisterCoflow(clientId, description) => {
val currentSender = sender
val st = now
logTrace("Registering coflow " + description.name)
if (CONSIDER_DEADLINE && description.deadlineMillis == 0) {
currentSender ! RegisterCoflowFailed("Must specify a valid deadline")
} else {
val client = idToClient.get(clientId)
if (client == null) {
currentSender ! RegisterCoflowFailed("Invalid clientId " + clientId)
} else {
val coflow = addCoflow(client, description, currentSender)
// context.watch doesn't work with remote actors but helps for testing
// context.watch(currentSender)
currentSender ! RegisteredCoflow(coflow.id)
logInfo("Registered coflow " + description.name + " with ID " + coflow.id + " in " +
(now - st) + " milliseconds")
}
}
}
case UnregisterCoflow(coflowId) => {
removeCoflow(idToCoflow.get(coflowId))
sender ! true
}
case Heartbeat(slaveId, newRxBps, newTxBps) => {
val slaveInfo = idToSlave.get(slaveId)
if (slaveInfo != null) {
slaveInfo.updateNetworkStats(newRxBps, newTxBps)
slaveInfo.lastHeartbeat = System.currentTimeMillis()
idToRxBps.updateNetworkStats(slaveId, newRxBps)
idToTxBps.updateNetworkStats(slaveId, newTxBps)
} else {
logWarning("Got heartbeat from unregistered slave " + slaveId)
}
}
case Terminated(actor) => {
// The disconnected actor could've been a slave or a client; remove accordingly.
// Coflow termination is handled explicitly through UnregisterCoflow or when its client dies
if (actorToSlave.containsKey(actor))
removeSlave(actorToSlave.get(actor))
if (actorToClient.containsKey(actor))
removeClient(actorToClient.get(actor))
}
case e: DisassociatedEvent => {
// The disconnected actor could've been a slave or a client; remove accordingly.
// Coflow termination is handled explicitly through UnregisterCoflow or when its client dies
if (addressToSlave.containsKey(e.remoteAddress))
removeSlave(addressToSlave.get(e.remoteAddress))
if (addressToClient.containsKey(e.remoteAddress))
removeClient(addressToClient.get(e.remoteAddress))
}
case RequestMasterState => {
sender ! MasterState(
ip,
port,
idToSlave.values.toSeq.toArray,
idToCoflow.values.toSeq.toArray,
completedCoflows.toArray,
idToClient.values.toSeq.toArray)
}
case CheckForSlaveTimeOut => {
timeOutDeadSlaves()
}
case RequestWebUIPort => {
sender ! WebUIPortResponse(webUi.boundPort.getOrElse(-1))
}
case RequestBestRxMachines(howMany, bytes) => {
sender ! BestRxMachines(idToRxBps.getTopN(
howMany, bytes).toArray.map(x => idToSlave.get(x).host))
}
case RequestBestTxMachines(howMany, bytes) => {
sender ! BestTxMachines(idToTxBps.getTopN(
howMany, bytes).toArray.map(x => idToSlave.get(x).host))
}
case AddFlows(flowDescs, coflowId, dataType) => {
val currentSender = sender
// coflowId will always be valid
val coflow = idToCoflow.get(coflowId)
assert(coflow != null)
val st = now
flowDescs.foreach { coflow.addFlow }
logDebug("Added " + flowDescs.size + " flows to " + coflow + " in " + (now - st) +
" milliseconds")
currentSender ! true
}
case AddFlow(flowDesc) => {
val currentSender = sender
// coflowId will always be valid
val coflow = idToCoflow.get(flowDesc.coflowId)
assert(coflow != null)
val st = now
coflow.addFlow(flowDesc)
logDebug("Added flow to " + coflow + " in " + (now - st) + " milliseconds")
currentSender ! true
}
case GetFlow(flowId, coflowId, clientId, slaveId, _) => {
logTrace("Received GetFlow(" + flowId + ", " + coflowId + ", " + slaveId + ", " + sender +
")")
val currentSender = sender
Future { handleGetFlow(flowId, coflowId, clientId, slaveId, currentSender) }
}
case GetFlows(flowIds, coflowId, clientId, slaveId, _) => {
logTrace("Received GetFlows(" + flowIds + ", " + coflowId + ", " + slaveId + ", " + sender +
")")
val currentSender = sender
Future { handleGetFlows(flowIds, coflowId, clientId, slaveId, currentSender) }
}
case FlowProgress(flowId, coflowId, bytesSinceLastUpdate, isCompleted) => {
// coflowId will always be valid
val coflow = idToCoflow.get(coflowId)
assert(coflow != null)
val st = now
coflow.updateFlow(flowId, bytesSinceLastUpdate, isCompleted)
logTrace("Received FlowProgress for flow " + flowId + " of " + coflow + " in " +
(now - st) + " milliseconds")
}
case DeleteFlow(flowId, coflowId) => {
// TODO: Actually do something; e.g., remove destination?
// self ! ScheduleRequest
// sender ! true
}
case ScheduleRequest => {
schedule()
}
}
def handleGetFlows(
flowIds: Array[String],
coflowId: String,
clientId: String,
slaveId: String,
actor: ActorRef) {
logTrace("handleGetFlows(" + flowIds + ", " + coflowId + ", " + slaveId + ", " + actor + ")")
val client = idToClient.get(clientId)
assert(client != null)
val coflow = idToCoflow.get(coflowId)
assert(coflow != null)
// assert(coflow.contains(flowId))
var canSchedule = false
coflow.getFlowInfos(flowIds) match {
case Some(flowInfos) => {
val st = now
canSchedule = coflow.addDestinations(flowIds, client)
// TODO: Always returning the default source.
// Consider selecting based on traffic etc.
actor ! Some(GotFlowDescs(flowInfos.map(_.desc)))
logInfo("Added " + flowIds.size + " destinations to " + coflow + ". " +
coflow.numFlowsToRegister + " flows remain to register; in " + (now - st) +
" milliseconds")
}
case None => {
// logWarning("Couldn't find flow " + flowId + " of coflow " + coflowId)
actor ! None
}
}
if (canSchedule) {
logInfo("Coflow " + coflowId + " ready to be scheduled")
self ! ScheduleRequest
}
}
def handleGetFlow(
flowId: String,
coflowId: String,
clientId: String,
slaveId: String,
actor: ActorRef) {
logTrace("handleGetFlow(" + flowId + ", " + coflowId + ", " + slaveId + ", " + actor + ")")
val client = idToClient.get(clientId)
assert(client != null)
val coflow = idToCoflow.get(coflowId)
assert(coflow != null)
// assert(coflow.contains(flowId))
var canSchedule = false
coflow.getFlowInfo(flowId) match {
case Some(flowInfo) => {
val st = now
canSchedule = coflow.addDestination(flowId, client)
// TODO: Always returning the default source.
// Considering selecting based on traffic etc.
actor ! Some(GotFlowDesc(flowInfo.desc))
logInfo("Added destination to " + coflow + ". " + coflow.numFlowsToRegister +
" flows remain to register; in " + (now - st) + " milliseconds")
}
case None => {
// logWarning("Couldn't find flow " + flowId + " of coflow " + coflowId)
actor ! None
}
}
if (canSchedule) {
logInfo("Coflow " + coflowId + " ready to be scheduled")
self ! ScheduleRequest
}
}
def addSlave(
id: String,
host: String,
port: Int,
webUiPort: Int,
commPort: Int,
publicAddress: String,
actor: ActorRef): SlaveInfo = {
// There may be one or more refs to dead slaves on this same node with
// different IDs; remove them.
idToSlave.values.filter(
w => (w.host == host) && (w.state == SlaveState.DEAD)).foreach(idToSlave.values.remove(_))
val slave = new SlaveInfo(id, host, port, actor, webUiPort, commPort, publicAddress)
idToSlave.put(slave.id, slave)
actorToSlave(actor) = slave
addressToSlave(actor.path.address) = slave
hostToSlave(slave.host) = slave
slave
}
def removeSlave(slave: SlaveInfo) {
slave.setState(SlaveState.DEAD)
logError("Removing " + slave)
// Do not remove from idToSlave so that we remember DEAD slaves
actorToSlave -= slave.actor
addressToSlave -= slave.actor.path.address
hostToSlave -= slave.host
}
def addClient(clientName: String, host: String, commPort: Int, actor: ActorRef): ClientInfo = {
val date = new Date(now)
val client = new ClientInfo(now, newClientId(date), host, commPort, date, actor)
idToClient.put(client.id, client)
actorToClient(actor) = client
addressToClient(actor.path.address) = client
client
}
def removeClient(client: ClientInfo) {
if (client != null && idToClient.containsValue(client)) {
logTrace("Removing " + client)
idToClient.remove(client.id)
actorToClient -= client.actor
addressToClient -= client.actor.path.address
client.markFinished()
// Remove child coflows as well
client.coflows.foreach(removeCoflow)
}
}
def addCoflow(client: ClientInfo, desc: CoflowDescription, actor: ActorRef): CoflowInfo = {
val now = System.currentTimeMillis()
val date = new Date(now)
val coflow = new CoflowInfo(now, newCoflowId(date), desc, client, date, actor)
idToCoflow.put(coflow.id, coflow)
// Update its parent client
client.addCoflow(coflow)
coflow
}
// TODO: Let all involved clients know so that they can free up local resources
def removeCoflow(coflow: CoflowInfo) {
removeCoflow(coflow, CoflowState.FINISHED, true)
}
def removeCoflow(coflow: CoflowInfo, endState: CoflowState.Value, reschedule: Boolean) {
if (coflow != null && idToCoflow.containsValue(coflow)) {
idToCoflow.remove(coflow.id)
completedCoflows += coflow // Remember it in our history
coflow.markFinished(endState)
logInfo("Removing " + coflow)
if (reschedule) {
self ! ScheduleRequest
}
}
}
/**
* Schedule ongoing coflows and flows.
* Returns a Boolean indicating whether it ran or not
*/
def schedule(): Boolean = synchronized {
var st = now
// Schedule coflows
val activeCoflows = idToCoflow.values.toBuffer.asInstanceOf[ArrayBuffer[CoflowInfo]].filter(
x => x.remainingSizeInBytes > 0 &&
(x.curState == CoflowState.READY || x.curState == CoflowState.RUNNING))
val activeSlaves = idToSlave.values.toBuffer.asInstanceOf[ArrayBuffer[SlaveInfo]]
val schedulerOutput = coflowScheduler.schedule(SchedulerInput(activeCoflows, activeSlaves))
val step12Dur = now - st
st = now
// Communicate the schedule to clients
val activeFlows = schedulerOutput.scheduledCoflows.flatMap(_.getFlows)
logInfo("START_NEW_SCHEDULE: " + activeFlows.size + " flows in " +
schedulerOutput.scheduledCoflows.size + " coflows")
for (cf <- schedulerOutput.scheduledCoflows) {
val (timeStamp, totalBps) = cf.currentAllocation
logInfo(cf + " ==> " + (totalBps / 1048576.0) + " Mbps @ " + timeStamp)
}
activeFlows.groupBy(_.destClient).foreach { tuple =>
val client = tuple._1
val flows = tuple._2
val rateMap = flows.map(t => (t.desc.dataId, t.currentBps)).toMap
client.actor ! UpdatedRates(rateMap)
}
val step3Dur = now - st
logInfo("END_NEW_SCHEDULE in " + (step12Dur + step12Dur + step3Dur) + " = (" + step12Dur +
"+" + step12Dur + "+" + step3Dur + ") milliseconds")
// Remove rejected coflows
for (cf <- schedulerOutput.markedForRejection) {
val rejectMessage = "Cannot meet the specified deadline of " + cf.desc.deadlineMillis +
" milliseconds"
cf.parentClient.actor ! RejectedCoflow(cf.id, rejectMessage)
cf.getFlows.groupBy(_.destClient).foreach { tuple =>
val client = tuple._1
client.actor ! RejectedCoflow(cf.id, rejectMessage)
}
removeCoflow(cf, CoflowState.REJECTED, false)
}
true
}
/**
* Generate a new coflow ID given a coflow's submission date
*/
def newCoflowId(submitDate: Date): String = {
"COFLOW-%06d".format(nextCoflowNumber.getAndIncrement())
}
/**
* Generate a new client ID given a client's connection date
*/
def newClientId(submitDate: Date): String = {
"CLIENT-%06d".format(nextClientNumber.getAndIncrement())
}
/**
* Check for, and remove, any timed-out slaves
*/
def timeOutDeadSlaves() {
// Copy the slaves into an array so we don't modify the hashset while iterating through it
val expirationTime = System.currentTimeMillis() - SLAVE_TIMEOUT
val toRemove = idToSlave.values.filter(_.lastHeartbeat < expirationTime).toArray
for (slave <- toRemove) {
logWarning("Removing slave %s because we got no heartbeat in %d seconds".format(
slave.id, SLAVE_TIMEOUT))
removeSlave(slave)
}
}
}
}
private[varys] object Master {
private val systemName = "varysMaster"
private val actorName = "Master"
private val varysUrlRegex = "varys://([^:]+):([0-9]+)".r
def main(argStrings: Array[String]) {
val args = new MasterArguments(argStrings)
val masterObj = new Master(systemName, actorName, args.ip, args.port, args.webUiPort)
val (actorSystem, _) = masterObj.start()
actorSystem.awaitTermination()
}
/**
* Returns an `akka.tcp://...` URL for the Master actor given a varysUrl `varys://host:ip`.
*/
def toAkkaUrl(varysUrl: String): String = {
varysUrl match {
case varysUrlRegex(host, port) =>
"akka.tcp://%s@%s:%s/user/%s".format(systemName, host, port, actorName)
case _ =>
throw new VarysException("Invalid master URL: " + varysUrl)
}
}
}
|
mosharaf/varys
|
core/src/main/scala/varys/framework/master/Master.scala
|
Scala
|
apache-2.0
| 20,123 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import org.apache.commons.math3.linear._
import org.apache.spark.sql.SparkSession
/**
* Alternating least squares matrix factorization.
*
* This is an example implementation for learning how to use Spark. For more conventional use,
* please refer to org.apache.spark.ml.recommendation.ALS.
*/
object SparkALS {
// Parameters set through command line arguments
var M = 0 // Number of movies
var U = 0 // Number of users
var F = 0 // Number of features
var ITERATIONS = 0
val LAMBDA = 0.01 // Regularization coefficient
def generateR(): RealMatrix = {
val mh = randomMatrix(M, F)
val uh = randomMatrix(U, F)
mh.multiply(uh.transpose())
}
def rmse(targetR: RealMatrix, ms: Array[RealVector], us: Array[RealVector]): Double = {
val r = new Array2DRowRealMatrix(M, U)
for (i <- 0 until M; j <- 0 until U) {
r.setEntry(i, j, ms(i).dotProduct(us(j)))
}
val diffs = r.subtract(targetR)
var sumSqs = 0.0
for (i <- 0 until M; j <- 0 until U) {
val diff = diffs.getEntry(i, j)
sumSqs += diff * diff
}
math.sqrt(sumSqs / (M.toDouble * U.toDouble))
}
def update(i: Int, m: RealVector, us: Array[RealVector], R: RealMatrix) : RealVector = {
val U = us.length
val F = us(0).getDimension
var XtX: RealMatrix = new Array2DRowRealMatrix(F, F)
var Xty: RealVector = new ArrayRealVector(F)
// For each user that rated the movie
for (j <- 0 until U) {
val u = us(j)
// Add u * u^t to XtX
XtX = XtX.add(u.outerProduct(u))
// Add u * rating to Xty
Xty = Xty.add(u.mapMultiply(R.getEntry(i, j)))
}
// Add regularization coefs to diagonal terms
for (d <- 0 until F) {
XtX.addToEntry(d, d, LAMBDA * U)
}
// Solve it with Cholesky
new CholeskyDecomposition(XtX).getSolver.solve(Xty)
}
def showWarning() {
System.err.println(
"""WARN: This is a naive implementation of ALS and is given as an example!
|Please use org.apache.spark.ml.recommendation.ALS
|for more conventional use.
""".stripMargin)
}
def main(args: Array[String]) {
var slices = 0
val options = (0 to 4).map(i => if (i < args.length) Some(args(i)) else None)
options.toArray match {
case Array(m, u, f, iters, slices_) =>
M = m.getOrElse("100").toInt
U = u.getOrElse("500").toInt
F = f.getOrElse("10").toInt
ITERATIONS = iters.getOrElse("5").toInt
slices = slices_.getOrElse("2").toInt
case _ =>
System.err.println("Usage: SparkALS [M] [U] [F] [iters] [partitions]")
System.exit(1)
}
showWarning()
println(s"Running with M=$M, U=$U, F=$F, iters=$ITERATIONS")
val spark = SparkSession
.builder
.appName("SparkALS")
.getOrCreate()
val sc = spark.sparkContext
val R = generateR()
// Initialize m and u randomly
var ms = Array.fill(M)(randomVector(F))
var us = Array.fill(U)(randomVector(F))
// Iteratively update movies then users
val Rc = sc.broadcast(R)
var msb = sc.broadcast(ms)
var usb = sc.broadcast(us)
for (iter <- 1 to ITERATIONS) {
println(s"Iteration $iter:")
ms = sc.parallelize(0 until M, slices)
.map(i => update(i, msb.value(i), usb.value, Rc.value))
.collect()
msb = sc.broadcast(ms) // Re-broadcast ms because it was updated
us = sc.parallelize(0 until U, slices)
.map(i => update(i, usb.value(i), msb.value, Rc.value.transpose()))
.collect()
usb = sc.broadcast(us) // Re-broadcast us because it was updated
println("RMSE = " + rmse(R, ms, us))
println()
}
spark.stop()
}
private def randomVector(n: Int): RealVector =
new ArrayRealVector(Array.fill(n)(math.random))
private def randomMatrix(rows: Int, cols: Int): RealMatrix =
new Array2DRowRealMatrix(Array.fill(rows, cols)(math.random))
}
// scalastyle:on println
|
akopich/spark
|
examples/src/main/scala/org/apache/spark/examples/SparkALS.scala
|
Scala
|
apache-2.0
| 4,848 |
package lexer
sealed trait Tree
case class Leaf(value: String) extends Tree
case class Branch(operation:Char, left: Tree, right: Tree) extends Tree
object Auxiliary {
def removeWhite(string: String):String =
string filter ((x:Char) => x != ' ')
def separateByTokens(string: String, tokens: List[Char]): List[String] = {
val f = ((y: List[String], x: Char) => y flatMap ((z:String) => z.split(x).toList))
val tokens_and_brackets = tokens ::: List('(', ')')
return tokens_and_brackets.foldLeft(List(string))(f) filter ((x:String) => x!="")
}
def findBracketToken(string: String, tokens: List[Char]): List[(Char, Int)] = {
var counter = 0;
val total = tokens.length
def traverse(x: Char): Option[(Char, Int)] = {
if(x=='('){
counter += 1
return None
}
else if(x==')'){
counter -= 1
return None
}
else if(tokens.contains(x)){
return Some((x, counter))
}
else
return None
}
val temp = string.toList map traverse
val valid = temp filter ((x:Option[(Char, Int)]) => x.isDefined)
valid map ((x: Option[(Char, Int)]) => x getOrElse ('x', 0))
}
def makeForest(separated: List[String]): List[Tree] = {
separated map ((x:String) => Leaf(x))
}
def printTree(tree: Tree): String = {
tree match {
case Leaf(x) => x
case Branch(x, y, z) => printTree(y) + " " + printTree(z) + " " + x.toString
}
}
}
object Lexer {
val Tokens = List('+', '-', '*', '/')
val Precedence = Map(
('+', 2),
('-', 1),
('/', 3),
('*', 4)
)
def contract(forest: List[Tree], token_pos: List[(Char, Int)]):(List[Tree], List[(Char, Int)]) = {
val indexOp = token_pos indexOf (token_pos maxBy((x:(Char, Int)) => Precedence(x._1)+x._2*Tokens.length))
val new_branch = Branch(token_pos(indexOp)._1, forest(indexOp), forest(indexOp+1))
val new_token_pos = token_pos.take(indexOp) ::: token_pos.drop(indexOp+1)
val new_forest = forest.take(indexOp) ::: List(new_branch) ::: forest.drop(indexOp+2)
return (new_forest, new_token_pos)
}
def contract_till_end(forest: List[Tree], token_pos: List[(Char, Int)]):(List[Tree], List[(Char, Int)]) = {
if(token_pos.isEmpty)
return (forest, List[(Char, Int)]())
else {
val result = contract(forest, token_pos)
return contract_till_end(result._1, result._2)
}
}
def generate_tree(string: String): Tree = {
val forest = Auxiliary.makeForest(Auxiliary.separateByTokens(string, Tokens))
val token_pos = Auxiliary.findBracketToken(string, Tokens)
return contract_till_end(forest, token_pos)._1(0)
}
}
|
Bolt64/my_code
|
scala/lexer.scala
|
Scala
|
mit
| 2,760 |
package com.sungevity.cmaes
import breeze.linalg._
import breeze.linalg.eigSym.EigSym
import breeze.numerics._
import com.sungevity.cmaes
/**
* A minimalistic implementation of Covariance Matrix Adaptation Evolution Strategy. For algorithm description please
* visit [[https://en.wikipedia.org/wiki/CMA-ES]]. This class ideally should not be used directly. Please use
* [[com.sungevity.cmaes.CMAESDriver]] class.
*/
class CMAEvolutionStrategy private [cmaes] (val iteration: Int,
lambda: Int,
n: Int,
ps: DenseVector[Double],
pc: DenseVector[Double],
b: DenseMatrix[Double],
c: DenseMatrix[Double],
d: DenseVector[Double],
sigma: Double,
xMean: DenseVector[Double]
) {
private val mu = math.floor(lambda/2).toInt
private val (weights, mueff): (DenseVector[Double], Double) = {
val w = DenseVector.fill(mu)(math.log(mu + 1.0)) - DenseVector((0 until mu).map(v => math.log(v + 1.0)).toArray)
val weights: DenseVector[Double] = w / sum(w)
(weights, (sum(weights) * sum(weights)) / sum(weights :* weights))
}
private val cs = (mueff+2) / (n+mueff+3)
private val cc = 4.0 / (n + 4.0)
private val c1 = 2 / (math.pow(n + 1.3, 2) + mueff)
private val cmu = min(1 - c1, 2 * (mueff - 2 + 1 / mueff) / (math.pow(n+2, 2) + mueff))
private val chiN = math.sqrt(n) * (1.0 - 1.0 / (4.0 * n) + 1.0 / (21.0 * n * n))
private val damps = 1.0 + 2.0*math.max(0.0, math.sqrt((mueff-1.0)/(n + 1.0))-1.0) + cs
/**
* Generate a new population of solutions.
*
* @return a new generation of solutions.
*/
def samplePopulation(): DenseMatrix[Double] = {
val g = breeze.stats.distributions.Gaussian(0,1)
val s = (0 until lambda) map {
_ =>
xMean + sigma * b * (d :* g.samplesVector(n))
}
val distribution = DenseMatrix(new DenseVector(s.toArray).valuesIterator.map(_.valuesIterator.toArray).toSeq: _*)
distribution
}
/**
* Update search distribution.
*
* @param population current population.
* @param fitness fitness of current population.
* @return a copy of CMAEvolutionStrategy with updated state.
*/
def updateDistribution(population: DenseMatrix[Double], fitness: DenseVector[Double]): CMAEvolutionStrategy = {
val arfitness = argsort(fitness)
val selected = DenseVector((0 until mu).map{
idx => population(arfitness(idx), ::).inner
} toArray)
val newXMean = DenseVector.zeros[Double](n).mapPairs {
case(idx, _) =>
sum(selected.map(_(idx)) :* weights)
}
val invsqrtC = b * diag(d.:^(-1.0)) * b.t
val psN: DenseVector[Double] = (1.0-cs)*ps + sqrt(cs*(2.0-cs)*mueff) * invsqrtC * (newXMean - xMean) / sigma
val hsig = if (norm(psN) / math.sqrt(1.0 - pow(1.0-cs, 2.0*(iteration+1))) / chiN < 1.4 + 2.0/(n + 1.0)) 1.0 else 0.0
val pcN: DenseVector[Double] = (1.0-cc)*pc + hsig * sqrt(cc*(2.0-cc)*mueff) * (newXMean - xMean) / sigma
val artmp: DenseVector[DenseVector[Double]] = selected.map {
s => (s - xMean) :/ sigma
}
val artmpm = DenseMatrix(artmp.valuesIterator.map(_.valuesIterator.toArray).toSeq: _*).t
val base = (1.0-c1-cmu) * c
val plusRankOne = c1 * (pcN * pcN.t + (1.0-hsig) * cc*(2.0-cc) * c)
val rankMu = cmu * artmpm * diag(weights) * artmpm.t
val nC = base + plusRankOne + rankMu
val sigmaN = sigma * math.exp((cs/damps)*((norm(psN)/chiN) - 1.0))
val psxps = sum(psN :* psN)
val sigmaNN = sigma * math.exp(((math.sqrt(psxps) / chiN) - 1.0) * cs / damps)
val EigSym(nD, nB) = eigSym(nC)
new CMAEvolutionStrategy(
iteration+1,
lambda,
n,
psN,
pcN,
nB,
nC,
sqrt(nD),
sigmaN,
newXMean
)
}
}
object CMAEvolutionStrategy {
/**
* Instanciates a copy of CMAEvolutionStrategy from initial population with given initial distribution.
*
* @param lambda population size.
* @param initialX initial solution.
* @param initialStd initial standard deviation of first population.
* @return an instance of [[cmaes.CMAEvolutionStrategy]]
*/
def apply(lambda: Int, initialX: DenseVector[Double], initialStd: DenseVector[Double]): CMAEvolutionStrategy = {
new CMAEvolutionStrategy(
1,
lambda,
initialX.length,
DenseVector.zeros[Double](initialX.length),
DenseVector.zeros[Double](initialX.length),
DenseMatrix.eye[Double](initialX.length),
diag(initialStd),
initialStd,
1.0,
initialX)
}
}
|
VolodymyrOrlov/cma-es-scala
|
src/main/scala/com/sungevity/cmaes/CMAEvolutionStrategy.scala
|
Scala
|
apache-2.0
| 4,965 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.ops.{RsqrtGrad, SqrtGrad}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.tf.Context
import com.intel.analytics.bigdl.utils.tf.loaders.Utils.getType
import org.tensorflow.framework.{DataType, NodeDef}
import scala.reflect.ClassTag
class SqrtGrad extends TensorflowOpsLoader {
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
val t = getType(nodeDef.getAttrMap, "T")
if (t == DataType.DT_FLOAT) {
SqrtGrad[T, Float]()
} else if (t == DataType.DT_DOUBLE) {
SqrtGrad[T, Double]()
} else {
throw new UnsupportedOperationException(s"Not support load SqrtGrad when type is $t")
}
}
}
|
jenniew/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/SqrtGrad.scala
|
Scala
|
apache-2.0
| 1,566 |
package scala.slick.jdbc.meta
import scala.slick.jdbc.{ResultSetInvoker, JdbcType}
/**
* A wrapper for a row in the ResultSet returned by DatabaseMetaData.getUDTs().
*/
case class MUDT(
typeName: MQName, className: String, sqlType: Int, remarks: String, baseType: Option[Short]) {
def sqlTypeName = JdbcType.typeNames.get(sqlType)
def getAttributes(attributeNamePattern: String = "%") =
MAttribute.getAttributes(typeName, attributeNamePattern)
}
object MUDT {
def getUDTs(typeNamePattern: MQName, types: Option[Seq[Int]] = None) = ResultSetInvoker[MUDT](
_.metaData.getUDTs(typeNamePattern.catalog_?, typeNamePattern.schema_?,
typeNamePattern.name, types.map(_.toArray)getOrElse(null))) { r =>
MUDT(MQName.from(r), r.<<, r.<<, r.<<, r.<<)
}
}
|
boldradius/slick
|
src/main/scala/scala/slick/jdbc/meta/MUDT.scala
|
Scala
|
bsd-2-clause
| 800 |
package org.aguo.civsim.controller
import org.aguo.civsim.model.World
import org.aguo.civsim.model.domain._
import org.aguo.civsim.view._
object ExamineController {
def handleInput(input: String, world: World): World = {
val domain = world.domain
val population = domain.population
val territory = domain.territory
val jobs = population.jobAllocations.keySet
val buildings = territory.buildings
input match {
case Building(building) if buildings.contains(building) => ExamineBuildingScreen(building).render(world)
case Job(job) if jobs.contains(job) => ExamineJobScreen(job).render(world)
case _ => UnknownScreen.render(world)
}
}
}
|
aguo777/civ-sim
|
src/main/scala/org/aguo/civsim/controller/ExamineController.scala
|
Scala
|
mit
| 685 |
package redis.api.sortedsets
import akka.util.ByteString
import redis._
import redis.api.{Aggregate, Limit, SUM, ZaddOption}
import redis.protocol.RedisReply
case class Zadd[K, V](key: K, options: Seq[ZaddOption], scoreMembers: Seq[(Double, V)])
(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V])
extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZADD", keyAsString +: (options.map(_.serialize) ++
scoreMembers.foldLeft(Seq.empty[ByteString])({
case (acc, e) => ByteString(e._1.toString) +: convert.serialize(e._2) +: acc
})))
}
case class Zcard[K](key: K)(implicit keySeria: ByteStringSerializer[K]) extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZCARD", Seq(keyAsString))
}
case class Zcount[K](key: K, min: Limit = Limit(Double.NegativeInfinity), max: Limit = Limit(Double.PositiveInfinity))
(implicit keySeria: ByteStringSerializer[K])
extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZCOUNT", Seq(keyAsString, min.toByteString, max.toByteString))
}
case class Zincrby[K, V](key: K, increment: Double, member: V)(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V])
extends SimpleClusterKey[K] with RedisCommandBulkDouble {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZINCRBY", Seq(keyAsString, ByteString(increment.toString), convert.serialize(member)))
}
private[redis] object Zstore {
def buildArgs[KD, K, KK](destination: KD, key: K, keys: Seq[KK], aggregate: Aggregate = SUM)
(implicit keyDestSeria: ByteStringSerializer[KD], keySeria: ByteStringSerializer[K], keysSeria: ByteStringSerializer[KK]): Seq[ByteString] = {
(keyDestSeria.serialize(destination)
+: ByteString((1 + keys.size).toString)
+: keySeria.serialize(key)
+: keys.map(keysSeria.serialize)) ++ Seq(ByteString("AGGREGATE"), ByteString(aggregate.toString))
}
}
case class Zinterstore[KD: ByteStringSerializer, K: ByteStringSerializer, KK: ByteStringSerializer](destination: KD, key: K, keys: Seq[KK], aggregate: Aggregate = SUM)
extends RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZINTERSTORE", Zstore.buildArgs(destination, key, keys, aggregate))
}
private[redis] object ZstoreWeighted {
def buildArgs[KD, K](destination: KD, keys: Map[K, Double], aggregate: Aggregate = SUM)
(implicit keyDestSeria: ByteStringSerializer[KD], keySeria: ByteStringSerializer[K]): Seq[ByteString] = {
(keyDestSeria.serialize(destination) +: ByteString(keys.size.toString) +: keys.keys.map(keySeria.serialize).toSeq
) ++ (ByteString("WEIGHTS") +: keys.values.map(v => ByteString(v.toString)).toSeq
) ++ Seq(ByteString("AGGREGATE"), ByteString(aggregate.toString))
}
}
case class ZinterstoreWeighted[KD: ByteStringSerializer, K: ByteStringSerializer](destination: KD, keys: Map[K, Double], aggregate: Aggregate = SUM)
extends RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZINTERSTORE", ZstoreWeighted.buildArgs(destination, keys, aggregate))
}
case class Zrange[K, R](key: K, start: Long, stop: Long)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val encodedRequest: ByteString = encode("ZRANGE", Seq(keyAsString, ByteString(start.toString), ByteString(stop.toString)))
val isMasterOnly = false
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class ZrangeWithscores[K, R](key: K, start: Long, stop: Long)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteStringDouble[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZRANGE",
Seq(keyAsString, ByteString(start.toString), ByteString(stop.toString), ByteString("WITHSCORES")))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
private[redis] object Zrangebyscore {
def buildArgs[K](key: K, min: Limit, max: Limit, withscores: Boolean, limit: Option[(Long, Long)])
(implicit keySeria: ByteStringSerializer[K]): Seq[ByteString] = {
val builder = Seq.newBuilder[ByteString]
builder ++= Seq(keySeria.serialize(key), min.toByteString, max.toByteString)
if (withscores) {
builder += ByteString("WITHSCORES")
}
limit.foreach(l => {
builder ++= Seq(ByteString("LIMIT"), ByteString(l._1.toString), ByteString(l._2.toString))
})
builder.result()
}
}
private[redis] object Zrevrangebyscore {
def buildArgs[K](key: K, min: Limit, max: Limit, withscores: Boolean, limit: Option[(Long, Long)])
(implicit keySeria: ByteStringSerializer[K]): Seq[ByteString] = {
/*
* Find the actual min/max and reverse them in order to support backwards compatibility and legacy clients.
* See discussion in [[https://github.com/etaty/rediscala/issues/98 Github Issue]].
*/
val (_min, _max) = if(min.value < max.value) min -> max else max -> min
Zrangebyscore.buildArgs(key, _max, _min, withscores, limit)
}
}
private[redis] object Zrangebylex {
def buildArgs(key: ByteString, min: String, max: String, limit: Option[(Long, Long)]): Seq[ByteString] = {
val builder = Seq.newBuilder[ByteString]
builder ++= Seq(key, ByteString(min), ByteString(max))
limit.foreach(l => {
builder ++= Seq(ByteString("LIMIT"), ByteString(l._1.toString), ByteString(l._2.toString))
})
builder.result()
}
}
case class Zrangebyscore[K: ByteStringSerializer, R](key: K, min: Limit, max: Limit, limit: Option[(Long, Long)] = None)(implicit deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZRANGEBYSCORE", Zrangebyscore.buildArgs(key, min, max, withscores = false, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class ZrangebyscoreWithscores[K: ByteStringSerializer, R](key: K, min: Limit, max: Limit, limit: Option[(Long, Long)] = None)(implicit deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteStringDouble[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZRANGEBYSCORE", Zrangebyscore.buildArgs(key, min, max, withscores = true, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class Zrank[K, V](key: K, member: V)(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V]) extends SimpleClusterKey[K] with RedisCommandRedisReplyOptionLong {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZRANK", Seq(keyAsString, convert.serialize(member)))
}
case class Zrem[K, V](key: K, members: Seq[V])(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V]) extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZREM", keyAsString +: members.map(v => convert.serialize(v)))
}
case class Zremrangebylex[K](key: K, min: String, max: String)(implicit keySeria: ByteStringSerializer[K]) extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZREMRANGEBYLEX", Seq(keyAsString, ByteString(min), ByteString(max)))
}
case class Zremrangebyrank[K](key: K, start: Long, stop: Long)(implicit keySeria: ByteStringSerializer[K]) extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZREMRANGEBYRANK", Seq(keyAsString, ByteString(start.toString), ByteString(stop.toString)))
}
case class Zremrangebyscore[K](key: K, min: Limit, max: Limit)(implicit keySeria: ByteStringSerializer[K]) extends SimpleClusterKey[K] with RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZREMRANGEBYSCORE", Seq(keyAsString, min.toByteString, max.toByteString))
}
case class Zrevrange[K, R](key: K, start: Long, stop: Long)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R]) extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANGE", Seq(keyAsString, ByteString(start.toString), ByteString(stop.toString)))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class ZrevrangeWithscores[K, R](key: K, start: Long, stop: Long)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R]) extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteStringDouble[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANGE", Seq(keyAsString, ByteString(start.toString), ByteString(stop.toString), ByteString("WITHSCORES")))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class Zrevrangebyscore[K: ByteStringSerializer, R](key: K, min: Limit, max: Limit, limit: Option[(Long, Long)] = None)(implicit deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANGEBYSCORE", Zrevrangebyscore.buildArgs(key, min, max, withscores = false, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class ZrevrangebyscoreWithscores[K: ByteStringSerializer, R](key: K, min: Limit, max: Limit, limit: Option[(Long, Long)] = None)(implicit deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteStringDouble[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANGEBYSCORE", Zrevrangebyscore.buildArgs(key, min, max, withscores = true, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class Zrevrank[K, V](key: K, member: V)(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V]) extends SimpleClusterKey[K] with RedisCommandRedisReplyOptionLong {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANK", Seq(keyAsString, convert.serialize(member)))
}
case class Zscore[K, V](key: K, member: V)(implicit keySeria: ByteStringSerializer[K], convert: ByteStringSerializer[V]) extends SimpleClusterKey[K] with RedisCommandBulkOptionDouble {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZSCORE", Seq(keyAsString, convert.serialize(member)))
}
case class Zunionstore[KD: ByteStringSerializer, K: ByteStringSerializer, KK: ByteStringSerializer]
(destination: KD, key: K, keys: Seq[KK], aggregate: Aggregate = SUM)
extends RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZUNIONSTORE", Zstore.buildArgs(destination, key, keys, aggregate))
}
case class ZunionstoreWeighted[KD: ByteStringSerializer, K: ByteStringSerializer](destination: KD, keys: Map[K, Double], aggregate: Aggregate = SUM)
extends RedisCommandIntegerLong {
val isMasterOnly = true
val encodedRequest: ByteString = encode("ZUNIONSTORE", ZstoreWeighted.buildArgs(destination, keys, aggregate))
}
case class Zrangebylex[K, R](key: K, min: String, max: String, limit: Option[(Long, Long)] = None)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZRANGEBYLEX", Zrangebylex.buildArgs(keyAsString, min, max, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class Zrevrangebylex[K, R](key: K, max: String, min: String, limit: Option[(Long, Long)] = None)(implicit keySeria: ByteStringSerializer[K], deserializerR: ByteStringDeserializer[R])
extends SimpleClusterKey[K] with RedisCommandMultiBulkSeqByteString[R] {
val isMasterOnly = false
val encodedRequest: ByteString = encode("ZREVRANGEBYLEX", Zrangebylex.buildArgs(keyAsString, max, min, limit))
val deserializer: ByteStringDeserializer[R] = deserializerR
}
case class Zscan[K, C, R](key: K, cursor: C, count: Option[Int], matchGlob: Option[String])(implicit redisKey: ByteStringSerializer[K], redisCursor: ByteStringSerializer[C], deserializerR: ByteStringDeserializer[R], scoreDeserializer: ByteStringDeserializer[Double])
extends SimpleClusterKey[K] with RedisCommandMultiBulkCursor[Seq[(Double, R)]] with ByteStringDeserializerDefault {
val isMasterOnly: Boolean = false
val encodedRequest: ByteString = encode("ZSCAN", withOptionalParams(Seq(keyAsString, redisCursor.serialize(cursor))))
val empty: Seq[(Double, R)] = Seq.empty
def decodeResponses(responses: Seq[RedisReply]) =
responses.grouped(2).map { xs =>
val data = xs.head
val score = scoreDeserializer.deserialize(xs(1).toByteString)
score -> deserializerR.deserialize(data.toByteString)
}.toSeq
}
|
etaty/rediscala
|
src/main/scala/redis/api/SortedSets.scala
|
Scala
|
apache-2.0
| 13,348 |
package sublimeSpam.ui
import scala.swing._
import Swing._
import java.awt.Color
import sublimeSpam._
import ui.MainMenu._
class ResultPanel(trackPath: String, trackDesc: String, game: Game, previousBestOption: Option[(String, (Int, String, String))]) extends BorderPanel {
minimumSize = new Dimension(windowWidth, windowHeight)
preferredSize = new Dimension(windowWidth, windowHeight)
maximumSize = new Dimension(windowWidth, windowHeight)
this.background = new Color(35, 20, 53)
add(new FlowPanel {
minimumSize = new Dimension(windowWidth, 60)
preferredSize = new Dimension(windowWidth, 60)
maximumSize = new Dimension(windowWidth, 60)
opaque = false
contents += new Button("Accept") {
action = new Action(text) {
def apply() = {
updateContents(mainFrame.mainPanel)
}
}
}
}, BorderPanel.Position.North)
add (new BoxPanel(Orientation.Vertical) {
minimumSize = new Dimension(windowWidth, windowHeight - 100)
preferredSize = new Dimension(windowWidth, windowHeight - 100)
maximumSize = new Dimension(windowWidth, windowHeight - 100)
border = EmptyBorder(20)
opaque = false
contents += new FlowPanel { //Score
opaque = false
contents += new Label {
val formattedScoreString = Data.formatLargeNumber(game.score)
text = formattedScoreString
font = new java.awt.Font("High tower text", 0, 72)
foreground = Color.YELLOW
}
}
contents += new FlowPanel { //Percentage
opaque = false
contents += new Label {
val formattedScoreString = Data.formatLargeNumber(game.score)
text = Math.round(game.hits.toDouble * 100 / Math.max(1, game.entitiesLeft + game.passed)) + "%"
font = new java.awt.Font("High tower text", 0, 42)
foreground = Color.YELLOW
}
}
contents += VStrut(40)
contents += new FlowPanel { //Trackname
opaque = false
contents += new Label {
text = "<html><center>" + trackDesc.split("-")(0).trim //+ "<br>" + trackDesc.split("-")(1).trim
font = new java.awt.Font("High tower text", 0, 36)
foreground = Color.CYAN
}
}
contents += new FlowPanel { //Artist
opaque = false
contents += new Label {
text = "<html><center>" + trackDesc.split("-")(1).trim
font = new java.awt.Font("High tower text", 0, 24)
foreground = Color.CYAN
}
}
contents += VStrut(40)
if (previousBestOption.isDefined) {
val newRecord = game.score > previousBestOption.get._2._1
if (newRecord) {
contents += new FlowPanel { //New record notif
opaque = false
contents += new Label {
text = "NEW RECORD!"
font = new java.awt.Font("High tower text", 0, 28)
foreground = Color.GREEN
}
}
contents += new FlowPanel { //New record notif
opaque = false
contents += new Label {
text = "Previous best:"
font = new java.awt.Font("High tower text", 0, 24)
foreground = Color.WHITE
}
}
} else {
contents += new FlowPanel { //New record notif
opaque = false
contents += new Label {
text = "Current record:"
font = new java.awt.Font("High tower text", 0, 24)
foreground = Color.WHITE
}
}
}
contents += new FlowPanel { //New record notif
opaque = false
contents += new Label {
text = Data.formatLargeNumber(previousBestOption.get._2._1) + " by " + previousBestOption.get._2._2 + " on " + previousBestOption.get._2._3
font = new java.awt.Font("High tower text", 0, 16)
foreground = Color.WHITE
}
}
}
contents += new FlowPanel { //Track id
opaque = false
contents += new Label {
text = "Track id: " + trackPath
font = new java.awt.Font("High tower text", 0, 16)
foreground = Color.GRAY
}
}
}, BorderPanel.Position.Center)
}
|
Berthur/SublimeSpam
|
src/sublimeSpam/ui/ResultPanel.scala
|
Scala
|
gpl-3.0
| 4,148 |
package rewriting
import benchmarks.MolecularDynamics
import ir._
import ir.ast._
import lift.arithmetic.SizeVar
import opencl.executor.{Execute, Executor}
import opencl.ir._
import opencl.ir.pattern.ReduceSeq
import org.junit.Assert._
import org.junit.{AfterClass, BeforeClass, Test}
import rewriting.macrorules.MacroRules
import rewriting.rules.{CopyRules, OpenCLRules, Rules, SimplificationRules}
object TestRewriteMD {
@BeforeClass
def before(): Unit =
Executor.loadAndInit()
@AfterClass
def after(): Unit =
Executor.shutdown()
}
class TestRewriteMD {
@Test
def shoc(): Unit = {
val mdCompute = UserFun("updateF",
Array("f", "ipos", "jpos", "cutsq", "lj1", "lj2"),
"""|{
| // Calculate distance
| float delx = ipos.x - jpos.x;
| float dely = ipos.y - jpos.y;
| float delz = ipos.z - jpos.z;
| float r2inv = delx*delx + dely*dely + delz*delz;
| // If distance is less than cutoff, calculate force
| if (r2inv < cutsq) {
| r2inv = 1.0f/r2inv;
| float r6inv = r2inv * r2inv * r2inv;
| float forceC = r2inv*r6inv*(lj1*r6inv - lj2);
| f.x += delx * forceC;
| f.y += dely * forceC;
| f.z += delz * forceC;
| }
| return f;
|}
""".stripMargin,
Seq(Float4, Float4, Float4, Float, Float, Float),
Float4)
val N = SizeVar("N") // number of particles
val M = SizeVar("M") // number of neighbors
val f = fun(
ArrayTypeWSWC(Float4, N),
ArrayTypeWSWC(ArrayTypeWSWC(Int, N), M),
Float,
Float,
Float,
(particles, neighbourIds, cutsq, lj1, lj2) =>
Zip(particles, Transpose() $ neighbourIds) :>>
Map( \\(p =>
Filter(particles, p._1) :>>
// TODO: Separate into Map and Reduce?
ReduceSeq(\\((force, n) =>
mdCompute(force, p._0, n, cutsq, lj1, lj2)
), Value(0.0f, Float4))
) )
)
val f1 = Rewrite.applyRuleAtId(f, 0, Rules.splitJoin(128))
val f2 = Rewrite.applyRuleAtId(f1, 7, CopyRules.addIdForMapParam)
val f3 = Rewrite.applyRuleAtId(f2, 10, CopyRules.implementOneLevelOfId)
val f4 = Rewrite.applyRuleAtId(f3, 1, OpenCLRules.mapWrg)
val f5 = Rewrite.applyRuleAtId(f4, 7, OpenCLRules.mapLcl)
val f6 = Rewrite.applyRuleAtId(f5, 14, OpenCLRules.privateMemory)
val f7 = Rewrite.applyRuleAtId(f6, 17, CopyRules.implementOneLevelOfId)
val f8 = Rewrite.applyRuleAtId(f7, 11, SimplificationRules.dropId)
val f9 = Rewrite.applyRuleAtId(f8, 18, CopyRules.addIdAfterReduce)
val f10 = Rewrite.applyRuleAtId(f9, 18, OpenCLRules.globalMemory)
val f11 = Rewrite.applyRuleAtId(f10, 35, CopyRules.implementIdAsDeepCopy)
val l0 = Rewrite.applyRuleUntilCannot(f11, MacroRules.userFunCompositionToPrivate)
val inputSize = 1024
val maxNeighbours = 128
val particles = Array.fill(inputSize, 4)(util.Random.nextFloat() * 20.0f)
val particlesTuple = particles.map { case Array(a, b, c, d) => (a, b, c, d) }
val neighbours = MolecularDynamics.buildNeighbourList(particlesTuple, maxNeighbours).transpose
val cutsq = 16.0f
val lj1 = 1.5f
val lj2 = 2.0f
val gold = MolecularDynamics.mdScala(particlesTuple, neighbours, cutsq, lj1, lj2)
.map(_.productIterator).reduce(_ ++ _).asInstanceOf[Iterator[Float]].toArray
val (output, _) =
Execute(inputSize)[Array[Float]](l0, particles, neighbours, cutsq, lj1, lj2)
assertEquals(gold.length, output.length)
(output, gold).zipped.map((x, y) => {
var diff = (x - y) / x
if (x == 0.0f)
diff = 0.0f
math.sqrt(diff * diff).toFloat
}).zipWithIndex.foreach(x => assertEquals("Error at pos " + x._2, 0.0f, x._1, 0.1f))
}
}
|
lift-project/lift
|
src/test/rewriting/TestRewriteMD.scala
|
Scala
|
mit
| 3,852 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import scala.annotation.tailrec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.internal.SQLConf
/**
* Encapsulates star-schema detection logic.
*/
case class StarSchemaDetection(conf: SQLConf) extends PredicateHelper {
/**
* Star schema consists of one or more fact tables referencing a number of dimension
* tables. In general, star-schema joins are detected using the following conditions:
* 1. Informational RI constraints (reliable detection)
* + Dimension contains a primary key that is being joined to the fact table.
* + Fact table contains foreign keys referencing multiple dimension tables.
* 2. Cardinality based heuristics
* + Usually, the table with the highest cardinality is the fact table.
* + Table being joined with the most number of tables is the fact table.
*
* To detect star joins, the algorithm uses a combination of the above two conditions.
* The fact table is chosen based on the cardinality heuristics, and the dimension
* tables are chosen based on the RI constraints. A star join will consist of the largest
* fact table joined with the dimension tables on their primary keys. To detect that a
* column is a primary key, the algorithm uses table and column statistics.
*
* The algorithm currently returns only the star join with the largest fact table.
* Choosing the largest fact table on the driving arm to avoid large inners is in
* general a good heuristic. This restriction will be lifted to observe multiple
* star joins.
*
* The highlights of the algorithm are the following:
*
* Given a set of joined tables/plans, the algorithm first verifies if they are eligible
* for star join detection. An eligible plan is a base table access with valid statistics.
* A base table access represents Project or Filter operators above a LeafNode. Conservatively,
* the algorithm only considers base table access as part of a star join since they provide
* reliable statistics. This restriction can be lifted with the CBO enablement by default.
*
* If some of the plans are not base table access, or statistics are not available, the algorithm
* returns an empty star join plan since, in the absence of statistics, it cannot make
* good planning decisions. Otherwise, the algorithm finds the table with the largest cardinality
* (number of rows), which is assumed to be a fact table.
*
* Next, it computes the set of dimension tables for the current fact table. A dimension table
* is assumed to be in a RI relationship with a fact table. To infer column uniqueness,
* the algorithm compares the number of distinct values with the total number of rows in the
* table. If their relative difference is within certain limits (i.e. ndvMaxError * 2, adjusted
* based on 1TB TPC-DS data), the column is assumed to be unique.
*/
def findStarJoins(
input: Seq[LogicalPlan],
conditions: Seq[Expression]): Seq[LogicalPlan] = {
val emptyStarJoinPlan = Seq.empty[LogicalPlan]
if (input.size < 2) {
emptyStarJoinPlan
} else {
// Find if the input plans are eligible for star join detection.
// An eligible plan is a base table access with valid statistics.
val foundEligibleJoin = input.forall {
case PhysicalOperation(_, _, t: LeafNode) if t.stats.rowCount.isDefined => true
case _ => false
}
if (!foundEligibleJoin) {
// Some plans don't have stats or are complex plans. Conservatively,
// return an empty star join. This restriction can be lifted
// once statistics are propagated in the plan.
emptyStarJoinPlan
} else {
// Find the fact table using cardinality based heuristics i.e.
// the table with the largest number of rows.
val sortedFactTables = input.map { plan =>
TableAccessCardinality(plan, getTableAccessCardinality(plan))
}.collect { case t @ TableAccessCardinality(_, Some(_)) =>
t
}.sortBy(_.size)(implicitly[Ordering[Option[BigInt]]].reverse)
sortedFactTables match {
case Nil =>
emptyStarJoinPlan
case table1 :: table2 :: _
if table2.size.get.toDouble > conf.starSchemaFTRatio * table1.size.get.toDouble =>
// If the top largest tables have comparable number of rows, return an empty star plan.
// This restriction will be lifted when the algorithm is generalized
// to return multiple star plans.
emptyStarJoinPlan
case TableAccessCardinality(factTable, _) :: rest =>
// Find the fact table joins.
val allFactJoins = rest.collect { case TableAccessCardinality(plan, _)
if findJoinConditions(factTable, plan, conditions).nonEmpty =>
plan
}
// Find the corresponding join conditions.
val allFactJoinCond = allFactJoins.flatMap { plan =>
val joinCond = findJoinConditions(factTable, plan, conditions)
joinCond
}
// Verify if the join columns have valid statistics.
// Allow any relational comparison between the tables. Later
// we will heuristically choose a subset of equi-join
// tables.
val areStatsAvailable = allFactJoins.forall { dimTable =>
allFactJoinCond.exists {
case BinaryComparison(lhs: AttributeReference, rhs: AttributeReference) =>
val dimCol = if (dimTable.outputSet.contains(lhs)) lhs else rhs
val factCol = if (factTable.outputSet.contains(lhs)) lhs else rhs
hasStatistics(dimCol, dimTable) && hasStatistics(factCol, factTable)
case _ => false
}
}
if (!areStatsAvailable) {
emptyStarJoinPlan
} else {
// Find the subset of dimension tables. A dimension table is assumed to be in a
// RI relationship with the fact table. Only consider equi-joins
// between a fact and a dimension table to avoid expanding joins.
val eligibleDimPlans = allFactJoins.filter { dimTable =>
allFactJoinCond.exists {
case cond @ Equality(lhs: AttributeReference, rhs: AttributeReference) =>
val dimCol = if (dimTable.outputSet.contains(lhs)) lhs else rhs
isUnique(dimCol, dimTable)
case _ => false
}
}
if (eligibleDimPlans.isEmpty || eligibleDimPlans.size < 2) {
// An eligible star join was not found since the join is not
// an RI join, or the star join is an expanding join.
// Also, a star would involve more than one dimension table.
emptyStarJoinPlan
} else {
factTable +: eligibleDimPlans
}
}
}
}
}
}
/**
* Determines if a column referenced by a base table access is a primary key.
* A column is a PK if it is not nullable and has unique values.
* To determine if a column has unique values in the absence of informational
* RI constraints, the number of distinct values is compared to the total
* number of rows in the table. If their relative difference
* is within the expected limits (i.e. 2 * spark.sql.statistics.ndv.maxError based
* on TPC-DS data results), the column is assumed to have unique values.
*/
private def isUnique(
column: Attribute,
plan: LogicalPlan): Boolean = plan match {
case PhysicalOperation(_, _, t: LeafNode) =>
val leafCol = findLeafNodeCol(column, plan)
leafCol match {
case Some(col) if t.outputSet.contains(col) =>
val stats = t.stats
stats.rowCount match {
case Some(rowCount) if rowCount >= 0 =>
if (stats.attributeStats.nonEmpty && stats.attributeStats.contains(col)) {
val colStats = stats.attributeStats.get(col)
if (colStats.get.nullCount > 0) {
false
} else {
val distinctCount = colStats.get.distinctCount
val relDiff = math.abs((distinctCount.toDouble / rowCount.toDouble) - 1.0d)
// ndvMaxErr adjusted based on TPCDS 1TB data results
relDiff <= conf.ndvMaxError * 2
}
} else {
false
}
case None => false
}
case None => false
}
case _ => false
}
/**
* Given a column over a base table access, it returns
* the leaf node column from which the input column is derived.
*/
@tailrec
private def findLeafNodeCol(
column: Attribute,
plan: LogicalPlan): Option[Attribute] = plan match {
case pl @ PhysicalOperation(_, _, _: LeafNode) =>
pl match {
case t: LeafNode if t.outputSet.contains(column) =>
Option(column)
case p: Project if p.outputSet.exists(_.semanticEquals(column)) =>
val col = p.outputSet.find(_.semanticEquals(column)).get
findLeafNodeCol(col, p.child)
case f: Filter =>
findLeafNodeCol(column, f.child)
case _ => None
}
case _ => None
}
/**
* Checks if a column has statistics.
* The column is assumed to be over a base table access.
*/
private def hasStatistics(
column: Attribute,
plan: LogicalPlan): Boolean = plan match {
case PhysicalOperation(_, _, t: LeafNode) =>
val leafCol = findLeafNodeCol(column, plan)
leafCol match {
case Some(col) if t.outputSet.contains(col) =>
val stats = t.stats
stats.attributeStats.nonEmpty && stats.attributeStats.contains(col)
case None => false
}
case _ => false
}
/**
* Returns the join predicates between two input plans. It only
* considers basic comparison operators.
*/
@inline
private def findJoinConditions(
plan1: LogicalPlan,
plan2: LogicalPlan,
conditions: Seq[Expression]): Seq[Expression] = {
val refs = plan1.outputSet ++ plan2.outputSet
conditions.filter {
case BinaryComparison(_, _) => true
case _ => false
}.filterNot(canEvaluate(_, plan1))
.filterNot(canEvaluate(_, plan2))
.filter(_.references.subsetOf(refs))
}
/**
* Checks if a star join is a selective join. A star join is assumed
* to be selective if there are local predicates on the dimension
* tables.
*/
private def isSelectiveStarJoin(
dimTables: Seq[LogicalPlan],
conditions: Seq[Expression]): Boolean = dimTables.exists {
case plan @ PhysicalOperation(_, p, _: LeafNode) =>
// Checks if any condition applies to the dimension tables.
// Exclude the IsNotNull predicates until predicate selectivity is available.
// In most cases, this predicate is artificially introduced by the Optimizer
// to enforce nullability constraints.
val localPredicates = conditions.filterNot(_.isInstanceOf[IsNotNull])
.exists(canEvaluate(_, plan))
// Checks if there are any predicates pushed down to the base table access.
val pushedDownPredicates = p.nonEmpty && !p.forall(_.isInstanceOf[IsNotNull])
localPredicates || pushedDownPredicates
case _ => false
}
/**
* Helper case class to hold (plan, rowCount) pairs.
*/
private case class TableAccessCardinality(plan: LogicalPlan, size: Option[BigInt])
/**
* Returns the cardinality of a base table access. A base table access represents
* a LeafNode, or Project or Filter operators above a LeafNode.
*/
private def getTableAccessCardinality(
input: LogicalPlan): Option[BigInt] = input match {
case PhysicalOperation(_, cond, t: LeafNode) if t.stats.rowCount.isDefined =>
if (conf.cboEnabled && input.stats.rowCount.isDefined) {
Option(input.stats.rowCount.get)
} else {
Option(t.stats.rowCount.get)
}
case _ => None
}
/**
* Reorders a star join based on heuristics. It is called from ReorderJoin if CBO is disabled.
* 1) Finds the star join with the largest fact table.
* 2) Places the fact table the driving arm of the left-deep tree.
* This plan avoids large table access on the inner, and thus favor hash joins.
* 3) Applies the most selective dimensions early in the plan to reduce the amount of
* data flow.
*/
def reorderStarJoins(
input: Seq[(LogicalPlan, InnerLike)],
conditions: Seq[Expression]): Seq[(LogicalPlan, InnerLike)] = {
assert(input.size >= 2)
val emptyStarJoinPlan = Seq.empty[(LogicalPlan, InnerLike)]
// Find the eligible star plans. Currently, it only returns
// the star join with the largest fact table.
val eligibleJoins = input.collect{ case (plan, Inner) => plan }
val starPlan = findStarJoins(eligibleJoins, conditions)
if (starPlan.isEmpty) {
emptyStarJoinPlan
} else {
val (factTable, dimTables) = (starPlan.head, starPlan.tail)
// Only consider selective joins. This case is detected by observing local predicates
// on the dimension tables. In a star schema relationship, the join between the fact and the
// dimension table is a FK-PK join. Heuristically, a selective dimension may reduce
// the result of a join.
if (isSelectiveStarJoin(dimTables, conditions)) {
val reorderDimTables = dimTables.map { plan =>
TableAccessCardinality(plan, getTableAccessCardinality(plan))
}.sortBy(_.size).map {
case TableAccessCardinality(p1, _) => p1
}
val reorderStarPlan = factTable +: reorderDimTables
reorderStarPlan.map(plan => (plan, Inner))
} else {
emptyStarJoinPlan
}
}
}
}
|
poffuomo/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/StarSchemaDetection.scala
|
Scala
|
apache-2.0
| 15,013 |
package tutorial.webapp
import scala.scalajs.js.JSApp
object TutorialApp extends JSApp {
case class Interval(length: Int, max: Int) {
def add(value: Int) = Interval(length + 1, math.max(max, value))
}
private val emptyInterval = Interval(0, Int.MinValue)
private def calcNextState(intervals: List[Interval], optValue: Option[Int]) = {
val currentInterval = intervals.head
(currentInterval, optValue) match {
case (`emptyInterval`, None) => intervals
case (_, None) => emptyInterval :: intervals
case (_, Some(value)) => currentInterval.add(value) :: intervals.tail
}
}
def calcIntervals(values: List[Option[Int]]): List[Interval] = {
val initState = List(emptyInterval)
val intervals = values.foldLeft(initState)(calcNextState)
if (intervals.head == emptyInterval) intervals.tail.reverse
else intervals.reverse
}
def main(): Unit = {
val X = List(None, None, Some(-2), Some(-4), None,
Some(-1), Some(3), Some(1), None, None,
Some(2), Some(-4), Some(5), None)
val intervalsLengthAndMax = calcIntervals(X)
println(s"Result 2: ($intervalsLengthAndMax)")
}
}
|
enpassant/scalajs
|
src/main/scala/tutorial/webapp/TutorialApp.scala
|
Scala
|
apache-2.0
| 1,153 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigtable
import java.nio.charset.Charset
import com.google.bigtable.admin.v2._
import com.google.bigtable.admin.v2.ModifyColumnFamiliesRequest.Modification
import com.google.cloud.bigtable.config.BigtableOptions
import com.google.cloud.bigtable.grpc._
import com.google.protobuf.{ByteString, Duration => ProtoDuration}
import org.joda.time.Duration
import org.slf4j.{Logger, LoggerFactory}
import scala.jdk.CollectionConverters._
import scala.util.Try
/** Bigtable Table Admin API helper commands. */
object TableAdmin {
sealed trait CreateDisposition
object CreateDisposition {
case object Never extends CreateDisposition
case object CreateIfNeeded extends CreateDisposition
val default = CreateIfNeeded
}
private val log: Logger = LoggerFactory.getLogger(TableAdmin.getClass)
private def adminClient[A](
bigtableOptions: BigtableOptions
)(f: BigtableTableAdminClient => A): Try[A] = {
val channel =
ChannelPoolCreator.createPool(bigtableOptions)
val executorService =
BigtableSessionSharedThreadPools.getInstance().getRetryExecutor
val client = new BigtableTableAdminGrpcClient(channel, executorService, bigtableOptions)
val result = Try(f(client))
channel.shutdownNow()
result
}
/**
* Retrieves a set of tables from the given instancePath.
*
* @param client Client for calling Bigtable.
* @param instancePath String of the form "projects/$project/instances/$instance".
* @return
*/
private def fetchTables(client: BigtableTableAdminClient, instancePath: String): Set[String] =
client
.listTables(
ListTablesRequest
.newBuilder()
.setParent(instancePath)
.build()
)
.getTablesList
.asScala
.map(_.getName)
.toSet
/**
* Ensure that tables and column families exist.
* Checks for existence of tables or creates them if they do not exist. Also checks for
* existence of column families within each table and creates them if they do not exist.
*
* @param tablesAndColumnFamilies A map of tables and column families. Keys are table names.
* Values are a list of column family names.
*/
def ensureTables(
bigtableOptions: BigtableOptions,
tablesAndColumnFamilies: Map[String, Iterable[String]],
createDisposition: CreateDisposition = CreateDisposition.default
): Unit = {
val tcf = tablesAndColumnFamilies.iterator.map { case (k, l) =>
k -> l.map(_ -> None)
}.toMap
ensureTablesImpl(bigtableOptions, tcf, createDisposition).get
}
/**
* Ensure that tables and column families exist.
* Checks for existence of tables or creates them if they do not exist. Also checks for
* existence of column families within each table and creates them if they do not exist.
*
* @param tablesAndColumnFamilies A map of tables and column families.
* Keys are table names. Values are a
* list of column family names along with
* the desired cell expiration. Cell
* expiration is the duration before which
* garbage collection of a cell may occur.
* Note: minimum granularity is one second.
*/
def ensureTablesWithExpiration(
bigtableOptions: BigtableOptions,
tablesAndColumnFamilies: Map[String, Iterable[(String, Option[Duration])]],
createDisposition: CreateDisposition = CreateDisposition.default
): Unit = {
// Convert Duration to GcRule
val x = tablesAndColumnFamilies.iterator.map { case (k, v) =>
k -> v.map { case (columnFamily, duration) =>
(columnFamily, duration.map(gcRuleFromDuration))
}
}.toMap
ensureTablesImpl(bigtableOptions, x, createDisposition).get
}
/**
* Ensure that tables and column families exist.
* Checks for existence of tables or creates them if they do not exist. Also checks for
* existence of column families within each table and creates them if they do not exist.
*
* @param tablesAndColumnFamilies A map of tables and column families. Keys are table names.
* Values are a list of column family names along with the desired
* GcRule.
*/
def ensureTablesWithGcRules(
bigtableOptions: BigtableOptions,
tablesAndColumnFamilies: Map[String, Iterable[(String, Option[GcRule])]],
createDisposition: CreateDisposition = CreateDisposition.default
): Unit =
ensureTablesImpl(bigtableOptions, tablesAndColumnFamilies, createDisposition).get
/**
* Ensure that tables and column families exist.
* Checks for existence of tables or creates them if they do not exist. Also checks for
* existence of column families within each table and creates them if they do not exist.
*
* @param tablesAndColumnFamilies A map of tables and column families. Keys are table names.
* Values are a list of column family names.
*/
private def ensureTablesImpl(
bigtableOptions: BigtableOptions,
tablesAndColumnFamilies: Map[String, Iterable[(String, Option[GcRule])]],
createDisposition: CreateDisposition
): Try[Unit] = {
val project = bigtableOptions.getProjectId
val instance = bigtableOptions.getInstanceId
val instancePath = s"projects/$project/instances/$instance"
log.info("Ensuring tables and column families exist in instance {}", instance)
adminClient(bigtableOptions) { client =>
val existingTables = fetchTables(client, instancePath)
tablesAndColumnFamilies.foreach { case (table, columnFamilies) =>
val tablePath = s"$instancePath/tables/$table"
val exists = existingTables.contains(tablePath)
createDisposition match {
case _ if exists =>
log.info("Table {} exists", table)
case CreateDisposition.CreateIfNeeded =>
log.info("Creating table {}", table)
client.createTable(
CreateTableRequest
.newBuilder()
.setParent(instancePath)
.setTableId(table)
.build()
)
case CreateDisposition.Never =>
throw new IllegalStateException(s"Table $table does not exist")
}
ensureColumnFamilies(client, tablePath, columnFamilies, createDisposition)
}
}
}
/**
* Ensure that column families exist.
* Checks for existence of column families and creates them if they don't exist.
*
* @param tablePath A full table path that the bigtable API expects, in the form of
* `projects/projectId/instances/instanceId/tables/tableId`
* @param columnFamilies A list of column family names.
*/
private def ensureColumnFamilies(
client: BigtableTableAdminClient,
tablePath: String,
columnFamilies: Iterable[(String, Option[GcRule])],
createDisposition: CreateDisposition
): Unit =
createDisposition match {
case CreateDisposition.CreateIfNeeded =>
val tableInfo =
client.getTable(GetTableRequest.newBuilder().setName(tablePath).build)
val cfList = columnFamilies
.map { case (n, gcRule) =>
val cf = tableInfo
.getColumnFamiliesOrDefault(n, ColumnFamily.newBuilder().build())
.toBuilder
.setGcRule(gcRule.getOrElse(GcRule.getDefaultInstance))
.build()
(n, cf)
}
val modifications =
cfList.map { case (n, cf) =>
val mod = Modification.newBuilder().setId(n)
if (tableInfo.containsColumnFamilies(n)) {
mod.setUpdate(cf)
} else {
mod.setCreate(cf)
}
mod.build()
}
log.info(
"Modifying or updating {} column families for table {}",
modifications.size,
tablePath
)
if (modifications.nonEmpty) {
client.modifyColumnFamily(
ModifyColumnFamiliesRequest
.newBuilder()
.setName(tablePath)
.addAllModifications(modifications.asJava)
.build
)
}
()
case CreateDisposition.Never =>
()
}
private def gcRuleFromDuration(duration: Duration): GcRule = {
val protoDuration = ProtoDuration.newBuilder.setSeconds(duration.getStandardSeconds)
GcRule.newBuilder.setMaxAge(protoDuration).build
}
/**
* Permanently deletes a row range from the specified table that match a particular prefix.
*
* @param table table name
* @param rowPrefix row key prefix
*/
def dropRowRange(bigtableOptions: BigtableOptions, table: String, rowPrefix: String): Try[Unit] =
adminClient(bigtableOptions) { client =>
val project = bigtableOptions.getProjectId
val instance = bigtableOptions.getInstanceId
val instancePath = s"projects/$project/instances/$instance"
val tablePath = s"$instancePath/tables/$table"
dropRowRange(tablePath, rowPrefix, client)
}
/**
* Permanently deletes a row range from the specified table that match a particular prefix.
*
* @param tablePath A full table path that the bigtable API expects, in the form of
* `projects/projectId/instances/instanceId/tables/tableId`
* @param rowPrefix row key prefix
*/
private def dropRowRange(
tablePath: String,
rowPrefix: String,
client: BigtableTableAdminClient
): Unit = {
val request = DropRowRangeRequest
.newBuilder()
.setName(tablePath)
.setRowKeyPrefix(ByteString.copyFrom(rowPrefix, Charset.forName("UTF-8")))
.build()
client.dropRowRange(request)
}
}
|
regadas/scio
|
scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigtable/TableAdmin.scala
|
Scala
|
apache-2.0
| 10,506 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.lang
import org.scalajs.testsuite.utils.ExpectExceptions
import scala.reflect.{classTag, ClassTag}
import scala.scalajs.js
import org.scalajs.jasminetest.JasmineTest
object StringBufferTest extends JasmineTest with ExpectExceptions {
describe("java.lang.StringBuffer") {
def newBuf: java.lang.StringBuffer =
new java.lang.StringBuffer
def initBuf(str: String): java.lang.StringBuffer =
new java.lang.StringBuffer(str)
it("should respond to `append`") {
expect(newBuf.append("asdf").toString).toEqual("asdf")
expect(newBuf.append(null: AnyRef).toString).toEqual("null")
expect(newBuf.append(null: String).toString).toEqual("null")
expect(newBuf.append(null: CharSequence,0,2).toString).toEqual("nu")
expect(newBuf.append(js.undefined).toString).toEqual("undefined")
expect(newBuf.append(true).toString).toEqual("true")
expect(newBuf.append('a').toString).toEqual("a")
expect(newBuf.append(Array('a','b','c','d')).toString).toEqual("abcd")
expect(newBuf.append(Array('a','b','c','d'), 1, 2).toString).toEqual("bc")
expect(newBuf.append(4.toByte).toString).toEqual("4")
expect(newBuf.append(304.toShort).toString).toEqual("304")
expect(newBuf.append(100000).toString).toEqual("100000")
expect(newBuf.append(2.5f).toString).toEqual("2.5")
expect(newBuf.append(3.5).toString).toEqual("3.5")
}
it("should respond to `insert`") {
expect(newBuf.insert(0, "asdf").toString).toEqual("asdf")
expect(newBuf.insert(0, null: AnyRef).toString).toEqual("null")
expect(newBuf.insert(0, null: String).toString).toEqual("null")
expect(newBuf.insert(0, null: CharSequence,0,2).toString).toEqual("nu")
expect(newBuf.insert(0, js.undefined).toString).toEqual("undefined")
expect(newBuf.insert(0, true).toString).toEqual("true")
expect(newBuf.insert(0, 'a').toString).toEqual("a")
expect(newBuf.insert(0, Array('a','b','c','d')).toString).toEqual("abcd")
expect(newBuf.insert(0, Array('a','b','c','d'), 1, 2).toString).toEqual("bc")
expect(newBuf.insert(0, 4.toByte).toString).toEqual("4")
expect(newBuf.insert(0, 304.toShort).toString).toEqual("304")
expect(newBuf.insert(0, 100000).toString).toEqual("100000")
expect(newBuf.insert(0, 2.5f).toString).toEqual("2.5")
expect(newBuf.insert(0, 3.5).toString).toEqual("3.5")
expect(initBuf("adef").insert(1, "bc")).toEqual("abcdef")
expect(initBuf("abcd").insert(4, "ef")).toEqual("abcdef")
expect(initBuf("adef").insert(1, Array('b','c'))).toEqual("abcdef")
expect(initBuf("adef").insert(1, initBuf("bc"))).toEqual("abcdef")
expect(initBuf("abef").insert(2, Array('a','b','c','d','e'), 2, 2)).toEqual("abcdef")
expect(initBuf("abef").insert(2, initBuf("abcde"), 2, 4)).toEqual("abcdef")
expectThrows[StringIndexOutOfBoundsException](initBuf("abcd").insert(5, "whatever"))
expectThrows[StringIndexOutOfBoundsException](initBuf("abcd").insert(-1, "whatever"))
}
it("should respond to `deleteCharAt`") {
expect(initBuf("0123").deleteCharAt(1).toString).toEqual("023")
expect(initBuf("0123").deleteCharAt(0).toString).toEqual("123")
expect(initBuf("0123").deleteCharAt(3).toString).toEqual("012")
expectThrows[StringIndexOutOfBoundsException](initBuf("0123").deleteCharAt(-1))
expectThrows[StringIndexOutOfBoundsException](initBuf("0123").deleteCharAt(4))
}
it("should respond to `replace`") {
expect(initBuf("0123").replace(1,3,"bc").toString).toEqual("0bc3")
expect(initBuf("0123").replace(0,4,"abcd").toString).toEqual("abcd")
expect(initBuf("0123").replace(0,10,"abcd").toString).toEqual("abcd")
expect(initBuf("0123").replace(3,10,"defg").toString).toEqual("012defg")
expect(initBuf("0123").replace(0,1,"xxxx").toString).toEqual("xxxx123")
expect(initBuf("0123").replace(1,1,"xxxx").toString).toEqual("0xxxx123")
expectThrows[StringIndexOutOfBoundsException](initBuf("0123").replace(-1,3,"x"))
expectThrows[StringIndexOutOfBoundsException](initBuf("0123").replace(4,5,"x"))
}
it("should respond to `setCharAt`") {
val buf = newBuf
buf.append("foobar")
buf.setCharAt(2, 'x')
expect(buf.toString).toEqual("foxbar")
buf.setCharAt(5, 'h')
expect(buf.toString).toEqual("foxbah")
expect(() => buf.setCharAt(-1, 'h')).toThrow
expect(() => buf.setCharAt(6, 'h')).toThrow
}
it("should respond to `ensureCapacity`") {
// test that ensureCapacity is linking
expectNoException(newBuf.ensureCapacity(10))
}
it("should properly setLength") {
val buf = newBuf
buf.append("foobar")
expect(() => buf.setLength(-3)).toThrow
expect({ buf.setLength(3); buf.toString }).toEqual("foo")
expect({ buf.setLength(6); buf.toString }).toEqual("foo\\u0000\\u0000\\u0000")
}
it("should respond to `appendCodePoint`") {
val buf = newBuf
buf.appendCodePoint(0x61)
expect(buf.toString).toEqual("a")
buf.appendCodePoint(0x10000)
expect(buf.toString).toEqual("a\\uD800\\uDC00")
buf.append("fixture")
buf.appendCodePoint(0x00010FFFF)
expect(buf.toString).toEqual("a\\uD800\\uDC00fixture\\uDBFF\\uDFFF")
}
}
describe("java.lang.StringBuilder") {
def newBuilder: java.lang.StringBuilder =
new java.lang.StringBuilder
def initBuilder(str: String): java.lang.StringBuilder =
new java.lang.StringBuilder(str)
it("should respond to `append`") {
expect(newBuilder.append("asdf").toString).toEqual("asdf")
expect(newBuilder.append(null: AnyRef).toString).toEqual("null")
expect(newBuilder.append(null: String).toString).toEqual("null")
expect(newBuilder.append(null: CharSequence,0,2).toString).toEqual("nu")
expect(newBuilder.append(js.undefined).toString).toEqual("undefined")
expect(newBuilder.append(true).toString).toEqual("true")
expect(newBuilder.append('a').toString).toEqual("a")
expect(newBuilder.append(Array('a','b','c','d')).toString).toEqual("abcd")
expect(newBuilder.append(Array('a','b','c','d'), 1, 2).toString).toEqual("bc")
expect(newBuilder.append(4.toByte).toString).toEqual("4")
expect(newBuilder.append(304.toShort).toString).toEqual("304")
expect(newBuilder.append(100000).toString).toEqual("100000")
expect(newBuilder.append(2.5f).toString).toEqual("2.5")
expect(newBuilder.append(3.5).toString).toEqual("3.5")
}
it("should respond to `insert`") {
expect(newBuilder.insert(0, "asdf").toString).toEqual("asdf")
expect(newBuilder.insert(0, null: AnyRef).toString).toEqual("null")
expect(newBuilder.insert(0, null: String).toString).toEqual("null")
expect(newBuilder.insert(0, null: CharSequence,0,2).toString).toEqual("nu")
expect(newBuilder.insert(0, js.undefined).toString).toEqual("undefined")
expect(newBuilder.insert(0, true).toString).toEqual("true")
expect(newBuilder.insert(0, 'a').toString).toEqual("a")
expect(newBuilder.insert(0, Array('a','b','c','d')).toString).toEqual("abcd")
expect(newBuilder.insert(0, Array('a','b','c','d'), 1, 2).toString).toEqual("bc")
expect(newBuilder.insert(0, 4.toByte).toString).toEqual("4")
expect(newBuilder.insert(0, 304.toShort).toString).toEqual("304")
expect(newBuilder.insert(0, 100000).toString).toEqual("100000")
expect(newBuilder.insert(0, 2.5f).toString).toEqual("2.5")
expect(newBuilder.insert(0, 3.5).toString).toEqual("3.5")
expect(initBuilder("adef").insert(1, "bc")).toEqual("abcdef")
expect(initBuilder("abcd").insert(4, "ef")).toEqual("abcdef")
expect(initBuilder("adef").insert(1, Array('b','c'))).toEqual("abcdef")
expect(initBuilder("adef").insert(1, initBuilder("bc"))).toEqual("abcdef")
expect(initBuilder("abef").insert(2, Array('a','b','c','d','e'), 2, 2)).toEqual("abcdef")
expect(initBuilder("abef").insert(2, initBuilder("abcde"), 2, 4)).toEqual("abcdef")
expectThrows[StringIndexOutOfBoundsException](initBuilder("abcd").insert(5, "whatever"))
expectThrows[StringIndexOutOfBoundsException](initBuilder("abcd").insert(-1, "whatever"))
}
it("should allow string interpolation to survive `null` and `undefined`") {
expect(s"${null}").toEqual("null")
expect(s"${js.undefined}").toEqual("undefined")
}
it("should respond to `deleteCharAt`") {
expect(initBuilder("0123").deleteCharAt(1).toString).toEqual("023")
expect(initBuilder("0123").deleteCharAt(0).toString).toEqual("123")
expect(initBuilder("0123").deleteCharAt(3).toString).toEqual("012")
expectThrows[StringIndexOutOfBoundsException](initBuilder("0123").deleteCharAt(-1))
expectThrows[StringIndexOutOfBoundsException](initBuilder("0123").deleteCharAt(4))
}
it("should respond to `replace`") {
expect(initBuilder("0123").replace(1,3,"bc").toString).toEqual("0bc3")
expect(initBuilder("0123").replace(0,4,"abcd").toString).toEqual("abcd")
expect(initBuilder("0123").replace(0,10,"abcd").toString).toEqual("abcd")
expect(initBuilder("0123").replace(3,10,"defg").toString).toEqual("012defg")
expect(initBuilder("0123").replace(0,1,"xxxx").toString).toEqual("xxxx123")
expect(initBuilder("0123").replace(1,1,"xxxx").toString).toEqual("0xxxx123")
expectThrows[StringIndexOutOfBoundsException](initBuilder("0123").replace(-1,3,"x"))
expectThrows[StringIndexOutOfBoundsException](initBuilder("0123").replace(4,5,"x"))
}
it("should respond to `setCharAt`") {
val b = newBuilder
b.append("foobar")
b.setCharAt(2, 'x')
expect(b.toString).toEqual("foxbar")
b.setCharAt(5, 'h')
expect(b.toString).toEqual("foxbah")
expect(() => b.setCharAt(-1, 'h')).toThrow
expect(() => b.setCharAt(6, 'h')).toThrow
}
it("should respond to `ensureCapacity`") {
// test that ensureCapacity is linking
expectNoException(newBuilder.ensureCapacity(10))
}
it("should properly setLength") {
val b = newBuilder
b.append("foobar")
expect(() => b.setLength(-3)).toThrow
expect({ b.setLength(3); b.toString }).toEqual("foo")
expect({ b.setLength(6); b.toString }).toEqual("foo\\u0000\\u0000\\u0000")
}
it("should respond to `appendCodePoint`") {
val b = newBuilder
b.appendCodePoint(0x61)
expect(b.toString).toEqual("a")
b.appendCodePoint(0x10000)
expect(b.toString).toEqual("a\\uD800\\uDC00")
b.append("fixture")
b.appendCodePoint(0x00010FFFF)
expect(b.toString).toEqual("a\\uD800\\uDC00fixture\\uDBFF\\uDFFF")
}
}
}
|
andreaTP/scala-js
|
test-suite/src/test/scala/org/scalajs/testsuite/javalib/lang/StringBufferTest.scala
|
Scala
|
bsd-3-clause
| 11,358 |
/*
* Copyright (c) 2014, Brook 'redattack34' Heisler
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the ModularRayguns team nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.castlebravostudios.rayguns.api.items
import net.minecraft.util.ResourceLocation
trait RaygunChamber extends RaygunModule {
def registerShotHandlers() : Unit
def chargeTexture : ResourceLocation
}
|
Redattack34/ModularRayguns
|
src/main/scala/com/castlebravostudios/rayguns/api/items/RaygunChamber.scala
|
Scala
|
bsd-3-clause
| 1,812 |
package edu.gemini.ui.workspace.scala
import edu.gemini.ui.workspace.IShell
import java.util.logging.{Level, Logger}
import javax.swing.SwingUtilities
import java.io.File
object RichShell {
private val log = Logger.getLogger(classOf[RichShell[_]].getName)
}
// N.B. this isn't threadsafe. It expects to be on the UI dispatch thread.
class RichShell[A](shell:IShell) {
import RichShell._
// file holds the associated file, if any. This association is largely independent of the model, although if the
// model is set to None then so is the file. The idea is that undo should not affect the file association.
private var myFile:Option[File] = None
// myModel holds our current unstable state
private var myModel:Option[A] = None
// We have a list of listeners who are notified of model/pivot changes
private var listeners:List[() => Unit] = Nil
// If this is true, we're in the process of pushing state, which is not re-entrant
private var updating = false
// If this is true and old model was rolled into a new version
// Required to support the special case tha the model was upgraded
// It may make more sense as a method in the model A
private var wasRolled = false
// Undo and redo are stacks of state
private var undoStack:List[A] = Nil
private var redoStack:List[A] = Nil
// Our pivot indicates the last savepoint
private var pivot:Option[A] = None
// Some one-liner predicates
def isModified = model != pivot
def isClean = !isModified
def canUndo = undoStack.nonEmpty
def canRedo = redoStack.nonEmpty
def isRolled = wasRolled
// Make a new state current, or clear out everything if the new state is None.
private def push(a:Option[A], undoable:Boolean) = ui {
undoStack = (a, model) match {
case (Some(_), Some(b)) if undoable => b :: undoStack
case (Some(_), Some(b)) => undoStack
case _ => Nil
}
redoStack = Nil
if (a.isEmpty) pivot = None
commit(a)
}
def undo() = ui {
assert(model.isDefined)
val (un, m, re) = roll(undoStack, model.get, redoStack)
undoStack = un
redoStack = re
commit(Some(m))
}
def redo() = ui {
assert(model.isDefined)
val (re, m, un) = roll(redoStack, model.get, undoStack)
undoStack = un
redoStack = re
commit(Some(m))
}
def checkpoint() = ui {
// We saved so we are not rolled anymore
wasRolled = false
pivot = model
commit(model)
}
private def commit(a:Option[A]) = ui {
myModel = a
notifyListeners()
shell.setModel(a.orNull)
}
private def notifyListeners() = ui {
synchronized {
//println("Notifying of state. [%d] %s [%d]".format(undoStack.length, myModel.getClass.getName, redoStack.length))
listeners.foreach(_())
}
}
def model = myModel
def model_=(newModel:Option[A], undoable:Boolean = true) = ui {
if (newModel != model) {
// This method is not re-entrant
if (updating)
log.log(Level.WARNING, "Concurrent model change.", new Exception)
try {
push(newModel, undoable)
} finally {
updating = false
}
} else {
log.log(Level.WARNING, "Unchanged model discarded.", new Exception)
}
}
def file = myFile
def file_=(f:Option[File]) = ui {
(model, f) match {
case (None, None) => // nop
case (Some(_), Some(f)) =>
// This is the only valid case
myFile = Some(f)
notifyListeners()
case (Some(_), None) =>
log.log(Level.WARNING, "Discarding file detach.", new Exception)
case (None, _) =>
log.log(Level.WARNING, "Discarding file attach for empty model.", new Exception)
}
}
def init(model:Option[A], f:Option[File], wasRolled: Boolean) = ui {
this.wasRolled = wasRolled
undoStack = Nil
redoStack = Nil
myFile = f
pivot = model
commit(model)
}
def close() = shell.close()
def listen(f: => Unit) = synchronized {
f
listeners = (() => f) :: listeners
}
private def roll[A](src:List[A], a:A, dst:List[A]):(List[A], A, List[A]) = {
assert(src.nonEmpty)
(src.tail, src.head, a :: dst)
}
def peer = shell.getPeer
def context = shell.getContext
def advisor = shell.getAdvisor
private def ui[A](f: => A):A = {
require(SwingUtilities.isEventDispatchThread, "This method can only be called from the event dispatch thread.")
f
}
}
|
arturog8m/ocs
|
bundle/edu.gemini.ui.workspace/src/main/scala/edu/gemini/ui/workspace/scala/RichShell.scala
|
Scala
|
bsd-3-clause
| 4,455 |
/*
* Copyright 2017 Guy Van den Broeck <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai;
import org.scalatest.FlatSpec
import scala.language.existentials
import edu.ucla.cs.starai.logic._
import edu.ucla.cs.starai.sdd.manager.normalized.SDDManager
class CompilationSpec extends FlatSpec with SDDBehaviors {
behavior of "A balanced Vtree of 8 variables"
val vtree = VTree.balanced(8)
it should "have 8 variables" in {
assert(vtree.variables.size === 8)
}
val mgr = SDDManager(vtree)
behavior of "A buildFalse SDD"
it should behave like correctSize(mgr.False,7)
it should behave like correctModelCount(mgr.False,0)
behavior of "A buildTrue SDD"
it should behave like correctSize(mgr.True,7)
it should behave like correctModelCount(mgr.True,scala.math.pow(2,8).toInt)
val x1 = mgr.literal(1)
val x2 = mgr.literal(2)
val x3 = mgr.literal(3)
val x4 = mgr.literal(4)
val x5 = mgr.literal(5)
val x6 = mgr.literal(6)
val x7 = mgr.literal(7)
val x8 = mgr.literal(8)
behavior of "A clause"
val c1 = !x1 || x2 || !x5
it should behave like correctSize(c1,22)
it should behave like correctModelCount(c1,(BigInt(2).pow(8)-BigInt(2).pow(8-3)).toInt)
behavior of "Another clause"
val c2 = x1 || !x3 || x6
it should behave like correctSize(c2,24)
it should behave like correctModelCount(c2,(BigInt(2).pow(8)-BigInt(2).pow(8-3)).toInt)
val c3 = !x2 || x4 || x7
val c4 = x2 || x7 || !x8
val c5 = !x3 || !x4 || x6
val c6 = x4 || !x6 || x8
val c7 = x3 || x4 || !x8
val c8 = !x5 || x6 || !x7
val c9 = x6 || x7 || !x8
behavior of "A CNF"
val cnf = c1 && c2 && c3 && c4 && c5 && c6 && c7 && c8 && c9
it should behave like correctSize(cnf,122)
it should behave like correctModelCount(cnf,84)
behavior of "An inconsistent SDD"
val unsat = cnf && !c5
it should behave like correctSize(unsat,7)
it should behave like correctModelCount(unsat,0)
}
|
UCLA-StarAI/ScalaDD
|
src/test/scala/edu/ucla/cs/starai/CompilationSpec.scala
|
Scala
|
apache-2.0
| 2,526 |
package tests
private object Private {
def bar = ???
}
|
triplequote/intellij-scala
|
scala/scala-impl/testdata/rename3/privateMemberSamePackage/before/tests/Private.scala
|
Scala
|
apache-2.0
| 58 |
package no.ntnu.httpmock.servlet
import com.orbekk.logging.Logger
import javax.servlet.http.HttpServlet
import javax.servlet.http.HttpServletRequest
import javax.servlet.http.HttpServletResponse
import no.ntnu.httpmock.Mock
import no.ntnu.httpmock.MockProvider
import no.ntnu.httpmock.DummyMockResponse
class MockServlet(mockProvider: MockProvider,
var unexpectedCallHandler: HttpServlet)
extends HttpServlet with Logger {
override protected def doGet(request: HttpServletRequest,
response: HttpServletResponse) {
val requestString = request.getRequestURI()
mockProvider.getResponseFor(request) match {
case Some(mock) => serve(request, response, mock)
case None => unexpectedCall(request, response, requestString)
}
}
private def serve(request: HttpServletRequest, response: HttpServletResponse,
mock: Mock) {
logger.info("Serving mock: " + mock.descriptor)
mock.writeResponseTo(response)
}
private def unexpectedCall(request: HttpServletRequest,
response: HttpServletResponse, requestString: String) {
logger.info("Unexpected call: " + requestString)
response.sendError(HttpServletResponse.SC_FORBIDDEN, "Unexpected call")
unexpectedCallHandler.service(request, response)
}
}
|
orbekk/HttpMock
|
src/main/scala/no/ntnu/httpmock/servlet/MockServlet.scala
|
Scala
|
apache-2.0
| 1,264 |
package org.labrad.registry
import java.io.{ByteArrayOutputStream, File, FileInputStream, FileOutputStream}
import java.net.{URLDecoder, URLEncoder}
import java.nio.ByteOrder.BIG_ENDIAN
import java.nio.charset.StandardCharsets.UTF_8
import org.labrad.data._
import org.labrad.types._
import scala.annotation.tailrec
/**
* Base class for registry stores that use a separate file for each key.
*
* Concrete implementations must specify how to encode and decode key and
* directory names, and also how to encode and decode labrad data.
*/
abstract class FileStore(rootDir: File) extends RegistryStore {
type Dir = File
val root = rootDir.getAbsoluteFile
require(root.isDirectory, s"registry root is not a directory: $root")
def DIR_EXT = ".dir"
def KEY_EXT = ".key"
/**
* Encode and decode strings for use as filenames.
*/
def encode(segment: String): String
def decode(segment: String): String
/**
* Encode and decode data for storage in individual key files.
*/
def encodeData(data: Data): Array[Byte]
def decodeData(bytes: Array[Byte]): Data
/**
* Convert the given directory into a registry path.
*/
def pathTo(dir: File): Seq[String] = {
@tailrec
def fun(dir: File, rest: Seq[String]): Seq[String] = {
if (dir == root)
"" +: rest
else
fun(dir.getParentFile, decode(dir.getName.stripSuffix(DIR_EXT)) +: rest)
}
fun(dir, Nil)
}
def parent(dir: File): File = {
if (dir == root) dir else dir.getParentFile
}
def dir(dir: File): (Seq[String], Seq[String]) = {
val files = dir.listFiles()
val dirs = for (f <- files if f.isDirectory; n = f.getName if n.endsWith(DIR_EXT)) yield decode(n.stripSuffix(DIR_EXT))
val keys = for (f <- files if f.isFile; n = f.getName if n.endsWith(KEY_EXT)) yield decode(n.stripSuffix(KEY_EXT))
(dirs, keys)
}
def childImpl(parent: File, name: String, create: Boolean): (File, Boolean) = {
val dir = new File(parent, encode(name) + DIR_EXT)
val created = if (dir.exists) {
false
} else {
if (!create) sys.error(s"directory does not exist: $name")
dir.mkdir()
true
}
(dir, created)
}
def rmDirImpl(dir: File, name: String): Unit = {
val path = child(dir, name, create = false)
if (!path.exists) sys.error(s"directory does not exist: $name")
if (path.isFile) sys.error(s"found file instead of directory: $name")
if (path.list.nonEmpty) sys.error(s"cannot remove non-empty directory: $name")
val ok = path.delete()
if (!ok) sys.error(s"failed to remove directory: $name")
}
def getValue(dir: File, key: String, default: Option[(Boolean, Data)]): Data = {
val path = keyFile(dir, key)
if (path.exists) {
val bytes = readFile(path)
decodeData(bytes)
} else {
default match {
case None => sys.error(s"key does not exist: $key")
case Some((set, default)) =>
if (set) setValue(dir, key, default)
default
}
}
}
def setValueImpl(dir: File, key: String, value: Data): Unit = {
val path = keyFile(dir, key)
val bytes = encodeData(value)
writeFile(path, bytes)
}
def deleteImpl(dir: File, key: String): Unit = {
val path = keyFile(dir, key)
if (!path.exists) sys.error(s"key does not exist: $key")
if (path.isDirectory) sys.error(s"found directory instead of file: $key")
path.delete()
}
private def readFile(file: File): Array[Byte] = {
val is = new FileInputStream(file)
try {
val os = new ByteArrayOutputStream()
val buf = new Array[Byte](10000)
var done = false
while (!done) {
val read = is.read(buf)
if (read >= 0) os.write(buf, 0, read)
done = read < 0
}
os.toByteArray
} finally {
is.close
}
}
private def writeFile(file: File, contents: Array[Byte]) {
val os = new FileOutputStream(file)
try
os.write(contents)
finally
os.close
}
private def keyFile(dir: File, key: String) = new File(dir, encode(key) + KEY_EXT)
}
/**
* File store implementation that uses url encoding for file and key names
* and stores data in binary format.
*/
class BinaryFileStore(rootDir: File) extends FileStore(rootDir) {
implicit val byteOrder = BIG_ENDIAN
/**
* Encode arbitrary string in a format suitable for use as a filename.
*
* We use URLEncoder to encode special characters, which handles all the
* special characters prohibited by most OSs. We must also manually
* replace * by %2A as this is not replaced by the URLEncoder, but
* it is properly decoded by the URLDecoder, so no special handling
* is needed there.
*/
override def encode(segment: String): String = {
URLEncoder.encode(segment, UTF_8.name).replace("*", "%2A")
}
override def decode(segment: String): String = {
URLDecoder.decode(segment, UTF_8.name)
}
/**
* Encode and decode data for storage in individual key files.
*/
override def encodeData(data: Data): Array[Byte] = {
Cluster(Str(data.t.toString), Bytes(data.toBytes)).toBytes
}
override def decodeData(bytes: Array[Byte]): Data = {
val (typ, data) = Data.fromBytes(Type("sy"), bytes).get[(String, Array[Byte])]
Data.fromBytes(Type(typ), data)
}
}
|
labrad/scalabrad
|
manager/src/main/scala/org/labrad/registry/FileStore.scala
|
Scala
|
mit
| 5,307 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package base
package patterns
import com.intellij.lang.ASTNode
import com.intellij.psi._
import com.intellij.psi.scope.PsiScopeProcessor
import org.jetbrains.plugins.scala.extensions.PsiTypeExt
import org.jetbrains.plugins.scala.lang.lexer._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScTypeParam
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, Nothing, ParameterizedType}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.types.{ScExistentialType, api, _}
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
class ScTypedPatternImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScTypedPattern {
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
def nameId: PsiElement = findChildByType[PsiElement](TokenSets.ID_SET)
def isWildcard: Boolean = findChildByType[PsiElement](ScalaTokenTypes.tUNDER) != null
override def isIrrefutableFor(t: Option[ScType]): Boolean = {
t match {
case Some(t) => getType(TypingContext.empty) match {
case Success(tp, _) if t conforms tp => true
case _ => false
}
case _ => false
}
}
override def toString: String = "TypedPattern: " + name
override def getType(ctx: TypingContext): TypeResult[ScType] = {
implicit val typeSystem = this.typeSystem
typePattern match {
case Some(tp) =>
if (tp.typeElement == null) return Failure("No type element for type pattern", Some(this))
val typeElementType: TypeResult[ScType] =
tp.typeElement.getType(ctx).map {
case tp: ScExistentialType =>
val skolem = tp.quantified
skolem.extractClassType(getProject) match { //todo: type aliases?
case Some((clazz: ScTypeDefinition, subst)) =>
val typeParams = clazz.typeParameters
skolem match {
case ParameterizedType(des, typeArgs) if typeArgs.length == typeParams.length =>
ScParameterizedType(des, typeArgs.zip(typeParams).map {
case (arg: ScExistentialArgument, param: ScTypeParam) =>
val lowerBound =
if (arg.lower.equiv(Nothing)) subst subst param.lowerBound.getOrNothing
else arg.lower //todo: lub?
val upperBound =
if (arg.upper.equiv(Any)) subst subst param.upperBound.getOrAny
else arg.upper //todo: glb?
ScExistentialArgument(arg.name, arg.args, lowerBound, upperBound)
case (tp: ScType, param: ScTypeParam) => tp
}).unpackedType
case _ => tp
}
case Some((clazz: PsiClass, subst)) =>
val typeParams: Array[PsiTypeParameter] = clazz.getTypeParameters
skolem match {
case ParameterizedType(des, typeArgs) if typeArgs.length == typeParams.length =>
ScParameterizedType(des, typeArgs.zip(typeParams).map {
case (arg: ScExistentialArgument, param: PsiTypeParameter) =>
val lowerBound = arg.lower
val upperBound =
if (arg.upper.equiv(api.Any)) {
val listTypes: Array[PsiClassType] = param.getExtendsListTypes
if (listTypes.isEmpty) api.Any
else subst.subst(listTypes.toSeq.map(_.toScType(getProject, param.getResolveScope)).glb(checkWeak = true))
} else arg.upper //todo: glb?
ScExistentialArgument(arg.name, arg.args, lowerBound, upperBound)
case (tp: ScType, _) => tp
}).unpackedType
case _ => tp
}
case _ => tp
}
case tp: ScType => tp
}
expectedType match {
case Some(expectedType) =>
typeElementType.map {
case resType => expectedType.glb(resType, checkWeak = false)
}
case _ => typeElementType
}
case None => Failure("No type pattern", Some(this))
}
}
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState, lastParent: PsiElement,
place: PsiElement) = {
ScalaPsiUtil.processImportLastParent(processor, state, place, lastParent, getType(TypingContext.empty))
}
override def getOriginalElement: PsiElement = super[ScTypedPattern].getOriginalElement
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/base/patterns/ScTypedPatternImpl.scala
|
Scala
|
apache-2.0
| 5,208 |
/**
* Copyright 2015 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.csv.akka.serviceregistry
import akka.actor._
import akka.persistence.{PersistentActor, RecoveryCompleted, SaveSnapshotSuccess, SnapshotOffer}
import com.comcast.csv.akka.serviceregistry.ServiceRegistryInternalProtocol.End
import com.comcast.csv.common.protocol.ServiceRegistryProtocol._
import scala.collection.mutable
/**
* Companion of ServiceRegistry.
*/
object ServiceRegistry {
def props = Props[ServiceRegistry]
val identity = "serviceRegistry"
}
/**
* Service registry for Actor implemented service endpoints.
*
* @author dbolene
*/
class ServiceRegistry extends PersistentActor with ActorLogging {
// [aSubscriberOrPublisher]
val subscribersPublishers = scala.collection.mutable.Set.empty[ActorRef]
// Map[subscriber,Set[subscribedTo]]
val subscribers = scala.collection.mutable.HashMap.empty[ActorRef, mutable.HashSet[String]]
// Map[published,publisher]
val publishers = scala.collection.mutable.HashMap.empty[String, ActorRef]
log.info(s"=================== ServiceRegistry created ===================")
override val persistenceId: String = ServiceRegistry.identity
def recordSubscriberPublisher(subpub: AddSubscriberPublisher): Unit = {
subscribersPublishers += subpub.subscriberPublisher
}
def considerRememberParticipant(participant: ActorRef): Unit = {
if (!subscribersPublishers.contains(participant)) {
val add = AddSubscriberPublisher(participant)
persist(add)(recordSubscriberPublisher)
}
}
def unrecordSubscriberPublisher(subpub: RemoveSubscriberPublisher): Unit = {
subscribersPublishers -= subpub.subscriberPublisher
}
def considerForgetParticipant(participant: ActorRef): Unit = {
def isSubscriberPublisherStillInUse(subpub: ActorRef): Boolean = {
subscribers.contains(subpub) ||
publishers.exists { case (serviceName, endPoint) => endPoint == subpub }
}
if (subscribersPublishers.contains(participant) && !isSubscriberPublisherStillInUse(participant)) {
val remove = RemoveSubscriberPublisher(participant)
persist(remove)(unrecordSubscriberPublisher)
}
}
override def receiveRecover: Receive = {
case add: AddSubscriberPublisher =>
log.info(s"Received -> AddSubscriberPublisher: $add")
recordSubscriberPublisher(add)
case remove: RemoveSubscriberPublisher =>
log.info(s"Received -> RemoveSubscriberPublisher: $remove")
unrecordSubscriberPublisher(remove)
case SnapshotOffer(_, snapshot: SnapshotAfterRecover) =>
log.info(s"Received -> SnapshotOffer")
// do nothing
case RecoveryCompleted =>
log.info(s"Received -> RecoveryCompleted")
val registryHasRestarted = RegistryHasRestarted(self)
subscribersPublishers.foreach(sp => sp ! registryHasRestarted)
subscribersPublishers.clear()
saveSnapshot(SnapshotAfterRecover())
}
override def receiveCommand: Receive = {
case ps: PublishService =>
log.info(s"Received -> PublishService: $ps")
publishers += (ps.serviceName -> ps.serviceEndpoint)
subscribers.filter(p => p._2.contains(ps.serviceName))
.foreach(p => p._1 ! ServiceAvailable(ps.serviceName, ps.serviceEndpoint))
context.watch(ps.serviceEndpoint)
considerRememberParticipant(ps.serviceEndpoint)
case ups: UnPublishService =>
log.info(s"Received -> UnPublishService: $ups")
val serviceEndpoint = publishers.get(ups.serviceName)
publishers.remove(ups.serviceName)
subscribers.filter(p => p._2.contains(ups.serviceName))
.foreach(p => p._1 ! ServiceUnAvailable(ups.serviceName))
serviceEndpoint.foreach(ep => considerForgetParticipant(ep))
case ss: SubscribeToService =>
log.info(s"Received -> SubscribeToService: $ss")
subscribers += (sender() -> subscribers.get(sender())
.orElse(Some(new mutable.HashSet[String])).map(s => {
s + ss.serviceName
})
.getOrElse(new mutable.HashSet[String]))
publishers.filter(p => p._1 == ss.serviceName)
.foreach(p => sender() ! ServiceAvailable(ss.serviceName, p._2))
considerRememberParticipant(sender())
case us: UnSubscribeToService =>
log.info(s"Received -> UnSubscribeToService: $us")
subscribers += (sender() -> subscribers.get(sender())
.orElse(Some(new mutable.HashSet[String])).map(s => {
s - us.serviceName
})
.getOrElse(new mutable.HashSet[String]))
considerForgetParticipant(sender())
case terminated: Terminated =>
log.info(s"Received -> Terminated: $terminated")
publishers.find(p => p._2 == terminated.getActor).foreach(p2 => {
subscribers.filter(p3 => p3._2.contains(p2._1))
.foreach(p4 => p4._1 ! ServiceUnAvailable(p2._1))
})
case sss: SaveSnapshotSuccess =>
log.info(s"Received -> SaveSnapshotSuccess: $sss")
case End =>
log.info(s"Received -> End")
case msg =>
log.warning(s"Received unknown message: $msg")
}
}
/**
* Private ServiceRegistry messages.
*/
object ServiceRegistryInternalProtocol {
case object End
}
case class AddSubscriberPublisher(subscriberPublisher: ActorRef)
case class RemoveSubscriberPublisher(subscriberPublisher: ActorRef)
case class SnapshotAfterRecover()
|
GitOutATown/ActorServiceRegistry
|
serviceRegistry/src/main/scala/com/comcast/csv/akka/serviceregistry/ServiceRegistry.scala
|
Scala
|
apache-2.0
| 5,913 |
package com.github.mdr.mash.evaluator
import com.github.mdr.mash.parser.AbstractSyntax.{ LookupExpr, ThisExpr }
import com.github.mdr.mash.runtime._
object LookupEvaluator extends EvaluatorHelper {
def evaluateLookupExpr(lookupExpr: LookupExpr)(implicit context: EvaluationContext): MashValue = {
val LookupExpr(targetExpr, indexExpr, _) = lookupExpr
val target = Evaluator.evaluate(targetExpr)
val thisTarget = targetExpr.isInstanceOf[ThisExpr]
val index = Evaluator.evaluate(indexExpr)
index match {
case MashString(memberName, _) ⇒
MemberEvaluator.lookupByString(target, memberName, includePrivate = thisTarget, sourceLocation(indexExpr))
case n: MashNumber ⇒
val i = n.asInt.getOrElse(throw EvaluatorException("Unable to lookup, non-integer index: " + n, sourceLocation(lookupExpr)))
target match {
case xs: MashList ⇒
val index = if (i < 0) i + xs.size else i
if (index >= xs.size)
throw EvaluatorException("Index out of range " + n, sourceLocation(indexExpr))
xs(index)
case s: MashString ⇒
s.lookup(i)
case obj: MashObject ⇒
MemberEvaluator.lookup(target, index, includePrivate = thisTarget, sourceLocation(indexExpr))
case _ ⇒
throw EvaluatorException("Unable to lookup in target of type " + target.typeName, sourceLocation(lookupExpr))
}
case _ ⇒
target match {
case obj: MashObject ⇒
MemberEvaluator.lookup(target, index, includePrivate = thisTarget, sourceLocation(indexExpr))
case _ ⇒
throw EvaluatorException("Unable to lookup index of type " + index.typeName, sourceLocation(indexExpr))
}
}
}
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/evaluator/LookupEvaluator.scala
|
Scala
|
mit
| 1,862 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.api.auth.oauth2
/** A profile retrieved from an authentication provider cannot be parsed */
case class InvalidProfileException(profile: String, errors: Seq[String], cause: Throwable = null)
extends IllegalArgumentException(
s"Cannot parse user profile: ${errors.mkString(", ")}\\nprofile = $profile", cause)
|
telefonicaid/fiware-cosmos-platform
|
cosmos-api/app/es/tid/cosmos/api/auth/oauth2/InvalidProfileException.scala
|
Scala
|
apache-2.0
| 976 |
/**
* Copyright: Copyright (C) 2016, ATS Advanced Telematic Systems GmbH
* License: MPL-2.0
*/
package org.genivi.sota.data
import java.security.InvalidParameterException
import eu.timepit.refined.api.Refined
import org.scalacheck.{Arbitrary, Gen}
trait PackageIdGenerators {
/**
* For property based testing purposes, we need to explain how to
* randomly generate package ids.
*
* @see [[https://www.scalacheck.org/]]
*/
val genPackageIdName: Gen[PackageId.Name] =
Gen.nonEmptyContainerOf[List, Char](Gen.alphaNumChar).map(cs => Refined.unsafeApply(cs.mkString))
val genPackageIdVersion: Gen[PackageId.Version] =
Gen.listOfN(3, Gen.choose(0, 999)).map(_.mkString(".")).map(Refined.unsafeApply) // scalastyle:ignore magic.number
def genConflictFreePackageIdVersion(n: Int): Seq[PackageId.Version] = {
import GeneratorOps._
if(n < 2) throw new InvalidParameterException("n must be greater than or equal to 2")
var versions = Set(genPackageIdVersion.generate)
while(versions.size < n) {
val v = genPackageIdVersion.generate
if(!versions.contains(v)) {
versions += v
}
}
versions.toSeq
}
val genPackageId: Gen[PackageId] =
for {
name <- genPackageIdName
version <- genPackageIdVersion
} yield PackageId(name, version)
implicit lazy val arbPackageId: Arbitrary[PackageId] =
Arbitrary(genPackageId)
}
object PackageIdGenerators extends PackageIdGenerators
/**
* Generators for invalid data are kept in dedicated scopes
* to rule out their use as implicits (impersonating valid ones).
*/
trait InvalidPackageIdGenerators extends InvalidIdentGenerators {
val genInvalidPackageIdName: Gen[PackageId.Name] =
genInvalidIdent map Refined.unsafeApply
def getInvalidPackageIdName: PackageId.Name =
genInvalidPackageIdName.sample.getOrElse(getInvalidPackageIdName)
val genInvalidPackageIdVersion: Gen[PackageId.Version] =
Gen.identifier.map(s => s + ".0").map(Refined.unsafeApply)
def getInvalidPackageIdVersion: PackageId.Version =
genInvalidPackageIdVersion.sample.getOrElse(getInvalidPackageIdVersion)
val genInvalidPackageId: Gen[PackageId] =
for {
name <- genInvalidPackageIdName
version <- genInvalidPackageIdVersion
} yield PackageId(name, version)
def getInvalidPackageId: PackageId =
genInvalidPackageId.sample.getOrElse(getInvalidPackageId)
}
object InvalidPackageIdGenerators extends InvalidPackageIdGenerators
|
PDXostc/rvi_sota_server
|
common-test/src/main/scala/org/genivi/sota/data/PackageIdGenerators.scala
|
Scala
|
mpl-2.0
| 2,511 |
package web.protocols
import play.api.libs.json._
import play.api.libs.functional.syntax._ // Combinator syntax
sealed trait Error {
def code: Int
def message: JsValue
}
object Error {
case class BadRequest(message: JsValue) extends Error {
val code = 400
}
implicit val writes = new Writes[Error] {
def writes(error: Error): JsValue = Json.obj(
"code" -> error.code,
"message" -> error.message
)
}
}
|
OmniaGM/activator-spray-twitter
|
web/app/web/protocols/Error.scala
|
Scala
|
apache-2.0
| 441 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.geohash
import com.vividsolutions.jts.geom.Geometry
/**
* <p/>
* Iterates over the GeoHashes at a fixed resolution within a given rectangle. The difference
* between this iterator and the default iteration available through GeoHash is that this
* iterator considers only those GeoHashes known to be inside the bounding rectangle.
* <p/>
* The GeoHash is a combination of hashes of its two dimensions, latitude and longitude; the
* total precision of the GeoHash is defined as a fixed number of bits.
* <p/>
* Each bit represents an interval-halving decision. The first longitude bit, then, is interpreted as
* follows: a 0 means that the target longitude is on the interval [0, 180); a 1 means its on [180, 360].
* The first two bits together define a cell that is 90 degrees wide; the first three bits together define
* a cell that is 45 degrees wide, etc.
* <p/>
* The bit-strings from the two dimensions are interleaved (they alternate), beginning with longitude,
* so a GeoHash of 10110 (read Lon-Lat-Lon-Lat-Lon) consists of three bits of longitude (110) and two bits of latitude (01).
* The following is an example of how the GeoHashes (at 5-bits precision) progress:
* <p/>
* Longitude
* 000 001 010 011 100 101 110 111
* ----- ----- ----- ----- ----- ----- ----- -----
* 11 | 01010 01011 01110 01111 11010 11011 11110 11111
* 10 | 01000 01001 01100 01101 11000 11001 11100 11101
* 01 | 00010 00011 00110 00111 10010 10011 10110 10111
* 00 | 00000 00001 00100 00101 10000 10001 10100 10101
* <p/>
* Each cell in this example is 45 degrees wide and 45 degrees high (since longitude ranges over [0,360],
* and latitude only goes from [-90,90]).
* <p/>
* Note that the dimension-specific bit-strings proceed in order (longitude from 0 to 7; latitude from 0 to 3)
* along each axis. That allows us to work on these bit-strings as coordinate indexes, making it simple to
* iterate over the GeoHashes within a rectangle (and make some estimates about in-circle membership).
*/
object RectangleGeoHashIterator {
/**
* Offset, in degrees, by which the LL and UR corners are perturbed
* to make sure they don't fall on GeoHash boundaries (that may be
* shared between GeoHashes).
*/
val OFFSET_DEGREES = 1e-6
// alternate constructor
def apply(geometry: Geometry, precision: Int) = {
val env = geometry.getEnvelopeInternal
new RectangleGeoHashIterator(env.getMinY, env.getMinX, env.getMaxY, env.getMaxX, precision)
}
}
import org.locationtech.geomesa.utils.geohash.RectangleGeoHashIterator._
class RectangleGeoHashIterator(latitudeLL: Double,
longitudeLL: Double,
latitudeUR: Double,
longitudeUR: Double,
precision: Int)
extends GeoHashIterator(latitudeLL + OFFSET_DEGREES,
longitudeLL + OFFSET_DEGREES,
latitudeUR - OFFSET_DEGREES,
longitudeUR - OFFSET_DEGREES,
precision) {
/**
* Internal method that figures out whether the iterator is finished, and if not, updates the
* current GeoHash and advances the counters.
* <p/>
* As a general scheme, we start in the lower-left corner, and iterate in a row-major way
* until we exceed the upper-right corner of the rectangle.
*
* @return whether the iteration is over
*/
@Override
override protected def advance: Boolean = {
doesHaveNext = true
if (latPosition > latBitsUR) {
setCurrentGeoHash(null)
doesHaveNext = false
return false
}
setCurrentGeoHash(GeoHash.composeGeoHashFromBitIndicesAndPrec(latPosition, lonPosition, precision))
lonPosition +=1
if (lonPosition > lonBitsUR) {
latPosition += 1
lonPosition = lonBitsLL
}
true
}
}
|
nagavallia/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geohash/RectangleGeoHashIterator.scala
|
Scala
|
apache-2.0
| 4,468 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package top
import com.intellij.lang.PsiBuilder
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import scala.annotation.tailrec
/**
* @author Alexander Podkhalyuzin
* Date: 06.02.2008
*/
/*
QualId ::= id {. id}
*/
object QualId extends ParsingRule {
override def parse(implicit builder: ScalaPsiBuilder): Boolean = {
parseNext(builder.mark())
true
}
@tailrec
private def parseNext(qualMarker: PsiBuilder.Marker)(implicit builder: ScalaPsiBuilder): Unit = {
//parsing td identifier
builder.getTokenType match {
case ScalaTokenTypes.tIDENTIFIER =>
builder.advanceLexer() //Ate identifier
//Look for dot
builder.getTokenType match {
case ScalaTokenTypes.tDOT => {
val newMarker = qualMarker.precede
qualMarker.done(ScalaElementType.REFERENCE)
builder.advanceLexer() //Ate dot
//recursively parse qualified identifier
parseNext(newMarker)
}
case _ =>
//It's OK, let's close marker
qualMarker.done(ScalaElementType.REFERENCE)
}
case _ =>
builder error ScalaBundle.message("wrong.qual.identifier")
qualMarker.drop()
}
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/top/QualId.scala
|
Scala
|
apache-2.0
| 1,401 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.io.File
import org.apache.kafka.common.protocol.SecurityProtocol
class SslProducerSendTest extends BaseProducerSendTest {
override protected def securityProtocol = SecurityProtocol.SSL
override protected lazy val trustStoreFile = Some(File.createTempFile("truststore", ".jks"))
}
|
wangcy6/storm_app
|
frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/integration/kafka/api/SslProducerSendTest.scala
|
Scala
|
apache-2.0
| 1,135 |
package beam.agentsim.agents.choice.logit
import scala.util.Random
import org.scalatest.{Matchers, WordSpecLike}
class MultinomialLogitSpec extends WordSpecLike with Matchers {
"An MNL Model with standard data" must {
val utilityFunctions = Map(
"car" -> Map("intercept" -> UtilityFunctionOperation.Intercept(3.0)),
"walk" -> Map("intercept" -> UtilityFunctionOperation.Intercept(4.0))
)
val common = Map(
"cost" -> UtilityFunctionOperation.Multiplier(-0.01),
"time" -> UtilityFunctionOperation.Multiplier(-0.02)
)
val mnl = new MultinomialLogit(utilityFunctions, common)
val alts = Map(
"car" -> Map("cost" -> 30.0, "time" -> 50.0),
"walk" -> Map("cost" -> 0.0, "time" -> 40.0)
)
"should evaluate utility functions as expected" in {
mnl.getUtilityOfAlternative(alts.head._1, alts.head._2) match {
case None => fail()
case Some(util) =>
(util - 1.7).abs should be < 0.000000001
}
}
"should evaluate expected max utility as expected" in {
val util = mnl.getExpectedMaximumUtility(alts)
Math.abs(util.get - 3.401413) < 0.00001 should be(true)
}
"should sample higher probability alternatives more often" in {
// With these inputs, we expect "walk" ~81% of the time, which translates to an almost certainty that majority
// will be walk with 100 trials (p-val 3.00491e-12)
val sampleSize = 100
val rand = new Random()
val samples: Seq[String] = for {
_ <- 1 until sampleSize
result <- mnl.sampleAlternative(alts, rand)
} yield result.alternativeType
samples.count {
_ == "walk"
} > (sampleSize / 2) // 50% or more should be walk
}
}
"An MNL Model with arbitrary data" must {
// the alternatives as objects
sealed trait Mode
object Car extends Mode
object Walk extends Mode
sealed trait FunctionParam
object FixedUtility extends FunctionParam
object Cost extends FunctionParam
object Time extends FunctionParam
val utilityFunctions: Map[Mode, Map[FunctionParam, UtilityFunctionOperation]] = Map(
Car -> Map(FixedUtility -> UtilityFunctionOperation.Intercept(3.0)),
Walk -> Map(FixedUtility -> UtilityFunctionOperation.Intercept(4.0))
)
val common = Map(
Cost -> UtilityFunctionOperation.Multiplier(-0.01),
Time -> UtilityFunctionOperation.Multiplier(-0.02)
)
val mnl = new MultinomialLogit(utilityFunctions, common)
val alts: Map[Mode, Map[FunctionParam, Double]] = Map(
Car -> Map(Cost -> 30.0, Time -> 50.0),
Walk -> Map(Cost -> 0.0, Time -> 40.0)
)
"should evaluate utility functions as expected" in {
mnl.getUtilityOfAlternative(alts.head._1, alts.head._2) match {
case None => fail()
case Some(util) =>
(util - 1.7).abs should be < 0.000000001
}
}
"should evaluate expected max utility as expected" in {
val util = mnl.getExpectedMaximumUtility(alts)
Math.abs(util.get - 3.401413) < 0.00001 should be(true)
}
"should sample higher probability alternatives more often" in {
// With these inputs, we expect "walk" ~81% of the time, which translates to an almost certainty that majority
// will be walk with 100 trials (p-val 3.00491e-12)
val sampleSize = 100
val rand = new Random()
val samples: Seq[Mode] = for {
_ <- 1 until sampleSize
result <- mnl.sampleAlternative(alts, rand)
} yield result.alternativeType
samples.count { _ == Walk } > (sampleSize / 2) // 50% or more should be walk
}
}
"an MNL sampling alternatives where at least one has utility which is positively infinite" should {
"select one of them" in new MultinomialLogitSpec.InfinitelyValuedAlternatives {
val random: Random = new Random(0)
val mnl: MultinomialLogit[String, String] = new MultinomialLogit[String, String](Map.empty, utilityFunctions)
mnl.sampleAlternative(alternatives, random) match {
case None => fail()
case Some(selected) =>
// these alternatives have a dangerous cost value
selected.alternativeType should (equal("B") or equal("D"))
// the alternatives that "blow up" to infinity have a dangerous cost
selected.utility should equal(dangerousCostValue)
// the dangerous cost value, when e is raised to them, should go to pos. infinity
math.pow(math.E, dangerousCostValue) should equal(Double.PositiveInfinity)
}
}
}
"an MNL with n alternatives where all have equal value" should {
"select one with 1/n probability" in new MultinomialLogitSpec.EquallyValuedAlternatives {
val random: Random = new Random(0)
val mnl: MultinomialLogit[String, String] = new MultinomialLogit[String, String](Map.empty, utilityFunctions)
mnl.sampleAlternative(alternatives, random) match {
case None => fail()
case Some(selected) =>
// there are four equal alternatives, so, they should each be given a 25% probability of selection
selected.realProbability should equal(0.25)
// the utility should be the same
selected.utility should equal(alternativesCost)
}
}
}
}
object MultinomialLogitSpec {
trait InfinitelyValuedAlternatives {
val dangerousCostValue = 1000.0
val utilityFunctions = Map(
"value" -> UtilityFunctionOperation.Multiplier(1.0)
)
// alternatives B and D should evaluate to e^1000 which is greater than Double.MaxValue => infinite
val alternatives: Map[String, Map[String, Double]] = Map(
"A" -> Map(
"value" -> -0.5
),
"B" -> Map(
"value" -> dangerousCostValue
),
"C" -> Map(
"value" -> 2.0
),
"D" -> Map(
"value" -> dangerousCostValue
)
)
}
trait EquallyValuedAlternatives {
val alternativesCost = -1
val utilityFunctions = Map(
"value" -> UtilityFunctionOperation.Multiplier(1.0)
)
val alternatives: Map[String, Map[String, Double]] = Map(
"A" -> Map(
"value" -> alternativesCost
),
"B" -> Map(
"value" -> alternativesCost
),
"C" -> Map(
"value" -> alternativesCost
),
"D" -> Map(
"value" -> alternativesCost
)
)
}
}
|
colinsheppard/beam
|
src/test/scala/beam/agentsim/agents/choice/logit/MultinomialLogitSpec.scala
|
Scala
|
gpl-3.0
| 6,450 |
package cloudcmd.common.adapters
// "s3://<aws id>@<bucket>?tier=2&tags=s3&secret=<aws secret>"
class S3Adapter extends IndexFilterAdapter(new DirectS3Adapter) {}
|
briangu/cloudcmd
|
common/src/main/scala/cloudcmd/common/adapters/S3Adapter.scala
|
Scala
|
apache-2.0
| 168 |
object Solution {
def sievePrimeGenerator(n: Int): (Array[Int], Array[Boolean]) = {
val nums = Array.fill(n + 1)(true)
nums(0) = false
nums(1) = false
val primes = for (i <- (2 to n).toIterator if nums(i)) yield {
var j = 2
while (i * j <= n) {
nums(i * j) = false
j += 1
}
i
}
(primes.toArray, nums)
}
def isPrime(x: Long,
primes: Array[Int],
primeTable: Array[Boolean]): Boolean = {
val size = primeTable.size - 1
val sizeSquare = 1L * size * size
require(sizeSquare >= x)
val pSize = primes.size
if (x < 0) false
else if (x <= size) primeTable(x.toInt)
else {
var j = 0
val sqrtX = math.sqrt(x).toLong
while (j < pSize && primes(j) <= sqrtX) {
if (x % primes(j) == 0) return false
j += 1
}
true
}
}
def getIUnderRatio(ratio: Double): Long = {
var sl = 2L
var primeCount = 3L
var p = 9L
// 24e4 is a magical number, as I pre-compute the answer.
val (primes, primeTable) = sievePrimeGenerator(24e4.toInt)
while (primeCount >= ratio * (2 * sl + 1)) {
sl += 2
var i = 1
while (i <= 3) {
p += sl
// println(p)
if (isPrime(p, primes, primeTable)) primeCount += 1
i += 1
}
p += sl
}
sl + 1
}
def main(args: Array[String]) {
val n = readLine.toInt
val ratio = 1.0 * n / 100
val i = getIUnderRatio(ratio)
println(i)
}
}
|
advancedxy/hackerrank
|
project-euler/problem-58/SpiralPrimes.scala
|
Scala
|
mit
| 1,515 |
class Casey1[T](val a: T) {
def isEmpty: Boolean = false
def isEmpty(x: T): Boolean = ???
def get: T = a
def get(x: T): String = ???
}
object Casey1 { def unapply[T](a: Casey1[T]) = a }
object Test {
def main(args: Array[String]): Unit = {
val c @ Casey1(x) = new Casey1(0)
assert(x == c.get)
}
}
|
lampepfl/dotty
|
tests/pos/i1540b.scala
|
Scala
|
apache-2.0
| 318 |
package com.twitter.finagle.postgresql.machine
import com.twitter.finagle.postgresql.BackendMessage
import com.twitter.finagle.postgresql.BackendMessage.CommandTag
import com.twitter.finagle.postgresql.BackendMessage.DataRow
import com.twitter.finagle.postgresql.BackendMessage.RowDescription
import com.twitter.finagle.postgresql.FrontendMessage
import com.twitter.finagle.postgresql.PgSqlNoSuchTransition
import com.twitter.finagle.postgresql.PgSqlServerError
import com.twitter.finagle.postgresql.PropertiesSpec
import com.twitter.finagle.postgresql.Response
import com.twitter.finagle.postgresql.Response.{ConnectionParameters, QueryResponse, Row}
import com.twitter.finagle.postgresql.machine.StateMachine.Complete
import com.twitter.finagle.postgresql.machine.StateMachine.Respond
import com.twitter.finagle.postgresql.machine.StateMachine.Send
import com.twitter.finagle.postgresql.machine.StateMachine.Transition
import com.twitter.io.Reader
import com.twitter.util.Await
import com.twitter.util.Future
import com.twitter.util.Return
import com.twitter.util.Throw
import com.twitter.util.Try
import org.scalatest.Assertion
class SimpleQueryMachineSpec extends MachineSpec[Response] with PropertiesSpec {
def mkMachine(q: String): SimpleQueryMachine =
new SimpleQueryMachine(q, ConnectionParameters.empty)
val readyForQuery = BackendMessage.ReadyForQuery(BackendMessage.NoTx)
def checkQuery(q: String) =
checkResult("sends a query message") {
case Transition(_, Send(FrontendMessage.Query(str))) =>
str must be(q)
}
def checkCompletes =
checkResult("completes") {
case Complete(ready, response) =>
ready must be(readyForQuery)
response mustBe empty
}
type QueryResponseCheck = PartialFunction[Try[Response.QueryResponse], Assertion]
def checkSingleResponse(f: QueryResponseCheck) =
checkResult("captures one response") {
case Transition(_, Respond(value)) =>
value.asScala must beSuccessfulTry {
beLike[Response] {
case r @ Response.SimpleQueryResponse(_) =>
Await.result(r.next.liftToTry) must beLike(f)
}
}
}
def multiQuerySpec(
query: String,
first: (BackendMessage, QueryResponseCheck),
others: (BackendMessage, QueryResponseCheck)*
) = {
var sqr: Option[Response.SimpleQueryResponse] = None
val (msg, firstCheck) = first
val firstSteps = List(
receive(msg),
checkResult("responds") {
case Transition(_, Respond(value)) =>
value.asScala must beSuccessfulTry {
beLike[Response] {
case r: Response.SimpleQueryResponse =>
sqr = Some(r)
succeed
}
}
}
)
val steps = checkQuery(query) :: firstSteps ++
others.map { case (msg, _) => receive(msg) } ++
List(
receive(readyForQuery),
checkCompletes
)
machineSpec(mkMachine(query))(steps: _*)
sqr match {
case None => Future.value(fail() :: Nil)
case Some(s) =>
Reader
.toAsyncStream(s.responses)
.toSeq()
.map { actual =>
(actual zip (firstCheck :: others.map(_._2).toList))
.map {
case (a, check) =>
check(Return(a))
}
}
}
}
def singleQuerySpec(
query: String,
msg: BackendMessage
)(
f: PartialFunction[Try[Response.QueryResponse], Assertion]
) =
multiQuerySpec(query, msg -> f)
"SimpleQueryMachine" should {
"send the provided query string" in prop { query: String =>
machineSpec(mkMachine(query)) {
checkQuery(query)
}
}
"support empty queries" in {
singleQuerySpec("", BackendMessage.EmptyQueryResponse) {
case Return(value) => value must be(Response.Empty)
}
}
"support commands" in prop { (command: String, commandTag: CommandTag) =>
singleQuerySpec(command, BackendMessage.CommandComplete(commandTag)) {
case Return(value) => value must be(Response.Command(commandTag))
}
}
def resultSetSpec(
query: String,
rowDesc: RowDescription,
rows: List[DataRow]
)(
f: Seq[Row] => Assertion
) = {
var rowReader: Option[Response.ResultSet] = None
val prep = List(
checkQuery(query),
receive(rowDesc),
checkSingleResponse {
case Return(value) =>
value must beLike[QueryResponse] {
case rs @ Response.ResultSet(desc, _, _) =>
rowReader = Some(rs)
desc must be(rowDesc.rowFields)
}
}
)
val sendRows = rows.map(receive(_))
val post = List(
receive(
BackendMessage.CommandComplete(CommandTag.AffectedRows(CommandTag.Select, rows.size))),
receive(readyForQuery),
checkCompletes
)
oneMachineSpec(mkMachine(query))(prep ++ sendRows ++ post: _*)
rowReader mustBe defined
rowReader.get.toSeq.map(f)
rowReader = None
// NOTE: machineErrorSpec returns a Prop which we combine with another using &&
// It's kind of weird, but specs2 isn't really helping here.
machineErrorSpec(mkMachine(query))(prep ++ sendRows ++ post: _*)
// NOTE: the randomization of the error makes it possible that:
// * we read no rows at all
// * we read all rows (and the error isn't surfaced)
// * we read partial rows and then an exception
rowReader match {
case None => succeed
case Some(r) =>
rowReader = None // TODO: the statefulness of the test is pretty brittle
r.toSeq.liftToTry.map {
case Return(rows) =>
f(rows) // if we read all rows, then we should check that they're what we expect
case Throw(t) => t mustBe an[PgSqlServerError] // the error should surface here
}
}
}
"support empty result sets" in prop { rowDesc: RowDescription =>
resultSetSpec("select 1;", rowDesc, Nil) { rows =>
rows mustBe empty
}
}
"return rows in order" in prop { rs: TestResultSet =>
resultSetSpec("select 1;", rs.desc, rs.rows) { rows =>
rows must be(rs.rows.map(_.values))
}
}
"support multiline queries" in {
(command: String, firstTag: CommandTag, secondTag: CommandTag) =>
multiQuerySpec(
command,
BackendMessage.CommandComplete(firstTag) -> {
case Return(value) =>
value must be(Response.Command(firstTag))
},
BackendMessage.EmptyQueryResponse -> {
case Return(value) => value must be(Response.Empty)
},
BackendMessage.CommandComplete(secondTag) -> {
case Return(value) =>
value must be(Response.Command(secondTag))
}
)
}
"fail when no transition exist" in {
val machine = mkMachine("bogus")
an[PgSqlNoSuchTransition] shouldBe thrownBy {
machine.receive(machine.Sent, BackendMessage.PortalSuspended)
}
}
}
}
|
twitter/finagle
|
finagle-postgresql/src/test/scala/com/twitter/finagle/postgresql/machine/SimpleQueryMachineSpec.scala
|
Scala
|
apache-2.0
| 7,178 |
package com.signalcollect.dcop.evaluation.bestresponse
import com.signalcollect.StateForwarderEdge
import com.signalcollect.interfaces.AggregationOperation
import com.signalcollect.ExecutionConfiguration
import com.signalcollect.Graph
import com.signalcollect.dcop.evaluation.candidates.BestResponseVertexBuilder
import com.signalcollect.dcop.evaluation.candidates.BinaryConstraintGraphProvider
import com.signalcollect.GraphBuilder
import com.signalcollect.configuration.TerminationReason
import com.signalcollect.dcop.util.ProblemConstants
class BRExecutor(file: String, config: ExecutionConfiguration, isAdopt: Boolean, aggregation: AggregationOperation[Int], randomInit: Boolean, p: Double, graphSize: Int) {
val fileName = file
val numColors = ProblemConstants.numOfColors
val executionConfig = config
val isInputAdopt = isAdopt
val randomInitialState = randomInit
val probability = p
val aggregator = aggregation
var graph: Graph[Any, Any] = _
val algorithm = new BestResponseVertexBuilder(randomInitialState, probability)
val graphProvider: BinaryConstraintGraphProvider = new BinaryConstraintGraphProvider(graphSize, numColors, 2, loadFrom = file, isAdopt = isInputAdopt)
val graphBuilder = new GraphBuilder[Any, Any]()
var conflictsOverTime: Map[Int, Int] = Map()
// What edgeBuilder to use with this algorithm
val edgeBuilder = algorithm match {
case otherwise => (x: Int, y: Int) => new StateForwarderEdge(y)
}
graph = graphBuilder.build
graphProvider.populate(graph, algorithm, edgeBuilder)
graph.awaitIdle
def executeWithAggregation(): Int = {
if (aggregation != null) {
graph.execute(config)
graph.aggregate(aggregator)
} else {
-1
}
}
def executeForConvergenceSteps(): Long = {
val executionInfo = graph.execute(executionConfig)
if (executionInfo.executionStatistics.terminationReason == TerminationReason.Converged) {
executionInfo.executionStatistics.signalSteps
} else {
-1
}
}
def executeForConvergenceTime(): Long = {
val executionInfo = graph.execute(executionConfig)
if (executionInfo.executionStatistics.terminationReason == TerminationReason.Converged) {
executionInfo.executionStatistics.computationTime.toMillis
} else {
-1
}
}
}
|
gmazlami/dcop-maxsum
|
src/main/scala/com/signalcollect/dcop/evaluation/bestresponse/BRExecutor.scala
|
Scala
|
apache-2.0
| 2,305 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Enterprise Data Management Council
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.edmcouncil.rdf_toolkit
import org.edmcouncil.rdf_toolkit.io.DirectoryWalker
import java.io.{File, FileInputStream, FileOutputStream, PrintStream}
import java.util.regex.{Matcher, Pattern}
import scala.language.postfixOps
import scala.collection.JavaConverters._
import org.scalatest.{FlatSpec, Matchers}
import org.slf4j.LoggerFactory
class SesameRdfFormatterSpec extends FlatSpec with Matchers with SesameSortedWriterSpecSupport /*with OutputSuppressor*/ {
override val logger = LoggerFactory getLogger classOf[SesameRdfFormatterSpec]
val rootOutputDir1 = mkCleanDir(s"target/temp/${this.getClass.getName}")
def serializeStandardInputToStandardOutput(outputDir: File, inputFile: File, inputFormat: String, outputFormat: String, outputSuffix: String): Unit = {
val originalIn = System in
val originalOut = System out
try {
val outputFile = constructTargetFile(inputFile, resourceDir, outputDir, Some(outputSuffix))
System setIn (new FileInputStream(inputFile))
System setOut (new PrintStream(new FileOutputStream(outputFile)))
RdfFormatter run Array[String](
"-sfmt", inputFormat,
"-tfmt", outputFormat
)
assert(outputFile exists, s"file missing in outputDir: ${outputFile.getAbsolutePath}")
assert(compareFiles(inputFile, outputFile, "UTF-8"), s"file mismatch between inputFile and outputFile: ${inputFile.getName} | ${outputFile.getName}")
} catch {
case t: Throwable ⇒ throw t
} finally {
System setIn originalIn
System setOut originalOut
}
}
"A SesameRdfFormatter" should "be able to use the standard input and output" in {
val inputFile = new File("src/test/resources/other/topbraid-countries-ontology.ttl")
val outputDir = createTempDir(rootOutputDir1, "turtle")
serializeStandardInputToStandardOutput(outputDir, inputFile, "turtle", "turtle", ".ttl")
serializeStandardInputToStandardOutput(outputDir, inputFile, "turtle", "rdf-xml", ".rdf")
serializeStandardInputToStandardOutput(outputDir, inputFile, "turtle", "json-ld", ".jsonld")
}
def processDirectory(format: String, fileExt: String): Unit = {
val sourceDirPath = "src/test/resources"
val sourcePatternString = s"^(.*)\\\\.$fileExt$$"
val sourcePattern = Pattern.compile(sourcePatternString)
val targetPatternString = s"$$1.fmt.$fileExt"
RdfFormatter run Array[String](
"-sd", sourceDirPath,
"-sdp", sourcePatternString,
"-sfmt", format,
"-td", rootOutputDir1.getAbsolutePath,
"-tdp", targetPatternString,
"-tfmt", format
)
// Check the generated files
val dw = new DirectoryWalker(new File(sourceDirPath), sourcePattern);
for (sourceResult ← dw.pathMatches.asScala) {
val sourceMatcher = sourcePattern.matcher(sourceResult.getRelativePath)
val targetRelativePath = sourceMatcher.replaceFirst(targetPatternString)
val targetFile = new File(rootOutputDir1, targetRelativePath)
assert(targetFile exists, s"target file not created: ${targetFile.getAbsolutePath}")
compareFiles(sourceResult getFile, targetFile, "UTF-8")
}
}
it should "be able to process the Turtle files in a directory" in {
processDirectory("turtle", "ttl")
}
it should "be able to process the RDF/XML files in a directory" in {
processDirectory("rdf-xml", "rdf")
}
}
|
edmcouncil/rdf-toolkit
|
src/test/scala/org/edmcouncil/rdf_toolkit/SesameRdfFormatterSpec.scala
|
Scala
|
mit
| 4,553 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.json
import java.io._
import java.nio.charset.{Charset, StandardCharsets, UnsupportedCharsetException}
import java.nio.file.Files
import java.sql.{Date, Timestamp}
import java.time.LocalDate
import java.util.Locale
import com.fasterxml.jackson.core.JsonFactory
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.io.SequenceFile.CompressionType
import org.apache.hadoop.io.compress.GzipCodec
import org.apache.spark.{SparkConf, SparkException, TestUtils}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{functions => F, _}
import org.apache.spark.sql.catalyst.json._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.ExternalRDD
import org.apache.spark.sql.execution.adaptive.AdaptiveTestUtils.assertExceptionMessage
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.sql.types.StructType.fromDDL
import org.apache.spark.sql.types.TestUDT.{MyDenseVector, MyDenseVectorUDT}
import org.apache.spark.util.Utils
class TestFileFilter extends PathFilter {
override def accept(path: Path): Boolean = path.getParent.getName != "p=2"
}
abstract class JsonSuite extends QueryTest with SharedSparkSession with TestJsonData {
import testImplicits._
test("Type promotion") {
def checkTypePromotion(expected: Any, actual: Any): Unit = {
assert(expected.getClass == actual.getClass,
s"Failed to promote ${actual.getClass} to ${expected.getClass}.")
assert(expected == actual,
s"Promoted value ${actual}(${actual.getClass}) does not equal the expected value " +
s"${expected}(${expected.getClass}).")
}
val factory = new JsonFactory()
def enforceCorrectType(
value: Any,
dataType: DataType,
options: Map[String, String] = Map.empty): Any = {
val writer = new StringWriter()
Utils.tryWithResource(factory.createGenerator(writer)) { generator =>
generator.writeObject(value)
generator.flush()
}
val dummyOption = new JSONOptions(options, SQLConf.get.sessionLocalTimeZone)
val dummySchema = StructType(Seq.empty)
val parser = new JacksonParser(dummySchema, dummyOption, allowArrayAsStructs = true)
Utils.tryWithResource(factory.createParser(writer.toString)) { jsonParser =>
jsonParser.nextToken()
val converter = parser.makeConverter(dataType)
converter.apply(jsonParser)
}
}
val intNumber: Int = 2147483647
checkTypePromotion(intNumber, enforceCorrectType(intNumber, IntegerType))
checkTypePromotion(intNumber.toLong, enforceCorrectType(intNumber, LongType))
checkTypePromotion(intNumber.toDouble, enforceCorrectType(intNumber, DoubleType))
checkTypePromotion(
Decimal(intNumber), enforceCorrectType(intNumber, DecimalType.SYSTEM_DEFAULT))
val longNumber: Long = 9223372036854775807L
checkTypePromotion(longNumber, enforceCorrectType(longNumber, LongType))
checkTypePromotion(longNumber.toDouble, enforceCorrectType(longNumber, DoubleType))
checkTypePromotion(
Decimal(longNumber), enforceCorrectType(longNumber, DecimalType.SYSTEM_DEFAULT))
val doubleNumber: Double = 1.7976931348623157d
checkTypePromotion(doubleNumber.toDouble, enforceCorrectType(doubleNumber, DoubleType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber * 1000L)),
enforceCorrectType(intNumber, TimestampType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber.toLong * 1000L)),
enforceCorrectType(intNumber.toLong, TimestampType))
val strTime = "2014-09-30 12:34:56"
checkTypePromotion(
expected = DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf(strTime)),
enforceCorrectType(strTime, TimestampType,
Map("timestampFormat" -> "yyyy-MM-dd HH:mm:ss")))
val strDate = "2014-10-15"
checkTypePromotion(
DateTimeUtils.fromJavaDate(Date.valueOf(strDate)), enforceCorrectType(strDate, DateType))
val ISO8601Time1 = "1970-01-01T01:00:01.0Z"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(3601000)),
enforceCorrectType(
ISO8601Time1,
TimestampType,
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ss.SX")))
val ISO8601Time2 = "1970-01-01T02:00:01-01:00"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(10801000)),
enforceCorrectType(
ISO8601Time2,
TimestampType,
Map("timestampFormat" -> "yyyy-MM-dd'T'HH:mm:ssXXX")))
val ISO8601Date = "1970-01-01"
checkTypePromotion(DateTimeUtils.microsToDays(32400000000L),
enforceCorrectType(ISO8601Date, DateType))
}
test("Get compatible type") {
def checkDataType(t1: DataType, t2: DataType, expected: DataType): Unit = {
var actual = JsonInferSchema.compatibleType(t1, t2)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
actual = JsonInferSchema.compatibleType(t2, t1)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
}
// NullType
checkDataType(NullType, BooleanType, BooleanType)
checkDataType(NullType, IntegerType, IntegerType)
checkDataType(NullType, LongType, LongType)
checkDataType(NullType, DoubleType, DoubleType)
checkDataType(NullType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(NullType, StringType, StringType)
checkDataType(NullType, ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(NullType, StructType(Nil), StructType(Nil))
checkDataType(NullType, NullType, NullType)
// BooleanType
checkDataType(BooleanType, BooleanType, BooleanType)
checkDataType(BooleanType, IntegerType, StringType)
checkDataType(BooleanType, LongType, StringType)
checkDataType(BooleanType, DoubleType, StringType)
checkDataType(BooleanType, DecimalType.SYSTEM_DEFAULT, StringType)
checkDataType(BooleanType, StringType, StringType)
checkDataType(BooleanType, ArrayType(IntegerType), StringType)
checkDataType(BooleanType, StructType(Nil), StringType)
// IntegerType
checkDataType(IntegerType, IntegerType, IntegerType)
checkDataType(IntegerType, LongType, LongType)
checkDataType(IntegerType, DoubleType, DoubleType)
checkDataType(IntegerType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(IntegerType, StringType, StringType)
checkDataType(IntegerType, ArrayType(IntegerType), StringType)
checkDataType(IntegerType, StructType(Nil), StringType)
// LongType
checkDataType(LongType, LongType, LongType)
checkDataType(LongType, DoubleType, DoubleType)
checkDataType(LongType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(LongType, StringType, StringType)
checkDataType(LongType, ArrayType(IntegerType), StringType)
checkDataType(LongType, StructType(Nil), StringType)
// DoubleType
checkDataType(DoubleType, DoubleType, DoubleType)
checkDataType(DoubleType, DecimalType.SYSTEM_DEFAULT, DoubleType)
checkDataType(DoubleType, StringType, StringType)
checkDataType(DoubleType, ArrayType(IntegerType), StringType)
checkDataType(DoubleType, StructType(Nil), StringType)
// DecimalType
checkDataType(DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT,
DecimalType.SYSTEM_DEFAULT)
checkDataType(DecimalType.SYSTEM_DEFAULT, StringType, StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, ArrayType(IntegerType), StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, StructType(Nil), StringType)
// StringType
checkDataType(StringType, StringType, StringType)
checkDataType(StringType, ArrayType(IntegerType), StringType)
checkDataType(StringType, StructType(Nil), StringType)
// ArrayType
checkDataType(ArrayType(IntegerType), ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(ArrayType(IntegerType), ArrayType(LongType), ArrayType(LongType))
checkDataType(ArrayType(IntegerType), ArrayType(StringType), ArrayType(StringType))
checkDataType(ArrayType(IntegerType), StructType(Nil), StringType)
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, false), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, false), ArrayType(IntegerType, false))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
// StructType
checkDataType(StructType(Nil), StructType(Nil), StructType(Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil),
StructType(StructField("f1", LongType, true) :: Nil),
StructType(
StructField("f1", LongType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
StructType(
StructField("f2", IntegerType, true) :: Nil),
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
DecimalType.SYSTEM_DEFAULT,
StringType)
}
test("Complex field and type inferring with null in sampling") {
val jsonDF = spark.read.json(jsonNullStruct)
val expectedSchema = StructType(
StructField("headers", StructType(
StructField("Charset", StringType, true) ::
StructField("Host", StringType, true) :: Nil)
, true) ::
StructField("ip", StringType, true) ::
StructField("nullstr", StringType, true):: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select nullstr, headers.Host from jsonTable"),
Seq(Row("", "1.abc.com"), Row("", null), Row("", null), Row(null, null))
)
}
test("Primitive field and type inferring") {
val jsonDF = spark.read.json(primitiveFieldAndType)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Complex field and type inferring") {
val jsonDF = spark.read.json(complexFieldAndType1)
val expectedSchema = StructType(
StructField("arrayOfArray1", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfArray2", ArrayType(ArrayType(DoubleType, true), true), true) ::
StructField("arrayOfBigInteger", ArrayType(DecimalType(21, 0), true), true) ::
StructField("arrayOfBoolean", ArrayType(BooleanType, true), true) ::
StructField("arrayOfDouble", ArrayType(DoubleType, true), true) ::
StructField("arrayOfInteger", ArrayType(LongType, true), true) ::
StructField("arrayOfLong", ArrayType(LongType, true), true) ::
StructField("arrayOfNull", ArrayType(StringType, true), true) ::
StructField("arrayOfString", ArrayType(StringType, true), true) ::
StructField("arrayOfStruct", ArrayType(
StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", StringType, true) ::
StructField("field3", StringType, true) :: Nil), true), true) ::
StructField("struct", StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", DecimalType(20, 0), true) :: Nil), true) ::
StructField("structWithArrayFields", StructType(
StructField("field1", ArrayType(LongType, true), true) ::
StructField("field2", ArrayType(StringType, true), true) :: Nil), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
Row("str2", 2.1)
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
Row(
Row(true, "str1", null),
Row(false, null, null),
Row(null, null, null),
null)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from jsonTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
Row(5, null)
)
}
test("GetField operation on complex data type") {
val jsonDF = spark.read.json(complexFieldAndType1)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
// Getting all values of a specific field from an array of structs.
checkAnswer(
sql("select arrayOfStruct.field1, arrayOfStruct.field2 from jsonTable"),
Row(Seq(true, false, null), Seq("str1", null, null))
)
}
test("Type conflict in primitive field values") {
val jsonDF = spark.read.json(primitiveFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("num_bool", StringType, true) ::
StructField("num_num_1", LongType, true) ::
StructField("num_num_2", DoubleType, true) ::
StructField("num_num_3", DoubleType, true) ::
StructField("num_str", StringType, true) ::
StructField("str_bool", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row("true", 11L, null, 1.1, "13.1", "str1") ::
Row("12", null, 21474836470.9, null, null, "true") ::
Row("false", 21474836470L, 92233720368547758070d, 100, "str1", "false") ::
Row(null, 21474836570L, 1.1, 21474836470L, "92233720368547758070", null) :: Nil
)
// Number and Boolean conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_bool - 10 from jsonTable where num_bool > 11"),
Row(2)
)
// Widening to LongType
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 11"),
Row(21474836370L) :: Row(21474836470L) :: Nil
)
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 10"),
Row(-89) :: Row(21474836370L) :: Row(21474836470L) :: Nil
)
// Widening to DecimalType
checkAnswer(
sql("select num_num_2 + 1.3 from jsonTable where num_num_2 > 1.1"),
Row(21474836472.2) ::
Row(92233720368547758071.3) :: Nil
)
// Widening to Double
checkAnswer(
sql("select num_num_3 + 1.2 from jsonTable where num_num_3 > 1.1"),
Row(101.2) :: Row(21474836471.2) :: Nil
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 14d"),
Row(92233720368547758071.2)
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str >= 92233720368547758060"),
Row(new java.math.BigDecimal("92233720368547758071.2").doubleValue)
)
// String and Boolean conflict: resolve the type as string.
checkAnswer(
sql("select * from jsonTable where str_bool = 'str1'"),
Row("true", 11L, null, 1.1, "13.1", "str1")
)
}
test("Type conflict in complex field values") {
val jsonDF = spark.read.json(complexFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("array", ArrayType(LongType, true), true) ::
StructField("num_struct", StringType, true) ::
StructField("str_array", StringType, true) ::
StructField("struct", StructType(
StructField("field", StringType, true) :: Nil), true) ::
StructField("struct_array", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq(), "11", "[1,2,3]", Row(null), "[]") ::
Row(null, """{"field":false}""", null, null, "{}") ::
Row(Seq(4, 5, 6), null, "str", Row(null), "[7,8,9]") ::
Row(Seq(7), "{}", """["str1","str2",33]""", Row("str"), """{"field":true}""") :: Nil
)
}
test("Type conflict in array elements") {
val jsonDF = spark.read.json(arrayElementTypeConflict)
val expectedSchema = StructType(
StructField("array1", ArrayType(StringType, true), true) ::
StructField("array2", ArrayType(StructType(
StructField("field", LongType, true) :: Nil), true), true) ::
StructField("array3", ArrayType(StringType, true), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
"""{"field":"str"}"""), Seq(Row(214748364700L), Row(1)), null) ::
Row(null, null, Seq("""{"field":"str"}""", """{"field":1}""")) ::
Row(null, null, Seq("1", "2", "3")) :: Nil
)
// Treat an element as a number.
checkAnswer(
sql("select array1[0] + 1 from jsonTable where array1 is not null"),
Row(2)
)
}
test("Handling missing fields") {
val jsonDF = spark.read.json(missingFields)
val expectedSchema = StructType(
StructField("a", BooleanType, true) ::
StructField("b", LongType, true) ::
StructField("c", ArrayType(LongType, true), true) ::
StructField("d", StructType(
StructField("field", BooleanType, true) :: Nil), true) ::
StructField("e", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
}
test("Loading a JSON dataset from a text file") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Loading a JSON dataset primitivesAsString returns schema with primitive types as strings") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.option("primitivesAsString", "true").json(path)
val expectedSchema = StructType(
StructField("bigInteger", StringType, true) ::
StructField("boolean", StringType, true) ::
StructField("double", StringType, true) ::
StructField("integer", StringType, true) ::
StructField("long", StringType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row("92233720368547758070",
"true",
"1.7976931348623157",
"10",
"21474836470",
null,
"this is a simple string.")
)
}
test("Loading a JSON dataset primitivesAsString returns complex fields as strings") {
val jsonDF = spark.read.option("primitivesAsString", "true").json(complexFieldAndType1)
val expectedSchema = StructType(
StructField("arrayOfArray1", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfArray2", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfBigInteger", ArrayType(StringType, true), true) ::
StructField("arrayOfBoolean", ArrayType(StringType, true), true) ::
StructField("arrayOfDouble", ArrayType(StringType, true), true) ::
StructField("arrayOfInteger", ArrayType(StringType, true), true) ::
StructField("arrayOfLong", ArrayType(StringType, true), true) ::
StructField("arrayOfNull", ArrayType(StringType, true), true) ::
StructField("arrayOfString", ArrayType(StringType, true), true) ::
StructField("arrayOfStruct", ArrayType(
StructType(
StructField("field1", StringType, true) ::
StructField("field2", StringType, true) ::
StructField("field3", StringType, true) :: Nil), true), true) ::
StructField("struct", StructType(
StructField("field1", StringType, true) ::
StructField("field2", StringType, true) :: Nil), true) ::
StructField("structWithArrayFields", StructType(
StructField("field1", ArrayType(StringType, true), true) ::
StructField("field2", ArrayType(StringType, true), true) :: Nil), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
Row("922337203685477580700", "-922337203685477580800", null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("1.1", "2.1", "3.1"))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
Row("str2", "2.1")
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
Row(
Row("true", "str1", null),
Row("false", null, null),
Row(null, null, null),
null)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from jsonTable"),
Row(
Row("true", "92233720368547758070"),
"true",
"92233720368547758070") :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
Row(Seq("4", "5", "6"), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
Row("5", null)
)
}
test("Loading a JSON dataset prefersDecimal returns schema with float types as BigDecimal") {
val jsonDF = spark.read.option("prefersDecimal", "true").json(primitiveFieldAndType)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DecimalType(17, 16), true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(BigDecimal("92233720368547758070"),
true,
BigDecimal("1.7976931348623157"),
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Find compatible types even if inferred DecimalType is not capable of other IntegralType") {
val mixedIntegerAndDoubleRecords = Seq(
"""{"a": 3, "b": 1.1}""",
s"""{"a": 3.1, "b": 0.${"0" * 38}1}""").toDS()
val jsonDF = spark.read
.option("prefersDecimal", "true")
.json(mixedIntegerAndDoubleRecords)
// The values in `a` field will be decimals as they fit in decimal. For `b` field,
// they will be doubles as `1.0E-39D` does not fit.
val expectedSchema = StructType(
StructField("a", DecimalType(21, 1), true) ::
StructField("b", DoubleType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(
jsonDF,
Row(BigDecimal("3"), 1.1D) ::
Row(BigDecimal("3.1"), 1.0E-39D) :: Nil
)
}
test("Infer big integers correctly even when it does not fit in decimal") {
val jsonDF = spark.read
.json(bigIntegerRecords)
// The value in `a` field will be a double as it does not fit in decimal. For `b` field,
// it will be a decimal as `92233720368547758070`.
val expectedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(20, 0), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(jsonDF, Row(1.0E38D, BigDecimal("92233720368547758070")))
}
test("Infer floating-point values correctly even when it does not fit in decimal") {
val jsonDF = spark.read
.option("prefersDecimal", "true")
.json(floatingValueRecords)
// The value in `a` field will be a double as it does not fit in decimal. For `b` field,
// it will be a decimal as `0.01` by having a precision equal to the scale.
val expectedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(2, 2), true):: Nil)
assert(expectedSchema === jsonDF.schema)
checkAnswer(jsonDF, Row(1.0E-39D, BigDecimal("0.01")))
val mergedJsonDF = spark.read
.option("prefersDecimal", "true")
.json(floatingValueRecords.union(bigIntegerRecords))
val expectedMergedSchema = StructType(
StructField("a", DoubleType, true) ::
StructField("b", DecimalType(22, 2), true):: Nil)
assert(expectedMergedSchema === mergedJsonDF.schema)
checkAnswer(
mergedJsonDF,
Row(1.0E-39D, BigDecimal("0.01")) ::
Row(1.0E38D, BigDecimal("92233720368547758070")) :: Nil
)
}
test("Loading a JSON dataset from a text file with SQL") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.toURI.toString
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
sql(
s"""
|CREATE TEMPORARY VIEW jsonTableSQL
|USING org.apache.spark.sql.json
|OPTIONS (
| path '$path'
|)
""".stripMargin)
checkAnswer(
sql("select * from jsonTableSQL"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val schema = StructType(
StructField("bigInteger", DecimalType.SYSTEM_DEFAULT, true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", IntegerType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
val jsonDF1 = spark.read.schema(schema).json(path)
assert(schema === jsonDF1.schema)
jsonDF1.createOrReplaceTempView("jsonTable1")
checkAnswer(
sql("select * from jsonTable1"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
null,
"this is a simple string.")
)
val jsonDF2 = spark.read.schema(schema).json(primitiveFieldAndType)
assert(schema === jsonDF2.schema)
jsonDF2.createOrReplaceTempView("jsonTable2")
checkAnswer(
sql("select * from jsonTable2"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas with MapType") {
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val jsonWithSimpleMap = spark.read.schema(schemaWithSimpleMap).json(mapType1)
jsonWithSimpleMap.createOrReplaceTempView("jsonWithSimpleMap")
checkAnswer(
sql("select `map` from jsonWithSimpleMap"),
Row(Map("a" -> 1)) ::
Row(Map("b" -> 2)) ::
Row(Map("c" -> 3)) ::
Row(Map("c" -> 1, "d" -> 4)) ::
Row(Map("e" -> null)) :: Nil
)
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
sql("select `map`['c'] from jsonWithSimpleMap"),
Row(null) ::
Row(null) ::
Row(3) ::
Row(1) ::
Row(null) :: Nil
)
}
val innerStruct = StructType(
StructField("field1", ArrayType(IntegerType, true), true) ::
StructField("field2", IntegerType, true) :: Nil)
val schemaWithComplexMap = StructType(
StructField("map", MapType(StringType, innerStruct, true), false) :: Nil)
val jsonWithComplexMap = spark.read.schema(schemaWithComplexMap).json(mapType2)
jsonWithComplexMap.createOrReplaceTempView("jsonWithComplexMap")
checkAnswer(
sql("select `map` from jsonWithComplexMap"),
Row(Map("a" -> Row(Seq(1, 2, 3, null), null))) ::
Row(Map("b" -> Row(null, 2))) ::
Row(Map("c" -> Row(Seq(), 4))) ::
Row(Map("c" -> Row(null, 3), "d" -> Row(Seq(null), null))) ::
Row(Map("e" -> null)) ::
Row(Map("f" -> Row(null, null))) :: Nil
)
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
checkAnswer(
sql("select `map`['a'].field1, `map`['c'].field2 from jsonWithComplexMap"),
Row(Seq(1, 2, 3, null), null) ::
Row(null, null) ::
Row(null, 4) ::
Row(null, 3) ::
Row(null, null) ::
Row(null, null) :: Nil
)
}
}
test("SPARK-2096 Correctly parse dot notations") {
val jsonDF = spark.read.json(complexFieldAndType2)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
checkAnswer(
sql(
"""
|select complexArrayOfStruct[0].field1[1].inner2[0], complexArrayOfStruct[1].field2[0][1]
|from jsonTable
""".stripMargin),
Row("str2", 6)
)
}
test("SPARK-3390 Complex arrays") {
val jsonDF = spark.read.json(complexFieldAndType2)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql(
"""
|select arrayOfArray1[0][0][0], arrayOfArray1[1][0][1], arrayOfArray1[1][1][0]
|from jsonTable
""".stripMargin),
Row(5, 7, 8)
)
checkAnswer(
sql(
"""
|select arrayOfArray2[0][0][0].inner1, arrayOfArray2[1][0],
|arrayOfArray2[1][1][1].inner2[0], arrayOfArray2[2][0][0].inner3[0][0].inner4
|from jsonTable
""".stripMargin),
Row("str1", Nil, "str4", 2)
)
}
test("SPARK-3308 Read top level JSON arrays") {
val jsonDF = spark.read.json(jsonArray)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql(
"""
|select a, b, c
|from jsonTable
""".stripMargin),
Row("str_a_1", null, null) ::
Row("str_a_2", null, null) ::
Row(null, "str_b_3", null) ::
Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
}
test("Corrupt records: FAILFAST mode") {
// `FAILFAST` mode should throw an exception for corrupt records.
val exceptionOne = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.json(corruptRecords)
}.getMessage
assert(exceptionOne.contains(
"Malformed records are detected in schema inference. Parse Mode: FAILFAST."))
val exceptionTwo = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.schema("a string")
.json(corruptRecords)
.collect()
}.getMessage
assert(exceptionTwo.contains(
"Malformed records are detected in record parsing. Parse Mode: FAILFAST."))
}
test("Corrupt records: DROPMALFORMED mode") {
val schemaOne = StructType(
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val schemaTwo = StructType(
StructField("a", StringType, true) :: Nil)
// `DROPMALFORMED` mode should skip corrupt records
val jsonDFOne = spark.read
.option("mode", "DROPMALFORMED")
.json(corruptRecords)
checkAnswer(
jsonDFOne,
Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
assert(jsonDFOne.schema === schemaOne)
val jsonDFTwo = spark.read
.option("mode", "DROPMALFORMED")
.schema(schemaTwo)
.json(corruptRecords)
checkAnswer(
jsonDFTwo,
Row("str_a_4") :: Nil)
assert(jsonDFTwo.schema === schemaTwo)
}
test("SPARK-19641: Additional corrupt records: DROPMALFORMED mode") {
val schema = new StructType().add("dummy", StringType)
// `DROPMALFORMED` mode should skip corrupt records
val jsonDF = spark.read
.option("mode", "DROPMALFORMED")
.json(additionalCorruptRecords)
checkAnswer(
jsonDF,
Row("test"))
assert(jsonDF.schema === schema)
}
test("Corrupt records: PERMISSIVE mode, without designated column for malformed records") {
val schema = StructType(
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val jsonDF = spark.read.schema(schema).json(corruptRecords)
checkAnswer(
jsonDF.select($"a", $"b", $"c"),
Seq(
// Corrupted records are replaced with null
Row(null, null, null),
Row(null, null, null),
Row(null, null, null),
Row("str_a_4", "str_b_4", "str_c_4"),
Row(null, null, null))
)
}
test("Corrupt records: PERMISSIVE mode, with designated column for malformed records") {
// Test if we can query corrupt records.
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
val jsonDF = spark.read.json(corruptRecords)
val schema = StructType(
StructField("_unparsed", StringType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
assert(schema === jsonDF.schema)
// In HiveContext, backticks should be used to access columns starting with a underscore.
checkAnswer(
jsonDF.select($"a", $"b", $"c", $"_unparsed"),
Row(null, null, null, "{") ::
Row(null, null, null, """{"a":1, b:2}""") ::
Row(null, null, null, """{"a":{, b:3}""") ::
Row("str_a_4", "str_b_4", "str_c_4", null) ::
Row(null, null, null, "]") :: Nil
)
checkAnswer(
jsonDF.filter($"_unparsed".isNull).select($"a", $"b", $"c"),
Row("str_a_4", "str_b_4", "str_c_4")
)
checkAnswer(
jsonDF.filter($"_unparsed".isNotNull).select($"_unparsed"),
Row("{") ::
Row("""{"a":1, b:2}""") ::
Row("""{"a":{, b:3}""") ::
Row("]") :: Nil
)
}
}
test("SPARK-13953 Rename the corrupt record field via option") {
val jsonDF = spark.read
.option("columnNameOfCorruptRecord", "_malformed")
.json(corruptRecords)
val schema = StructType(
StructField("_malformed", StringType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
assert(schema === jsonDF.schema)
checkAnswer(
jsonDF.selectExpr("a", "b", "c", "_malformed"),
Row(null, null, null, "{") ::
Row(null, null, null, """{"a":1, b:2}""") ::
Row(null, null, null, """{"a":{, b:3}""") ::
Row("str_a_4", "str_b_4", "str_c_4", null) ::
Row(null, null, null, "]") :: Nil
)
}
test("SPARK-4068: nulls in arrays") {
val jsonDF = spark.read.json(nullsInArrays)
jsonDF.createOrReplaceTempView("jsonTable")
val schema = StructType(
StructField("field1",
ArrayType(ArrayType(ArrayType(ArrayType(StringType, true), true), true), true), true) ::
StructField("field2",
ArrayType(ArrayType(
StructType(StructField("Test", LongType, true) :: Nil), true), true), true) ::
StructField("field3",
ArrayType(ArrayType(
StructType(StructField("Test", StringType, true) :: Nil), true), true), true) ::
StructField("field4",
ArrayType(ArrayType(ArrayType(LongType, true), true), true), true) :: Nil)
assert(schema === jsonDF.schema)
checkAnswer(
sql(
"""
|SELECT field1, field2, field3, field4
|FROM jsonTable
""".stripMargin),
Row(Seq(Seq(null), Seq(Seq(Seq("Test")))), null, null, null) ::
Row(null, Seq(null, Seq(Row(1))), null, null) ::
Row(null, null, Seq(Seq(null), Seq(Row("2"))), null) ::
Row(null, null, null, Seq(Seq(null, Seq(1, 2, 3)))) :: Nil
)
}
test("SPARK-4228 DataFrame to JSON") {
val schema1 = StructType(
StructField("f1", IntegerType, false) ::
StructField("f2", StringType, false) ::
StructField("f3", BooleanType, false) ::
StructField("f4", ArrayType(StringType), nullable = true) ::
StructField("f5", IntegerType, true) :: Nil)
val rowRDD1 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v5 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(values(0).toInt, values(1), values(2).toBoolean, r.split(",").toList, v5)
}
val df1 = spark.createDataFrame(rowRDD1, schema1)
df1.createOrReplaceTempView("applySchema1")
val df2 = df1.toDF
val result = df2.toJSON.collect()
// scalastyle:off
assert(result(0) === "{\\"f1\\":1,\\"f2\\":\\"A1\\",\\"f3\\":true,\\"f4\\":[\\"1\\",\\" A1\\",\\" true\\",\\" null\\"]}")
assert(result(3) === "{\\"f1\\":4,\\"f2\\":\\"D4\\",\\"f3\\":true,\\"f4\\":[\\"4\\",\\" D4\\",\\" true\\",\\" 2147483644\\"],\\"f5\\":2147483644}")
// scalastyle:on
val schema2 = StructType(
StructField("f1", StructType(
StructField("f11", IntegerType, false) ::
StructField("f12", BooleanType, false) :: Nil), false) ::
StructField("f2", MapType(StringType, IntegerType, true), false) :: Nil)
val rowRDD2 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v4 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(Row(values(0).toInt, values(2).toBoolean), Map(values(1) -> v4))
}
val df3 = spark.createDataFrame(rowRDD2, schema2)
df3.createOrReplaceTempView("applySchema2")
val df4 = df3.toDF
val result2 = df4.toJSON.collect()
assert(result2(1) === "{\\"f1\\":{\\"f11\\":2,\\"f12\\":false},\\"f2\\":{\\"B2\\":null}}")
assert(result2(3) === "{\\"f1\\":{\\"f11\\":4,\\"f12\\":true},\\"f2\\":{\\"D4\\":2147483644}}")
val jsonDF = spark.read.json(primitiveFieldAndType)
val primTable = spark.read.json(jsonDF.toJSON)
primTable.createOrReplaceTempView("primitiveTable")
checkAnswer(
sql("select * from primitiveTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157,
10,
21474836470L,
"this is a simple string.")
)
val complexJsonDF = spark.read.json(complexFieldAndType1)
val compTable = spark.read.json(complexJsonDF.toJSON)
compTable.createOrReplaceTempView("complexTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from complexTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from complexTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] " +
" from complexTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from complexTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from complexTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from complexTable"),
Row("str2", 2.1)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from complexTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from complexTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] " +
"from complexTable"),
Row(5, null)
)
}
test("Dataset toJSON doesn't construct rdd") {
val containsRDD = spark.emptyDataFrame.toJSON.queryExecution.logical.find {
case ExternalRDD(_, _) => true
case _ => false
}
assert(containsRDD.isEmpty, "Expected logical plan of toJSON to not contain an RDD")
}
test("JSONRelation equality test") {
withTempPath(dir => {
val path = dir.getCanonicalFile.toURI.toString
sparkContext.parallelize(1 to 100)
.map(i => s"""{"a": 1, "b": "str$i"}""").saveAsTextFile(path)
val d1 = DataSource(
spark,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
bucketSpec = None,
className = classOf[JsonFileFormat].getCanonicalName,
options = Map("path" -> path)).resolveRelation()
val d2 = DataSource(
spark,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
bucketSpec = None,
className = classOf[JsonFileFormat].getCanonicalName,
options = Map("path" -> path)).resolveRelation()
assert(d1 === d2)
})
}
test("SPARK-6245 JsonInferSchema.infer on empty RDD") {
// This is really a test that it doesn't throw an exception
val options = new JSONOptions(Map.empty[String, String], "UTC")
val emptySchema = new JsonInferSchema(options).infer(
empty.rdd,
CreateJacksonParser.string)
assert(StructType(Seq()) === emptySchema)
}
test("SPARK-7565 MapType in JsonRDD") {
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempDir { dir =>
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val df = spark.read.schema(schemaWithSimpleMap).json(mapType1)
val path = dir.getAbsolutePath
df.write.mode("overwrite").parquet(path)
// order of MapType is not defined
assert(spark.read.parquet(path).count() == 5)
val df2 = spark.read.json(corruptRecords)
df2.write.mode("overwrite").parquet(path)
checkAnswer(spark.read.parquet(path), df2.collect())
}
}
}
test("SPARK-8093 Erase empty structs") {
val options = new JSONOptions(Map.empty[String, String], "UTC")
val emptySchema = new JsonInferSchema(options).infer(
emptyRecords.rdd,
CreateJacksonParser.string)
assert(StructType(Seq()) === emptySchema)
}
test("JSON with Partition") {
def makePartition(rdd: RDD[String], parent: File, partName: String, partValue: Any): File = {
val p = new File(parent, s"$partName=${partValue.toString}")
rdd.saveAsTextFile(p.getCanonicalPath)
p
}
withTempPath(root => {
val d1 = new File(root, "d1=1")
// root/dt=1/col1=abc
val p1_col1 = makePartition(
sparkContext.parallelize(2 to 5).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abc")
// root/dt=1/col1=abd
val p2 = makePartition(
sparkContext.parallelize(6 to 10).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abd")
spark.read.json(root.getAbsolutePath).createOrReplaceTempView("test_myjson_with_part")
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abc'"), Row(4))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abd'"), Row(5))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1"), Row(9))
})
}
test("backward compatibility") {
// This test we make sure our JSON support can read JSON data generated by previous version
// of Spark generated through toJSON method and JSON data source.
// The data is generated by the following program.
// Here are a few notes:
// - Spark 1.5.0 cannot save timestamp data. So, we manually added timestamp field (col13)
// in the JSON object.
// - For Spark before 1.5.1, we do not generate UDTs. So, we manually added the UDT value to
// JSON objects generated by those Spark versions (col17).
// - If the type is NullType, we do not write data out.
// Create the schema.
val struct =
StructType(
StructField("f1", FloatType, true) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val dataTypes =
Seq(
StringType, BinaryType, NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType,
ArrayType(IntegerType), MapType(StringType, LongType), struct,
new MyDenseVectorUDT())
val fields = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, nullable = true)
}
val schema = StructType(fields)
val constantValues =
Seq(
"a string in binary".getBytes(StandardCharsets.UTF_8),
null,
true,
1.toByte,
2.toShort,
3,
Long.MaxValue,
0.25.toFloat,
0.75,
new java.math.BigDecimal(s"1234.23456"),
new java.math.BigDecimal(s"1.23456"),
java.sql.Date.valueOf("2015-01-01"),
java.sql.Timestamp.valueOf("2015-01-01 23:50:59.123"),
Seq(2, 3, 4),
Map("a string" -> 2000L),
Row(4.75.toFloat, Seq(false, true)),
new MyDenseVector(Array(0.25, 2.25, 4.25)))
val data =
Row.fromSeq(Seq("Spark " + spark.sparkContext.version) ++ constantValues) :: Nil
// Data generated by previous versions.
// scalastyle:off
val existingJSONData =
"""{"col0":"Spark 1.2.2","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"16436","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" :: Nil
// scalastyle:on
// Generate data for the current version.
val df = spark.createDataFrame(spark.sparkContext.parallelize(data, 1), schema)
withTempPath { path =>
df.write.format("json").mode("overwrite").save(path.getCanonicalPath)
// df.toJSON will convert internal rows to external rows first and then generate
// JSON objects. While, df.write.format("json") will write internal rows directly.
val allJSON =
existingJSONData ++
df.toJSON.collect() ++
sparkContext.textFile(path.getCanonicalPath).collect()
Utils.deleteRecursively(path)
sparkContext.parallelize(allJSON, 1).saveAsTextFile(path.getCanonicalPath)
// Read data back with the schema specified.
val col0Values =
Seq(
"Spark 1.2.2",
"Spark 1.3.1",
"Spark 1.3.1",
"Spark 1.4.1",
"Spark 1.4.1",
"Spark 1.5.0",
"Spark 1.5.0",
"Spark " + spark.sparkContext.version,
"Spark " + spark.sparkContext.version)
val expectedResult = col0Values.map { v =>
Row.fromSeq(Seq(v) ++ constantValues)
}
checkAnswer(
spark.read.format("json").schema(schema).load(path.getCanonicalPath),
expectedResult
)
}
}
test("SPARK-11544 test pathfilter") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val df = spark.range(2)
df.write.json(path + "/p=1")
df.write.json(path + "/p=2")
assert(spark.read.json(path).count() === 4)
val extraOptions = Map(
"mapred.input.pathFilter.class" -> classOf[TestFileFilter].getName,
"mapreduce.input.pathFilter.class" -> classOf[TestFileFilter].getName
)
assert(spark.read.options(extraOptions).json(path).count() === 2)
}
}
test("SPARK-12057 additional corrupt records do not throw exceptions") {
// Test if we can query corrupt records.
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempView("jsonTable") {
val schema = StructType(
StructField("_unparsed", StringType, true) ::
StructField("dummy", StringType, true) :: Nil)
{
// We need to make sure we can infer the schema.
val jsonDF = spark.read.json(additionalCorruptRecords)
assert(jsonDF.schema === schema)
}
{
val jsonDF = spark.read.schema(schema).json(additionalCorruptRecords)
jsonDF.createOrReplaceTempView("jsonTable")
// In HiveContext, backticks should be used to access columns starting with a underscore.
checkAnswer(
sql(
"""
|SELECT dummy, _unparsed
|FROM jsonTable
""".stripMargin),
Row("test", null) ::
Row(null, """[1,2,3]""") ::
Row(null, """":"test", "a":1}""") ::
Row(null, """42""") ::
Row(null, """ ","ian":"test"}""") :: Nil
)
}
}
}
}
test("Parse JSON rows having an array type and a struct type in the same field.") {
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
arrayAndStructRecords.map(record => record.replaceAll("\\n", " ")).write.text(path)
val schema =
StructType(
StructField("a", StructType(
StructField("b", StringType) :: Nil
)) :: Nil)
val jsonDF = spark.read.schema(schema).json(path)
assert(jsonDF.count() == 2)
}
}
test("SPARK-12872 Support to specify the option for compression codec") {
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.format("json")
.option("compression", "gZiP")
.save(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(_.getName.endsWith(".json.gz")))
val jsonCopy = spark.read
.format("json")
.load(jsonDir)
assert(jsonCopy.count == jsonDF.count)
val jsonCopySome = jsonCopy.selectExpr("string", "long", "boolean")
val jsonDFSome = jsonDF.selectExpr("string", "long", "boolean")
checkAnswer(jsonCopySome, jsonDFSome)
}
}
test("SPARK-13543 Write the output as uncompressed via option()") {
val extraOptions = Map[String, String](
"mapreduce.output.fileoutputformat.compress" -> "true",
"mapreduce.output.fileoutputformat.compress.type" -> CompressionType.BLOCK.toString,
"mapreduce.output.fileoutputformat.compress.codec" -> classOf[GzipCodec].getName,
"mapreduce.map.output.compress" -> "true",
"mapreduce.map.output.compress.codec" -> classOf[GzipCodec].getName
)
withTempDir { dir =>
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).write.text(path)
val jsonDF = spark.read.json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.format("json")
.option("compression", "none")
.options(extraOptions)
.save(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(!_.getName.endsWith(".json.gz")))
val jsonCopy = spark.read
.format("json")
.options(extraOptions)
.load(jsonDir)
assert(jsonCopy.count == jsonDF.count)
val jsonCopySome = jsonCopy.selectExpr("string", "long", "boolean")
val jsonDFSome = jsonDF.selectExpr("string", "long", "boolean")
checkAnswer(jsonCopySome, jsonDFSome)
}
}
test("Casting long as timestamp") {
withTempView("jsonTable") {
val schema = (new StructType).add("ts", TimestampType)
val jsonDF = spark.read.schema(schema).json(timestampAsLong)
jsonDF.createOrReplaceTempView("jsonTable")
checkAnswer(
sql("select ts from jsonTable"),
Row(java.sql.Timestamp.valueOf("2016-01-02 03:04:05"))
)
}
}
test("wide nested json table") {
val nested = (1 to 100).map { i =>
s"""
|"c$i": $i
""".stripMargin
}.mkString(", ")
val json = s"""
|{"a": [{$nested}], "b": [{$nested}]}
""".stripMargin
val df = spark.read.json(Seq(json).toDS())
assert(df.schema.size === 2)
df.collect()
}
test("Write dates correctly with dateFormat option") {
val customSchema = new StructType(Array(StructField("date", DateType, true)))
withTempDir { dir =>
// With dateFormat option.
val datesWithFormatPath = s"${dir.getCanonicalPath}/datesWithFormat.json"
val datesWithFormat = spark.read
.schema(customSchema)
.option("dateFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
datesWithFormat.write
.format("json")
.option("dateFormat", "yyyy/MM/dd")
.save(datesWithFormatPath)
// This will load back the dates as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringDatesWithFormat = spark.read
.schema(stringSchema)
.json(datesWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/26"),
Row("2014/10/27"),
Row("2016/01/28"))
checkAnswer(stringDatesWithFormat, expectedStringDatesWithFormat)
}
}
test("Write timestamps correctly with timestampFormat option") {
val customSchema = new StructType(Array(StructField("date", TimestampType, true)))
withTempDir { dir =>
// With dateFormat option.
val timestampsWithFormatPath = s"${dir.getCanonicalPath}/timestampsWithFormat.json"
val timestampsWithFormat = spark.read
.schema(customSchema)
.option("timestampFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
timestampsWithFormat.write
.format("json")
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.save(timestampsWithFormatPath)
// This will load back the timestamps as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringTimestampsWithFormat = spark.read
.schema(stringSchema)
.json(timestampsWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/26 18:00"),
Row("2014/10/27 18:30"),
Row("2016/01/28 20:00"))
checkAnswer(stringTimestampsWithFormat, expectedStringDatesWithFormat)
}
}
test("Write timestamps correctly with timestampFormat option and timeZone option") {
val customSchema = new StructType(Array(StructField("date", TimestampType, true)))
withTempDir { dir =>
// With dateFormat option and timeZone option.
val timestampsWithFormatPath = s"${dir.getCanonicalPath}/timestampsWithFormat.json"
val timestampsWithFormat = spark.read
.schema(customSchema)
.option("timestampFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
timestampsWithFormat.write
.format("json")
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.option(DateTimeUtils.TIMEZONE_OPTION, "UTC")
.save(timestampsWithFormatPath)
// This will load back the timestamps as string.
val stringSchema = StructType(StructField("date", StringType, true) :: Nil)
val stringTimestampsWithFormat = spark.read
.schema(stringSchema)
.json(timestampsWithFormatPath)
val expectedStringDatesWithFormat = Seq(
Row("2015/08/27 01:00"),
Row("2014/10/28 01:30"),
Row("2016/01/29 04:00"))
checkAnswer(stringTimestampsWithFormat, expectedStringDatesWithFormat)
val readBack = spark.read
.schema(customSchema)
.option("timestampFormat", "yyyy/MM/dd HH:mm")
.option(DateTimeUtils.TIMEZONE_OPTION, "UTC")
.json(timestampsWithFormatPath)
checkAnswer(readBack, timestampsWithFormat)
}
}
test("SPARK-18433: Improve DataSource option keys to be more case-insensitive") {
val records = Seq("""{"a": 3, "b": 1.1}""", """{"a": 3.1, "b": 0.000001}""").toDS()
val schema = StructType(
StructField("a", DecimalType(21, 1), true) ::
StructField("b", DecimalType(7, 6), true) :: Nil)
val df1 = spark.read.option("prefersDecimal", "true").json(records)
assert(df1.schema == schema)
val df2 = spark.read.option("PREfersdecimaL", "true").json(records)
assert(df2.schema == schema)
}
test("SPARK-18352: Parse normal multi-line JSON files (compressed)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
primitiveFieldAndType
.toDF("value")
.write
.option("compression", "GzIp")
.text(path)
assert(new File(path).listFiles().exists(_.getName.endsWith(".gz")))
val jsonDF = spark.read.option("multiLine", true).json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write
.option("compression", "gZiP")
.json(jsonDir)
assert(new File(jsonDir).listFiles().exists(_.getName.endsWith(".json.gz")))
val originalData = spark.read.json(primitiveFieldAndType)
checkAnswer(jsonDF, originalData)
checkAnswer(spark.read.schema(originalData.schema).json(jsonDir), originalData)
}
}
test("SPARK-18352: Parse normal multi-line JSON files (uncompressed)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
primitiveFieldAndType
.toDF("value")
.write
.text(path)
val jsonDF = spark.read.option("multiLine", true).json(path)
val jsonDir = new File(dir, "json").getCanonicalPath
jsonDF.coalesce(1).write.json(jsonDir)
val compressedFiles = new File(jsonDir).listFiles()
assert(compressedFiles.exists(_.getName.endsWith(".json")))
val originalData = spark.read.json(primitiveFieldAndType)
checkAnswer(jsonDF, originalData)
checkAnswer(spark.read.schema(originalData.schema).json(jsonDir), originalData)
}
}
test("SPARK-18352: Expect one JSON document per file") {
// the json parser terminates as soon as it sees a matching END_OBJECT or END_ARRAY token.
// this might not be the optimal behavior but this test verifies that only the first value
// is parsed and the rest are discarded.
// alternatively the parser could continue parsing following objects, which may further reduce
// allocations by skipping the line reader entirely
withTempPath { dir =>
val path = dir.getCanonicalPath
spark
.createDataFrame(Seq(Tuple1("{}{invalid}")))
.coalesce(1)
.write
.text(path)
val jsonDF = spark.read.option("multiLine", true).json(path)
// no corrupt record column should be created
assert(jsonDF.schema === StructType(Seq()))
// only the first object should be read
assert(jsonDF.count() === 1)
}
}
test("SPARK-18352: Handle multi-line corrupt documents (PERMISSIVE)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val jsonDF = spark.read.option("multiLine", true).option("mode", "PERMISSIVE").json(path)
assert(jsonDF.count() === corruptRecordCount)
assert(jsonDF.schema === new StructType()
.add("_corrupt_record", StringType)
.add("dummy", StringType))
val counts = jsonDF
.join(
additionalCorruptRecords.toDF("value"),
F.regexp_replace($"_corrupt_record", "(^\\\\s+|\\\\s+$)", "") === F.trim($"value"),
"outer")
.agg(
F.count($"dummy").as("valid"),
F.count($"_corrupt_record").as("corrupt"),
F.count("*").as("count"))
checkAnswer(counts, Row(1, 4, 6))
}
}
test("SPARK-19641: Handle multi-line corrupt documents (DROPMALFORMED)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val jsonDF = spark.read.option("multiLine", true).option("mode", "DROPMALFORMED").json(path)
checkAnswer(jsonDF, Seq(Row("test")))
}
}
test("SPARK-18352: Handle multi-line corrupt documents (FAILFAST)") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val corruptRecordCount = additionalCorruptRecords.count().toInt
assert(corruptRecordCount === 5)
additionalCorruptRecords
.toDF("value")
// this is the minimum partition count that avoids hash collisions
.repartition(corruptRecordCount * 4, F.hash($"value"))
.write
.text(path)
val schema = new StructType().add("dummy", StringType)
// `FAILFAST` mode should throw an exception for corrupt records.
val exceptionOne = intercept[SparkException] {
spark.read
.option("multiLine", true)
.option("mode", "FAILFAST")
.json(path)
}
assert(exceptionOne.getMessage.contains("Malformed records are detected in schema " +
"inference. Parse Mode: FAILFAST."))
val exceptionTwo = intercept[SparkException] {
spark.read
.option("multiLine", true)
.option("mode", "FAILFAST")
.schema(schema)
.json(path)
.collect()
}
assert(exceptionTwo.getMessage.contains("Malformed records are detected in record " +
"parsing. Parse Mode: FAILFAST."))
}
}
test("Throw an exception if a `columnNameOfCorruptRecord` field violates requirements") {
val columnNameOfCorruptRecord = "_unparsed"
val schema = StructType(
StructField(columnNameOfCorruptRecord, IntegerType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
val errMsg = intercept[AnalysisException] {
spark.read
.option("mode", "Permissive")
.option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
.schema(schema)
.json(corruptRecords)
}.getMessage
assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
// We use `PERMISSIVE` mode by default if invalid string is given.
withTempPath { dir =>
val path = dir.getCanonicalPath
corruptRecords.toDF("value").write.text(path)
val errMsg = intercept[AnalysisException] {
spark.read
.option("mode", "permm")
.option("columnNameOfCorruptRecord", columnNameOfCorruptRecord)
.schema(schema)
.json(path)
.collect
}.getMessage
assert(errMsg.startsWith("The field for corrupt records must be string type and nullable"))
}
}
test("SPARK-18772: Parse special floats correctly") {
val jsons = Seq(
"""{"a": "NaN"}""",
"""{"a": "Infinity"}""",
"""{"a": "-Infinity"}""")
// positive cases
val checks: Seq[Double => Boolean] = Seq(
_.isNaN,
_.isPosInfinity,
_.isNegInfinity)
Seq(FloatType, DoubleType).foreach { dt =>
jsons.zip(checks).foreach { case (json, check) =>
val ds = spark.read
.schema(StructType(Seq(StructField("a", dt))))
.json(Seq(json).toDS())
.select($"a".cast(DoubleType)).as[Double]
assert(check(ds.first()))
}
}
// negative cases
Seq(FloatType, DoubleType).foreach { dt =>
val lowerCasedJsons = jsons.map(_.toLowerCase(Locale.ROOT))
// The special floats are case-sensitive so these cases below throw exceptions.
lowerCasedJsons.foreach { lowerCasedJson =>
val e = intercept[SparkException] {
spark.read
.option("mode", "FAILFAST")
.schema(StructType(Seq(StructField("a", dt))))
.json(Seq(lowerCasedJson).toDS())
.collect()
}
assert(e.getMessage.contains("Cannot parse"))
}
}
}
test("SPARK-21610: Corrupt records are not handled properly when creating a dataframe " +
"from a file") {
withTempPath { dir =>
val path = dir.getCanonicalPath
val data =
"""{"field": 1}
|{"field": 2}
|{"field": "3"}""".stripMargin
Seq(data).toDF().repartition(1).write.text(path)
val schema = new StructType().add("field", ByteType).add("_corrupt_record", StringType)
// negative cases
val msg = intercept[AnalysisException] {
spark.read.schema(schema).json(path).select("_corrupt_record").collect()
}.getMessage
assert(msg.contains("only include the internal corrupt record column"))
// workaround
val df = spark.read.schema(schema).json(path).cache()
assert(df.filter($"_corrupt_record".isNotNull).count() == 1)
assert(df.filter($"_corrupt_record".isNull).count() == 2)
checkAnswer(
df.select("_corrupt_record"),
Row(null) :: Row(null) :: Row("{\\"field\\": \\"3\\"}") :: Nil
)
}
}
def testLineSeparator(lineSep: String): Unit = {
test(s"SPARK-21289: Support line separator - lineSep: '$lineSep'") {
// Read
val data =
s"""
| {"f":
|"a", "f0": 1}$lineSep{"f":
|
|"c", "f0": 2}$lineSep{"f": "d", "f0": 3}
""".stripMargin
val dataWithTrailingLineSep = s"$data$lineSep"
Seq(data, dataWithTrailingLineSep).foreach { lines =>
withTempPath { path =>
Files.write(path.toPath, lines.getBytes(StandardCharsets.UTF_8))
val df = spark.read.option("lineSep", lineSep).json(path.getAbsolutePath)
val expectedSchema =
StructType(StructField("f", StringType) :: StructField("f0", LongType) :: Nil)
checkAnswer(df, Seq(("a", 1), ("c", 2), ("d", 3)).toDF())
assert(df.schema === expectedSchema)
}
}
// Write
withTempPath { path =>
Seq("a", "b", "c").toDF("value").coalesce(1)
.write.option("lineSep", lineSep).json(path.getAbsolutePath)
val partFile = TestUtils.recursiveList(path).filter(f => f.getName.startsWith("part-")).head
val readBack = new String(Files.readAllBytes(partFile.toPath), StandardCharsets.UTF_8)
assert(
readBack === s"""{"value":"a"}$lineSep{"value":"b"}$lineSep{"value":"c"}$lineSep""")
}
// Roundtrip
withTempPath { path =>
val df = Seq("a", "b", "c").toDF()
df.write.option("lineSep", lineSep).json(path.getAbsolutePath)
val readBack = spark.read.option("lineSep", lineSep).json(path.getAbsolutePath)
checkAnswer(df, readBack)
}
}
}
// scalastyle:off nonascii
Seq("|", "^", "::", "!!!@3", 0x1E.toChar.toString, "아").foreach { lineSep =>
testLineSeparator(lineSep)
}
// scalastyle:on nonascii
test("""SPARK-21289: Support line separator - default value \\r, \\r\\n and \\n""") {
val data =
"{\\"f\\": \\"a\\", \\"f0\\": 1}\\r{\\"f\\": \\"c\\", \\"f0\\": 2}\\r\\n{\\"f\\": \\"d\\", \\"f0\\": 3}\\n"
withTempPath { path =>
Files.write(path.toPath, data.getBytes(StandardCharsets.UTF_8))
val df = spark.read.json(path.getAbsolutePath)
val expectedSchema =
StructType(StructField("f", StringType) :: StructField("f0", LongType) :: Nil)
checkAnswer(df, Seq(("a", 1), ("c", 2), ("d", 3)).toDF())
assert(df.schema === expectedSchema)
}
}
test("SPARK-23849: schema inferring touches less data if samplingRatio < 1.0") {
// Set default values for the DataSource parameters to make sure
// that whole test file is mapped to only one partition. This will guarantee
// reliable sampling of the input file.
withSQLConf(
SQLConf.FILES_MAX_PARTITION_BYTES.key -> (128 * 1024 * 1024).toString,
SQLConf.FILES_OPEN_COST_IN_BYTES.key -> (4 * 1024 * 1024).toString
)(withTempPath { path =>
val ds = sampledTestData.coalesce(1)
ds.write.text(path.getAbsolutePath)
val readback = spark.read.option("samplingRatio", 0.1).json(path.getCanonicalPath)
assert(readback.schema == new StructType().add("f1", LongType))
})
}
test("SPARK-23849: usage of samplingRatio while parsing a dataset of strings") {
val ds = sampledTestData.coalesce(1)
val readback = spark.read.option("samplingRatio", 0.1).json(ds)
assert(readback.schema == new StructType().add("f1", LongType))
}
test("SPARK-23849: samplingRatio is out of the range (0, 1.0]") {
val ds = spark.range(0, 100, 1, 1).map(_.toString)
val errorMsg0 = intercept[IllegalArgumentException] {
spark.read.option("samplingRatio", -1).json(ds)
}.getMessage
assert(errorMsg0.contains("samplingRatio (-1.0) should be greater than 0"))
val errorMsg1 = intercept[IllegalArgumentException] {
spark.read.option("samplingRatio", 0).json(ds)
}.getMessage
assert(errorMsg1.contains("samplingRatio (0.0) should be greater than 0"))
val sampled = spark.read.option("samplingRatio", 1.0).json(ds)
assert(sampled.count() == ds.count())
}
test("SPARK-23723: json in UTF-16 with BOM") {
val fileName = "test-data/utf16WithBOM.json"
val schema = new StructType().add("firstName", StringType).add("lastName", StringType)
val jsonDF = spark.read.schema(schema)
.option("multiline", "true")
.option("encoding", "UTF-16")
.json(testFile(fileName))
checkAnswer(jsonDF, Seq(Row("Chris", "Baird"), Row("Doug", "Rood")))
}
test("SPARK-23723: multi-line json in UTF-32BE with BOM") {
val fileName = "test-data/utf32BEWithBOM.json"
val schema = new StructType().add("firstName", StringType).add("lastName", StringType)
val jsonDF = spark.read.schema(schema)
.option("multiline", "true")
.json(testFile(fileName))
checkAnswer(jsonDF, Seq(Row("Chris", "Baird")))
}
test("SPARK-23723: Use user's encoding in reading of multi-line json in UTF-16LE") {
val fileName = "test-data/utf16LE.json"
val schema = new StructType().add("firstName", StringType).add("lastName", StringType)
val jsonDF = spark.read.schema(schema)
.option("multiline", "true")
.options(Map("encoding" -> "UTF-16LE"))
.json(testFile(fileName))
checkAnswer(jsonDF, Seq(Row("Chris", "Baird")))
}
test("SPARK-23723: Unsupported encoding name") {
val invalidCharset = "UTF-128"
val exception = intercept[UnsupportedCharsetException] {
spark.read
.options(Map("encoding" -> invalidCharset, "lineSep" -> "\\n"))
.json(testFile("test-data/utf16LE.json"))
.count()
}
assert(exception.getMessage.contains(invalidCharset))
}
test("SPARK-23723: checking that the encoding option is case agnostic") {
val fileName = "test-data/utf16LE.json"
val schema = new StructType().add("firstName", StringType).add("lastName", StringType)
val jsonDF = spark.read.schema(schema)
.option("multiline", "true")
.options(Map("encoding" -> "uTf-16lE"))
.json(testFile(fileName))
checkAnswer(jsonDF, Seq(Row("Chris", "Baird")))
}
test("SPARK-23723: specified encoding is not matched to actual encoding") {
val fileName = "test-data/utf16LE.json"
val schema = new StructType().add("firstName", StringType).add("lastName", StringType)
val exception = intercept[SparkException] {
spark.read.schema(schema)
.option("mode", "FAILFAST")
.option("multiline", "true")
.options(Map("encoding" -> "UTF-16BE"))
.json(testFile(fileName))
.count()
}
assertExceptionMessage(exception, "Malformed records are detected in record parsing")
}
def checkEncoding(expectedEncoding: String, pathToJsonFiles: String,
expectedContent: String): Unit = {
val jsonFiles = new File(pathToJsonFiles)
.listFiles()
.filter(_.isFile)
.filter(_.getName.endsWith("json"))
val actualContent = jsonFiles.map { file =>
new String(Files.readAllBytes(file.toPath), expectedEncoding)
}.mkString.trim
assert(actualContent == expectedContent)
}
test("SPARK-23723: save json in UTF-32BE") {
val encoding = "UTF-32BE"
withTempPath { path =>
val df = spark.createDataset(Seq(("Dog", 42)))
df.write
.options(Map("encoding" -> encoding))
.json(path.getCanonicalPath)
checkEncoding(
expectedEncoding = encoding,
pathToJsonFiles = path.getCanonicalPath,
expectedContent = """{"_1":"Dog","_2":42}""")
}
}
test("SPARK-23723: save json in default encoding - UTF-8") {
withTempPath { path =>
val df = spark.createDataset(Seq(("Dog", 42)))
df.write.json(path.getCanonicalPath)
checkEncoding(
expectedEncoding = "UTF-8",
pathToJsonFiles = path.getCanonicalPath,
expectedContent = """{"_1":"Dog","_2":42}""")
}
}
test("SPARK-23723: wrong output encoding") {
val encoding = "UTF-128"
val exception = intercept[SparkException] {
withTempPath { path =>
val df = spark.createDataset(Seq((0)))
df.write
.options(Map("encoding" -> encoding))
.json(path.getCanonicalPath)
}
}
val baos = new ByteArrayOutputStream()
val ps = new PrintStream(baos, true, StandardCharsets.UTF_8.name())
exception.printStackTrace(ps)
ps.flush()
assert(baos.toString.contains(
"java.nio.charset.UnsupportedCharsetException: UTF-128"))
}
test("SPARK-23723: read back json in UTF-16LE") {
val options = Map("encoding" -> "UTF-16LE", "lineSep" -> "\\n")
withTempPath { path =>
val ds = spark.createDataset(Seq(("a", 1), ("b", 2), ("c", 3))).repartition(2)
ds.write.options(options).json(path.getCanonicalPath)
val readBack = spark
.read
.options(options)
.json(path.getCanonicalPath)
checkAnswer(readBack.toDF(), ds.toDF())
}
}
test("SPARK-23723: write json in UTF-16/32 with multiline off") {
Seq("UTF-16", "UTF-32").foreach { encoding =>
withTempPath { path =>
val ds = spark.createDataset(Seq(("a", 1))).repartition(1)
ds.write
.option("encoding", encoding)
.option("multiline", false)
.json(path.getCanonicalPath)
val jsonFiles = path.listFiles().filter(_.getName.endsWith("json"))
jsonFiles.foreach { jsonFile =>
val readback = Files.readAllBytes(jsonFile.toPath)
val expected = ("""{"_1":"a","_2":1}""" + "\\n").getBytes(Charset.forName(encoding))
assert(readback === expected)
}
}
}
}
def checkReadJson(lineSep: String, encoding: String, inferSchema: Boolean, id: Int): Unit = {
test(s"SPARK-23724: checks reading json in ${encoding} #${id}") {
val schema = new StructType().add("f1", StringType).add("f2", IntegerType)
withTempPath { path =>
val records = List(("a", 1), ("b", 2))
val data = records
.map(rec => s"""{"f1":"${rec._1}", "f2":${rec._2}}""".getBytes(encoding))
.reduce((a1, a2) => a1 ++ lineSep.getBytes(encoding) ++ a2)
val os = new FileOutputStream(path)
os.write(data)
os.close()
val reader = if (inferSchema) {
spark.read
} else {
spark.read.schema(schema)
}
val readBack = reader
.option("encoding", encoding)
.option("lineSep", lineSep)
.json(path.getCanonicalPath)
checkAnswer(readBack, records.map(rec => Row(rec._1, rec._2)))
}
}
}
// scalastyle:off nonascii
List(
(0, "|", "UTF-8", false),
(1, "^", "UTF-16BE", true),
(2, "::", "ISO-8859-1", true),
(3, "!!!@3", "UTF-32LE", false),
(4, 0x1E.toChar.toString, "UTF-8", true),
(5, "아", "UTF-32BE", false),
(6, "куку", "CP1251", true),
(7, "sep", "utf-8", false),
(8, "\\r\\n", "UTF-16LE", false),
(9, "\\r\\n", "utf-16be", true),
(10, "\\u000d\\u000a", "UTF-32BE", false),
(11, "\\u000a\\u000d", "UTF-8", true),
(12, "===", "US-ASCII", false),
(13, "$^+", "utf-32le", true)
).foreach {
case (testNum, sep, encoding, inferSchema) => checkReadJson(sep, encoding, inferSchema, testNum)
}
// scalastyle:on nonascii
test("SPARK-23724: lineSep should be set if encoding if different from UTF-8") {
val encoding = "UTF-16LE"
val exception = intercept[IllegalArgumentException] {
spark.read
.options(Map("encoding" -> encoding))
.json(testFile("test-data/utf16LE.json"))
.count()
}
assert(exception.getMessage.contains(
s"""The lineSep option must be specified for the $encoding encoding"""))
}
private val badJson = "\\u0000\\u0000\\u0000A\\u0001AAA"
test("SPARK-23094: permissively read JSON file with leading nulls when multiLine is enabled") {
withTempPath { tempDir =>
val path = tempDir.getAbsolutePath
Seq(badJson + """{"a":1}""").toDS().write.text(path)
val expected = s"""${badJson}{"a":1}\\n"""
val schema = new StructType().add("a", IntegerType).add("_corrupt_record", StringType)
val df = spark.read.format("json")
.option("mode", "PERMISSIVE")
.option("multiLine", true)
.option("encoding", "UTF-8")
.schema(schema).load(path)
checkAnswer(df, Row(null, expected))
}
}
test("SPARK-23094: permissively read JSON file with leading nulls when multiLine is disabled") {
withTempPath { tempDir =>
val path = tempDir.getAbsolutePath
Seq(badJson, """{"a":1}""").toDS().write.text(path)
val schema = new StructType().add("a", IntegerType).add("_corrupt_record", StringType)
val df = spark.read.format("json")
.option("mode", "PERMISSIVE")
.option("multiLine", false)
.option("encoding", "UTF-8")
.schema(schema).load(path)
checkAnswer(df, Seq(Row(1, null), Row(null, badJson)))
}
}
test("SPARK-23094: permissively parse a dataset contains JSON with leading nulls") {
checkAnswer(
spark.read.option("mode", "PERMISSIVE").option("encoding", "UTF-8").json(Seq(badJson).toDS()),
Row(badJson))
}
test("SPARK-23772 ignore column of all null values or empty array during schema inference") {
withTempPath { tempDir =>
val path = tempDir.getAbsolutePath
// primitive types
Seq(
"""{"a":null, "b":1, "c":3.0}""",
"""{"a":null, "b":null, "c":"string"}""",
"""{"a":null, "b":null, "c":null}""")
.toDS().write.text(path)
var df = spark.read.format("json")
.option("dropFieldIfAllNull", true)
.load(path)
var expectedSchema = new StructType()
.add("b", LongType).add("c", StringType)
assert(df.schema === expectedSchema)
checkAnswer(df, Row(1, "3.0") :: Row(null, "string") :: Row(null, null) :: Nil)
// arrays
Seq(
"""{"a":[2, 1], "b":[null, null], "c":null, "d":[[], [null]], "e":[[], null, [[]]]}""",
"""{"a":[null], "b":[null], "c":[], "d":[null, []], "e":null}""",
"""{"a":null, "b":null, "c":[], "d":null, "e":[null, [], null]}""")
.toDS().write.mode("overwrite").text(path)
df = spark.read.format("json")
.option("dropFieldIfAllNull", true)
.load(path)
expectedSchema = new StructType()
.add("a", ArrayType(LongType))
assert(df.schema === expectedSchema)
checkAnswer(df, Row(Array(2, 1)) :: Row(Array(null)) :: Row(null) :: Nil)
// structs
Seq(
"""{"a":{"a1": 1, "a2":"string"}, "b":{}}""",
"""{"a":{"a1": 2, "a2":null}, "b":{"b1":[null]}}""",
"""{"a":null, "b":null}""")
.toDS().write.mode("overwrite").text(path)
df = spark.read.format("json")
.option("dropFieldIfAllNull", true)
.load(path)
expectedSchema = new StructType()
.add("a", StructType(StructField("a1", LongType) :: StructField("a2", StringType)
:: Nil))
assert(df.schema === expectedSchema)
checkAnswer(df, Row(Row(1, "string")) :: Row(Row(2, null)) :: Row(null) :: Nil)
}
}
test("SPARK-24190: restrictions for JSONOptions in read") {
for (encoding <- Set("UTF-16", "UTF-32")) {
val exception = intercept[IllegalArgumentException] {
spark.read
.option("encoding", encoding)
.option("multiLine", false)
.json(testFile("test-data/utf16LE.json"))
.count()
}
assert(exception.getMessage.contains("encoding must not be included in the blacklist"))
}
}
test("count() for malformed input") {
def countForMalformedJSON(expected: Long, input: Seq[String]): Unit = {
val schema = new StructType().add("a", StringType)
val strings = spark.createDataset(input)
val df = spark.read.schema(schema).json(strings)
assert(df.count() == expected)
}
def checkCount(expected: Long): Unit = {
val validRec = """{"a":"b"}"""
val inputs = Seq(
Seq("{-}", validRec),
Seq(validRec, "?"),
Seq("}", validRec),
Seq(validRec, """{"a": [1, 2, 3]}"""),
Seq("""{"a": {"a": "b"}}""", validRec)
)
inputs.foreach { input =>
countForMalformedJSON(expected, input)
}
}
checkCount(2)
countForMalformedJSON(0, Seq(""))
}
test("SPARK-26745: count() for non-multiline input with empty lines") {
withTempPath { tempPath =>
val path = tempPath.getCanonicalPath
Seq("""{ "a" : 1 }""", "", """ { "a" : 2 }""", " \\t ")
.toDS()
.repartition(1)
.write
.text(path)
assert(spark.read.json(path).count() === 2)
}
}
private def failedOnEmptyString(dataType: DataType): Unit = {
val df = spark.read.schema(s"a ${dataType.catalogString}")
.option("mode", "FAILFAST").json(Seq("""{"a":""}""").toDS)
val errMessage = intercept[SparkException] {
df.collect()
}.getMessage
assert(errMessage.contains(
s"Failed to parse an empty string for data type ${dataType.catalogString}"))
}
private def emptyString(dataType: DataType, expected: Any): Unit = {
val df = spark.read.schema(s"a ${dataType.catalogString}")
.option("mode", "FAILFAST").json(Seq("""{"a":""}""").toDS)
checkAnswer(df, Row(expected) :: Nil)
}
test("SPARK-25040: empty strings should be disallowed") {
failedOnEmptyString(BooleanType)
failedOnEmptyString(ByteType)
failedOnEmptyString(ShortType)
failedOnEmptyString(IntegerType)
failedOnEmptyString(LongType)
failedOnEmptyString(FloatType)
failedOnEmptyString(DoubleType)
failedOnEmptyString(DecimalType.SYSTEM_DEFAULT)
failedOnEmptyString(TimestampType)
failedOnEmptyString(DateType)
failedOnEmptyString(ArrayType(IntegerType))
failedOnEmptyString(MapType(StringType, IntegerType, true))
failedOnEmptyString(StructType(StructField("f1", IntegerType, true) :: Nil))
emptyString(StringType, "")
emptyString(BinaryType, "".getBytes(StandardCharsets.UTF_8))
}
test("SPARK-25040: allowing empty strings when legacy config is enabled") {
def emptyStringAsNull(dataType: DataType): Unit = {
val df = spark.read.schema(s"a ${dataType.catalogString}")
.option("mode", "FAILFAST").json(Seq("""{"a":""}""").toDS)
checkAnswer(df, Row(null) :: Nil)
}
// Legacy mode prior to Spark 3.0.0
withSQLConf(SQLConf.LEGACY_ALLOW_EMPTY_STRING_IN_JSON.key -> "true") {
emptyStringAsNull(BooleanType)
emptyStringAsNull(ByteType)
emptyStringAsNull(ShortType)
emptyStringAsNull(IntegerType)
emptyStringAsNull(LongType)
failedOnEmptyString(FloatType)
failedOnEmptyString(DoubleType)
failedOnEmptyString(TimestampType)
failedOnEmptyString(DateType)
emptyStringAsNull(DecimalType.SYSTEM_DEFAULT)
emptyStringAsNull(ArrayType(IntegerType))
emptyStringAsNull(MapType(StringType, IntegerType, true))
emptyStringAsNull(StructType(StructField("f1", IntegerType, true) :: Nil))
emptyString(StringType, "")
emptyString(BinaryType, "".getBytes(StandardCharsets.UTF_8))
}
}
test("return partial result for bad records") {
val schema = "a double, b array<int>, c string, _corrupt_record string"
val badRecords = Seq(
"""{"a":"-","b":[0, 1, 2],"c":"abc"}""",
"""{"a":0.1,"b":{},"c":"def"}""").toDS()
val df = spark.read.schema(schema).json(badRecords)
checkAnswer(
df,
Row(null, Array(0, 1, 2), "abc", """{"a":"-","b":[0, 1, 2],"c":"abc"}""") ::
Row(0.1, null, "def", """{"a":0.1,"b":{},"c":"def"}""") :: Nil)
}
test("inferring timestamp type") {
def schemaOf(jsons: String*): StructType = spark.read.json(jsons.toDS).schema
assert(schemaOf(
"""{"a":"2018-12-17T10:11:12.123-01:00"}""",
"""{"a":"2018-12-16T22:23:24.123-02:00"}""") === fromDDL("a timestamp"))
assert(schemaOf("""{"a":"2018-12-17T10:11:12.123-01:00"}""", """{"a":1}""")
=== fromDDL("a string"))
assert(schemaOf("""{"a":"2018-12-17T10:11:12.123-01:00"}""", """{"a":"123"}""")
=== fromDDL("a string"))
assert(schemaOf("""{"a":"2018-12-17T10:11:12.123-01:00"}""", """{"a":null}""")
=== fromDDL("a timestamp"))
assert(schemaOf("""{"a":null}""", """{"a":"2018-12-17T10:11:12.123-01:00"}""")
=== fromDDL("a timestamp"))
}
test("roundtrip for timestamp type inferring") {
val customSchema = new StructType().add("date", TimestampType)
withTempDir { dir =>
val timestampsWithFormatPath = s"${dir.getCanonicalPath}/timestampsWithFormat.json"
val timestampsWithFormat = spark.read
.option("timestampFormat", "dd/MM/yyyy HH:mm")
.json(datesRecords)
assert(timestampsWithFormat.schema === customSchema)
timestampsWithFormat.write
.format("json")
.option("timestampFormat", "yyyy-MM-dd HH:mm:ss")
.option(DateTimeUtils.TIMEZONE_OPTION, "UTC")
.save(timestampsWithFormatPath)
val readBack = spark.read
.option("timestampFormat", "yyyy-MM-dd HH:mm:ss")
.option(DateTimeUtils.TIMEZONE_OPTION, "UTC")
.json(timestampsWithFormatPath)
assert(readBack.schema === customSchema)
checkAnswer(readBack, timestampsWithFormat)
}
}
test("SPARK-30960: parse date/timestamp string with legacy format") {
val ds = Seq("{'t': '2020-1-12 3:23:34.12', 'd': '2020-1-12 T', 'd2': '12345'}").toDS()
val json = spark.read.schema("t timestamp, d date, d2 date").json(ds)
checkAnswer(json, Row(
Timestamp.valueOf("2020-1-12 3:23:34.12"),
Date.valueOf("2020-1-12"),
Date.valueOf(LocalDate.ofEpochDay(12345))))
}
test("exception mode for parsing date/timestamp string") {
val ds = Seq("{'t': '2020-01-27T20:06:11.847-0800'}").toDS()
val json = spark.read
.schema("t timestamp")
.option("timestampFormat", "yyyy-MM-dd'T'HH:mm:ss.SSSz")
.json(ds)
withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> "exception") {
val msg = intercept[SparkException] {
json.collect()
}.getCause.getMessage
assert(msg.contains("Fail to parse"))
}
withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> "legacy") {
checkAnswer(json, Row(Timestamp.valueOf("2020-01-27 20:06:11.847")))
}
withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> "corrected") {
checkAnswer(json, Row(null))
}
}
}
class JsonV1Suite extends JsonSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "json")
}
class JsonV2Suite extends JsonSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "")
}
class JsonLegacyTimeParserSuite extends JsonSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.LEGACY_TIME_PARSER_POLICY, "legacy")
}
|
kevinyu98/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
|
Scala
|
apache-2.0
| 98,218 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.