code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.convert.json
import com.google.gson.{JsonArray, JsonElement}
import com.jayway.jsonpath.spi.json.GsonJsonProvider
import com.jayway.jsonpath.{Configuration, JsonPath}
import com.typesafe.config.Config
import com.vividsolutions.jts.geom._
import com.vividsolutions.jts.geom.impl.CoordinateArraySequence
import org.locationtech.geomesa.convert.Transformers.{EvaluationContext, Expr}
import org.locationtech.geomesa.convert._
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
import scala.util.Try
class JsonSimpleFeatureConverter(jsonConfig: Configuration,
val targetSFT: SimpleFeatureType,
val root: Option[JsonPath],
val inputFields: IndexedSeq[Field],
val idBuilder: Expr) extends ToSimpleFeatureConverter[String] {
import scala.collection.JavaConversions._
override def fromInputType(i: String): Seq[Array[Any]] =
Try { jsonConfig.jsonProvider.parse(i) }.map { json =>
root.map { r => extractFromRoot(json, r) }.getOrElse(Seq(Array[Any](json)))
}.getOrElse(Seq(Array()))
def extractFromRoot(json: AnyRef, r: JsonPath): Seq[Array[Any]] =
r.read[JsonArray](json, jsonConfig).map { o =>
Array[Any](o)
}.toSeq
}
class JsonSimpleFeatureConverterFactory extends SimpleFeatureConverterFactory[String] {
import scala.collection.JavaConversions._
private val jsonConfig =
Configuration.builder()
.jsonProvider(new GsonJsonProvider)
.options(com.jayway.jsonpath.Option.DEFAULT_PATH_LEAF_TO_NULL)
.build()
override def canProcess(conf: Config): Boolean = canProcessType(conf, "json")
override def buildConverter(targetSFT: SimpleFeatureType, conf: Config): SimpleFeatureConverter[String] = {
val root = if(conf.hasPath("feature-path")) Some(JsonPath.compile(conf.getString("feature-path"))) else None
val fields = buildFields(conf.getConfigList("fields"))
val idBuilder = buildIdBuilder(conf.getString("id-field"))
new JsonSimpleFeatureConverter(jsonConfig, targetSFT, root, fields, idBuilder)
}
override def buildFields(fields: Seq[Config]): IndexedSeq[Field] = {
fields.map { f =>
val name = f.getString("name")
val transform = if (f.hasPath("transform")) {
Transformers.parseTransform(f.getString("transform"))
} else {
null
}
if (f.hasPath("path")) {
// path can be absolute, or relative to the feature node
// it can also include xpath functions to manipulate the result
JsonField(name, JsonPath.compile(f.getString("path")), jsonConfig, transform, f.getString("json-type"))
} else {
SimpleField(name, transform)
}
}.toIndexedSeq
}
}
object JsonField {
def apply(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr, jsonType: String) = jsonType match {
case "string" => StringJsonField(name, expression, jsonConfig, transform)
case "double" => DoubleJsonField(name, expression, jsonConfig, transform)
case "int" | "integer" => IntJsonField(name, expression, jsonConfig, transform)
case "bool" | "boolean" => BooleanJsonField(name, expression, jsonConfig, transform)
case "long" => LongJsonField(name, expression, jsonConfig, transform)
case "geometry" => GeometryJsonField(name, expression, jsonConfig, transform)
}
}
trait BaseJsonField[T] extends Field {
def name: String
def expression: JsonPath
def jsonConfig: Configuration
def transform: Expr
private val mutableArray = Array.ofDim[Any](1)
def getAs(el: JsonElement): T
override def eval(args: Array[Any])(implicit ec: EvaluationContext): Any = {
mutableArray(0) = getAs(evaluateJsonPath(args))
if(transform == null) mutableArray(0)
else super.eval(mutableArray)
}
def evalWithTransform(args: Array[Any])(implicit ec: EvaluationContext): Any = {
mutableArray(0) = evaluateJsonPath(args)
super.eval(mutableArray)
}
def evaluateJsonPath(args: Array[Any]): JsonElement = expression.read[JsonElement](args(0), jsonConfig)
}
case class BooleanJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[java.lang.Boolean] {
override def getAs(el: JsonElement): java.lang.Boolean = if (el.isJsonNull) null else el.getAsBoolean
}
case class IntJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[java.lang.Integer] {
override def getAs(el: JsonElement): java.lang.Integer = if (el.isJsonNull) null else el.getAsInt
}
case class LongJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[java.lang.Long] {
override def getAs(el: JsonElement): java.lang.Long = if (el.isJsonNull) null else el.getAsBigInteger.longValue()
}
case class DoubleJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[java.lang.Double] {
override def getAs(el: JsonElement): java.lang.Double = if (el.isJsonNull) null else el.getAsDouble
}
case class StringJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[java.lang.String] {
override def getAs(el: JsonElement): String = if (el.isJsonNull) null else el.getAsString
}
case class GeometryJsonField(name: String, expression: JsonPath, jsonConfig: Configuration, transform: Expr)
extends BaseJsonField[Geometry] with GeoJsonParsing {
override def getAs(el: JsonElement): Geometry = parseGeometry(el)
}
trait GeoJsonParsing {
val geoFac = new GeometryFactory
def toPointCoords(el: JsonElement): Coordinate = {
val arr = el.getAsJsonArray.iterator.map(_.getAsDouble).toArray
new Coordinate(arr(0), arr(1))
}
def toCoordSeq(el: JsonElement): CoordinateSequence = {
val arr = el.getAsJsonArray.iterator.map(_.getAsJsonArray).map(toPointCoords).toArray
new CoordinateArraySequence(arr)
}
private val CoordsPath = "coordinates"
def parseGeometry(el: JsonElement): Geometry = {
if (el.isJsonObject) {
val geomType = el.getAsJsonObject.get("type").getAsString.toLowerCase
geomType match {
case "point" =>
geoFac.createPoint(toPointCoords(el.getAsJsonObject.get(CoordsPath)))
case "linestring" =>
geoFac.createLineString(toCoordSeq(el.getAsJsonObject.get(CoordsPath)))
case "polygon" =>
// Only simple polygons for now (one linear ring)
val coords = el.getAsJsonObject.get(CoordsPath).getAsJsonArray.iterator.map(toCoordSeq).toArray
geoFac.createPolygon(coords(0))
}
} else if (el.isJsonNull) {
null.asInstanceOf[Geometry]
} else if (el.isJsonPrimitive) {
WKTUtils.read(el.getAsString)
} else {
throw new IllegalArgumentException(s"Unknown geometry type: $el")
}
}
} | vpipkt/geomesa | geomesa-convert/geomesa-convert-json/src/main/scala/org/locationtech/geomesa/convert/json/JsonSimpleFeatureConverter.scala | Scala | apache-2.0 | 7,595 |
object traverser1 {
import org.mentha.tools.archimate.model._
import org.mentha.tools.archimate.model.edges._
import org.mentha.tools.archimate.model.nodes.dsl._
import org.mentha.tools.archimate.model.nodes.dsl.Business._
import org.mentha.tools.archimate.model.nodes.dsl.Motivation._
import org.mentha.tools.archimate.model.nodes.dsl.Implementation._
import org.mentha.tools.archimate.model.nodes.dsl.Composition._
import org.mentha.tools.archimate.model.view.dsl._
import org.mentha.tools.archimate.model.view._
implicit val space = Size(40, 40)
implicit val model = new Model withId "vp-impl-n-mig"
implicit val view = model.add("v-main") { new View(ProjectViewPoint) }
val de = in(view) node { deliverable withName "Deliverable" }
val wp = in(view) node { workPackage withName "Work Package" } place(directions.Left, de)
val br = in(view) node { businessRole withName "Business Role" } place(directions.Left, wp)
val ba = in(view) node { businessActor withName "Business Actor" } place(directions.Down, br)
val lo = in(view) node { location withName "Location" } place(directions.Up, br)
val rq = in(view) node { requirement withName "Requirement/\nConstraint" } place(directions.Up, de)
val go = in(view) node { goal withName "Goal" } place(directions.Right, rq)
val pl = in(view) node { plateau withName "Plateau" } place(directions.Down, de)
val ga = in(view) node { gap withName "Gap" } place(directions.Down, pl)
in(view) edge { $(wp) `flows` "smth" `to` $(wp) } routeLoop(directions.Down, 1)
in(view) edge { $(wp) `triggers` $(wp) } routeLoop(directions.Down, 2)
in(view) edge { $(lo) `associated with` $(de) } flex (-1, -1, -1)
in(view) edge { $(lo) `associated with` $(wp) } flex (-1, -1)
in(view) edge { $(lo) `assigned to` $(br) }
in(view) edge { $(ba) `assigned to` $(br) }
in(view) edge { $(br) `assigned to` $(wp) }
in(view) edge { $(wp) `realizes` $(de) }
in(view) edge { $(de) `realizes` $(rq) }
in(view) edge { $(rq) `realizes` $(go) }
in(view) edge { $(de) `realizes` $(pl) }
in(view) edge { $(pl) `triggers` $(pl) } routeLoop (directions.Left, 1)
in(view) edge { $(pl) `associated with` $(ga) }
import org.mentha.archimate.model.traverse._
val traverser = new ModelTraversing(ModelTraversing.ChangesInfluence)(model)
traverser($(de))( new ModelVisitor {
override def visitVertex(vertex: Concept): Boolean = {
println(s"visit-node: ${vertex.meta.name}@${vertex.id}")
true
}
override def visitEdge(from: Concept, edge: EdgeConcept, to: Concept): Boolean = {
println(s"visit-edge: ${from.meta.name}@${from.id} --~${edge.meta.name}~--> ${to.meta.name}@${to.id} ")
true
}
})
def main(args: Array[String]): Unit = {
}
} | zhuj/mentha-web-archimate | junk/src/test/scala/traverser1.scala | Scala | mit | 2,773 |
import sbt._
import Keys._
object InfoSettings {
type Sett = Project.Setting[_]
def all = Seq[Sett](versioninfo)
val versioninfo = sourceGenerators in Compile <+= (sourceManaged in Compile, version, name) map { (d, v, n) =>
val file = d / "info.scala"
IO.write(file, """package com.nicta
package trackfunction
|object Info {
| val version = "%s"
| val name = "trackfunction"
|}
|""".stripMargin.format(v))
Seq(file)
}
}
| NICTA/trackfunction | project/InfoSettings.scala | Scala | bsd-3-clause | 574 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.rdd.cl.tests
import java.util.LinkedList
import com.amd.aparapi.internal.writer.ScalaArrayParameter
import com.amd.aparapi.internal.model.Tuple2ClassModel
import com.amd.aparapi.internal.model.ClassModel
import com.amd.aparapi.internal.model.HardCodedClassModels
import com.amd.aparapi.internal.model.DenseVectorClassModel
import com.amd.aparapi.internal.model.ScalaArrayClassModel
import org.apache.spark.rdd.cl.AsyncCodeGenTest
import org.apache.spark.rdd.cl.CodeGenTest
import org.apache.spark.rdd.cl.CodeGenTests
import org.apache.spark.rdd.cl.CodeGenUtil
import org.apache.spark.rdd.cl.PrimitiveArrayInputBufferWrapperConfig
object AsyncPrimitiveArrayInputTest extends AsyncCodeGenTest[Double] {
def getExpectedException() : String = { return null }
def getExpectedKernel() : String = { getExpectedKernelHelper(getClass) }
def getExpectedNumInputs : Int = {
1
}
def init() : HardCodedClassModels = {
val models = new HardCodedClassModels()
val arrayModel = ScalaArrayClassModel.create("D")
models.addClassModelFor(classOf[Array[_]], arrayModel)
models
}
def complete(params : LinkedList[ScalaArrayParameter]) {
}
def getFunction() : Function0[Double] = {
var arr : Array[Double] = new Array[Double](3)
new Function0[Double] {
override def apply() : Double = {
var sum = 0.0
var i = 0
while (i < arr.length) {
sum += arr(i)
i += 1
}
sum
}
}
}
}
| agrippa/spark-swat | swat/src/test/scala/org/apache/spark/rdd/cl/tests/AsyncPrimitiveArrayInputTest.scala | Scala | bsd-3-clause | 3,015 |
package com.twitter.finagle.http.filter
import com.twitter.finagle.{Service, SimpleFilter}
import com.twitter.finagle.http.{MediaType, Method, Request, Response}
import com.twitter.util.Future
import com.twitter.io.Buf
/**
* JSONP (callback) filter
*
* Wrap JSON content in <callback>(<content>);
*
* See: http://en.wikipedia.org/wiki/JSONP
*/
class JsonpFilter[Req <: Request] extends SimpleFilter[Req, Response] {
def apply(request: Req, service: Service[Req, Response]): Future[Response] = {
getCallback(request) match {
case Some(callback) =>
addCallback(callback, request, service)
case None =>
service(request)
}
}
def addCallback(
callback: String,
request: Req,
service: Service[Req, Response]
): Future[Response] =
service(request).map { response =>
if (response.mediaType.contains(MediaType.Json)) {
response.content = Buf(
Seq(
JsonpFilter.Comment,
Buf.Utf8(callback),
JsonpFilter.LeftParen,
response.content,
JsonpFilter.RightParenSemicolon
)
)
response.mediaType = MediaType.Javascript
}
response
}
def getCallback(request: Request): Option[String] = {
// Ignore HEAD, though in practice this should be behind the HeadFilter
if (request.method != Method.Head)
request.params.get("callback").flatMap { callback =>
val sanitizedCallback = JsonpFilter.SanitizerRegex.replaceAllIn(callback, "")
if (!sanitizedCallback.isEmpty)
Some(sanitizedCallback)
else
None
} else
None
}
}
object JsonpFilter extends JsonpFilter[Request] {
// Sanitize to prevent cross domain policy attacks and such
private val SanitizerRegex = """[^\\/\\@\\.\\[\\]\\:\\w\\d]""".r
// Reuse left/right paren. The semicolon may not be strictly necessary, but
// some APIs include it.
private val LeftParen = Buf.Utf8("(")
private val RightParenSemicolon = Buf.Utf8(");")
// Prepended to address CVE-2014-4671
private val Comment = Buf.Utf8("/**/")
}
| mkhq/finagle | finagle-http/src/main/scala/com/twitter/finagle/http/filter/JsonpFilter.scala | Scala | apache-2.0 | 2,110 |
package packa
import org.junit.Test
class TestA {
@Test
def testMethod(): Unit = {}
}
class TestA1 {
@Test
def testMethod1(): Unit = {}
@Test
def testMethod2(): Unit = {}
}
| Kwestor/scala-ide | org.scala-ide.sdt.core.tests/test-workspace/test-finder/src/packa/TestA.scala | Scala | bsd-3-clause | 190 |
package com.szadowsz.gospel.core.engine.subgoal
import com.szadowsz.gospel.core.data.Term
import com.szadowsz.gospel.core.engine.subgoal.tree.{SubGoalLeaf, SubGoalTree}
/** Class to hold sub-goal clauses to evaluate
*
* Main Constructor takes an existing Sub-Goal Tree generally
* generated by a ClauseInfo object or static method.
*
* @param subGoals The current sub-goals of the demonstration
*/
private[engine] class SubGoalStore(subGoals: SubGoalTree) {
private var _goals : SubGoalTree = subGoals
private var _index : Int = 0
private var _curSGId : SubGoalId = null
/** Restores the Sub-Goal Store state
*
* Backtracks the state to the specified goal pointer so that
* we can continue our attempts to solve the current theory
*
* @param identifier The sub-goal state to backtrack to
*/
def backTo(identifier: SubGoalId): Option[Term] = {
popSubGoal(identifier.asInstanceOf[SubGoalId])
_index -= 1 // decrease the index so the term we fetch and return will be correct
fetch()
}
/** Pushes a new sub-goal list onto the stack
*
* @param subGoals The sub-goal list to evaluate
*/
def pushSubGoal(subGoals: SubGoalTree) {
_curSGId = new SubGoalId(_curSGId, _goals, _index)
_goals = subGoals
_index = 0
}
/** Set an existing identifier as the head and pops all ids that are more recent
*
* @param identifier The sub-goal state to go to
*/
private def popSubGoal(identifier: SubGoalId) {
_goals = identifier.getRoot
_index = identifier.getIndex
_curSGId = identifier.getParent
}
/** Returns the next sub-goal to evaluate */
def fetch(): Option[Term] = {
if (haveSubGoals) {
val goal = _goals.getChild(_index)
_index += 1
goal match {
case g : SubGoalTree =>
pushSubGoal(g)
fetch()
case g : SubGoalLeaf =>
Option(g.getValue)
}
} else { // if we have no more sub-goals we go back up a level if able
if (_curSGId == null) {
None
} else {
popSubGoal(_curSGId)
fetch()
}
}
}
def getCurrentID: SubGoalId = _curSGId
/**
* Current execution index
*/
def getCurrentIndex: SubGoalId = new SubGoalId(_curSGId, _goals, _index)
def haveSubGoals: Boolean = _index < _goals.size
override def toString: String = "goals: " + _goals + " " + "index: " + _index
} | zakski/project-soisceal | scala-core/src/main/scala/com/szadowsz/gospel/core/engine/subgoal/SubGoalStore.scala | Scala | lgpl-3.0 | 2,424 |
package uk.gov.gds.ier.service
import com.google.inject.Inject
import uk.gov.gds.ier.model.Country
import uk.gov.gds.ier.transaction.ordinary.InprogressOrdinary
import uk.gov.gds.ier.validation.{DateValidator, CountryValidator}
import uk.gov.gds.ier.validation.constants.DateOfBirthConstants
class ScotlandService @Inject()(
val addressService: AddressService
) {
/*
* Given a pair of postcode and country...
* (THIS IS AN ALTERNATIVE isScot() where the calling class does not have access to the full currentState object)
* 1. If the postcode is not BLANK, is the postcode within a Scottish GSS_CODE? return TRUE/FALSE
* 2. Else is the country-of-residence selected Scotland? return TRUE/FALSE
*/
def isScotByPostcodeOrCountry(postcode: String, country: Country):Boolean = {
if (postcode.nonEmpty) {
addressService.isScotAddress(postcode)
} else {
CountryValidator.isScotland(Some(country))
}
}
/*
Given a current application...
1. If there's an address, is the postcode within a Scottish GSS_CODE? return TRUE/FALSE
2. Else is the country-of-residence selected Scotland? return TRUE/FALSE
3. Else return FALSE
*/
def isScot(currentState: InprogressOrdinary):Boolean = {
if(currentState.address.isDefined) {
currentState.address match {
case Some(partialAddress) => {
val postcode = partialAddress.postcode.trim.toUpperCase
addressService.isScotAddress(postcode)
}
case _ => {false}
}
}
else {
CountryValidator.isScotland(currentState.country)
}
}
/*
Given a current application...
Extract the DOB and pass into the function to determine if 14/15 yrs old
return TRUE/FALSE
ASSUMPTION : DOB exists when this function is called
*/
def isUnderageScot(currentState: InprogressOrdinary):Boolean = {
DateValidator.isValidYoungScottishVoter(currentState.dob.get.dob.get)
}
/*
Given a current application...
Call both isScot() && isUnderageScot() and return TRUE only if both criteria are present
Else return false
*/
def isYoungScot(currentState: InprogressOrdinary):Boolean = {
if(currentState.dob.get.noDob.isDefined) {
isScot(currentState)
}
else{
isScot(currentState) && isUnderageScot(currentState)
}
}
/*
Given a current application...
Check the address / country status.
If an actual DOB exists, then ignore this check entirely. An actual DOB always takes presedence
If SCO, then any non-SCO noDOB age range options selected need to be reset to force the citizen to reenter
If non-SCO, then any SCO noDOB age range options selected need to be reset to force the citizen to reenter
*/
def resetNoDOBRange(currentState: InprogressOrdinary): Boolean = {
if(currentState.dob.isDefined) {
if(currentState.dob.get.noDob.isDefined) {
val dateOfBirthRangeOption = currentState.dob.get.noDob.get.range.get
if(isScot(currentState)) {
//Wipe DOB object if any non-SCO noDOB age range is currently stored
dateOfBirthRangeOption match {
case DateOfBirthConstants.under18 | DateOfBirthConstants.is18to75 | DateOfBirthConstants.over75 => return true
case _ => return false
}
}
else {
//Wipe DOB object if any SCO noDOB age range is currently stored
dateOfBirthRangeOption match {
case DateOfBirthConstants.is14to15 | DateOfBirthConstants.is16to17 | DateOfBirthConstants.over18 => return true
case _ => return false
}
}
}
}
return false
}
}
| alphagov/ier-frontend | app/uk/gov/gds/ier/service/ScotlandService.scala | Scala | mit | 3,665 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{Calculated, CtBigDecimal, CtBoxIdentifier}
import uk.gov.hmrc.ct.ct600.v2.calculations.CorporationTaxAlreadyPaidCalculator
import uk.gov.hmrc.ct.ct600.v2.retriever.CT600BoxRetriever
case class B93(value: BigDecimal) extends CtBoxIdentifier("Corporation Tax Overpaid") with CtBigDecimal
object B93 extends CorporationTaxAlreadyPaidCalculator with Calculated[B93, CT600BoxRetriever]{
override def calculate(fieldValueRetriever: CT600BoxRetriever): B93 =
corporationTaxOverpaid(fieldValueRetriever.b86(),
fieldValueRetriever.b91())
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B93.scala | Scala | apache-2.0 | 1,232 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.contrib.scalaz.MonadError_
package object connector {
type ByteStore[F[_]] = Store[F, String, Array[Byte]]
type MonadResourceErr[F[_]] = MonadError_[F, ResourceError]
def MonadResourceErr[F[_]](implicit ev: MonadResourceErr[F])
: MonadResourceErr[F] = ev
}
| djspiewak/quasar | connector/src/main/scala/quasar/connector/package.scala | Scala | apache-2.0 | 924 |
package jp.ne.opt.chronoscala
import java.time._
import java.util.TimeZone
import org.scalacheck.Gen
import Gens.timeZoneIds
trait Gens {
def instantGen: Gen[Instant] = Gen.chooseNum(0L, Long.MaxValue).map(Instant.ofEpochMilli)
def localDateGen: Gen[LocalDate] = for {
year <- Gen.chooseNum(Year.MIN_VALUE, Year.MAX_VALUE)
month <- Gen.chooseNum(1, 12)
dayOfMonth <- Gen.chooseNum(1, Month.of(month).length(Year.isLeap(year)))
} yield LocalDate.of(year, month, dayOfMonth)
def localDateTimeGen: Gen[LocalDateTime] = for {
instant <- instantGen
zoneId <- Gen.oneOf(timeZoneIds)
} yield LocalDateTime.ofInstant(instant, zoneId)
def localTimeGen: Gen[LocalTime] = for {
hour <- Gen.chooseNum(0, 23)
minute <- Gen.chooseNum(0, 59)
second <- Gen.chooseNum(0, 59)
} yield LocalTime.of(hour, minute, second)
def zonedDateTimeGen: Gen[ZonedDateTime] = for {
instant <- instantGen
zoneId <- Gen.oneOf(timeZoneIds)
} yield ZonedDateTime.ofInstant(instant, zoneId)
def offsetDateTimeGen: Gen[OffsetDateTime] = for {
instant <- instantGen.map(_.atZone(ZoneOffset.UTC).toLocalDateTime)
offset <- Gen
.chooseNum(ZoneOffset.MIN.getTotalSeconds, ZoneOffset.MAX.getTotalSeconds)
.map(ZoneOffset.ofTotalSeconds)
} yield OffsetDateTime.of(instant, offset)
def durationGen: Gen[Duration] = for {
start <- instantGen
end <- instantGen
} yield Duration.between(start, end)
}
object Gens {
private val timeZoneIds = TimeZone.getAvailableIDs.map(TimeZone.getTimeZone(_).toZoneId).toSeq
}
| opt-tech/chronoscala | shared/src/test/scala/jp/ne/opt/chronoscala/Gens.scala | Scala | mit | 1,570 |
package org.flowpaint.filters
import org.flowpaint.property.{DataImpl, Data}
import org.flowpaint.util.{MathUtils, DataSample, PropertyRegister}
/**
* Removes segments with zero length, and merges their properties with subsequent points
*
* @author Hans Haggstrom
*/
class ZeroLengthSegmentFilter extends PathProcessor {
private var previousData = new DataImpl()
private var temp = new DataImpl()
private var oldX = 0f
private var oldY = 0f
private var oldX2 = 0f
private var oldY2 = 0f
override protected def onInit() = {
previousData.clear
temp.clear
oldX = 0f
oldY = 0f
oldX2 = 0f
oldY2 = 0f
}
protected def processPathPoint(pointData: Data) : List[Data] = {
var result : List[Data] = Nil
val smoothing = getFloatProperty("smoothing", pointData, 0.2f)
val FILTER_DISTANCE = getFloatProperty("filterDistance", pointData, 1.5f)
val smooth = if (firstPoint) 0f else smoothing
val newX = MathUtils.interpolate(smooth, pointData.getFloatProperty(PropertyRegister.PATH_X, oldX2), oldX2)
val newY = MathUtils.interpolate(smooth, pointData.getFloatProperty(PropertyRegister.PATH_Y, oldY2), oldY2)
/*
val newX = pointData.getProperty("x",0)
val newY = pointData.getProperty("y",0)
*/
pointData.setFloatProperty(PropertyRegister.PATH_X, newX)
pointData.setFloatProperty(PropertyRegister.PATH_Y, newY)
oldX2 = newX
oldY2 = newY
if (MathUtils.squaredDistance(oldX, oldY, newX, newY) <= FILTER_DISTANCE * FILTER_DISTANCE)
{
previousData.setValuesFrom(pointData)
// Discard (do not process) the point
}
else {
oldX = newX
oldY = newY
// Overwrite values with latest ones
previousData.setValuesFrom(pointData)
/*
temp.clear
temp.setValuesFrom(pointData)
*/
// Copy all values to the newest point, to also catch any old ones that were set for discarded points
// and not reset with the latest point data.
pointData.setValuesFrom(previousData)
/*
// Clear the old temp data, but retain the most recent
// previousData.clear
previousData.setValuesFrom( temp )
*/
// Process normally
result = List(pointData)
}
return result
}
} | zzorn/flowpaint | src/main/scala/org/flowpaint/filters/ZeroLengthSegmentFilter.scala | Scala | gpl-2.0 | 2,581 |
/*Β§
===========================================================================
GraphsJ - SDK
===========================================================================
Copyright (C) 2009-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.graphsj
/**
* OutputConsole implementation doing just nothing
*/
object NopOutputConsole extends OutputConsole {
override def write(value: Any): Unit = {}
override def writeln(value: Any): Unit = {}
override def writeln(): Unit = {}
override def writeHeader(header: String): Unit = {}
}
| giancosta86/GraphsJ-sdk | src/main/scala/info/gianlucacosta/graphsj/NopOutputConsole.scala | Scala | apache-2.0 | 1,251 |
package com.github.meln1k.reactive.telegrambot.api
import java.util.UUID
import com.github.meln1k.reactive.telegrambot.methods.ApiMethod
/**
* Class representing request to api
* @param id unique id of the request
* @param method request method
*/
case class ApiRequest(method: ApiMethod, id: UUID = UUID.randomUUID())
| meln1k/reactive-telegrambot | src/main/scala/com/github/meln1k/reactive/telegrambot/api/ApiRequest.scala | Scala | mit | 326 |
package antonkulaga.projects.stats
import java.io.FileInputStream
import ammonite.ops.{LsSeq, read, Path}
import org.w3.banana.io.{RDFXML, RDFReader}
import org.w3.banana.sesame.Sesame
import org.w3.banana.{RDFOps, RDF}
import org.w3.banana.util.tryInstances._
import scala.util._
import scalaz.{Monad, Comonad}
abstract class Store[Rdf <: RDF, M[+_] : Monad : Comonad, Sin, Sout](
syntax: String,
extension: String
)(implicit
val ops: RDFOps[Rdf],
val reader: RDFReader[Rdf, M, Sin]
) {
def read(fileName: String, base: String = ""): M[Rdf#Graph] = {
val stream = new FileInputStream( fileName )
//val lines: BufferedSource = scala.io.Source.fromFile("file.txt")
reader.read(stream,base)
}
}
class SesameReader extends Store[Sesame, Try, RDFXML, RDFXML]("RDF/XML", "rdf"){
}
| antonkulaga/personal | app/jvm/src/main/scala/antonkulaga/projects/stats/SesameReader.scala | Scala | mpl-2.0 | 1,214 |
package io.github.binaryfoo.gclog.output
import java.io.{PrintStream, PrintWriter}
import java.net.Socket
trait OutputSink {
def write(event: String)
def close()
}
object StdOutSink extends PrintStreamSink(Console.out)
case class PrintStreamSink(out: PrintStream) extends OutputSink {
override def write(event: String): Unit = out.println(event)
override def close(): Unit = out.flush()
}
case class SocketSink(host: String = "localhost", port: Int = 2003) extends OutputSink {
private val socket = new Socket(host, port)
private val out = new PrintWriter(socket.getOutputStream)
override def write(event: String): Unit = out.println(event)
override def close(): Unit = {
out.close()
socket.close()
}
}
object SocketSink {
def apply(destination: String): SocketSink = {
val Array(host, port) = destination.split(':')
SocketSink(host, port.toInt)
}
}
| binaryfoo/gclog-parser | src/main/scala/io/github/binaryfoo/gclog/output/OutputSink.scala | Scala | mit | 897 |
package org.apache.spark.sql.util
object __THIS_IS_THE_ROAD_TO_CLOWNTOWN__ArrowUtils {
// TODO:
// This is defeating encapsulation and should be avoided if possible.
// Sorry, I'm a clown.
val rootAllocator = org.apache.spark.sql.util.ArrowUtils.rootAllocator
val toArrowSchema = org.apache.spark.sql.util.ArrowUtils.toArrowSchema(_, _)
}
| rstudio/sparklyr | java/spark-3.0.0/clowntown.scala | Scala | apache-2.0 | 351 |
/*
ASIB - A Scala IRC Bot
Copyright (C) 2012 Iain Cambridge
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package asib.mocks.command
import asib.command.Quit
import asib.util.Message
class MockQuit extends Quit {
var lastMessage = new Message
override def handle(line: Message) = {
lastMessage = line
super.handle(line)
}
} | icambridge-old/asib | src/test/scala/asib/mocks/command/MockQuit.scala | Scala | gpl-3.0 | 952 |
package authentication
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import authentication.AuthenticatorSuite._
import authentication.entities._
import tests.TestWordSpec
import org.mockito.Mock
import org.scalatest.concurrent.Eventually
import rest.client.RestClient
import rest.client.entities.ExecutionResultCode
import scala.concurrent.{ExecutionContext, Future}
class AuthenticatorSuite extends TestKit(ActorSystem("authenticator-tests")) with TestWordSpec with ImplicitSender with Eventually {
implicit val ec: ExecutionContext = system.dispatcher
@Mock
private var mockRestClient: RestClient = _
private var authenticator: TestActorRef[Authenticator] = _
"on sign-in rest client signIn is called" in {
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignIn(USER, PASSWORD)
verify(mockRestClient, times(1)).signIn(eq(USER), eq(PASSWORD))
}
"on sign-in success sender receives Authenticated message" in {
when(mockRestClient.signIn(eq(USER), eq(PASSWORD))).thenReturn(Future.successful(AUTH_TOKEN))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignIn(USER, PASSWORD)
expectMsg(Authenticated)
}
"on sign-in success store contains received token" in {
when(mockRestClient.signIn(eq(USER), eq(PASSWORD))).thenReturn(Future.successful(AUTH_TOKEN))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignIn(USER, PASSWORD)
eventually(AuthTokenStore.getBearerToken.shouldEqual(TOKEN))
expectMsg(Authenticated)
}
"on sign-in failure sender receives AuthFailure" in {
val authError = new Exception("authentication failure")
when(mockRestClient.signIn(eq(USER), eq(PASSWORD))).thenReturn(Future.failed(authError))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignIn(USER, PASSWORD)
expectMsg(AuthFailure(authError))
}
"on sign-up rest client signUp is called" in {
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignUp(USER, PASSWORD)
verify(mockRestClient, times(1)).signUp(eq(USER), eq(PASSWORD))
}
"on sign-up success sender receives Authenticated message" in {
when(mockRestClient.signUp(eq(USER), eq(PASSWORD))).thenReturn(Future.successful(AUTH_TOKEN))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignUp(USER, PASSWORD)
expectMsg(Authenticated)
}
"on sign-up success store contains received token" in {
when(mockRestClient.signUp(eq(USER), eq(PASSWORD))).thenReturn(Future.successful(AUTH_TOKEN))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignUp(USER, PASSWORD)
eventually(AuthTokenStore.getBearerToken.shouldEqual(TOKEN))
expectMsg(Authenticated)
}
"on sign-up failure sender receives AuthFailure" in {
val authError = new Exception("authentication failure")
when(mockRestClient.signUp(eq(USER), eq(PASSWORD))).thenReturn(Future.failed(authError))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignUp(USER, PASSWORD)
expectMsg(AuthFailure(authError))
}
"on sign-out rest client signOut is called" in {
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignOut
verify(mockRestClient, times(1)).signOut()
}
"on sign-out success sender receives Disconnected message" in {
when(mockRestClient.signOut()).thenReturn(Future.successful(ExecutionResultCode.OK))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignOut
expectMsg(Disconnected)
}
"on sign-out success store is cleared" in {
when(mockRestClient.signOut()).thenReturn(Future.successful(ExecutionResultCode.OK))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignOut
assertThrows[RuntimeException] {
AuthTokenStore.getBearerToken
}
expectMsg(Disconnected)
}
"on sign-out failure sender receives AuthFailure" in {
val authError = new Exception("authentication failure")
when(mockRestClient.signOut()).thenReturn(Future.failed(authError))
authenticator = TestActorRef(Authenticator.props(mockRestClient))
authenticator ! SignOut
expectMsg(AuthFailure(authError))
}
}
object AuthenticatorSuite {
private val USER: String = "test-user"
private val PASSWORD: String = "test-user-p@sswo7d"
private val TOKEN: String = "1-2-3-x-y-z"
private val AUTH_TOKEN: AuthToken = BearerToken(TOKEN)
}
| lymr/fun-chat | fun-chat-client/src/test/scala/authentication/AuthenticatorSuite.scala | Scala | mit | 4,716 |
package my.game.pkg.actors
import my.game.pkg.utils.Implicits._
import my.game.pkg.utils.Utils._
import my.game.pkg.Settings
import my.game.pkg.base_actor.{ActorInView, AcceleratableActor}
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.scenes.scene2d.actions.Actions
import com.badlogic.gdx.scenes.scene2d.{Action, InputEvent, InputListener}
import com.badlogic.gdx.graphics.g2d.Animation
import com.badlogic.gdx.utils.{Array => GArray}
import com.badlogic.gdx.math.Vector2
import com.badlogic.gdx.graphics.g2d.TextureRegion
import com.badlogic.gdx.graphics.g2d.Batch
import com.badlogic.gdx.graphics.Texture
import com.badlogic.gdx.graphics.Pixmap.Format.RGB888
import com.badlogic.gdx.utils.Timer
import com.badlogic.gdx.math.Circle
import scala.language.implicitConversions._
import scala.collection.JavaConversions._
import scala.math
class ShipMovingAction(val acc:Float, rotation: AcceleratableActor => Float) extends Action {
override def act(delta: Float):Boolean = {
def proj(angle:Float, value:Float): (Float, Float) = {
(value * math.cos(angle), value * math.sin(angle))
}
val ship = actor match {
case ship: AcceleratableActor => ship
case _ => throw new ClassCastException
}
val old_velocity = ship.velocity
if(acc < 0 && math.abs(old_velocity.x) < 0.01 && math.abs(old_velocity.y) < 0.01) {
return true
}
val proj_a = proj(rotation(ship) * math.Pi / 180, acc)
val new_velocity = new Vector2(old_velocity.x + proj_a._1 * delta,
old_velocity.y + proj_a._2 * delta)
val s_x = old_velocity.x * delta + (proj_a._1 * delta * delta) / 2
val s_y = old_velocity.y * delta + (proj_a._2 * delta * delta) / 2
ship.velocity = new_velocity
ship.addAction(Actions.moveBy(-s_x, -s_y))
true
}
}
class BulletShot extends Action {
var delta_sum: Float = 0
var is_first_shot = true
val delta_accum = 7 // larger will slower
override def act(delta: Float) = {
delta_sum += delta
if(delta_sum > delta * delta_accum || is_first_shot) {
is_first_shot = false
delta_sum = 0
val ship = actor match {
case ship: Ship => ship
case _ => throw new ClassCastException
}
ship.makeBullet()
Settings.sounds("fire").play()
}
true
}
}
class Ship extends AcceleratableActor("ship.png") {
private val ANGLE: Float = 5
private val ACC: Float = 2.0f
private val frames: GArray[TextureRegion] = new GArray()
private var stateTime = 0f
frames.add(new TextureRegion(texture))
// empty frame
frames.add(new TextureRegion(new Texture(texture.getWidth(),
texture.getHeight(),
RGB888
)))
var is_immune = false
private val inv_animation = new Animation(5f, frames)
val actions = Array(
Actions.forever(Actions.rotateBy(ANGLE)),
Actions.forever(Actions.rotateBy(-ANGLE)),
Actions.forever(new ShipMovingAction(ACC, ship => ship.getRotation)),
Actions.forever(new BulletShot())
)
setImmunity(2)
def setImmunity(time: Float) = {
is_immune = true
Timer.schedule(is_immune = false, time)
}
addListener(new InputListener {
override def keyDown(event: InputEvent, keycode: Int):Boolean = {
// actions.map(_.reset())
movement(keycode,
space=addAction(actions(3)),
left=addAction(actions(0)),
right=addAction(actions(1)),
up=addAction(actions(2))
)
true
}
override def keyUp(event: InputEvent, keycode: Int):Boolean = {
def replace_and_delete_action(i:Int) = {
removeAction(actions(i))
actions(i) = i match {
case 0 => Actions.forever(Actions.rotateBy(ANGLE))
case 1 => Actions.forever(Actions.rotateBy(-ANGLE))
case 2 => Actions.forever(new ShipMovingAction(ACC, ship => ship.getRotation))
case 3 => Actions.forever(new BulletShot())
}
}
movement(keycode,
replace_and_delete_action(3),
replace_and_delete_action(0),
replace_and_delete_action(1),
replace_and_delete_action(2)
)
true
}
})
override def draw(batch:Batch, alpha:Float) = {
if(is_immune) {
stateTime += alpha
val frame = inv_animation.getKeyFrame(stateTime, true).getTexture
batch.draw(frame,this.getX(),getY(),this.getOriginX(),this.getOriginY(),this.getWidth(),
this.getHeight(),this.getScaleX(), this.getScaleY(),this.getRotation(),0,0,
texture.getWidth(),texture.getHeight(),false,false);
}
else {
super.draw(batch, alpha)
}
}
def movement(keycode:Int, space: => Unit, left: => Unit, right: => Unit, up: => Unit) = {
keycode match {
case 21 => left
case 22=> right
case 19 => up
case 62 => space
case _ =>
}
}
def makeBullet() {
val stage = getStage()
val velocity = new Vector2(-10, 0)
velocity.rotate(getRotation)
val bullet = new Bullet(velocity, (this.getX(), this.getY()))
stage.addActor(bullet)
}
def drawInvulnerable() = {
}
}
class Bullet(val velocity: Vector2, from: (Float, Float)) extends ActorInView("bullet.png") {
private val v1 = new Vector2(from._1, from._2)
private val v2 = new Vector2(0, 0)
private var overall:Float = 0
setPosition(v1.x, v1.y)
override def act(delta: Float) {
moveBy(velocity.x, velocity.y)
v2.x = getX()
v2.y = getY()
overall += velocity.len
if(overall > 200) {
remove()
return
}
super.act(delta)
}
}
| raymank26/asteroids-scala | common/src/main/scala/ship.scala | Scala | gpl-3.0 | 6,134 |
package controllers
/**
* Created by sam on 17/12/16.
*/
import io.circe.generic.auto._
import io.circe.syntax._
import io.swagger.annotations._
import io.swagger.config.ConfigFactory
import models.User
import pdi.jwt._
import play.api._
import play.api.libs.circe.Circe
import play.api.mvc._
@Api(
value = "/api",
produces = "application/json",
consumes = "application/json"
// authorizations = Array(new ApiKeyAuthDefinition("api_key",In.HEADER))
)
@SwaggerDefinition(securityDefinition = new SecurityDefinition(apiKeyAuthDefintions = Array(new ApiKeyAuthDefinition(name= "api_key", key="api_key", in=ApiKeyAuthDefinition.ApiKeyLocation.HEADER))))
class CirceController extends Controller with Circe {
case class Bar(bar: Int)
case class Foo(foo: String, bar: Bar)
val bar = Bar(1)
val foo = Foo("foo", bar)
@ApiOperation(value = "get All Todos",
notes = "Returns List of all Todos",
response = classOf[Foo], httpMethod = "GET",authorizations = Array(new Authorization(value="api_key")))
def get = AuthenticatedAction { request =>
val foo = Foo(request.jwtSession.getAs[User]("user").getOrElse(new User("Anonymous")).name, bar)
Ok(foo.asJson)
}
/*parse json to case class
def post = Action(circe.json[Foo]) { implicit request =>
val isEqual = request.body == foo
Ok(isEqual.toString)
}
def postJson = Action(circe.json) { implicit request =>
val isEqual = request.body == foo.asJson
Ok(isEqual.toString)
}
def postTolerate = Action(circe.tolerantJson[Foo]) { implicit request =>
val isEqual = request.body == foo
Ok(isEqual.toString)
}
def postTolerateJson = Action(circe.tolerantJson) { implicit request =>
val isEqual = request.body == foo.asJson
Ok(isEqual.toString)
}*/
} | sammyrulez/minimal-play2 | app/controllers/CirceController.scala | Scala | mit | 1,777 |
import scala.deriving.Mirror
@main def Test =
val mirrorTop = summon[Mirror.SumOf[lib.Top]]
assert(mirrorTop eq lib.Top) // cached in companion - previous run, tasty dependency
assert(mirrorTop.ordinal(lib.Middle()) == 0)
| dotty-staging/dotty | tests/run/i14540-priorRun/Test_2.scala | Scala | apache-2.0 | 229 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bforms.connectors
import play.api.libs.json.{JsObject, JsValue, Json}
import play.api.mvc.Action
import uk.gov.hmrc.bforms.WSHttp
import uk.gov.hmrc.bforms.models.{ FormData, FormId, FormTypeId, VerificationResult, SaveResult }
import uk.gov.hmrc.play.config.ServicesConfig
import uk.gov.hmrc.play.http.{HeaderCarrier, HttpGet, HttpPost, HttpPut, HttpResponse}
import scala.concurrent.{ExecutionContext, Future}
trait BformsConnector {
def httpGet : HttpGet
def httpPost: HttpPost
def httpPut: HttpPut
def bformsUrl: String
def retrieveFormTemplate(formTypeId: FormTypeId, version: String)(implicit hc: HeaderCarrier, ec : ExecutionContext) : Future[Option[JsObject]] = {
httpGet.GET[Option[JsObject]](bformsUrl + s"/formtemplates/$formTypeId/$version")
}
def saveForm(formDetails : JsValue, registrationNumber: String)(implicit hc : HeaderCarrier, ec : ExecutionContext) : Future[VerificationResult] = {
httpPost.POST[JsValue, VerificationResult](bformsUrl + s"/saveForm/$registrationNumber", formDetails)
}
def retrieveForm(registrationNumber: String)(implicit hc: HeaderCarrier, ec : ExecutionContext) : Future[JsObject] = {
httpPost.POSTString[JsObject](bformsUrl + s"/retrieveForm/$registrationNumber", registrationNumber)
}
def submit(registrationNumber :String)(implicit hc: HeaderCarrier, ec : ExecutionContext) : Future[HttpResponse] ={
httpGet.GET[HttpResponse](bformsUrl+s"/submit/$registrationNumber")
}
def getById(formTypeId: FormTypeId, version: String, formId: FormId)(implicit hc : HeaderCarrier) : Future[FormData] = {
httpGet.GET[FormData](bformsUrl + s"/forms/$formTypeId/$version/$formId")
}
def save(formDetails: FormData, tolerant: Boolean)(implicit hc : HeaderCarrier) : Future[SaveResult] = {
httpPost.POST[FormData, SaveResult](bformsUrl + s"/forms?tolerant=$tolerant", formDetails)
}
def update(formId: FormId, formData: FormData, tolerant: Boolean)(implicit hc : HeaderCarrier) : Future[SaveResult] = {
httpPut.PUT[FormData, SaveResult](bformsUrl + s"/forms/$formId?tolerant=$tolerant", formData)
}
def sendSubmission(formTypeId: FormTypeId, formId: FormId)(implicit hc : HeaderCarrier) : Future[HttpResponse] = {
httpPost.POSTEmpty[HttpResponse](bformsUrl + s"/forms/$formTypeId/submission/$formId")
}
}
object BformsConnector extends BformsConnector with ServicesConfig {
lazy val httpGet = WSHttp
lazy val httpPost = WSHttp
lazy val httpPut = WSHttp
def bformsUrl: String = s"${baseUrl("bforms")}/bforms"
}
| VlachJosef/bforms-frontend | app/uk/gov/hmrc/bforms/connectors/BformsConnectors.scala | Scala | apache-2.0 | 3,162 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.utils
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.prop.Tables
import org.scalatest.{FeatureSpec, Ignore, Matchers}
class NamePartsSpec extends FeatureSpec with Matchers with Tables {
val muhammadFull = "Ω
ΨΩ
Ψ― Ψ¨Ω Ψ³ΨΉΩΨ― Ψ¨Ω ΨΉΨ¨Ψ― Ψ§ΩΨΉΨ²ΩΨ² Ψ§ΩΩΩΨ³Ψ·ΩΩΩ"
val muhammadFirst = "Ω
ΨΩ
Ψ―"
val muhammadInit = "Ω
Ψ§"
val abdullahFull = "ΨΉΨ¨Ψ― Ψ§ΩΩΩ Ψ§ΩΨ«Ψ§ΩΩ Ψ¨Ω Ψ§ΩΨΨ³ΩΩ"
val abdullahFirst = "ΨΉΨ¨Ψ― Ψ§ΩΩΩ"
val abdullahInit = "ΨΉΨ§"
val amatFull = "Ψ§Ω
Ω Ψ§ΩΨΉΩΩΩ
Ψ§ΩΨ³ΩΨ³ΩΩ"
val amatFirst = "Ψ§Ω
Ω Ψ§ΩΨΉΩΩΩ
"
val amatInit = "Ψ§Ψ§"
val habibFull = "ΨΨ¨ΩΨ¨Ψ§ ΩΩΩ Ψ§ΩΨ«Ψ§ΩΩ Ψ¨Ω Ψ§ΩΨΨ³ΩΩ"
val habibFirst = "ΨΨ¨ΩΨ¨Ψ§ ΩΩΩ"
val habibInit = "ΨΨ§"
val names = Table(
("name", "full name", "first name", "first with initial of last", "initials"),
("John Evans", "John Evans", "John", "John E", "JE"),
("John Anthony Evans", "John Anthony Evans", "John", "John E", "JE"),
("John", "John", "John", "John", "J"),
(" John", " John", "John", "John", "J"),
("Vincent de Gryuter", "Vincent de Gryuter", "Vincent", "Vincent G", "VG"),
("Vincent de gryuter", "Vincent de gryuter", "Vincent", "Vincent g", "VG"),
("L. L. Cool J", "L. L. Cool J", "L.", "L. J", "LJ"),
("The Amazing Kangaroo", "The Amazing Kangaroo", "The", "The K", "TK"),
("Andrea:) 900973", "Andrea:) 900973", "Andrea:)", "Andrea:)", "A"),
("377 [808]", "377 [808]", "377", "377", ""),
("1234", "1234", "1234", "1234", ""),
(muhammadFull, muhammadFull, muhammadFirst, muhammadFull, muhammadInit),
(abdullahFull, abdullahFull, abdullahFirst, abdullahFull, abdullahInit),
(amatFull, amatFull, amatFirst, amatFull, amatInit),
(habibFull, habibFull, habibFirst, habibFull, habibInit),
("ζζ·θ", "ζζ·θ", "ζζ·θ", "ζζ·θ", "ζζ·"),
("ζ", "ζ", "ζ", "ζ", "ζ"),
("εζ©ζ£ζ¨", "εζ©ζ£ζ¨", "εζ©ζ£ζ¨", "εζ©ζ£ζ¨", "εζ©"),
("ζζ·θ(shumeng)", "ζζ·θ(shumeng)", "ζζ·θ(shumeng)", "ζζ·θ(shumeng)", "ζζ·"),
("ζζ·θ (shumeng)", "ζζ·θ (shumeng)", "ζζ·θ", "ζζ·θ", "ζζ·")
)
feature("Names") {
scenario("Splitting up names into parts") {
forAll(names) { (raw: String, fullExp: String, firstExp: String, firstWithInitialExp: String, initialsExp: String) =>
NameParts.parseFrom(raw) match {
case NameParts(full, first, firstWithInitial, initials) =>
full shouldEqual fullExp
first shouldEqual firstExp
firstWithInitial shouldEqual firstWithInitialExp
initials shouldEqual initialsExp
case _ => fail(s"The name '$raw' should be parsable.")
}
}
}
scenario("initials for multipart name") {
NameParts.parseFrom("some other user") shouldEqual NameParts("some other user", "some", "some u", "SU")
}
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/utils/NamePartsSpec.scala | Scala | gpl-3.0 | 3,659 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.resolver
import java.net.InetAddress
import java.util.{ List => JList }
import io.netty.resolver.NameResolver
import io.netty.resolver.dns.DnsCache
import io.netty.util.concurrent.{ Future, Promise }
case class DelegatingNameResolver(resolver: ExtendedDnsNameResolver, cache: DnsCache)
extends NameResolver[InetAddress] {
override def resolve(inetHost: String): Future[InetAddress] =
resolve(inetHost, resolver.executor.newPromise[InetAddress])
override def resolve(inetHost: String, promise: Promise[InetAddress]): Future[InetAddress] = {
resolver.doResolve(inetHost, promise, cache)
promise
}
override def resolveAll(inetHost: String): Future[JList[InetAddress]] =
resolveAll(inetHost, resolver.executor.newPromise[JList[InetAddress]])
override def resolveAll(inetHost: String, promise: Promise[JList[InetAddress]]): Future[JList[InetAddress]] = {
resolver.doResolveAll(inetHost, promise, cache)
promise
}
override def close(): Unit = cache.clear()
}
| GabrielPlassard/gatling | gatling-http/src/main/scala/io/gatling/http/resolver/DelegatingNameResolver.scala | Scala | apache-2.0 | 1,645 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtInteger}
import uk.gov.hmrc.ct.ct600.v2.retriever.CT600BoxRetriever
case class B18(value: Int) extends CtBoxIdentifier(name = " Net chargeable gains") with CtInteger
object B18 extends Calculated[B18, CT600BoxRetriever] {
override def calculate(fieldValueRetriever: CT600BoxRetriever): B18 = {
// B18(fieldValueRetriever.retrieveB16() - fieldValueRetriever.retrieveB17())
???
}
}
| scottcutts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B18.scala | Scala | apache-2.0 | 1,090 |
package com.twitter.finagle.serverset2.client
import com.twitter.finagle.stats.{StatsReceiver, Stat}
import com.twitter.io.Buf
import com.twitter.util._
private[serverset2] trait StatsClient extends ZooKeeperClient {
sealed trait StatFilter {
val name: String
lazy val failure = stats.counter(s"${name}_failures")
lazy val success = stats.counter(s"${name}_successes")
def apply[T](result: Future[T]): Future[T] = {
Stat
.timeFuture(stats.stat("name_latency_ms"))(result)
.respond {
case Return(_) =>
success.incr()
case Throw(ke: KeeperException) =>
stats.counter(ke.name).incr()
case Throw(_) =>
failure.incr()
}
result
}
}
protected val EphemeralFilter = new StatFilter {
val name = "ephemeral"
}
protected val MultiFilter = new StatFilter {
val name = "multi"
}
protected val ReadFilter = new StatFilter {
val name = "read"
}
protected val WatchFilter = new StatFilter {
val name = "watch"
}
protected val WriteFilter = new StatFilter {
val name = "write"
}
protected val underlying: ZooKeeperClient
protected val stats: StatsReceiver
def addAuthInfo(scheme: String, auth: Buf): Future[Unit] = underlying.addAuthInfo(scheme, auth)
def close(deadline: Time): Future[Unit] = underlying.close()
def getEphemerals(): Future[Seq[String]] = EphemeralFilter(underlying.getEphemerals())
def sessionId: Long = underlying.sessionId
def sessionPasswd: Buf = underlying.sessionPasswd
def sessionTimeout: Duration = underlying.sessionTimeout
}
private[serverset2] trait StatsReader extends StatsClient with ZooKeeperReader {
protected val underlying: ZooKeeperReader
def exists(path: String): Future[Option[Data.Stat]] = ReadFilter(underlying.exists(path))
def existsWatch(path: String): Future[Watched[Option[Data.Stat]]] =
WatchFilter(underlying.existsWatch(path))
def getData(path: String): Future[Node.Data] = ReadFilter(underlying.getData(path))
def getDataWatch(path: String): Future[Watched[Node.Data]] =
WatchFilter(underlying.getDataWatch(path))
def getACL(path: String): Future[Node.ACL] = ReadFilter(underlying.getACL(path))
def getChildren(path: String): Future[Node.Children] = ReadFilter(underlying.getChildren(path))
def getChildrenWatch(path: String): Future[Watched[Node.Children]] =
WatchFilter(underlying.getChildrenWatch(path))
def sync(path: String): Future[Unit] = ReadFilter(underlying.sync(path))
}
private[serverset2] trait StatsWriter extends StatsClient with ZooKeeperWriter {
protected val underlying: ZooKeeperWriter
def create(
path: String,
data: Option[Buf],
acl: Seq[Data.ACL],
createMode: CreateMode
): Future[String] = createMode match {
case CreateMode.Ephemeral => EphemeralFilter(underlying.create(path, data, acl, createMode))
case CreateMode.EphemeralSequential =>
EphemeralFilter(underlying.create(path, data, acl, createMode))
case _ => WriteFilter(underlying.create(path, data, acl, createMode))
}
def delete(path: String, version: Option[Int]): Future[Unit] =
WriteFilter(underlying.delete(path, version))
def setACL(path: String, acl: Seq[Data.ACL], version: Option[Int]): Future[Data.Stat] =
WriteFilter(underlying.setACL(path, acl, version))
def setData(path: String, data: Option[Buf], version: Option[Int]): Future[Data.Stat] =
WriteFilter(underlying.setData(path, data, version))
}
private[serverset2] trait StatsMulti extends StatsClient with ZooKeeperMulti {
protected val underlying: ZooKeeperMulti
def multi(ops: Seq[Op]): Future[Seq[OpResult]] = MultiFilter(underlying.multi(ops))
}
private[serverset2] trait StatsRW extends ZooKeeperRW with StatsReader with StatsWriter {
protected val underlying: ZooKeeperRW
}
private[serverset2] trait StatsRWMulti
extends ZooKeeperRWMulti
with StatsReader
with StatsWriter
with StatsMulti {
protected val underlying: ZooKeeperRWMulti
}
private[serverset2] trait EventStats {
import NodeEvent._
protected val stats: StatsReceiver
private[this] lazy val createdCounter = stats.counter(Created.name)
private[this] lazy val dataChangedCounter = stats.counter(DataChanged.name)
private[this] lazy val deletedCounter = stats.counter(Deleted.name)
private[this] lazy val childrenChangedCounter = stats.counter(ChildrenChanged.name)
private[this] lazy val dataWatchRemovedCounter = stats.counter(DataWatchRemoved.name)
private[this] lazy val childWatchRemovedCounter = stats.counter(ChildWatchRemoved.name)
protected def EventFilter(event: NodeEvent): NodeEvent = {
event match {
case Created => createdCounter.incr()
case DataChanged => dataChangedCounter.incr()
case Deleted => deletedCounter.incr()
case ChildrenChanged => childrenChangedCounter.incr()
case DataWatchRemoved => dataWatchRemovedCounter.incr()
case ChildWatchRemoved => childWatchRemovedCounter.incr()
}
event
}
}
object SessionStats {
def watcher(
underlying: Var[WatchState],
statsReceiver: StatsReceiver,
interval: Duration,
timer: Timer
): Var[WatchState] = {
import SessionState._
val unknownCounter = statsReceiver.counter(Unknown.name)
val authFailedCounter = statsReceiver.counter(AuthFailed.name)
val disconnectedCounter = statsReceiver.counter(Disconnected.name)
val expiredCounter = statsReceiver.counter(Expired.name)
val syncConnectedCounter = statsReceiver.counter(SyncConnected.name)
val noSyncConnectedCounter = statsReceiver.counter(NoSyncConnected.name)
val connectedReadOnlyCounter = statsReceiver.counter(ConnectedReadOnly.name)
val saslAuthenticatedCounter = statsReceiver.counter(SaslAuthenticated.name)
Var.async[WatchState](WatchState.Pending) { v =>
val stateTracker = new StateTracker(statsReceiver, interval, timer)
underlying.changes.respond { w: WatchState =>
w match {
case WatchState.SessionState(newState) =>
stateTracker.transition(newState)
newState match {
case Unknown => unknownCounter.incr()
case AuthFailed => authFailedCounter.incr()
case Disconnected => disconnectedCounter.incr()
case Expired => expiredCounter.incr()
case SyncConnected => syncConnectedCounter.incr()
case NoSyncConnected => noSyncConnectedCounter.incr()
case ConnectedReadOnly => connectedReadOnlyCounter.incr()
case SaslAuthenticated => saslAuthenticatedCounter.incr()
}
case _ => ()
}
v() = w
}
stateTracker
}
}
}
| luciferous/finagle | finagle-serversets/src/main/scala/com/twitter/finagle/serverset2/client/Stats.scala | Scala | apache-2.0 | 6,747 |
/*
* Copyright (c) 2015 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression
package context.invoker
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.context.JdiContext
import org.scalaide.debug.internal.expression.proxies.JdiProxy
import org.scalaide.debug.internal.expression.proxies.StringJdiProxy
import com.sun.jdi.Value
/**
* Custom handler for string concatenation (`obj + String` or `String + obj`).
*
* Those calls are replaced with `String.concat(a, b)`.
*/
class StringConcatenationMethod(proxy: JdiProxy, name: String, args: Seq[JdiProxy], context: JdiContext)
extends MethodInvoker {
private def stringify(proxy: JdiProxy) = StringJdiProxy(context, context.callToString(proxy.__autoboxed))
private def callConcatMethod(proxy: JdiProxy, arg: JdiProxy) =
context.tryInvokeUnboxed(proxy, None, "concat", Seq(stringify(arg)))
override def apply(): Option[Value] = (name, args) match {
case ("+" | "$plus", Seq(arg)) =>
(proxy.__type.name, arg.__type.name) match {
case (Java.String, _) => callConcatMethod(proxy, arg)
case (_, Java.String) => callConcatMethod(stringify(proxy), arg)
case _ => None
}
case _ => None
}
}
| stephenh/scala-ide | org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/context/invoker/StringAddMethod.scala | Scala | bsd-3-clause | 1,285 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen
import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.async.AsyncFunction
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.codegen.CodeGenUtils._
import org.apache.flink.table.codegen.GenerateUtils._
import org.apache.flink.table.codegen.Indenter.toISC
import org.apache.flink.table.dataformat.DataFormatConverters.DataFormatConverter
import org.apache.flink.table.dataformat.{BaseRow, DataFormatConverters, GenericRow, JoinedRow}
import org.apache.flink.table.functions.{AsyncTableFunction, TableFunction}
import org.apache.flink.table.generated.{GeneratedCollector, GeneratedFunction, GeneratedResultFuture}
import org.apache.flink.table.plan.util.LookupJoinUtil.{ConstantLookupKey, FieldRefLookupKey, LookupKey}
import org.apache.flink.table.runtime.collector.{TableFunctionCollector, TableFunctionResultFuture}
import org.apache.flink.table.runtime.join.lookup.DelegatingResultFuture
import org.apache.flink.table.types.LogicalTypeDataTypeConverter.fromLogicalTypeToDataType
import org.apache.flink.table.types.logical.{LogicalType, RowType}
import org.apache.flink.table.types.utils.TypeConversions
import org.apache.flink.types.Row
import org.apache.flink.util.Collector
import org.apache.calcite.rex.{RexNode, RexProgram}
import java.util
object LookupJoinCodeGenerator {
val ARRAY_LIST = className[util.ArrayList[_]]
/**
* Generates a lookup function ([[TableFunction]])
*/
def generateLookupFunction(
config: TableConfig,
typeFactory: FlinkTypeFactory,
inputType: LogicalType,
returnType: LogicalType,
tableReturnTypeInfo: TypeInformation[_],
lookupKeyInOrder: Array[Int],
// index field position -> lookup key
allLookupFields: Map[Int, LookupKey],
lookupFunction: TableFunction[_],
enableObjectReuse: Boolean)
: GeneratedFunction[FlatMapFunction[BaseRow, BaseRow]] = {
val ctx = CodeGeneratorContext(config)
val (prepareCode, parameters) = prepareParameters(
ctx,
typeFactory,
inputType,
lookupKeyInOrder,
allLookupFields,
tableReturnTypeInfo.isInstanceOf[RowTypeInfo],
enableObjectReuse)
val lookupFunctionTerm = ctx.addReusableFunction(lookupFunction)
val setCollectorCode = tableReturnTypeInfo match {
case rt: RowTypeInfo =>
val converterCollector = new RowToBaseRowCollector(rt)
val term = ctx.addReusableObject(converterCollector, "collector")
s"""
|$term.setCollector($DEFAULT_COLLECTOR_TERM);
|$lookupFunctionTerm.setCollector($term);
""".stripMargin
case _ =>
s"$lookupFunctionTerm.setCollector($DEFAULT_COLLECTOR_TERM);"
}
val body =
s"""
|$prepareCode
|$setCollectorCode
|$lookupFunctionTerm.eval($parameters);
""".stripMargin
FunctionCodeGenerator.generateFunction(
ctx,
"LookupFunction",
classOf[FlatMapFunction[BaseRow, BaseRow]],
body,
returnType,
inputType)
}
/**
* Generates a async lookup function ([[AsyncTableFunction]])
*/
def generateAsyncLookupFunction(
config: TableConfig,
typeFactory: FlinkTypeFactory,
inputType: LogicalType,
returnType: LogicalType,
tableReturnTypeInfo: TypeInformation[_],
lookupKeyInOrder: Array[Int],
allLookupFields: Map[Int, LookupKey],
asyncLookupFunction: AsyncTableFunction[_])
: GeneratedFunction[AsyncFunction[BaseRow, AnyRef]] = {
val ctx = CodeGeneratorContext(config)
val (prepareCode, parameters) = prepareParameters(
ctx,
typeFactory,
inputType,
lookupKeyInOrder,
allLookupFields,
tableReturnTypeInfo.isInstanceOf[RowTypeInfo],
fieldCopy = true) // always copy input field because of async buffer
val lookupFunctionTerm = ctx.addReusableFunction(asyncLookupFunction)
val DELEGATE = className[DelegatingResultFuture[_]]
val body =
s"""
|$prepareCode
|$DELEGATE delegates = new $DELEGATE($DEFAULT_COLLECTOR_TERM);
|$lookupFunctionTerm.eval(delegates.getCompletableFuture(), $parameters);
""".stripMargin
FunctionCodeGenerator.generateFunction(
ctx,
"LookupFunction",
classOf[AsyncFunction[BaseRow, AnyRef]],
body,
returnType,
inputType)
}
/**
* Prepares parameters and returns (code, parameters)
*/
private def prepareParameters(
ctx: CodeGeneratorContext,
typeFactory: FlinkTypeFactory,
inputType: LogicalType,
lookupKeyInOrder: Array[Int],
allLookupFields: Map[Int, LookupKey],
isExternalArgs: Boolean,
fieldCopy: Boolean): (String, String) = {
val inputFieldExprs = for (i <- lookupKeyInOrder) yield {
allLookupFields.get(i) match {
case Some(ConstantLookupKey(dataType, literal)) =>
generateLiteral(ctx, dataType, literal.getValue3)
case Some(FieldRefLookupKey(index)) =>
generateInputAccess(
ctx,
inputType,
DEFAULT_INPUT1_TERM,
index,
nullableInput = false,
fieldCopy)
case None =>
throw new CodeGenException("This should never happen!")
}
}
val codeAndArg = inputFieldExprs
.map { e =>
val dataType = fromLogicalTypeToDataType(e.resultType)
val bType = if (isExternalArgs) {
boxedTypeTermForExternalType(dataType)
} else {
boxedTypeTermForType(e.resultType)
}
val assign = if (isExternalArgs) {
CodeGenUtils.genToExternal(ctx, dataType, e.resultTerm)
} else {
e.resultTerm
}
val newTerm = newName("arg")
val code =
s"""
|$bType $newTerm = null;
|if (!${e.nullTerm}) {
| $newTerm = $assign;
|}
""".stripMargin
(code, newTerm)
}
(codeAndArg.map(_._1).mkString("\\n"), codeAndArg.map(_._2).mkString(", "))
}
/**
* Generates collector for temporal join ([[Collector]])
*
* Differs from CommonCorrelate.generateCollector which has no real condition because of
* FLINK-7865, here we should deal with outer join type when real conditions filtered result.
*/
def generateCollector(
ctx: CodeGeneratorContext,
inputType: RowType,
udtfTypeInfo: RowType,
resultType: RowType,
condition: Option[RexNode],
pojoFieldMapping: Option[Array[Int]],
retainHeader: Boolean = true): GeneratedCollector[TableFunctionCollector[BaseRow]] = {
val inputTerm = DEFAULT_INPUT1_TERM
val udtfInputTerm = DEFAULT_INPUT2_TERM
val exprGenerator = new ExprCodeGenerator(ctx, nullableInput = false)
.bindInput(udtfTypeInfo, inputTerm = udtfInputTerm, inputFieldMapping = pojoFieldMapping)
val udtfResultExpr = exprGenerator.generateConverterResultExpression(
udtfTypeInfo, classOf[GenericRow])
val joinedRowTerm = CodeGenUtils.newName("joinedRow")
ctx.addReusableOutputRecord(resultType, classOf[JoinedRow], joinedRowTerm)
val header = if (retainHeader) {
s"$joinedRowTerm.setHeader($inputTerm.getHeader());"
} else {
""
}
val body =
s"""
|${udtfResultExpr.code}
|$joinedRowTerm.replace($inputTerm, ${udtfResultExpr.resultTerm});
|$header
|outputResult($joinedRowTerm);
""".stripMargin
val collectorCode = if (condition.isEmpty) {
body
} else {
val filterGenerator = new ExprCodeGenerator(ctx, nullableInput = false)
.bindInput(inputType, inputTerm)
.bindSecondInput(udtfTypeInfo, udtfInputTerm, pojoFieldMapping)
val filterCondition = filterGenerator.generateExpression(condition.get)
s"""
|${filterCondition.code}
|if (${filterCondition.resultTerm}) {
| $body
|}
|""".stripMargin
}
generateTableFunctionCollectorForJoinTable(
ctx,
"JoinTableFuncCollector",
collectorCode,
inputType,
udtfTypeInfo,
inputTerm = inputTerm,
collectedTerm = udtfInputTerm)
}
/**
* The only differences against CollectorCodeGenerator.generateTableFunctionCollector is
* "super.collect" call is binding with collect join row in "body" code
*/
private def generateTableFunctionCollectorForJoinTable(
ctx: CodeGeneratorContext,
name: String,
bodyCode: String,
inputType: RowType,
collectedType: RowType,
inputTerm: String = DEFAULT_INPUT1_TERM,
collectedTerm: String = DEFAULT_INPUT2_TERM)
: GeneratedCollector[TableFunctionCollector[BaseRow]] = {
val funcName = newName(name)
val input1TypeClass = boxedTypeTermForType(inputType)
val input2TypeClass = boxedTypeTermForType(collectedType)
val funcCode =
s"""
public class $funcName extends ${classOf[TableFunctionCollector[_]].getCanonicalName} {
${ctx.reuseMemberCode()}
public $funcName(Object[] references) throws Exception {
${ctx.reuseInitCode()}
}
@Override
public void open(${className[Configuration]} parameters) throws Exception {
${ctx.reuseOpenCode()}
}
@Override
public void collect(Object record) throws Exception {
$input1TypeClass $inputTerm = ($input1TypeClass) getInput();
$input2TypeClass $collectedTerm = ($input2TypeClass) record;
${ctx.reuseLocalVariableCode()}
${ctx.reuseInputUnboxingCode()}
$bodyCode
}
@Override
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
}
""".stripMargin
new GeneratedCollector(funcName, funcCode, ctx.references.toArray)
}
/**
* Generates a [[TableFunctionResultFuture]] that can be passed to Java compiler.
*
* @param config The TableConfig
* @param name Class name of the table function collector. Must not be unique but has to be a
* valid Java class identifier.
* @param leftInputType The type information of the element being collected
* @param collectedType The type information of the element collected by the collector
* @param condition The filter condition before collect elements
* @return instance of GeneratedCollector
*/
def generateTableAsyncCollector(
config: TableConfig,
name: String,
leftInputType: RowType,
collectedType: RowType,
condition: Option[RexNode])
: GeneratedResultFuture[TableFunctionResultFuture[BaseRow]] = {
val funcName = newName(name)
val input1TypeClass = boxedTypeTermForType(leftInputType)
val input2TypeClass = boxedTypeTermForType(collectedType)
val input1Term = DEFAULT_INPUT1_TERM
val input2Term = DEFAULT_INPUT2_TERM
val outTerm = "resultCollection"
val ctx = CodeGeneratorContext(config)
val body = if (condition.isEmpty) {
"getResultFuture().complete(records);"
} else {
val filterGenerator = new ExprCodeGenerator(ctx, nullableInput = false)
.bindInput(leftInputType, input1Term)
.bindSecondInput(collectedType, input2Term)
val filterCondition = filterGenerator.generateExpression(condition.get)
s"""
|if (records == null || records.size() == 0) {
| getResultFuture().complete(java.util.Collections.emptyList());
| return;
|}
|try {
| $input1TypeClass $input1Term = ($input1TypeClass) getInput();
| $ARRAY_LIST $outTerm = new $ARRAY_LIST();
| for (Object record : records) {
| $input2TypeClass $input2Term = ($input2TypeClass) record;
| ${ctx.reuseLocalVariableCode()}
| ${ctx.reuseInputUnboxingCode()}
| ${ctx.reusePerRecordCode()}
| ${filterCondition.code}
| if (${filterCondition.resultTerm}) {
| $outTerm.add(record);
| }
| }
| getResultFuture().complete($outTerm);
|} catch (Exception e) {
| getResultFuture().completeExceptionally(e);
|}
|""".stripMargin
}
val funcCode =
j"""
public class $funcName extends ${classOf[TableFunctionResultFuture[_]].getCanonicalName} {
${ctx.reuseMemberCode()}
public $funcName(Object[] references) throws Exception {
${ctx.reuseInitCode()}
}
@Override
public void open(${className[Configuration]} parameters) throws Exception {
${ctx.reuseOpenCode()}
}
@Override
public void complete(java.util.Collection records) throws Exception {
$body
}
public void close() throws Exception {
${ctx.reuseCloseCode()}
}
}
""".stripMargin
new GeneratedResultFuture(funcName, funcCode, ctx.references.toArray)
}
/**
* Generates calculate flatmap function for temporal join which is used
* to projection/filter the dimension table results
*/
def generateCalcMapFunction(
config: TableConfig,
calcProgram: Option[RexProgram],
tableSourceRowType: RowType)
: GeneratedFunction[FlatMapFunction[BaseRow, BaseRow]] = {
val program = calcProgram.get
val condition = if (program.getCondition != null) {
Some(program.expandLocalRef(program.getCondition))
} else {
None
}
CalcCodeGenerator.generateFunction(
tableSourceRowType,
"TableCalcMapFunction",
FlinkTypeFactory.toLogicalRowType(program.getOutputRowType),
classOf[GenericRow],
program,
condition,
config)
}
// ----------------------------------------------------------------------------------------
// Utility Classes
// ----------------------------------------------------------------------------------------
class RowToBaseRowCollector(rowTypeInfo: RowTypeInfo)
extends TableFunctionCollector[Row] with Serializable {
private val converter = DataFormatConverters.getConverterForDataType(
TypeConversions.fromLegacyInfoToDataType(rowTypeInfo))
.asInstanceOf[DataFormatConverter[BaseRow, Row]]
override def collect(record: Row): Unit = {
val result = converter.toInternal(record)
outputResult(result)
}
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/codegen/LookupJoinCodeGenerator.scala | Scala | apache-2.0 | 15,550 |
package io.buoyant
/**
* The [[io.buoyant.router]] and [[com.twitter.finagle.buoyant]]
* packages provide a library for building RPC routers with Finagle.
*/
package object router
| denverwilliams/linkerd | router/core/src/main/scala/com/twitter/finagle/buoyant/package.scala | Scala | apache-2.0 | 184 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.io.{File, IOException}
import java.lang.{Integer => JInt}
import java.net.{InetAddress, InetSocketAddress}
import java.nio.charset.StandardCharsets
import java.util.{Collections, Map => JMap, Properties, UUID}
import java.util.concurrent.TimeUnit
import javax.security.auth.login.Configuration
import scala.collection.JavaConverters._
import scala.util.Random
import com.google.common.io.Files
import kafka.api.Request
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.server.checkpoints.OffsetCheckpointFile
import kafka.utils.ZkUtils
import org.apache.hadoop.minikdc.MiniKdc
import org.apache.hadoop.security.UserGroupInformation
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.admin.{AdminClient, CreatePartitionsOptions, ListConsumerGroupsResult, NewPartitions, NewTopic}
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.producer._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.config.SaslConfigs
import org.apache.kafka.common.header.Header
import org.apache.kafka.common.header.internals.RecordHeader
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.security.auth.SecurityProtocol.{PLAINTEXT, SASL_PLAINTEXT}
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.apache.zookeeper.server.{NIOServerCnxnFactory, ZooKeeperServer}
import org.apache.zookeeper.server.auth.SASLAuthenticationProvider
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.kafka010.KafkaTokenUtil
import org.apache.spark.util.{ShutdownHookManager, Utils}
/**
* This is a helper class for Kafka test suites. This has the functionality to set up
* and tear down local Kafka servers, and to push data using Kafka producers.
*
* The reason to put Kafka test utility class in src is to test Python related Kafka APIs.
*/
class KafkaTestUtils(
withBrokerProps: Map[String, Object] = Map.empty,
secure: Boolean = false) extends Logging {
private val JAVA_AUTH_CONFIG = "java.security.auth.login.config"
private val localCanonicalHostName = InetAddress.getLoopbackAddress().getCanonicalHostName()
logInfo(s"Local host name is $localCanonicalHostName")
private var kdc: MiniKdc = _
// Zookeeper related configurations
private val zkHost = localCanonicalHostName
private var zkPort: Int = 0
private val zkConnectionTimeout = 60000
private val zkSessionTimeout = 10000
private var zookeeper: EmbeddedZookeeper = _
private var zkUtils: ZkUtils = _
// Kafka broker related configurations
private val brokerHost = localCanonicalHostName
private var brokerPort = 0
private var brokerConf: KafkaConfig = _
private val brokerServiceName = "kafka"
private val clientUser = s"client/$localCanonicalHostName"
private var clientKeytabFile: File = _
// Kafka broker server
private var server: KafkaServer = _
private var adminClient: AdminClient = _
// Kafka producer
private var producer: Producer[String, String] = _
// Flag to test whether the system is correctly started
private var kdcReady = false
private var zkReady = false
private var brokerReady = false
private var leakDetector: AnyRef = null
def zkAddress: String = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper address")
s"$zkHost:$zkPort"
}
def brokerAddress: String = {
assert(brokerReady, "Kafka not setup yet or already torn down, cannot get broker address")
s"$brokerHost:$brokerPort"
}
def zookeeperClient: ZkUtils = {
assert(zkReady, "Zookeeper not setup yet or already torn down, cannot get zookeeper client")
Option(zkUtils).getOrElse(
throw new IllegalStateException("Zookeeper client is not yet initialized"))
}
def clientPrincipal: String = {
assert(kdcReady, "KDC should be set up beforehand")
clientUser + "@" + kdc.getRealm()
}
def clientKeytab: String = {
assert(kdcReady, "KDC should be set up beforehand")
clientKeytabFile.getAbsolutePath()
}
private def setUpMiniKdc(): Unit = {
val kdcDir = Utils.createTempDir()
val kdcConf = MiniKdc.createConf()
kdc = new MiniKdc(kdcConf, kdcDir)
kdc.start()
kdcReady = true
}
private def createKeytabsAndJaasConfigFile(): String = {
assert(kdcReady, "KDC should be set up beforehand")
val baseDir = Utils.createTempDir()
val zkServerUser = s"zookeeper/$localCanonicalHostName"
val zkServerKeytabFile = new File(baseDir, "zookeeper.keytab")
kdc.createPrincipal(zkServerKeytabFile, zkServerUser)
logDebug(s"Created keytab file: ${zkServerKeytabFile.getAbsolutePath()}")
val zkClientUser = s"zkclient/$localCanonicalHostName"
val zkClientKeytabFile = new File(baseDir, "zkclient.keytab")
kdc.createPrincipal(zkClientKeytabFile, zkClientUser)
logDebug(s"Created keytab file: ${zkClientKeytabFile.getAbsolutePath()}")
val kafkaServerUser = s"kafka/$localCanonicalHostName"
val kafkaServerKeytabFile = new File(baseDir, "kafka.keytab")
kdc.createPrincipal(kafkaServerKeytabFile, kafkaServerUser)
logDebug(s"Created keytab file: ${kafkaServerKeytabFile.getAbsolutePath()}")
clientKeytabFile = new File(baseDir, "client.keytab")
kdc.createPrincipal(clientKeytabFile, clientUser)
logDebug(s"Created keytab file: ${clientKeytabFile.getAbsolutePath()}")
val file = new File(baseDir, "jaas.conf");
val realm = kdc.getRealm()
val content =
s"""
|Server {
| ${KafkaTokenUtil.getKrb5LoginModuleName} required
| useKeyTab=true
| storeKey=true
| useTicketCache=false
| keyTab="${zkServerKeytabFile.getAbsolutePath()}"
| principal="$zkServerUser@$realm";
|};
|
|Client {
| ${KafkaTokenUtil.getKrb5LoginModuleName} required
| useKeyTab=true
| storeKey=true
| useTicketCache=false
| keyTab="${zkClientKeytabFile.getAbsolutePath()}"
| principal="$zkClientUser@$realm";
|};
|
|KafkaServer {
| ${KafkaTokenUtil.getKrb5LoginModuleName} required
| serviceName="$brokerServiceName"
| useKeyTab=true
| storeKey=true
| keyTab="${kafkaServerKeytabFile.getAbsolutePath()}"
| principal="$kafkaServerUser@$realm";
|};
""".stripMargin.trim
Files.write(content, file, StandardCharsets.UTF_8)
logDebug(s"Created JAAS file: ${file.getPath}")
logDebug(s"JAAS file content: $content")
file.getAbsolutePath()
}
// Set up the Embedded Zookeeper server and get the proper Zookeeper port
private def setupEmbeddedZookeeper(): Unit = {
// Zookeeper server startup
zookeeper = new EmbeddedZookeeper(s"$zkHost:$zkPort")
// Get the actual zookeeper binding port
zkPort = zookeeper.actualPort
zkUtils = ZkUtils(s"$zkHost:$zkPort", zkSessionTimeout, zkConnectionTimeout, false)
zkReady = true
}
// Set up the Embedded Kafka server
private def setupEmbeddedKafkaServer(): Unit = {
assert(zkReady, "Zookeeper should be set up beforehand")
val protocolName = if (!secure) PLAINTEXT.name else SASL_PLAINTEXT.name
// Kafka broker startup
Utils.startServiceOnPort(brokerPort, port => {
brokerPort = port
brokerConf = new KafkaConfig(brokerConfiguration, doLog = false)
server = new KafkaServer(brokerConf)
server.startup()
brokerPort = server.boundPort(new ListenerName(protocolName))
(server, brokerPort)
}, new SparkConf(), "KafkaBroker")
adminClient = AdminClient.create(adminClientConfiguration)
brokerReady = true
}
/** setup the whole embedded servers, including Zookeeper and Kafka brokers */
def setup(): Unit = {
// Set up a KafkaTestUtils leak detector so that we can see where the leak KafkaTestUtils is
// created.
val exception = new SparkException("It was created at: ")
leakDetector = ShutdownHookManager.addShutdownHook { () =>
logError("Found a leak KafkaTestUtils.", exception)
}
if (secure) {
setUpMiniKdc()
val jaasConfigFile = createKeytabsAndJaasConfigFile()
System.setProperty(JAVA_AUTH_CONFIG, jaasConfigFile)
Configuration.getConfiguration.refresh()
} else {
System.clearProperty(JAVA_AUTH_CONFIG)
}
setupEmbeddedZookeeper()
setupEmbeddedKafkaServer()
eventually(timeout(1.minute)) {
assert(zkUtils.getAllBrokersInCluster().nonEmpty, "Broker was not up in 60 seconds")
}
}
/** Teardown the whole servers, including Kafka broker and Zookeeper */
def teardown(): Unit = {
if (leakDetector != null) {
ShutdownHookManager.removeShutdownHook(leakDetector)
}
brokerReady = false
zkReady = false
if (producer != null) {
producer.close()
producer = null
}
if (adminClient != null) {
adminClient.close()
}
if (server != null) {
server.shutdown()
server.awaitShutdown()
server = null
}
// On Windows, `logDirs` is left open even after Kafka server above is completely shut down
// in some cases. It leads to test failures on Windows if the directory deletion failure
// throws an exception.
brokerConf.logDirs.foreach { f =>
try {
Utils.deleteRecursively(new File(f))
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
}
if (zkUtils != null) {
zkUtils.close()
zkUtils = null
}
if (zookeeper != null) {
zookeeper.shutdown()
zookeeper = null
}
System.clearProperty(JAVA_AUTH_CONFIG)
Configuration.getConfiguration.refresh()
if (kdc != null) {
kdc.stop()
}
UserGroupInformation.reset()
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String, partitions: Int, overwrite: Boolean = false): Unit = {
var created = false
while (!created) {
try {
val newTopic = new NewTopic(topic, partitions, 1)
adminClient.createTopics(Collections.singleton(newTopic))
created = true
} catch {
// Workaround fact that TopicExistsException is in kafka.common in 0.10.0 and
// org.apache.kafka.common.errors in 0.10.1 (!)
case e: Exception if (e.getClass.getSimpleName == "TopicExistsException") && overwrite =>
deleteTopic(topic)
}
}
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
def getAllTopicsAndPartitionSize(): Seq[(String, Int)] = {
zkUtils.getPartitionsForTopics(zkUtils.getAllTopics()).mapValues(_.size).toSeq
}
/** Create a Kafka topic and wait until it is propagated to the whole cluster */
def createTopic(topic: String): Unit = {
createTopic(topic, 1)
}
/** Delete a Kafka topic and wait until it is propagated to the whole cluster */
def deleteTopic(topic: String): Unit = {
val partitions = zkUtils.getPartitionsForTopics(Seq(topic))(topic).size
adminClient.deleteTopics(Collections.singleton(topic))
verifyTopicDeletionWithRetries(zkUtils, topic, partitions, List(this.server))
}
/** Add new partitions to a Kafka topic */
def addPartitions(topic: String, partitions: Int): Unit = {
adminClient.createPartitions(
Map(topic -> NewPartitions.increaseTo(partitions)).asJava,
new CreatePartitionsOptions)
// wait until metadata is propagated
(0 until partitions).foreach { p =>
waitUntilMetadataIsPropagated(topic, p)
}
}
def sendMessages(topic: String, msgs: Array[String]): Seq[(String, RecordMetadata)] = {
sendMessages(topic, msgs, None)
}
def sendMessages(
topic: String,
msgs: Array[String],
part: Option[Int]): Seq[(String, RecordMetadata)] = {
val records = msgs.map { msg =>
val builder = new RecordBuilder(topic, msg)
part.foreach { p => builder.partition(p) }
builder.build()
}
sendMessages(records)
}
def sendMessage(msg: ProducerRecord[String, String]): Seq[(String, RecordMetadata)] = {
sendMessages(Array(msg))
}
def sendMessages(msgs: Seq[ProducerRecord[String, String]]): Seq[(String, RecordMetadata)] = {
producer = new KafkaProducer[String, String](producerConfiguration)
val offsets = try {
msgs.map { msg =>
val metadata = producer.send(msg).get(10, TimeUnit.SECONDS)
logInfo(s"\tSent ($msg) to partition ${metadata.partition}, offset ${metadata.offset}")
(msg.value(), metadata)
}
} finally {
if (producer != null) {
producer.close()
producer = null
}
}
offsets
}
def cleanupLogs(): Unit = {
server.logManager.cleanupLogs()
}
def getEarliestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get earliest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToBeginning(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get earliest offsets")
offsets
}
def getLatestOffsets(topics: Set[String]): Map[TopicPartition, Long] = {
val kc = new KafkaConsumer[String, String](consumerConfiguration)
logInfo("Created consumer to get latest offsets")
kc.subscribe(topics.asJavaCollection)
kc.poll(0)
val partitions = kc.assignment()
kc.pause(partitions)
kc.seekToEnd(partitions)
val offsets = partitions.asScala.map(p => p -> kc.position(p)).toMap
kc.close()
logInfo("Closed consumer to get latest offsets")
offsets
}
def listConsumerGroups(): ListConsumerGroupsResult = {
adminClient.listConsumerGroups()
}
protected def brokerConfiguration: Properties = {
val props = new Properties()
props.put("broker.id", "0")
props.put("host.name", "127.0.0.1")
props.put("advertised.host.name", "127.0.0.1")
props.put("port", brokerPort.toString)
props.put("log.dir", Utils.createTempDir().getAbsolutePath)
props.put("zookeeper.connect", zkAddress)
props.put("zookeeper.connection.timeout.ms", "60000")
props.put("log.flush.interval.messages", "1")
props.put("replica.socket.timeout.ms", "1500")
props.put("delete.topic.enable", "true")
props.put("group.initial.rebalance.delay.ms", "10")
// Change the following settings as we have only 1 broker
props.put("offsets.topic.num.partitions", "1")
props.put("offsets.topic.replication.factor", "1")
props.put("transaction.state.log.replication.factor", "1")
props.put("transaction.state.log.min.isr", "1")
if (secure) {
props.put("listeners", "SASL_PLAINTEXT://127.0.0.1:0")
props.put("advertised.listeners", "SASL_PLAINTEXT://127.0.0.1:0")
props.put("inter.broker.listener.name", "SASL_PLAINTEXT")
props.put("delegation.token.master.key", UUID.randomUUID().toString)
props.put("sasl.enabled.mechanisms", "GSSAPI,SCRAM-SHA-512")
}
// Can not use properties.putAll(propsMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
withBrokerProps.foreach { case (k, v) => props.put(k, v) }
props
}
private def adminClientConfiguration: Properties = {
val props = new Properties()
props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, s"$brokerHost:$brokerPort")
setAuthenticationConfigIfNeeded(props)
props
}
private def producerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("value.serializer", classOf[StringSerializer].getName)
props.put("key.serializer", classOf[StringSerializer].getName)
// wait for all in-sync replicas to ack sends
props.put("acks", "all")
setAuthenticationConfigIfNeeded(props)
props
}
/** Call `f` with a `KafkaProducer` that has initialized transactions. */
def withTranscationalProducer(f: KafkaProducer[String, String] => Unit): Unit = {
val props = producerConfiguration
props.put("transactional.id", UUID.randomUUID().toString)
val producer = new KafkaProducer[String, String](props)
try {
producer.initTransactions()
f(producer)
} finally {
producer.close()
}
}
private def consumerConfiguration: Properties = {
val props = new Properties()
props.put("bootstrap.servers", brokerAddress)
props.put("group.id", "group-KafkaTestUtils-" + Random.nextInt)
props.put("value.deserializer", classOf[StringDeserializer].getName)
props.put("key.deserializer", classOf[StringDeserializer].getName)
props.put("enable.auto.commit", "false")
setAuthenticationConfigIfNeeded(props)
props
}
private def setAuthenticationConfigIfNeeded(props: Properties): Unit = {
if (secure) {
val jaasParams = KafkaTokenUtil.getKeytabJaasParams(
clientKeytabFile.getAbsolutePath, clientPrincipal, brokerServiceName)
props.put(SaslConfigs.SASL_JAAS_CONFIG, jaasParams)
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SASL_PLAINTEXT.name)
}
}
/** Verify topic is deleted in all places, e.g, brokers, zookeeper. */
private def verifyTopicDeletion(
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]): Unit = {
val topicAndPartitions = (0 until numPartitions).map(new TopicPartition(topic, _))
import ZkUtils._
// wait until admin path for delete topic is deleted, signaling completion of topic deletion
assert(
!zkUtils.pathExists(getDeleteTopicPath(topic)),
s"${getDeleteTopicPath(topic)} still exists")
assert(!zkUtils.pathExists(getTopicPath(topic)), s"${getTopicPath(topic)} still exists")
// ensure that the topic-partition has been deleted from all brokers' replica managers
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.replicaManager.getPartition(tp) == None)),
s"topic $topic still exists in the replica manager")
// ensure that logs from all replicas are deleted if delete topic is marked successful
assert(servers.forall(server => topicAndPartitions.forall(tp =>
server.getLogManager().getLog(tp).isEmpty)),
s"topic $topic still exists in log mananger")
// ensure that topic is removed from all cleaner offsets
assert(servers.forall(server => topicAndPartitions.forall { tp =>
val checkpoints = server.getLogManager().liveLogDirs.map { logDir =>
new OffsetCheckpointFile(new File(logDir, "cleaner-offset-checkpoint")).read()
}
checkpoints.forall(checkpointsPerLogDir => !checkpointsPerLogDir.contains(tp))
}), s"checkpoint for topic $topic still exists")
// ensure the topic is gone
assert(
!zkUtils.getAllTopics().contains(topic),
s"topic $topic still exists on zookeeper")
}
/** Verify topic is deleted. Retry to delete the topic if not. */
private def verifyTopicDeletionWithRetries(
zkUtils: ZkUtils,
topic: String,
numPartitions: Int,
servers: Seq[KafkaServer]) {
eventually(timeout(1.minute), interval(200.milliseconds)) {
try {
verifyTopicDeletion(topic, numPartitions, servers)
} catch {
case e: Throwable =>
// As pushing messages into Kafka updates Zookeeper asynchronously, there is a small
// chance that a topic will be recreated after deletion due to the asynchronous update.
// Hence, delete the topic and retry.
adminClient.deleteTopics(Collections.singleton(topic))
throw e
}
}
}
private def waitUntilMetadataIsPropagated(topic: String, partition: Int): Unit = {
def isPropagated = server.dataPlaneRequestProcessor.metadataCache
.getPartitionInfo(topic, partition) match {
case Some(partitionState) =>
zkUtils.getLeaderForPartition(topic, partition).isDefined &&
Request.isValidBrokerId(partitionState.basePartitionState.leader) &&
!partitionState.basePartitionState.replicas.isEmpty
case _ =>
false
}
eventually(timeout(1.minute)) {
assert(isPropagated, s"Partition [$topic, $partition] metadata not propagated after timeout")
}
}
/**
* Wait until the latest offset of the given `TopicPartition` is not less than `offset`.
*/
def waitUntilOffsetAppears(topicPartition: TopicPartition, offset: Long): Unit = {
eventually(timeout(1.minute)) {
val currentOffset = getLatestOffsets(Set(topicPartition.topic)).get(topicPartition)
assert(currentOffset.nonEmpty && currentOffset.get >= offset)
}
}
private class EmbeddedZookeeper(val zkConnect: String) {
private val ZOOKEEPER_AUTH_PROVIDER = "zookeeper.authProvider.1"
val snapshotDir = Utils.createTempDir()
val logDir = Utils.createTempDir()
if (secure) {
System.setProperty(ZOOKEEPER_AUTH_PROVIDER, classOf[SASLAuthenticationProvider].getName)
} else {
System.clearProperty(ZOOKEEPER_AUTH_PROVIDER)
}
val zookeeper = new ZooKeeperServer(snapshotDir, logDir, 500)
val (ip, port) = {
val splits = zkConnect.split(":")
(splits(0), splits(1).toInt)
}
val factory = new NIOServerCnxnFactory()
factory.configure(new InetSocketAddress(ip, port), 16)
factory.startup(zookeeper)
val actualPort = factory.getLocalPort
def shutdown() {
factory.shutdown()
// The directories are not closed even if the ZooKeeper server is shut down.
// Please see ZOOKEEPER-1844, which is fixed in 3.4.6+. It leads to test failures
// on Windows if the directory deletion failure throws an exception.
try {
Utils.deleteRecursively(snapshotDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
try {
Utils.deleteRecursively(logDir)
} catch {
case e: IOException if Utils.isWindows =>
logWarning(e.getMessage)
}
System.clearProperty(ZOOKEEPER_AUTH_PROVIDER)
}
}
}
| bdrillard/spark | external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaTestUtils.scala | Scala | apache-2.0 | 23,257 |
package org.phenoscape.owl.util
import better.files._
import scala.io.Source
object Conversions {
implicit class BetterFileOps(val self: File) extends AnyVal {
def toSource(encoding: String): Source = Source.fromFile(self.toJava, encoding)
}
}
| phenoscape/phenoscape-owl-tools | src/main/scala/org/phenoscape/owl/util/Conversions.scala | Scala | mit | 259 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.{lang => jl}
import java.util.Locale
import scala.collection.JavaConverters._
import org.apache.spark.annotation.Stable
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
/**
* Functionality for working with missing data in `DataFrame`s.
*
* @since 1.3.1
*/
@Stable
final class DataFrameNaFunctions private[sql](df: DataFrame) {
/**
* Returns a new `DataFrame` that drops rows containing any null or NaN values.
*
* @since 1.3.1
*/
def drop(): DataFrame = drop("any", df.columns)
/**
* Returns a new `DataFrame` that drops rows containing null or NaN values.
*
* If `how` is "any", then drop rows containing any null or NaN values.
* If `how` is "all", then drop rows only if every column is null or NaN for that row.
*
* @since 1.3.1
*/
def drop(how: String): DataFrame = drop(how, df.columns)
/**
* Returns a new `DataFrame` that drops rows containing any null or NaN values
* in the specified columns.
*
* @since 1.3.1
*/
def drop(cols: Array[String]): DataFrame = drop(cols.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that drops rows containing any null or NaN values
* in the specified columns.
*
* @since 1.3.1
*/
def drop(cols: Seq[String]): DataFrame = drop(cols.size, cols)
/**
* Returns a new `DataFrame` that drops rows containing null or NaN values
* in the specified columns.
*
* If `how` is "any", then drop rows containing any null or NaN values in the specified columns.
* If `how` is "all", then drop rows only if every specified column is null or NaN for that row.
*
* @since 1.3.1
*/
def drop(how: String, cols: Array[String]): DataFrame = drop(how, cols.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that drops rows containing null or NaN values
* in the specified columns.
*
* If `how` is "any", then drop rows containing any null or NaN values in the specified columns.
* If `how` is "all", then drop rows only if every specified column is null or NaN for that row.
*
* @since 1.3.1
*/
def drop(how: String, cols: Seq[String]): DataFrame = {
how.toLowerCase(Locale.ROOT) match {
case "any" => drop(cols.size, cols)
case "all" => drop(1, cols)
case _ => throw new IllegalArgumentException(s"how ($how) must be 'any' or 'all'")
}
}
/**
* Returns a new `DataFrame` that drops rows containing
* less than `minNonNulls` non-null and non-NaN values.
*
* @since 1.3.1
*/
def drop(minNonNulls: Int): DataFrame = drop(minNonNulls, df.columns)
/**
* Returns a new `DataFrame` that drops rows containing
* less than `minNonNulls` non-null and non-NaN values in the specified columns.
*
* @since 1.3.1
*/
def drop(minNonNulls: Int, cols: Array[String]): DataFrame = drop(minNonNulls, cols.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that drops rows containing less than
* `minNonNulls` non-null and non-NaN values in the specified columns.
*
* @since 1.3.1
*/
def drop(minNonNulls: Int, cols: Seq[String]): DataFrame = {
// Filtering condition:
// only keep the row if it has at least `minNonNulls` non-null and non-NaN values.
val predicate = AtLeastNNonNulls(minNonNulls, cols.map(name => df.resolve(name)))
df.filter(Column(predicate))
}
/**
* Returns a new `DataFrame` that replaces null or NaN values in numeric columns with `value`.
*
* @since 2.2.0
*/
def fill(value: Long): DataFrame = fill(value, df.columns)
/**
* Returns a new `DataFrame` that replaces null or NaN values in numeric columns with `value`.
* @since 1.3.1
*/
def fill(value: Double): DataFrame = fill(value, df.columns)
/**
* Returns a new `DataFrame` that replaces null values in string columns with `value`.
*
* @since 1.3.1
*/
def fill(value: String): DataFrame = fill(value, df.columns)
/**
* Returns a new `DataFrame` that replaces null or NaN values in specified numeric columns.
* If a specified column is not a numeric column, it is ignored.
*
* @since 2.2.0
*/
def fill(value: Long, cols: Array[String]): DataFrame = fill(value, cols.toSeq)
/**
* Returns a new `DataFrame` that replaces null or NaN values in specified numeric columns.
* If a specified column is not a numeric column, it is ignored.
*
* @since 1.3.1
*/
def fill(value: Double, cols: Array[String]): DataFrame = fill(value, cols.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that replaces null or NaN values in specified
* numeric columns. If a specified column is not a numeric column, it is ignored.
*
* @since 2.2.0
*/
def fill(value: Long, cols: Seq[String]): DataFrame = fillValue(value, cols)
/**
* (Scala-specific) Returns a new `DataFrame` that replaces null or NaN values in specified
* numeric columns. If a specified column is not a numeric column, it is ignored.
*
* @since 1.3.1
*/
def fill(value: Double, cols: Seq[String]): DataFrame = fillValue(value, cols)
/**
* Returns a new `DataFrame` that replaces null values in specified string columns.
* If a specified column is not a string column, it is ignored.
*
* @since 1.3.1
*/
def fill(value: String, cols: Array[String]): DataFrame = fill(value, cols.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that replaces null values in
* specified string columns. If a specified column is not a string column, it is ignored.
*
* @since 1.3.1
*/
def fill(value: String, cols: Seq[String]): DataFrame = fillValue(value, cols)
/**
* Returns a new `DataFrame` that replaces null values in boolean columns with `value`.
*
* @since 2.3.0
*/
def fill(value: Boolean): DataFrame = fill(value, df.columns)
/**
* (Scala-specific) Returns a new `DataFrame` that replaces null values in specified
* boolean columns. If a specified column is not a boolean column, it is ignored.
*
* @since 2.3.0
*/
def fill(value: Boolean, cols: Seq[String]): DataFrame = fillValue(value, cols)
/**
* Returns a new `DataFrame` that replaces null values in specified boolean columns.
* If a specified column is not a boolean column, it is ignored.
*
* @since 2.3.0
*/
def fill(value: Boolean, cols: Array[String]): DataFrame = fill(value, cols.toSeq)
/**
* Returns a new `DataFrame` that replaces null values.
*
* The key of the map is the column name, and the value of the map is the replacement value.
* The value must be of the following type:
* `Integer`, `Long`, `Float`, `Double`, `String`, `Boolean`.
* Replacement values are cast to the column data type.
*
* For example, the following replaces null values in column "A" with string "unknown", and
* null values in column "B" with numeric value 1.0.
* {{{
* import com.google.common.collect.ImmutableMap;
* df.na.fill(ImmutableMap.of("A", "unknown", "B", 1.0));
* }}}
*
* @since 1.3.1
*/
def fill(valueMap: java.util.Map[String, Any]): DataFrame = fillMap(valueMap.asScala.toSeq)
/**
* (Scala-specific) Returns a new `DataFrame` that replaces null values.
*
* The key of the map is the column name, and the value of the map is the replacement value.
* The value must be of the following type: `Int`, `Long`, `Float`, `Double`, `String`, `Boolean`.
* Replacement values are cast to the column data type.
*
* For example, the following replaces null values in column "A" with string "unknown", and
* null values in column "B" with numeric value 1.0.
* {{{
* df.na.fill(Map(
* "A" -> "unknown",
* "B" -> 1.0
* ))
* }}}
*
* @since 1.3.1
*/
def fill(valueMap: Map[String, Any]): DataFrame = fillMap(valueMap.toSeq)
/**
* Replaces values matching keys in `replacement` map with the corresponding values.
*
* {{{
* import com.google.common.collect.ImmutableMap;
*
* // Replaces all occurrences of 1.0 with 2.0 in column "height".
* df.na.replace("height", ImmutableMap.of(1.0, 2.0));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "name".
* df.na.replace("name", ImmutableMap.of("UNKNOWN", "unnamed"));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in all string columns.
* df.na.replace("*", ImmutableMap.of("UNKNOWN", "unnamed"));
* }}}
*
* @param col name of the column to apply the value replacement. If `col` is "*",
* replacement is applied on all string, numeric or boolean columns.
* @param replacement value replacement map. Key and value of `replacement` map must have
* the same type, and can only be doubles, strings or booleans.
* The map value can have nulls.
*
* @since 1.3.1
*/
def replace[T](col: String, replacement: java.util.Map[T, T]): DataFrame = {
replace[T](col, replacement.asScala.toMap)
}
/**
* Replaces values matching keys in `replacement` map with the corresponding values.
*
* {{{
* import com.google.common.collect.ImmutableMap;
*
* // Replaces all occurrences of 1.0 with 2.0 in column "height" and "weight".
* df.na.replace(new String[] {"height", "weight"}, ImmutableMap.of(1.0, 2.0));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "firstname" and "lastname".
* df.na.replace(new String[] {"firstname", "lastname"}, ImmutableMap.of("UNKNOWN", "unnamed"));
* }}}
*
* @param cols list of columns to apply the value replacement. If `col` is "*",
* replacement is applied on all string, numeric or boolean columns.
* @param replacement value replacement map. Key and value of `replacement` map must have
* the same type, and can only be doubles, strings or booleans.
* The map value can have nulls.
*
* @since 1.3.1
*/
def replace[T](cols: Array[String], replacement: java.util.Map[T, T]): DataFrame = {
replace(cols.toSeq, replacement.asScala.toMap)
}
/**
* (Scala-specific) Replaces values matching keys in `replacement` map.
*
* {{{
* // Replaces all occurrences of 1.0 with 2.0 in column "height".
* df.na.replace("height", Map(1.0 -> 2.0));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "name".
* df.na.replace("name", Map("UNKNOWN" -> "unnamed"));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in all string columns.
* df.na.replace("*", Map("UNKNOWN" -> "unnamed"));
* }}}
*
* @param col name of the column to apply the value replacement. If `col` is "*",
* replacement is applied on all string, numeric or boolean columns.
* @param replacement value replacement map. Key and value of `replacement` map must have
* the same type, and can only be doubles, strings or booleans.
* The map value can have nulls.
*
* @since 1.3.1
*/
def replace[T](col: String, replacement: Map[T, T]): DataFrame = {
if (col == "*") {
replace0(df.columns, replacement)
} else {
replace0(Seq(col), replacement)
}
}
/**
* (Scala-specific) Replaces values matching keys in `replacement` map.
*
* {{{
* // Replaces all occurrences of 1.0 with 2.0 in column "height" and "weight".
* df.na.replace("height" :: "weight" :: Nil, Map(1.0 -> 2.0));
*
* // Replaces all occurrences of "UNKNOWN" with "unnamed" in column "firstname" and "lastname".
* df.na.replace("firstname" :: "lastname" :: Nil, Map("UNKNOWN" -> "unnamed"));
* }}}
*
* @param cols list of columns to apply the value replacement. If `col` is "*",
* replacement is applied on all string, numeric or boolean columns.
* @param replacement value replacement map. Key and value of `replacement` map must have
* the same type, and can only be doubles, strings or booleans.
* The map value can have nulls.
*
* @since 1.3.1
*/
def replace[T](cols: Seq[String], replacement: Map[T, T]): DataFrame = replace0(cols, replacement)
private def replace0[T](cols: Seq[String], replacement: Map[T, T]): DataFrame = {
if (replacement.isEmpty || cols.isEmpty) {
return df
}
// Convert the NumericType in replacement map to DoubleType,
// while leaving StringType, BooleanType and null untouched.
val replacementMap: Map[_, _] = replacement.map {
case (k, v: String) => (k, v)
case (k, v: Boolean) => (k, v)
case (k: String, null) => (k, null)
case (k: Boolean, null) => (k, null)
case (k, null) => (convertToDouble(k), null)
case (k, v) => (convertToDouble(k), convertToDouble(v))
}
// targetColumnType is either DoubleType, StringType or BooleanType,
// depending on the type of first key in replacement map.
// Only fields of targetColumnType will perform replacement.
val targetColumnType = replacement.head._1 match {
case _: jl.Double | _: jl.Float | _: jl.Integer | _: jl.Long => DoubleType
case _: jl.Boolean => BooleanType
case _: String => StringType
}
val columnEquals = df.sparkSession.sessionState.analyzer.resolver
val projections = df.schema.fields.map { f =>
val shouldReplace = cols.exists(colName => columnEquals(colName, f.name))
if (f.dataType.isInstanceOf[NumericType] && targetColumnType == DoubleType && shouldReplace) {
replaceCol(f, replacementMap)
} else if (f.dataType == targetColumnType && shouldReplace) {
replaceCol(f, replacementMap)
} else {
df.col(f.name)
}
}
df.select(projections : _*)
}
private def fillMap(values: Seq[(String, Any)]): DataFrame = {
// Error handling
values.foreach { case (colName, replaceValue) =>
// Check column name exists
df.resolve(colName)
// Check data type
replaceValue match {
case _: jl.Double | _: jl.Float | _: jl.Integer | _: jl.Long | _: jl.Boolean | _: String =>
// This is good
case _ => throw new IllegalArgumentException(
s"Unsupported value type ${replaceValue.getClass.getName} ($replaceValue).")
}
}
val columnEquals = df.sparkSession.sessionState.analyzer.resolver
val projections = df.schema.fields.map { f =>
values.find { case (k, _) => columnEquals(k, f.name) }.map { case (_, v) =>
v match {
case v: jl.Float => fillCol[Float](f, v)
case v: jl.Double => fillCol[Double](f, v)
case v: jl.Long => fillCol[Long](f, v)
case v: jl.Integer => fillCol[Integer](f, v)
case v: jl.Boolean => fillCol[Boolean](f, v.booleanValue())
case v: String => fillCol[String](f, v)
}
}.getOrElse(df.col(f.name))
}
df.select(projections : _*)
}
/**
* Returns a [[Column]] expression that replaces null value in `col` with `replacement`.
*/
private def fillCol[T](col: StructField, replacement: T): Column = {
val quotedColName = "`" + col.name + "`"
val colValue = col.dataType match {
case DoubleType | FloatType =>
nanvl(df.col(quotedColName), lit(null)) // nanvl only supports these types
case _ => df.col(quotedColName)
}
coalesce(colValue, lit(replacement).cast(col.dataType)).as(col.name)
}
/**
* Returns a [[Column]] expression that replaces value matching key in `replacementMap` with
* value in `replacementMap`, using [[CaseWhen]].
*
* TODO: This can be optimized to use broadcast join when replacementMap is large.
*/
private def replaceCol(col: StructField, replacementMap: Map[_, _]): Column = {
val keyExpr = df.col(col.name).expr
def buildExpr(v: Any) = Cast(Literal(v), keyExpr.dataType)
val branches = replacementMap.flatMap { case (source, target) =>
Seq(buildExpr(source), buildExpr(target))
}.toSeq
new Column(CaseKeyWhen(keyExpr, branches :+ keyExpr)).as(col.name)
}
private def convertToDouble(v: Any): Double = v match {
case v: Float => v.toDouble
case v: Double => v
case v: Long => v.toDouble
case v: Int => v.toDouble
case v => throw new IllegalArgumentException(
s"Unsupported value type ${v.getClass.getName} ($v).")
}
/**
* Returns a new `DataFrame` that replaces null or NaN values in specified
* numeric, string columns. If a specified column is not a numeric, string
* or boolean column it is ignored.
*/
private def fillValue[T](value: T, cols: Seq[String]): DataFrame = {
// the fill[T] which T is Long/Double,
// should apply on all the NumericType Column, for example:
// val input = Seq[(java.lang.Integer, java.lang.Double)]((null, 164.3)).toDF("a","b")
// input.na.fill(3.1)
// the result is (3,164.3), not (null, 164.3)
val targetType = value match {
case _: Double | _: Long => NumericType
case _: String => StringType
case _: Boolean => BooleanType
case _ => throw new IllegalArgumentException(
s"Unsupported value type ${value.getClass.getName} ($value).")
}
val columnEquals = df.sparkSession.sessionState.analyzer.resolver
val filledColumns = df.schema.fields.filter { f =>
val typeMatches = (targetType, f.dataType) match {
case (NumericType, dt) => dt.isInstanceOf[NumericType]
case (StringType, dt) => dt == StringType
case (BooleanType, dt) => dt == BooleanType
case _ =>
throw new IllegalArgumentException(s"$targetType is not matched at fillValue")
}
// Only fill if the column is part of the cols list.
typeMatches && cols.exists(col => columnEquals(f.name, col))
}
df.withColumns(filledColumns.map(_.name), filledColumns.map(fillCol[T](_, value)))
}
}
| caneGuy/spark | sql/core/src/main/scala/org/apache/spark/sql/DataFrameNaFunctions.scala | Scala | apache-2.0 | 18,845 |
/*
* Copyright (c) 2013-2014 TelefΓ³nica InvestigaciΓ³n y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.api.mocks
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import es.tid.cosmos.common.ExecutableValidation
import es.tid.cosmos.platform.ial._
import es.tid.cosmos.platform.ial.MachineProfile._
import es.tid.cosmos.platform.ial.MachineProfile
import es.tid.cosmos.platform.ial.MachineState
/** Mocked IAL that simulates a number of machines being managed
*
* @constructor
* @param totalMachines Number of simulated machines per profile
*/
class MockInfrastructureProvider(
totalMachines: Map[MachineProfile, Int] = Map(G1Compute -> 100, HdfsMaster -> 1, HdfsSlave -> 4)
) extends InfrastructureProvider {
private class Machine(name: String, val profile: MachineProfile) {
val id = Id(name)
val hostname = s"$name.example.com"
var inUse: Boolean = false
val machineState = MachineState(id, name, profile, MachineStatus.Running, hostname, "0.0.0.0")
}
private val machines: Seq[Machine] = totalMachines.flatMap {
case (profile, total) => (1 to total).map(idx => new Machine(s"$profile-$idx", profile))
}.toSeq
override def createMachines(
preConditions: ExecutableValidation,
profile: MachineProfile,
numberOfMachines: Int,
bootstrapAction: (MachineState) => Future[Unit]): Future[Seq[MachineState]] = Future {
machines.synchronized {
require(preConditions().isSuccess, "Preconditions failed")
val selectedMachines =
machines.filter(m => !m.inUse && m.profile == profile).take(numberOfMachines)
require(selectedMachines.size == numberOfMachines, "Not enough machines")
selectedMachines.foreach { machine =>
machine.inUse = true
bootstrapAction(machine.machineState)
}
machines.map(_.machineState)
}
}
override def assignedMachines(hostNames: Seq[String]): Future[Seq[MachineState]] = ???
override def releaseMachines(machinesToRelease: Seq[MachineState]): Future[Unit] = Future {
machines.synchronized {
machinesToRelease.foreach { m =>
machines.find(_.id == m.id).foreach(_.inUse = false)
}
}
}
override val rootPrivateSshKey: String = "ssh-rsa XXXXXX [email protected]"
override def availableMachineCount(profile: MachineProfile.Value): Future[Int] = Future {
machines.synchronized {
machines.count(m => !m.inUse && m.profile == profile)
}
}
override def machinePoolCount(profileFilter: MachineProfile => Boolean): Int =
machines.synchronized {
machines.count(m => profileFilter(m.profile))
}
}
object MockInfrastructureProvider {
trait Component extends InfrastructureProviderComponent {
override def infrastructureProvider: InfrastructureProvider = new MockInfrastructureProvider()
}
}
| telefonicaid/fiware-cosmos-platform | cosmos-api/it/scala/es/tid/cosmos/api/mocks/MockInfrastructureProvider.scala | Scala | apache-2.0 | 3,418 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the βLicenseβ); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an βAS ISβ BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.it
import java.io.ByteArrayInputStream
import cmwell.it.fixture.NSHashesAndPrefixes
import cmwell.util.concurrent.SimpleScheduler._
import com.typesafe.scalalogging.LazyLogging
import org.apache.jena.query.DatasetFactory
import org.apache.jena.riot.{Lang, RDFDataMgr}
import org.scalatest.{AsyncFunSpec, Matchers}
import play.api.libs.json.{Reads, _}
import scala.concurrent.duration._
import scala.io.Source
class QuadTests extends AsyncFunSpec with Matchers with Helpers with NSHashesAndPrefixes with LazyLogging {
val exampleOrg = cmw / "example.org"
val spiderman = exampleOrg / "comics" / "characters" / "spiderman"
val superman = exampleOrg / "comics" / "characters" / "superman"
val batman = exampleOrg / "comics" / "characters" / "batman"
def arr(v: JsValue) = JsArray(Seq(v))
def jsonlNoData(name: String) = {
Json.obj(
"type.sys" -> Json.arr(Json.obj("value" -> "ObjectInfoton")),
"path.sys" -> Json.arr(Json.obj("value" -> s"/example.org/comics/characters/$name")),
"dataCenter.sys" -> Json.arr(Json.obj("value" -> dcName)),
"parent.sys" -> Json.arr(Json.obj("value" -> "/example.org/comics/characters")))
}
val sEnemies = Json.obj("type" -> "ObjectInfoton",
"system" -> Json.obj("path" -> "/example.org/comics/characters/spiderman",
"parent" -> "/example.org/comics/characters",
"dataCenter" -> dcName),
"fields" -> Json.obj("enemyOf.rel" -> Json.arr(
"http://example.org/comics/characters/dr-octopus",
"http://example.org/comics/characters/green-goblin",
"http://example.org/comics/characters/venom")))
def bEnemies(enemies: JsObject*) = Json.obj(
"type.sys" -> arr(Json.obj("value" -> "ObjectInfoton")),
"path.sys" -> arr(Json.obj("value" -> "/example.org/comics/characters/batman")),
"parent.sys" -> arr(Json.obj("value" -> "/example.org/comics/characters")),
"dataCenter.sys" -> arr(Json.obj("value" -> dcName)),
"enemyOf.rel" -> enemies.seq
).transform(jsonlSorter).get
def supermanWithQuad(quad: String) = Json.obj(
"type.sys" -> Json.arr(Json.obj("value" -> "ObjectInfoton")),
"path.sys" -> Json.arr(Json.obj("value" -> "/example.org/comics/characters/superman")),
"dataCenter.sys" -> Json.arr(Json.obj("value" -> dcName)),
"parent.sys" -> Json.arr(Json.obj("value" -> "/example.org/comics/characters")),
"enemyOf.rel" -> Json.arr(
Json.obj(
"value" -> "http://example.org/comics/characters/general-zod",
"quad" -> quad,
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/lex-luthor",
"quad" -> quad,
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI")
),
"sameAs.owl" -> Json.arr(
Json.obj(
"value" -> "http://example.org/comics/characters/clark-kent",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI")
)
).transform(jsonlSorter andThen jsonlUuidDateIdEraser).get
val batmanExpected = bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/riddler",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
describe("n-quads data") {
//Assertions
val ingestingNquads = {
val spiderman = Source.fromURL(this.getClass.getResource("/spiderman.nq")).mkString
val superman = Source.fromURL(this.getClass.getResource("/superman.nq")).mkString
val batman = Source.fromURL(this.getClass.getResource("/batman.nq")).mkString
val f1 = Http.post(_in, spiderman, Some("application/n-quads;charset=UTF-8"), List("format" -> "nquads"), tokenHeader)
val f2 = Http.post(_in, superman, Some("application/n-quads;charset=UTF-8"), List("format" -> "nquads"), tokenHeader)
val f3 = Http.post(_in, batman, Some("application/n-quads;charset=UTF-8"), List("format" -> "nquads"), tokenHeader)
for {
r1 <- f1
r2 <- f2
r3 <- f3
} yield {
Json.parse(r1.payload) should be(jsonSuccess)
Json.parse(r2.payload) should be(jsonSuccess)
Json.parse(r3.payload) should be(jsonSuccess)
}
}
val fSuperHeroes = ingestingNquads.flatMap(_ => schedule(indexingDuration)(()))
val failGlobalQuadReplace = fSuperHeroes.flatMap { _ =>
val data = """<> <cmwell://meta/sys#replaceGraph> <*> ."""
Http.post(_in, data, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {
res.status should be(400)
}
}
}
val failTooManyGraphReplaceStatements = fSuperHeroes.flatMap { _ =>
val stmtPrefix = "<> <cmwell://meta/sys#replaceGraph> <http://graph.number/"
val stmtSuffix = "> .\n"
val ntriplesRG = (1 to 21).mkString(stmtPrefix, stmtSuffix + stmtPrefix, stmtSuffix)
Http.post(_in, ntriplesRG, None, List("format" -> "ntriples"), tokenHeader).map { res =>
withClue(res) {
res.status should be(400)
}
}
}
val fSpiderman1 = fSuperHeroes.flatMap(_ => Http.get(spiderman, List("format" -> "json"))).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(fieldsSorter andThen uuidDateEraser)
.get shouldEqual sEnemies
}
}
val fSpiderman2 = fSuperHeroes.flatMap(_ => Http.get(spiderman, List("format" -> "nquads"))).map { result =>
val res = result.payload
val ds = DatasetFactory.createGeneral()
RDFDataMgr.read(ds, new ByteArrayInputStream(res), Lang.NQUADS)
ds.getNamedModel("http://example.org/graphs/spiderman").isEmpty should be(false)
}
val fSpiderman3 = {
val data = """<http://example.org/comics/characters/spiderman> <cmwell://meta/sys#markReplace> <*> <http://example.org/graphs/spiderman> ."""
for {
_ <- fSpiderman1
_ <- fSpiderman2
res <- Http.post(_in, data, None, List("format" -> "nquads"), tokenHeader)
} yield withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
}
val fSpiderman4 = fSpiderman3.flatMap(_ => scheduleFuture(indexingDuration) {
Http.get(spiderman, List("format" -> "nquads")).map { res =>
withClue(res) {
res.status should be(404)
}
}
})
val fSuperman1 = fSuperHeroes.flatMap(_ => Http.get(
exampleOrg,
List(
"op" -> "search",
"qp" -> "system.quad::http://example.org/graphs/superman",
"format" -> "jsonl",
"recursive" -> "")).map { res =>
val expected =
Json.obj(
"type" -> "SearchResults",
"total" -> 4,
"offset" -> 0,
"length" -> 4,
"infotons" -> Json.arr(
jsonlNoData("john-kent"),
jsonlNoData("clark-kent"),
jsonlNoData("martha-kent"),
jsonlNoData("superman")
)
).transform(jsonlSorter).get
withClue(res) {
Json
.parse(res.payload)
.transform((__ \ 'results).json.pick andThen
(__ \ 'fromDate).json.prune andThen
(__ \ 'toDate).json.prune andThen
jsonlInfotonArraySorterAndUuidDateIdEraser)
.get shouldEqual expected
}
})
val fSuperman2 = fSuperHeroes.flatMap(_ => Http.get(
exampleOrg,
List("format" -> "jsonl", "op" -> "search", "qp" -> "system.quad::superman", "recursive" -> "")).map { res =>
withClue(res) {
res.status should be(422)
}
})
val fSuperman3 = fSuperHeroes.flatMap(_ => Http.get(superman, List("format" -> "jsonl", "pretty" -> "")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual supermanWithQuad("http://example.org/graphs/superman")
}
})
val fSuperman4 = for {
_ <- fSuperman1
_ <- fSuperman2
_ <- fSuperman3
data = """<> <cmwell://meta/sys#graphAlias> "superman" <http://example.org/graphs/superman> ."""
res <- Http.post(_in, data, None, List("format" -> "nquads"), tokenHeader)
} yield withClue(res) {
jsonSuccessPruner(Json.parse(res.payload)) should be(jsonSuccess)
}
val fSuperman5 = fSuperman4.flatMap(_ => scheduleFuture(cacheEviction){
Http.get(superman, List("format" -> "jsonl", "pretty" -> "")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual supermanWithQuad("superman")
}
}
})
val fSuperman6 = fSuperman5.flatMap { _ =>
val expected = Json.obj(
"type" -> "SearchResults",
"total" -> 4,
"offset" -> 0,
"length" -> 4,
"infotons" -> Json.arr(
jsonlNoData("john-kent"),
jsonlNoData("clark-kent"),
jsonlNoData("martha-kent"),
jsonlNoData("superman"))).transform(jsonlSorter).get
Http.get(exampleOrg, List("op" -> "search", "qp" -> "system.quad::superman", "format" -> "jsonl", "recursive" -> "")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform((__ \ 'results).json.pick andThen
(__ \ 'fromDate).json.prune andThen
(__ \ 'toDate).json.prune andThen
jsonlInfotonArraySorterAndUuidDateIdEraser)
.get shouldEqual expected
}
}
}
val fSuperman7 = fSuperman6.flatMap{ _ =>
val data = """<> <cmwell://meta/sys#replaceGraph> <http://example.org/graphs/superman> ."""
Http.post(_in, data, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {
res.status should be(200)
}
}
}
val fSuperman8 = fSuperman7.flatMap(_ => scheduleFuture(indexingDuration) {
spinCheck(100.millis, true)(Http.get(superman, List("format" -> "jsonl")))(_.status).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual Json.obj(
"type.sys" -> Json.arr(Json.obj("value" -> "ObjectInfoton")),
"path.sys" -> Json.arr(Json.obj("value" -> "/example.org/comics/characters/superman")),
"dataCenter.sys" -> Json.arr(Json.obj("value" -> dcName)),
"parent.sys" -> Json.arr(Json.obj("value" -> "/example.org/comics/characters")),
"sameAs.owl" -> Json.arr(
Json.obj(
"value" -> "http://example.org/comics/characters/clark-kent",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI")
)
).transform(jsonlSorter andThen jsonlUuidDateIdEraser).get
}
}
})
val fBatman01 = fSuperHeroes.flatMap(_ => Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual batmanExpected
}
})
val fBatman02 = {
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markDelete> _:batmanDeletes <http://example.org/graphs/joker> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/riddler> <http://example.org/graphs/batman> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> .
""".stripMargin
fBatman01.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map{res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman03 = fBatman02.flatMap( _ => scheduleFuture(indexingDuration) {
Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual batmanExpected
}
}
})
val fBatman04 = {
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markDelete> _:batmanDeletes <http://example.org/graphs/batman> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/riddler> <http://example.org/graphs/batman> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> .
""".stripMargin
fBatman03.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman05 = fBatman04.flatMap(_ => scheduleFuture(indexingDuration){
Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman06 = {
// scalastyle:off
val quads =
"""
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/scarecrow> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markDelete> _:batmanDeletes <http://example.org/graphs/joker> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> .
""".stripMargin
// scalastyle:on
fBatman05.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map{res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman07 = fBatman06.flatMap(_ => scheduleFuture(indexingDuration){
Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/scarecrow",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman08 = {
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markDelete> _:batmanDeletes <http://example.org/graphs/joker> .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> <http://example.org/graphs/joker> .
""".stripMargin
fBatman07.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman09 = fBatman08.flatMap(_ => scheduleFuture(indexingDuration) {
spinCheck(100.millis, true)(Http.get(batman, List("format" -> "jsonl")))(_.status).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/scarecrow",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman10 = {
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markDelete> _:batmanDeletes .
|_:batmanDeletes <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> .
""".stripMargin
fBatman09.flatMap(_ => Http.post(_in, quads, None, List("format" -> "ntriples"), tokenHeader).map { res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman11 = fBatman10.flatMap(_ => scheduleFuture(indexingDuration) {
spinCheck(100.millis, true)(Http.get(batman, List("format" -> "jsonl")))(_.status).map { res =>
withClue(res){
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/scarecrow",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman12 = {
// scalastyle:off
val quads =
"""
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/riddler> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/joker> <http://example.org/graphs/joker> .
""".stripMargin
// scalastyle:on
fBatman11.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads", "replace-mode" -> "*"), tokenHeader).map{res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman13 = fBatman12.flatMap(_ => scheduleFuture(indexingDuration) {
Http.get(batman, List("format" -> "jsonl")).map{res =>
withClue(res){
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/riddler",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman14 = {
// scalastyle:off
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markReplace> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/enigma> <http://example.org/graphs/batman> .
""".stripMargin
// scalastyle:on
fBatman13.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map{res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman15 = fBatman14.flatMap(_ => scheduleFuture(indexingDuration) {
Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/enigma",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman16 = {
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markReplace> <http://purl.org/vocab/relationship/enemyOf> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/scarecrow> .
""".stripMargin
fBatman15.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map{res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman17 = fBatman16.flatMap(_ => scheduleFuture(indexingDuration) {
Http.get(batman, List("format" -> "jsonl")).map { res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/scarecrow",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/joker",
"quad" -> "http://example.org/graphs/joker",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/enigma",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"))
}
}
})
val fBatman18 = {
// scalastyle:off
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markReplace> <http://purl.org/vocab/relationship/enemyOf> <*> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/enemyOf> <http://example.org/comics/characters/ivy> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/cat-woman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/james-gordon> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/cat-woman> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/batman> .
|<http://example.org/comics/characters/james-gordon> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/batman> <http://example.org/graphs/batman> .
""".stripMargin
// scalastyle:on
fBatman17.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {
Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman19 = fBatman18.flatMap(_ => scheduleFuture(indexingDuration) {
val addColls = __.json.update(
Reads.JsObjectReads.map{
case JsObject(xs) => JsObject(
xs + ("collaboratesWith.rel" -> Json.arr(
Json.obj(
"value" -> "http://example.org/comics/characters/cat-woman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/james-gordon",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI",
"quad" -> "http://example.org/graphs/batman")))
)
}
)
Http.get(batman, List("format" -> "jsonl")).map{res =>
withClue(res) {
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual bEnemies(
Json.obj(
"value" -> "http://example.org/comics/characters/ivy",
"quad" -> "http://example.org/graphs/batman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI")
).transform(addColls andThen jsonlSorter).get
}
}
})
val fBatman20 = {
// scalastyle:off
val quads =
"""
|<http://example.org/comics/characters/batman> <cmwell://meta/sys#markReplace> <*> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/batman> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/robin> <http://example.org/graphs/batman> .
|<http://example.org/comics/characters/robin> <http://purl.org/vocab/relationship/collaboratesWith> <http://example.org/comics/characters/batman> <http://example.org/graphs/batman> .
""".stripMargin
// scalastyle:on
fBatman19.flatMap(_ => Http.post(_in, quads, None, List("format" -> "nquads"), tokenHeader).map { res =>
withClue(res) {Json.parse(res.payload) should be(jsonSuccess)
}
})
}
val fBatman21 = fBatman20.flatMap(_ => scheduleFuture(indexingDuration) {
Http.get(batman, List("format" -> "jsonl")).map{res =>
withClue(res){
Json
.parse(res.payload)
.transform(jsonlSorter andThen jsonlUuidDateIdEraser)
.get shouldEqual Json.obj(
"type.sys" -> arr(Json.obj("value" -> "ObjectInfoton")),
"path.sys" -> arr(Json.obj("value" -> "/example.org/comics/characters/batman")),
"parent.sys" -> arr(Json.obj("value" -> "/example.org/comics/characters")),
"dataCenter.sys" -> arr(Json.obj("value" -> dcName)),
"collaboratesWith.rel" -> Json.arr(
Json.obj(
"value" -> "http://example.org/comics/characters/cat-woman",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI"),
Json.obj(
"value" -> "http://example.org/comics/characters/robin",
"type" -> "http://www.w3.org/2001/XMLSchema#anyURI",
"quad" -> "http://example.org/graphs/batman"))
).transform(jsonlSorter).get
}
}
})
// scalastyle:off
it("should accept and process n-quads data")(ingestingNquads)
it("should not succeed deleting all quads globally using <*>")(failGlobalQuadReplace)
it("should fail to exceed the maximum allowed replaceGraph statements per request")(failTooManyGraphReplaceStatements)
it("should be retrievable in json format")(fSpiderman1)
it("should verify subgraph data is readable (nquads format)")(fSpiderman2)
it("should succeed deleting spiderman's quads")(fSpiderman3)
it("should not be able to get infoton after spiderman fields deletion")(fSpiderman4)
it("should be searchable with system.quad as qp")(fSuperman1)
it("should not be able to find superman's infotons by quad alias in search")(fSuperman2)
it("should retrieve superman as pretty jsonl without quad aliasing")(fSuperman3)
it("should register an alias to superman's graph")(fSuperman4)
it("should retrieve superman as pretty jsonl with quad aliasing")(fSuperman5)
it("should now (previously failed) be able to find superman's infotons by quad alias in search")(fSuperman6)
it("should succeed deleting all superman's quads globally")(fSuperman7)
it("should retrieve superman as jsonl without deleted quad asociated attributes")(fSuperman8)
it("should retrieve batman as jsonl")(fBatman01)
// MARK DELETE
describe("should make sure wrong quads supplied to _in will not modify the data") {
it("succeed posting the quads")(fBatman02)
it("verifying the data")(fBatman03)
}
describe("should make sure to delete only meta operation related quads and not values that belong to other quads") {
it("succeed posting the quads")(fBatman04)
it("verifying the data")(fBatman05)
}
describe("should ignore deletes for wrong graph but insert new unrelated data"){
it("succeed posting the quads")(fBatman06)
it("verifying the data")(fBatman07)
}
describe("should delete joker from joker graph") {
it("succeed posting the quads")(fBatman08)
it("verifying the data")(fBatman09)
}
describe("should delete joker from all graphs when no graph is supplied") {
it("succeed posting the quads")(fBatman10)
it("verifying the data")(fBatman11)
}
//MARK REPLACE
describe("should replace all with 'replace-mode' enabled") {
it("succeed posting the quads")(fBatman12)
it("verifying the data")(fBatman13)
}
describe("should replace all values in same quad when it is passed as a quad to 'markReplace' with the new values") {
it("succeed posting the quads")(fBatman14)
it("verifying the data")(fBatman15)
}
describe("should replace all default graph related when no quad is supplied") {
it("succeed posting the quads")(fBatman16)
it("verifying the data")(fBatman17)
}
describe("should respect the 'nuclear' option and replace all values for all quads when '*' is supplied as a quad") {
it("succeed posting the quads")(fBatman18)
it("verifying the data")(fBatman19)
}
describe("should respect the 'inverted nuclear' option and replace all fields associated with some quad when '*' is supplied as a predicate for markReplace") {
it("succeed posting the quads")(fBatman20)
it("verifying the data")(fBatman21)
}
// scalastyle:on
}
}
| hochgi/CM-Well | server/cmwell-it/src/it/scala/cmwell/it/QuadTests.scala | Scala | apache-2.0 | 33,142 |
package com.API
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import spray.can.Http
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
object Boot extends App {
// we need an ActorSystem to host our application in
implicit val system = ActorSystem("spray-actors")
// create and start our service actor
val service = system.actorOf(Props[MyServiceActor], "api")
implicit val timeout = Timeout(5.seconds)
// start a new HTTP server on port 8080 with our service actor as the handler
IO(Http) ? Http.Bind(service, interface = "0.0.0.0", port = 8080)
}
| code-ape/Clojure-vs-Scala-Web-API | scala-api/src/main/scala/com/API/boot.scala | Scala | gpl-2.0 | 612 |
package barbershop
import akka.actor.Actor
import akka.actor.ActorRef
import scala.util.Random
class Barber extends Actor {
val random = new Random
def receive = {
case customer: ActorRef =>
println("[b] Starting to cut hair on customer!")
Thread.sleep(random.nextInt(300))
customer ! Haircut
sender.tell(CuttingDoneMessage(customer), self)
}
} | saesh/BarberShopAkka | src/scala/main/barbershop/Barber.scala | Scala | mit | 397 |
package models
import java.util.{Date, UUID}
import scala.collection.JavaConversions._
import com.datastax.driver.core.querybuilder.QueryBuilder
import database.Cassandra
import constants.Table
import pubsub.{UnFollowMessage, FollowMessage, PubSubHelper}
import com.datastax.driver.core.Row
trait Counter {
val counter_table: String = "counter"
def incr(orgTable: String, field: String, id: UUID) = {
val key = orgTable + "_" + field
Cassandra.session.execute(
QueryBuilder.update(counter_table)
.`with`(QueryBuilder.incr("value"))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
// get vote-up and vote-down back to return
val row = Cassandra.session.execute(
QueryBuilder.select("value")
.from(counter_table)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val value = row.getLong("value")
Cassandra.session.execute(
QueryBuilder.update(orgTable)
.`with`(QueryBuilder.set(field, value))
.where(QueryBuilder.eq("id", id))
)
value
}
def incr(orgTable: String, field: String, id: UUID, step: java.lang.Long) = {
val key = orgTable + "_" + field
Cassandra.session.execute(
QueryBuilder.update(counter_table)
.`with`(QueryBuilder.incr("value", step))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
// get vote-up and vote-down back to return
val row = Cassandra.session.execute(
QueryBuilder.select("value")
.from(counter_table)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val value = row.getLong("value")
Cassandra.session.execute(
QueryBuilder.update(orgTable)
.`with`(QueryBuilder.set(field, value))
.where(QueryBuilder.eq("id", id))
)
value
}
def decr(orgTable: String, field: String, id: UUID) = {
val key = orgTable + "_" + field
Cassandra.session.execute(
QueryBuilder.update(counter_table)
.`with`(QueryBuilder.decr("value"))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
// get vote-up and vote-down back to return
val row = Cassandra.session.execute(
QueryBuilder.select("value")
.from(counter_table)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val value = row.getLong("value")
Cassandra.session.execute(
QueryBuilder.update(orgTable)
.`with`(QueryBuilder.set(field, value))
.where(QueryBuilder.eq("id", id))
)
value
}
def decr(orgTable: String, field: String, id: UUID, step: Long) = {
val key = orgTable + "_" + field
Cassandra.session.execute(
QueryBuilder.update(counter_table)
.`with`(QueryBuilder.decr("value", step))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
// get vote-up and vote-down back to return
val row = Cassandra.session.execute(
QueryBuilder.select("value")
.from(counter_table)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val value = row.getLong("value")
Cassandra.session.execute(
QueryBuilder.update(orgTable)
.`with`(QueryBuilder.set(field, value))
.where(QueryBuilder.eq("id", id))
)
value
}
def getCounter(orgTable: String, field: String, id: UUID) = {
val key = orgTable + "_" + field
// get field
val row = Cassandra.session.execute(
QueryBuilder.select(field)
.from(counter_table)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
row.getLong("value")
}
}
trait Follow {
val followTable = "follow"
def followTarget(userId: UUID, target: String, target_id: UUID, about: String = "follow") = {
val updated = new Date()
val key = target + "_" + target_id.toString
// get current status of follow
val row = Cassandra.session.execute(
QueryBuilder.select().all()
.from(followTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("follower_id", userId))
).one()
if(row != null) false
else {
Cassandra.session.execute(
QueryBuilder.insertInto(followTable)
.value("key", key)
.value("follower_id", userId)
.value("updated", updated)
)
PubSubHelper.publish(constants.PubSub.TOPIC_FOLLOW, FollowMessage(userId, target, target_id, updated, about))
true
}
}
def unFollowTarget(userId: UUID, target: String, target_id: UUID) = {
val key = target + "_" + target_id.toString
// get current status of follow
val row = Cassandra.session.execute(
QueryBuilder.select().all()
.from(followTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("follower_id", userId))
).one()
if(row == null) false
else {
Cassandra.session.execute(
QueryBuilder.delete().from(followTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("follower_id", userId))
)
PubSubHelper.publish(constants.PubSub.TOPIC_FOLLOW, UnFollowMessage(userId, target, target_id))
true
}
}
def getFollowers(target: String, targetId: UUID) = {
val key = target + "_" + targetId.toString
Cassandra.session.execute(
QueryBuilder.select().all()
.from(followTable)
.where(QueryBuilder.eq("key", key))
).all().map(_.getUUID("follower_id"))
}
def getFollowTarget(userId: UUID) = {
val follows: scala.collection.mutable.Map[String, Set[UUID]] = scala.collection.mutable.Map()
Cassandra.session.execute(
QueryBuilder.select().all().from(Table.USER_FOLLOW)
.where(QueryBuilder.eq("user_id", userId))
).all().foreach { row =>
val target = row.getString("target")
if(!follows.isDefinedAt(target)) follows(target) = Set[UUID]()
follows(target) = follows(target) + row.getUUID("target_id")
}
follows
}
def getFollowTarget(userId: UUID, target: String) = {
Cassandra.session.execute(
QueryBuilder.select().all().from(Table.USER_FOLLOW)
.where(QueryBuilder.eq("user_id", userId))
.and(QueryBuilder.eq("target", target))
).all().map { row =>
row.getUUID("target_id")
}
}
def getFollowTarget(userIds: Set[UUID]) = {
val follows: scala.collection.mutable.Map[UUID, scala.collection.mutable.Map[String, Set[UUID]]] = scala.collection.mutable.Map()
Cassandra.session.execute(
QueryBuilder.select().all().from(Table.USER_FOLLOW)
.where(QueryBuilder.in("user_id", userIds.toSeq:_*))
).all().foreach { row =>
val userId = row.getUUID("user_id")
val target = row.getString("target")
if(!follows.isDefinedAt(userId)) follows(userId) = scala.collection.mutable.Map[String, Set[UUID]]()
if(!follows(userId).isDefinedAt(target)) follows(userId)(target) = Set[UUID]()
follows(userId)(target) = follows(userId)(target) + row.getUUID("target_id")
}
follows
}
}
trait LongOrdered {
val longOrderedTable: String = "long_ordered"
val longOrderedTrackTable: String = "long_ordered_track"
def getLongIds(table: String, field: String, prefix: String = null, limit: Int, next: (Long, UUID) = null, desc: Boolean = true) = {
val key = if(prefix != null) table + "_" + field + "_" + prefix else table + "_" + field
val order = if(desc) QueryBuilder.desc("value") else QueryBuilder.asc("value")
var query = QueryBuilder.select().all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", key))
if(next != null) {
if(!desc) query = query.and(QueryBuilder.gt(List("value", "id"), List[AnyRef]( next._1: java.lang.Long, next._2)))
else query = query.and(QueryBuilder.lt(List("value", "id"), List[AnyRef]( next._1: java.lang.Long, next._2)))
}
Cassandra.session.execute(
query.limit(limit).orderBy(order)
)
.all().map(p => (p.getUUID("id"), (p.getDate("updated"), p.getString("action"), p.getString("f"), p.getUUID("fid"), p.getUUID("answer_id"))))
}
def longUpdate(table: String, field: String, id: UUID, value: java.lang.Long, prefixes: Set[String], updated: Date, action: String = null, from: String = null, fromId: UUID = null, answerId: UUID = null) = {
val keys = if(prefixes.size > 0) prefixes.map(table + "_" + field + "_" + _) else Set(table + "_" + field)
keys.foreach { key =>
var r_action = action
var r_from = from
var r_fromId = fromId
var r_answerId = answerId
//find in score track to make sure delete old comment in comment score
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(longOrderedTrackTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val oldValue = if(longRow == null) 0L else longRow.getLong("value")
if(longRow != null) {
if(action == null) r_action = longRow.getString("action")
if(from == null) r_from = longRow.getString("f")
if(fromId == null) r_fromId = longRow.getUUID("fid")
if(answerId == null) r_answerId = longRow.getUUID("answer_id")
// delete old score
Cassandra.session.execute(
QueryBuilder.delete()
.all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("value", oldValue))
.and(QueryBuilder.eq("id", id))
)
}
// update new score for this comment
Cassandra.session.execute(
QueryBuilder.update(longOrderedTrackTable)
.`with`(QueryBuilder.set("value", value))
.and(QueryBuilder.set("action", r_action))
.and(QueryBuilder.set("f", r_from))
.and(QueryBuilder.set("fid", r_fromId))
.and(QueryBuilder.set("answer_id", r_answerId))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
//update new score for comment score
Cassandra.session.execute(
QueryBuilder.insertInto(longOrderedTable)
.value("key", key)
.value("value", value)
.value("id", id)
.value("updated", updated)
.value("action", r_action)
.value("f", r_from)
.value("fid", r_fromId)
.value("answer_id", r_answerId)
)
}
}
def longCopy(fromKey: String, toKey: String, first: Int = 50) = {
var r = 0
var batch = QueryBuilder.batch()
var lastRow: Row = null
Cassandra.session.execute(
QueryBuilder.select().all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.orderBy(QueryBuilder.desc("value"))
.limit(first)
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(longOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.and(QueryBuilder.eq("value", longRow.getLong("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(longOrderedTable)
.value("key", toKey)
.value("value", row.getLong("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(longOrderedTrackTable)
.value("value", row.getLong("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
lastRow = row
}
Cassandra.session.execute(batch)
if(r >= first) {
batch = QueryBuilder.batch()
Cassandra.session.execute(
QueryBuilder.select().all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.and(QueryBuilder.lt(List("value", "id"), List[AnyRef](lastRow.getLong("value"): java.lang.Long, lastRow.getUUID("id"))))
.orderBy(QueryBuilder.desc("value"))
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(longOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(longOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.and(QueryBuilder.eq("value", longRow.getLong("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(longOrderedTable)
.value("key", toKey)
.value("value", row.getLong("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(longOrderedTrackTable)
.value("value", row.getLong("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
}
Cassandra.session.executeAsync(batch)
}
}
}
trait DateOrdered {
val dateOrderedTable: String = "date_ordered"
val dateOrderedTrackTable: String = "date_ordered_track"
def getDateIds(table: String, field: String, prefix: String = null, limit: Int, next: (Long, UUID) = null, desc: Boolean = true) = {
val key = if(prefix != null) table + "_" + field + "_" + prefix else table + "_" + field
val order = if(desc) QueryBuilder.desc("value") else QueryBuilder.asc("value")
var query = QueryBuilder.select().all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", key))
if(next != null) {
if(!desc) query = query.and(QueryBuilder.gt(List("value", "id"), List[AnyRef]( next._1: java.lang.Long, next._2)))
else query = query.and(QueryBuilder.lt(List("value", "id"), List[AnyRef]( next._1: java.lang.Long, next._2)))
}
Cassandra.session.execute(
query.limit(limit).orderBy(order)
)
.all().map(p => (p.getUUID("id"), (p.getDate("updated"), p.getString("action"), p.getString("f"), p.getUUID("fid"), p.getUUID("answer_id"))))
}
def dateUpdate(table: String, field: String, id: UUID, value: Date, prefixes: Set[String], updated: Date, action: String = null, from: String = null, fromId: UUID = null, answerId: UUID = null) = {
val keys = if(prefixes.size > 0) prefixes.map(table + "_" + field + "_" + _) else Set(table + "_" + field)
keys.foreach { key =>
var r_action = action
var r_from = from
var r_fromId = fromId
var r_answerId = answerId
//find in score track to make sure delete old comment in comment score
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(dateOrderedTrackTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val oldValue = if(longRow == null) null else longRow.getDate("value")
if(longRow != null) {
if(action == null) r_action = longRow.getString("action")
if(from == null) r_from = longRow.getString("f")
if(fromId == null) r_fromId = longRow.getUUID("fid")
if(answerId == null) r_answerId = longRow.getUUID("answer_id")
// delete old score
Cassandra.session.execute(
QueryBuilder.delete()
.all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("value", oldValue))
.and(QueryBuilder.eq("id", id))
)
}
// update new score for this comment
Cassandra.session.execute(
QueryBuilder.update(dateOrderedTrackTable)
.`with`(QueryBuilder.set("value", value))
.and(QueryBuilder.set("action", r_action))
.and(QueryBuilder.set("f", r_from))
.and(QueryBuilder.set("fid", r_fromId))
.and(QueryBuilder.set("answer_id", r_answerId))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
//update new score for comment score
Cassandra.session.execute(
QueryBuilder.insertInto(dateOrderedTable)
.value("key", key)
.value("value", value)
.value("id", id)
.value("updated", updated)
.value("action", r_action)
.value("f", r_from)
.value("fid", r_fromId)
.value("answer_id", r_answerId)
)
}
}
def dateCopy(fromKey: String, toKey: String, first: Int = 50) = {
var r = 0
var batch = QueryBuilder.batch()
var lastRow: Row = null
Cassandra.session.execute(
QueryBuilder.select().all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.orderBy(QueryBuilder.desc("value"))
.limit(first)
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(dateOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("value", longRow.getDate("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(dateOrderedTable)
.value("key", toKey)
.value("value", row.getDate("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(dateOrderedTrackTable)
.value("value", row.getDate("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
lastRow = row
}
Cassandra.session.execute(batch)
if(r >= first) {
batch = QueryBuilder.batch()
Cassandra.session.execute(
QueryBuilder.select().all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.and(QueryBuilder.lt(List("value", "id"), List[AnyRef](lastRow.getDate("value").getTime: java.lang.Long, lastRow.getUUID("id"))))
.orderBy(QueryBuilder.desc("value"))
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(dateOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(dateOrderedTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("value", longRow.getDate("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(dateOrderedTable)
.value("key", toKey)
.value("value", row.getDate("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(dateOrderedTrackTable)
.value("value", row.getDate("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
}
Cassandra.session.executeAsync(batch)
}
}
}
trait DoubleOrdered {
val doubleOrderedTable: String = "double_ordered"
val doubleOrderedTrackTable: String = "double_ordered_track"
def getDoubleIds(table: String, field: String, prefix: String = null, limit: Int, next: (Double, UUID) = null, desc: Boolean = true) = {
val key = if(prefix != null) table + "_" + field + "_" + prefix else table + "_" + field
val order = if(desc) QueryBuilder.desc("value") else QueryBuilder.asc("value")
var query = QueryBuilder.select().all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", key))
if(next != null) {
if(!desc) query = query.and(QueryBuilder.gt(List("value", "id"), List[AnyRef]( next._1: java.lang.Double, next._2)))
else query = query.and(QueryBuilder.lt(List("value", "id"), List[AnyRef]( next._1: java.lang.Double, next._2)))
}
Cassandra.session.execute(
query.limit(limit).orderBy(order)
)
.all().map(p => (p.getUUID("id"), (p.getDate("updated"), p.getString("action"), p.getString("f"), p.getUUID("fid"), p.getUUID("answer_id"))))
}
def doubleUpdate(table: String, field: String, id: UUID, value: Double, prefixes: Set[String], updated: Date, action: String = null, from: String = null, fromId: UUID = null, answerId: UUID = null) = {
val keys = if(prefixes.size > 0) prefixes.map(table + "_" + field + "_" + _) else Set(table + "_" + field)
keys.foreach { key =>
var r_action = action
var r_from = from
var r_fromId = fromId
var r_answerId = answerId
//find in score track to make sure delete old comment in comment score
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(doubleOrderedTrackTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
).one()
val oldValue = if(longRow == null) 0D else longRow.getDouble("value")
if(longRow != null) {
if(action == null) r_action = longRow.getString("action")
if(from == null) r_from = longRow.getString("f")
if(fromId == null) r_fromId = longRow.getUUID("fid")
if(answerId == null) r_answerId = longRow.getUUID("answer_id")
// delete old score
Cassandra.session.execute(
QueryBuilder.delete()
.all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("value", oldValue))
.and(QueryBuilder.eq("id", id))
)
}
// update new score for this comment
Cassandra.session.execute(
QueryBuilder.update(doubleOrderedTrackTable)
.`with`(QueryBuilder.set("value", value))
.and(QueryBuilder.set("action", r_action))
.and(QueryBuilder.set("f", r_from))
.and(QueryBuilder.set("fid", r_fromId))
.and(QueryBuilder.set("answer_id", r_answerId))
.where(QueryBuilder.eq("key", key))
.and(QueryBuilder.eq("id", id))
)
//update new score for comment score
Cassandra.session.execute(
QueryBuilder.insertInto(doubleOrderedTable)
.value("key", key)
.value("value", value)
.value("id", id)
.value("updated", updated)
.value("action", r_action)
.value("f", r_from)
.value("fid", r_fromId)
.value("answer_id", r_answerId)
)
}
}
def doubleCopy(fromKey: String, toKey: String, first: Int = 50) = {
var r = 0
var batch = QueryBuilder.batch()
var lastRow: Row = null
Cassandra.session.execute(
QueryBuilder.select().all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.orderBy(QueryBuilder.desc("value"))
.limit(first)
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(doubleOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("value", longRow.getDouble("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(doubleOrderedTable)
.value("key", toKey)
.value("value", row.getDouble("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(doubleOrderedTrackTable)
.value("value", row.getDouble("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
lastRow = row
}
Cassandra.session.execute(batch)
if(r >= first) {
batch = QueryBuilder.batch()
Cassandra.session.execute(
QueryBuilder.select().all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", fromKey))
.and(QueryBuilder.lt(List("value", "id"), List[AnyRef](lastRow.getDouble("value"): java.lang.Double, lastRow.getUUID("id"))))
.orderBy(QueryBuilder.desc("value"))
).all().map { row =>
val longRow = Cassandra.session.execute(
QueryBuilder.select().all()
.from(doubleOrderedTrackTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("id", row.getUUID("id")))
).one()
if(longRow != null) {
// delete old score
batch.add(
QueryBuilder.delete()
.all()
.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", toKey))
.and(QueryBuilder.eq("value", longRow.getDouble("value")))
.and(QueryBuilder.eq("id", row.getUUID("id")))
)
}
// update longOrderedTable & longOrderedTrackTable for first 50 items
batch.add(
QueryBuilder.insertInto(doubleOrderedTable)
.value("key", toKey)
.value("value", row.getDouble("value"))
.value("id", row.getUUID("id"))
.value("updated", row.getDate("updated"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
)
batch.add(
QueryBuilder.insertInto(doubleOrderedTrackTable)
.value("value", row.getDouble("value"))
.value("action", row.getString("action"))
.value("f", row.getString("f"))
.value("fid", row.getUUID("fid"))
.value("answer_id", row.getUUID("answer_id"))
.value("key", toKey)
.value("id", row.getUUID("id"))
)
r = r + 1
}
Cassandra.session.executeAsync(batch)
}
}
def getDoubleTopRow(table: String, field: String, key: String) = {
val row = Cassandra.session.execute(
QueryBuilder.select().all.from(doubleOrderedTable)
.where(QueryBuilder.eq("key", table + "_" + field + "_" + key))
.orderBy(QueryBuilder.desc("value"))
.limit(1)
).one()
row
}
} | lequangdzung/quora-clone | api-app/app/models/social.scala | Scala | gpl-2.0 | 29,968 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.customop
import java.io.File
import java.net.URL
import org.apache.commons.io.FileUtils
import org.apache.mxnet.Context
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.slf4j.LoggerFactory
import scala.sys.process.Process
class CustomOpExampleSuite extends FunSuite with BeforeAndAfterAll {
private val logger = LoggerFactory.getLogger(classOf[CustomOpExampleSuite])
test("Example CI: Test Customop MNIST") {
// This test is CPU only
if (System.getenv().containsKey("SCALA_TEST_ON_GPU") &&
System.getenv("SCALA_TEST_ON_GPU").toInt == 1) {
logger.info("CPU test only, skipped...")
} else {
logger.info("Downloading mnist model")
val baseUrl = "https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci"
val tempDirPath = System.getProperty("java.io.tmpdir")
val modelDirPath = tempDirPath + File.separator + "mnist/"
val tmpFile = new File(tempDirPath + "/mnist/mnist.zip")
if (!tmpFile.exists()) {
FileUtils.copyURLToFile(new URL(baseUrl + "/mnist/mnist.zip"),
tmpFile)
}
// TODO: Need to confirm with Windows
Process("unzip " + tempDirPath + "/mnist/mnist.zip -d "
+ tempDirPath + "/mnist/") !
val context = Context.cpu()
val output = ExampleCustomOp.test(modelDirPath, context)
assert(output >= 0.95f)
}
}
test("Example CI: Test CustomopRtc MNIST") {
// This test is GPU only
// TODO: RTC is depreciated, need to change to CUDA Module
val RTC_fixed = false
if (RTC_fixed) {
if (System.getenv().containsKey("SCALA_TEST_ON_GPU") &&
System.getenv("SCALA_TEST_ON_GPU").toInt == 1) {
logger.info("Downloading mnist model")
val baseUrl = "https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci"
val tempDirPath = System.getProperty("java.io.tmpdir")
val modelDirPath = tempDirPath + File.separator + "mnist/"
val tmpFile = new File(tempDirPath + "/mnist/mnist.zip")
if (!tmpFile.exists()) {
FileUtils.copyURLToFile(new URL(baseUrl + "/mnist/mnist.zip"),
tmpFile)
}
// TODO: Need to confirm with Windows
Process("unzip " + tempDirPath + "/mnist/mnist.zip -d "
+ tempDirPath + "/mnist/") !
val context = Context.gpu()
val output = ExampleCustomOpWithRtc.test(modelDirPath, context)
assert(output >= 0.95f)
} else {
logger.info("GPU test only, skipped...")
}
} else {
logger.warn("RTC module is not up to date, please don't use this" +
"\\nCreate CudaModule for this")
}
}
}
| rahul003/mxnet | scala-package/examples/src/test/scala/org/apache/mxnetexamples/customop/CustomOpExampleSuite.scala | Scala | apache-2.0 | 3,469 |
package whitespace
import skinny.orm._, feature._
import scalikejdbc._
import org.joda.time._
case class PostTag(
id: Long,
tagId: Int,
postId: Int,
createdAt: DateTime
)
object PostTag extends SkinnyJoinTable[PostTag] {
override val connectionPoolName = 'ws
override val tableName = "posts_tags"
override val defaultAlias = createAlias("pt")
override def extract(rs: WrappedResultSet, rn: ResultName[PostTag]): PostTag = new PostTag(
id = rs.get(rn.id),
tagId = rs.get(rn.tagId),
postId = rs.get(rn.postId),
createdAt = rs.get(rn.createdAt)
)
}
| seratch/skinny-framework | factory-girl/src/test/scala/whitespace/PostTag.scala | Scala | mit | 607 |
/**
* Copyright (c) 2007-2011 Eric Torreborre <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software. Neither the name of specs nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.specs.samples
class isolatedExamples extends org.spex.Specification {
var x = 0
def inc() = {
x = x + 1
}
"a" should {
inc()
"b" in {
inc()
"b1" in {
inc()
x must_== 3
}
"b2" in {
inc()
x must_== 3
}
}
"c" in {
inc()
x must_== 2
}
}
"d" should {
inc()
"e" in {
inc()
"e1" in {
inc()
x must_== 3
}
"e2" in {
inc()
x must_== 3
}
}
"f" in {
inc()
x must_== 2
}
}
}
| yyuu/specs | src/test/scala/org/specs/samples/isolatedExamples.scala | Scala | mit | 1,983 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller, Matthew Saltz
* @version 1.3
* @date Wed May 13 14:58:25 EDT 2015
* @see LICENSE (MIT style license file).
*
* `MGraph` Dual Simulation Using Mutable Sets
*/
package scalation.graphalytics.mutable
import scala.collection.mutable.Map
import scala.collection.mutable.{Set => SET}
import scalation.graphalytics.mutable.{ExampleMGraphD => EX_GRAPH}
import scalation.util.time
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MDualSim` class provides a second implementation for Dual Graph Simulation.
* It differs from `DualSim` by not using inverse adjacency sets ('pa') in
* order to save space.
* @param g the data graph G(V, E, l)
* @param q the query graph Q(U, D, k)
*/
class MDualSim [TLabel] (g: MGraph [TLabel], q: MGraph [TLabel])
extends GraphMatcher (g, q)
{
private val DEBUG = true // debug flag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Apply the Dual Graph Simulation pattern matching algorithm to find the mappings
* from the query graph 'q' to the data graph 'g'. These are represented by a
* multi-valued function 'phi' that maps each query graph vertex 'u' to a
* set of data graph vertices '{v}'.
*/
def mappings (): Array [SET [Int]] = saltzDualSim (feasibleMates ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given the mappings 'phi' produced by the 'feasibleMates' method,
* eliminate mappings 'u -> v' when (1) v's children fail to match u's
* or (2) v's parents fail to match u's.
* @param phi array of mappings from a query vertex u to { graph vertices v }
*/
def saltzDualSim (phi: Array [SET [Int]]): Array [SET [Int]] =
{
var alter = true
while (alter) { // check for matching children/parents
alter = false
for (u <- qRange; u_c <- q.ch(u)) { // for each u in q and its children u_
if (DEBUG) { println (s"for u = $u, u_c = $u_c"); showMappings (phi) }
val newPhi = SET [Int] () // subset of phi(u_c) having a parent in phi(u)
val elab_u2u_c = q.elabel ((u, u_c)) // edge label in q for (u, u_c)
for (v <- phi(u)) { // for each v in g image of u
// val v_c = g.ch(v) // don't filter on edge labels
val v_c = g.ch(v).filter (elab_u2u_c == g.elabel (v, _)) // filter on edge labels
if (DEBUG) println (s"v = $v, v_c = $v_c, phi_u_c = " + phi(u_c))
val phiInt = v_c & phi(u_c) // children of v contained in phi(u_c)
if (phiInt.isEmpty) {
phi(u) -= v // remove vertex v from phi(u)
if (phi(u).isEmpty) return phi // no match for vertex u => no overall match
alter = true
} // if
// build newPhi to contain only those vertices in phi(u_c) which also have a parent in phi(u)
newPhi ++= phiInt
} // for
if (newPhi.isEmpty) return phi // empty newPhi => no match
if (newPhi.size < phi(u_c).size) alter = true // since newPhi is smaller than phi(u_c)
if (SELF_LOOPS && u_c == u) phi(u_c) &= newPhi else phi(u_c) = newPhi
} // for
} // while
phi
} // saltzDualSim
} // MDualSim class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MDualSimTest` object is used to test the `MDualSim` class.
* run-main scalation.graphalytics.mutable.MDualSimTest
*/
object MDualSimTest extends App
{
val g = new MGraph (Array (SET (), // ch(0)
SET (0, 2, 3, 4), // ch(1)
SET (0), // ch(2)
SET (4), // ch(3)
SET ()), // ch(4)
Array (11.0, 10.0, 11.0, 11.0, 11.0),
Map ((1, 0) -> -1.0,
(1, 2) -> -1.0,
(1, 3) -> -1.0,
(1, 4) -> -1.0,
(2, 0) -> -1.0,
(3, 4) -> -2.0)) // change from -1 to -2 filter out vertices
val q = new MGraph (Array (SET (1, 2), // ch(0)
SET (), // ch(1)
SET (1)), // ch(2)
Array (10.0, 11.0, 11.0),
Map ((0, 1) -> -1.0,
(0, 2) -> -1.0,
(2, 1) -> -1.0))
g.printG ()
q.printG ()
val matcher = new MDualSim (g, q) // Graph Simulation Pattern Matcher
val phi = time { matcher.mappings () } // time the matcher
matcher.showMappings (phi) // display results
} // MDualSimTest object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MDualSimTest2` object is used to test the `MDualSim` class.
* run-main scalation.graphalytics.mutable.MDualSimTest2
*/
//object MDualSimTest2 extends App
//{
// val gSize = 1000 // size of the data graph
// val qSize = 10 // size of the query graph
// val nLabels = 100 // number of distinct labels
// val gAvDegree = 5 // average vertex out degree for data graph
// val qAvDegree = 2 // average vertex out degree for query graph
//
// q.printG ()
//
// val g = genRandomGraph (gSize, nLabels, gAvDegree, false, "g")
// val q = genBFSQuery (qSize, qAvDegree, g, false, "q")
//
// val matcher = new MDualSim (g, q) // Dual Graph Simulation Pattern Matcher
// val phi = time { matcher.mappings () } // time the matcher
// matcher.showMappings (phi) // display results
//
//} // MDualSimTest2 object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/graphalytics/mutable/MDualSim.scala | Scala | mit | 6,595 |
package com.socrata.soda.clients.datacoordinator
import com.rojoma.json.v3.ast._
import com.rojoma.json.v3.util.JsonUtil
import com.socrata.soda.server.id.RollupName
sealed abstract class RollupMutation extends DataCoordinatorInstruction {
override def toString = JsonUtil.renderJson(asJson)
}
case class CreateOrUpdateRollupInstruction(name: RollupName, soql: String) extends RollupMutation {
def asJson = JObject(Map(
"c" -> JString("create or update rollup"),
"name" -> JString(name.name),
"soql" -> JString(soql)
))
}
case class DropRollupInstruction(name: RollupName) extends RollupMutation {
def asJson = JObject(Map(
"c" -> JString("drop rollup"),
"name" -> JString(name.name)
))
}
| socrata-platform/soda-fountain | soda-fountain-lib/src/main/scala/com/socrata/soda/clients/datacoordinator/RollupMutation.scala | Scala | apache-2.0 | 735 |
package org.jetbrains.plugins.scala
package codeInsight.generation
import com.intellij.lang.LanguageCodeInsightActionHandler
import com.intellij.testFramework.fixtures.CodeInsightTestFixture
/**
* Nikolay.Tropin
* 8/23/13
*/
class GenerateEqualsTest extends ScalaGenerateTestBase {
import CodeInsightTestFixture.CARET_MARKER
override protected val handler: LanguageCodeInsightActionHandler =
new ScalaGenerateEqualsHandler
def testFindAllFields() {
val text = s"""class A (i: Int, val j: Int) {
| val x = 0$CARET_MARKER
| var y = 0
| private val z = 0
|}"""
val result = """class A (i: Int, val j: Int) {
| val x = 0
| var y = 0
| private val z = 0
|
| def canEqual(other: Any): Boolean = other.isInstanceOf[A]
|
| override def equals(other: Any): Boolean = other match {
| case that: A =>
| (that canEqual this) &&
| x == that.x &&
| y == that.y &&
| z == that.z &&
| j == that.j
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(x, z, j)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}"""
performTest(text, result)
}
def testInFinalClass() {
val text = s"""final class$CARET_MARKER A (i: Int, val j: Int) {
| private val z = 0
|}"""
val result = """final class A (i: Int, val j: Int) {
| private val z = 0
|
| override def equals(other: Any): Boolean = other match {
| case that: A =>
| z == that.z &&
| j == that.j
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(z, j)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}"""
performTest(text, result)
}
def testInAbstract() {
val text = s"""abstract class A (i: Int, val j: Int) extends Set[Int] {
| private val z = 0
|
|$CARET_MARKER}"""
val result = s"""abstract class A (i: Int, val j: Int) extends Set[Int] {
| private val z = 0
|
| override def canEqual(other: Any): Boolean = other.isInstanceOf[A]
|
| override def equals(other: Any): Boolean = other match {
| case that: A =>
| super.equals(that) &&
| (that canEqual this) &&
| z == that.z &&
| j == that.j
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(super.hashCode(), z, j)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}"""
performTest(text, result)
}
def testInInheritor() {
val text = s"""class A {
| val a = 0
|
| def canEqual(other: Any): Boolean = other.isInstanceOf[A]
|
| override def equals(other: Any): Boolean = other match {
| case that: A =>
| (that canEqual this) &&
| a == that.a
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(a)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}
|
|class B (i: Int, val j: Int) extends A {
| val z = 0$CARET_MARKER
|}"""
val result = """class A {
| val a = 0
|
| def canEqual(other: Any): Boolean = other.isInstanceOf[A]
|
| override def equals(other: Any): Boolean = other match {
| case that: A =>
| (that canEqual this) &&
| a == that.a
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(a)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}
|
|class B (i: Int, val j: Int) extends A {
| val z = 0
|
| override def canEqual(other: Any): Boolean = other.isInstanceOf[B]
|
| override def equals(other: Any): Boolean = other match {
| case that: B =>
| super.equals(that) &&
| (that canEqual this) &&
| z == that.z &&
| j == that.j
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(super.hashCode(), z, j)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}"""
performTest(text, result)
}
def testInheritsMethodsFromJavaLangObject() = {
val text = s"""class A {
| val a = 0
|}
|
|class B (i: Int, val j: Int) extends A {
| val z = 0$CARET_MARKER
|}"""
val result = """class A {
| val a = 0
|}
|
|class B (i: Int, val j: Int) extends A {
| val z = 0
|
| def canEqual(other: Any): Boolean = other.isInstanceOf[B]
|
| override def equals(other: Any): Boolean = other match {
| case that: B =>
| (that canEqual this) &&
| z == that.z &&
| j == that.j
| case _ => false
| }
|
| override def hashCode(): Int = {
| val state = Seq(z, j)
| state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
| }
|}"""
performTest(text, result)
}
}
| loskutov/intellij-scala | test/org/jetbrains/plugins/scala/codeInsight/generation/GenerateEqualsTest.scala | Scala | apache-2.0 | 7,223 |
package props
import org.scalatest.PropSpec
import org.scalatest.prop.GeneratorDrivenPropertyChecks
class UnionSpec extends PropSpec with GeneratorDrivenPropertyChecks {
property("set union is associative") {
forAll { (a: Set[String], b: Set[String], c: Set[String]) =>
assert( union(union(a,b),c) == union(a,union(b,c)) )
}
}
property("set union is commutative") {
forAll { (a: Set[String], b: Set[String]) =>
assert( union(a,b) == union(b,a) )
}
}
property("set union has neutral element: empty set") {
forAll { (a: Set[Int]) =>
assert( union(a,Set()) == a )
}
}
property("set union of equal sets is equal to the sets") {
forAll { (a: Set[Int]) =>
assert( union(a,a) == a )
}
}
property("sets are subsets of their union") {
forAll { (a: Set[String], b: Set[String]) =>
val u = union(a,b)
assert( a.subsetOf(u) && b.subsetOf(u) )
}
}
}
| jastice/proptests | src/test/scala/props/UnionSpec.scala | Scala | bsd-2-clause | 941 |
package com.mucahitbayar.shoppingcart.checkout
import com.mucahitbayar.shoppingcart.domain.{Discount, Product}
case class ShoppingCartItem(product: Product, qty: Int = 1) {
def total(): BigDecimal = product.price * qty
def total(discount: Discount): BigDecimal = {
if (discount.minQty <= qty) {
val extraQty = qty % discount.minQty
val quantity = qty - extraQty
val total = product.price * extraQty + discount.reducedPrice * quantity
total
}
else product.price * qty
}
def isSame(other: ShoppingCartItem): Boolean = this.product.isEqual(other.product)
def add(other: ShoppingCartItem): ShoppingCartItem = {
if (isSame(other)) {
ShoppingCartItem(this.product, this.qty + other.qty)
} else {
throw new IllegalArgumentException("The Products are note same !")
}
}
} | mucahitbayar/ScalaTDDShop | src/main/scala/com/mucahitbayar/shoppingcart/checkout/ShoppingCartItem.scala | Scala | mit | 841 |
/*
* Copyright 2012-2013 Eligotech BV.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eligosource.eventsourced.core
import akka.actor.Actor
/**
* Stackable modification for actors that need to receive a re-sequenced message stream.
* `(sequence number, message)` tuples will be resequenced by this trait according to
* `sequence number` where the modified actor's `receive` method is called with the re-
* sequenced `message`s. Messages with types other than `(Long, Any)` by-pass the re-
* sequencing algorithm.
*/
trait Sequencer extends Actor {
import scala.collection.mutable.Map
private val delayed = Map.empty[Long, Any]
private var delivered = 0L
abstract override def receive = {
case (seqnr: Long, msg) => {
resequence(seqnr, msg)
}
case msg => {
super.receive(msg)
}
}
@scala.annotation.tailrec
private def resequence(seqnr: Long, msg: Any) {
if (seqnr == delivered + 1) {
delivered = seqnr
super.receive(msg)
} else {
delayed += (seqnr -> msg)
}
val eo = delayed.remove(delivered + 1)
if (eo.isDefined) resequence(delivered + 1, eo.get)
}
}
| CoderPaulK/eventsourced | es-core/src/main/scala/org/eligosource/eventsourced/core/Sequencer.scala | Scala | apache-2.0 | 1,670 |
/*
* Copyright 2011 TomTom International BV
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tomtom.dps.mavenizer.dependency
import org.scalatest.{FeatureSpec, GivenWhenThen}
import org.scalatest.prop.PropertyChecks
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import xml.NodeSeq
@RunWith(classOf[JUnitRunner])
class NexusSpec extends FeatureSpec with GivenWhenThen with PropertyChecks with NexusComponent {
type ? = this.type
val MAX: Int = 64
val nexus = new NexusImpl(null)
import nexus.makeArtifact
feature("Missing data should result in no artifact being created by the Nexus.makeArtifact factory method") {
scenario("A missing component should result in no JarArtifact") {
given("an artifact xml")
when("one or more of artifactId, groupId, or version is missing")
then("the makeArtifact factory should return None")
val cases = for (subset <- powerSet(List("artifactId", "groupId", "version"))) yield {
for (elem <- subset) yield {
elem match {
case "artifactId" => <artifactId>
{elem}
</artifactId>
case "groupId" => <groupId>
{elem}
</groupId>
case "version" => <version>
{elem}
</version>
}
}
}
for (c <- cases) {
val xml = <artifact>
{NodeSeq.fromSeq(c)}
</artifact>
assert(None === makeArtifact(xml))
}
assert(None === makeArtifact(<artifact></artifact>))
}
}
feature("Artifact xml can occur in any order and the same JarArtifact will be created") {
scenario("For an artifact xml") {
when("the artifactId, groupId, and version may arrive in any order")
then("we can parse it correctly no matter the order")
for (ordering <- List("artifactId", "groupId", "version").permutations) yield {
val cases = for (elem <- ordering) yield {
elem match {
case "artifactId" => <artifactId>
{elem}
</artifactId>
case "groupId" => <groupId>
{elem}
</groupId>
case "version" => <version>
{elem}
</version>
}
}
for (c <- cases) {
assert(None != makeArtifact(<artifact>
{NodeSeq.fromSeq(c)}
</artifact>))
}
}
}
}
/** Given Seq(1,2,3) generates Seq(Seq(1), Seq(2), Seq(3), Seq(1,2), Seq(2,3), Seq(1,3)). */
def powerSet[X](xs: Seq[X]): Seq[Seq[X]] = {
val xis = xs.zipWithIndex
for (j <- 1 until (1 << (xs.length - 1))) yield {
for ((x, i) <- xis if ((j & (1 << i)) != 0)) yield x
}
}
} | ebowman/mavenizer | src/test/scala/com/tomtom/dps/mavenizer/dependency/NexusSpec.scala | Scala | apache-2.0 | 3,246 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import org.scalatest.{FlatSpec, Matchers}
import scala.util.Random
@com.intel.analytics.bigdl.tags.Parallel
class LogSpec extends FlatSpec with Matchers {
"A Log" should "generate correct output" in {
val input = Tensor(Storage[Double](Array(1.0, 2, 3, 4, 5, 6)), 1, Array(2, 3))
val output = Tensor(Storage(Array(0.0, 0.6931471805599453, 1.0986122886681098,
1.3862943611198906, 1.6094379124341003, 1.791759469228055)), 1, Array(2, 3))
val log = new Log[Double]()
val logOutput = log.forward(input)
logOutput should equal (output)
}
"A Log" should "generate correct grad" in {
val input = Tensor(Storage[Double](Array(1.0, 2, 3, 4, 5, 6)), 1, Array(2, 3))
val gradOutput = Tensor(Storage(Array(0.1, 0.2, 0.3, 0.4, 0.5, 0.6)), 1, Array(2, 3))
val log = new Log[Double]()
val gradInput = log.backward(input, gradOutput)
gradInput should equal (Tensor(Storage(Array(0.1, 0.1, 0.1, 0.1, 0.1, 0.1)), 1, Array(2, 3)))
}
}
class LogSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val log = Log[Float]().setName("log")
val input = Tensor[Float](10).apply1(_ => Random.nextFloat())
runSerializationTest(log, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/LogSpec.scala | Scala | apache-2.0 | 2,009 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst
import java.sql.{Date, Timestamp}
import scala.language.implicitConversions
import org.apache.spark.api.java.function.FilterFunction
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.objects.Invoke
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._
/**
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.
*
* {{{
* scala> import org.apache.spark.sql.catalyst.dsl.expressions._
*
* // Standard operators are added to expressions.
* scala> import org.apache.spark.sql.catalyst.expressions.Literal
* scala> Literal(1) + Literal(1)
* res0: org.apache.spark.sql.catalyst.expressions.Add = (1 + 1)
*
* // There is a conversion from 'symbols to unresolved attributes.
* scala> 'a.attr
* res1: org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute = 'a
*
* // These unresolved attributes can be used to create more complicated expressions.
* scala> 'a === 'b
* res2: org.apache.spark.sql.catalyst.expressions.EqualTo = ('a = 'b)
*
* // SQL verbs can be used to construct logical query plans.
* scala> import org.apache.spark.sql.catalyst.plans.logical._
* scala> import org.apache.spark.sql.catalyst.dsl.plans._
* scala> LocalRelation('key.int, 'value.string).where('key === 1).select('value).analyze
* res3: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan =
* Project [value#3]
* Filter (key#2 = 1)
* LocalRelation [key#2,value#3], []
* }}}
*/
package object dsl {
trait ImplicitOperators {
def expr: Expression
def unary_- : Expression = UnaryMinus(expr)
def unary_! : Predicate = Not(expr)
def unary_~ : Expression = BitwiseNot(expr)
def + (other: Expression): Expression = Add(expr, other)
def - (other: Expression): Expression = Subtract(expr, other)
def * (other: Expression): Expression = Multiply(expr, other)
def / (other: Expression): Expression = Divide(expr, other)
def div (other: Expression): Expression = IntegralDivide(expr, other)
def % (other: Expression): Expression = Remainder(expr, other)
def & (other: Expression): Expression = BitwiseAnd(expr, other)
def | (other: Expression): Expression = BitwiseOr(expr, other)
def ^ (other: Expression): Expression = BitwiseXor(expr, other)
def && (other: Expression): Predicate = And(expr, other)
def || (other: Expression): Predicate = Or(expr, other)
def < (other: Expression): Predicate = LessThan(expr, other)
def <= (other: Expression): Predicate = LessThanOrEqual(expr, other)
def > (other: Expression): Predicate = GreaterThan(expr, other)
def >= (other: Expression): Predicate = GreaterThanOrEqual(expr, other)
def === (other: Expression): Predicate = EqualTo(expr, other)
def <=> (other: Expression): Predicate = EqualNullSafe(expr, other)
def =!= (other: Expression): Predicate = Not(EqualTo(expr, other))
def in(list: Expression*): Expression = list match {
case Seq(l: ListQuery) => expr match {
case c: CreateNamedStruct => InSubquery(c.valExprs, l)
case other => InSubquery(Seq(other), l)
}
case _ => In(expr, list)
}
def like(other: Expression): Expression = Like(expr, other)
def rlike(other: Expression): Expression = RLike(expr, other)
def contains(other: Expression): Expression = Contains(expr, other)
def startsWith(other: Expression): Expression = StartsWith(expr, other)
def endsWith(other: Expression): Expression = EndsWith(expr, other)
def substr(pos: Expression, len: Expression = Literal(Int.MaxValue)): Expression =
Substring(expr, pos, len)
def substring(pos: Expression, len: Expression = Literal(Int.MaxValue)): Expression =
Substring(expr, pos, len)
def isNull: Predicate = IsNull(expr)
def isNotNull: Predicate = IsNotNull(expr)
def getItem(ordinal: Expression): UnresolvedExtractValue = UnresolvedExtractValue(expr, ordinal)
def getField(fieldName: String): UnresolvedExtractValue =
UnresolvedExtractValue(expr, Literal(fieldName))
def cast(to: DataType): Expression = Cast(expr, to)
def asc: SortOrder = SortOrder(expr, Ascending)
def asc_nullsLast: SortOrder = SortOrder(expr, Ascending, NullsLast, Set.empty)
def desc: SortOrder = SortOrder(expr, Descending)
def desc_nullsFirst: SortOrder = SortOrder(expr, Descending, NullsFirst, Set.empty)
def as(alias: String): NamedExpression = Alias(expr, alias)()
def as(alias: Symbol): NamedExpression = Alias(expr, alias.name)()
}
trait ExpressionConversions {
implicit class DslExpression(e: Expression) extends ImplicitOperators {
def expr: Expression = e
}
implicit def booleanToLiteral(b: Boolean): Literal = Literal(b)
implicit def byteToLiteral(b: Byte): Literal = Literal(b)
implicit def shortToLiteral(s: Short): Literal = Literal(s)
implicit def intToLiteral(i: Int): Literal = Literal(i)
implicit def longToLiteral(l: Long): Literal = Literal(l)
implicit def floatToLiteral(f: Float): Literal = Literal(f)
implicit def doubleToLiteral(d: Double): Literal = Literal(d)
implicit def stringToLiteral(s: String): Literal = Literal.create(s, StringType)
implicit def dateToLiteral(d: Date): Literal = Literal(d)
implicit def bigDecimalToLiteral(d: BigDecimal): Literal = Literal(d.underlying())
implicit def bigDecimalToLiteral(d: java.math.BigDecimal): Literal = Literal(d)
implicit def decimalToLiteral(d: Decimal): Literal = Literal(d)
implicit def timestampToLiteral(t: Timestamp): Literal = Literal(t)
implicit def binaryToLiteral(a: Array[Byte]): Literal = Literal(a)
implicit def symbolToUnresolvedAttribute(s: Symbol): analysis.UnresolvedAttribute =
analysis.UnresolvedAttribute(s.name)
/** Converts $"col name" into an [[analysis.UnresolvedAttribute]]. */
implicit class StringToAttributeConversionHelper(val sc: StringContext) {
// Note that if we make ExpressionConversions an object rather than a trait, we can
// then make this a value class to avoid the small penalty of runtime instantiation.
def $(args: Any*): analysis.UnresolvedAttribute = {
analysis.UnresolvedAttribute(sc.s(args : _*))
}
}
def rand(e: Long): Expression = Rand(e)
def sum(e: Expression): Expression = Sum(e).toAggregateExpression()
def sumDistinct(e: Expression): Expression = Sum(e).toAggregateExpression(isDistinct = true)
def count(e: Expression): Expression = Count(e).toAggregateExpression()
def countDistinct(e: Expression*): Expression =
Count(e).toAggregateExpression(isDistinct = true)
def approxCountDistinct(e: Expression, rsd: Double = 0.05): Expression =
HyperLogLogPlusPlus(e, rsd).toAggregateExpression()
def avg(e: Expression): Expression = Average(e).toAggregateExpression()
def first(e: Expression): Expression = new First(e).toAggregateExpression()
def last(e: Expression): Expression = new Last(e).toAggregateExpression()
def min(e: Expression): Expression = Min(e).toAggregateExpression()
def minDistinct(e: Expression): Expression = Min(e).toAggregateExpression(isDistinct = true)
def max(e: Expression): Expression = Max(e).toAggregateExpression()
def maxDistinct(e: Expression): Expression = Max(e).toAggregateExpression(isDistinct = true)
def upper(e: Expression): Expression = Upper(e)
def lower(e: Expression): Expression = Lower(e)
def coalesce(args: Expression*): Expression = Coalesce(args)
def greatest(args: Expression*): Expression = Greatest(args)
def least(args: Expression*): Expression = Least(args)
def sqrt(e: Expression): Expression = Sqrt(e)
def abs(e: Expression): Expression = Abs(e)
def star(names: String*): Expression = names match {
case Seq() => UnresolvedStar(None)
case target => UnresolvedStar(Option(target))
}
def namedStruct(e: Expression*): Expression = CreateNamedStruct(e)
def callFunction[T, U](
func: T => U,
returnType: DataType,
argument: Expression): Expression = {
val function = Literal.create(func, ObjectType(classOf[T => U]))
Invoke(function, "apply", returnType, argument :: Nil)
}
def windowSpec(
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
frame: WindowFrame): WindowSpecDefinition =
WindowSpecDefinition(partitionSpec, orderSpec, frame)
def windowExpr(windowFunc: Expression, windowSpec: WindowSpecDefinition): WindowExpression =
WindowExpression(windowFunc, windowSpec)
implicit class DslSymbol(sym: Symbol) extends ImplicitAttribute { def s: String = sym.name }
// TODO more implicit class for literal?
implicit class DslString(val s: String) extends ImplicitOperators {
override def expr: Expression = Literal(s)
def attr: UnresolvedAttribute = analysis.UnresolvedAttribute(s)
}
abstract class ImplicitAttribute extends ImplicitOperators {
def s: String
def expr: UnresolvedAttribute = attr
def attr: UnresolvedAttribute = analysis.UnresolvedAttribute(s)
/** Creates a new AttributeReference of type boolean */
def boolean: AttributeReference = AttributeReference(s, BooleanType, nullable = true)()
/** Creates a new AttributeReference of type byte */
def byte: AttributeReference = AttributeReference(s, ByteType, nullable = true)()
/** Creates a new AttributeReference of type short */
def short: AttributeReference = AttributeReference(s, ShortType, nullable = true)()
/** Creates a new AttributeReference of type int */
def int: AttributeReference = AttributeReference(s, IntegerType, nullable = true)()
/** Creates a new AttributeReference of type long */
def long: AttributeReference = AttributeReference(s, LongType, nullable = true)()
/** Creates a new AttributeReference of type float */
def float: AttributeReference = AttributeReference(s, FloatType, nullable = true)()
/** Creates a new AttributeReference of type double */
def double: AttributeReference = AttributeReference(s, DoubleType, nullable = true)()
/** Creates a new AttributeReference of type string */
def string: AttributeReference = AttributeReference(s, StringType, nullable = true)()
/** Creates a new AttributeReference of type date */
def date: AttributeReference = AttributeReference(s, DateType, nullable = true)()
/** Creates a new AttributeReference of type decimal */
def decimal: AttributeReference =
AttributeReference(s, DecimalType.SYSTEM_DEFAULT, nullable = true)()
/** Creates a new AttributeReference of type decimal */
def decimal(precision: Int, scale: Int): AttributeReference =
AttributeReference(s, DecimalType(precision, scale), nullable = true)()
/** Creates a new AttributeReference of type timestamp */
def timestamp: AttributeReference = AttributeReference(s, TimestampType, nullable = true)()
/** Creates a new AttributeReference of type binary */
def binary: AttributeReference = AttributeReference(s, BinaryType, nullable = true)()
/** Creates a new AttributeReference of type array */
def array(dataType: DataType): AttributeReference =
AttributeReference(s, ArrayType(dataType), nullable = true)()
def array(arrayType: ArrayType): AttributeReference =
AttributeReference(s, arrayType)()
/** Creates a new AttributeReference of type map */
def map(keyType: DataType, valueType: DataType): AttributeReference =
map(MapType(keyType, valueType))
def map(mapType: MapType): AttributeReference =
AttributeReference(s, mapType, nullable = true)()
/** Creates a new AttributeReference of type struct */
def struct(structType: StructType): AttributeReference =
AttributeReference(s, structType, nullable = true)()
def struct(attrs: AttributeReference*): AttributeReference =
struct(StructType.fromAttributes(attrs))
/** Creates a new AttributeReference of object type */
def obj(cls: Class[_]): AttributeReference =
AttributeReference(s, ObjectType(cls), nullable = true)()
/** Create a function. */
def function(exprs: Expression*): UnresolvedFunction =
UnresolvedFunction(s, exprs, isDistinct = false)
def distinctFunction(exprs: Expression*): UnresolvedFunction =
UnresolvedFunction(s, exprs, isDistinct = true)
}
implicit class DslAttribute(a: AttributeReference) {
def notNull: AttributeReference = a.withNullability(false)
def canBeNull: AttributeReference = a.withNullability(true)
def at(ordinal: Int): BoundReference = BoundReference(ordinal, a.dataType, a.nullable)
}
}
object expressions extends ExpressionConversions // scalastyle:ignore
object plans { // scalastyle:ignore
def table(ref: String): LogicalPlan = UnresolvedRelation(TableIdentifier(ref))
def table(db: String, ref: String): LogicalPlan =
UnresolvedRelation(TableIdentifier(ref, Option(db)))
implicit class DslLogicalPlan(val logicalPlan: LogicalPlan) {
def select(exprs: Expression*): LogicalPlan = {
val namedExpressions = exprs.map {
case e: NamedExpression => e
case e => UnresolvedAlias(e)
}
Project(namedExpressions, logicalPlan)
}
def where(condition: Expression): LogicalPlan = Filter(condition, logicalPlan)
def filter[T : Encoder](func: T => Boolean): LogicalPlan = TypedFilter(func, logicalPlan)
def filter[T : Encoder](func: FilterFunction[T]): LogicalPlan = TypedFilter(func, logicalPlan)
def serialize[T : Encoder]: LogicalPlan = CatalystSerde.serialize[T](logicalPlan)
def deserialize[T : Encoder]: LogicalPlan = CatalystSerde.deserialize[T](logicalPlan)
def limit(limitExpr: Expression): LogicalPlan = Limit(limitExpr, logicalPlan)
def join(
otherPlan: LogicalPlan,
joinType: JoinType = Inner,
condition: Option[Expression] = None): LogicalPlan =
Join(logicalPlan, otherPlan, joinType, condition)
def cogroup[Key: Encoder, Left: Encoder, Right: Encoder, Result: Encoder](
otherPlan: LogicalPlan,
func: (Key, Iterator[Left], Iterator[Right]) => TraversableOnce[Result],
leftGroup: Seq[Attribute],
rightGroup: Seq[Attribute],
leftAttr: Seq[Attribute],
rightAttr: Seq[Attribute]
): LogicalPlan = {
CoGroup.apply[Key, Left, Right, Result](
func,
leftGroup,
rightGroup,
leftAttr,
rightAttr,
logicalPlan,
otherPlan)
}
def orderBy(sortExprs: SortOrder*): LogicalPlan = Sort(sortExprs, true, logicalPlan)
def sortBy(sortExprs: SortOrder*): LogicalPlan = Sort(sortExprs, false, logicalPlan)
def groupBy(groupingExprs: Expression*)(aggregateExprs: Expression*): LogicalPlan = {
val aliasedExprs = aggregateExprs.map {
case ne: NamedExpression => ne
case e => Alias(e, e.toString)()
}
Aggregate(groupingExprs, aliasedExprs, logicalPlan)
}
def window(
windowExpressions: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder]): LogicalPlan =
Window(windowExpressions, partitionSpec, orderSpec, logicalPlan)
def subquery(alias: Symbol): LogicalPlan = SubqueryAlias(alias.name, logicalPlan)
def except(otherPlan: LogicalPlan, isAll: Boolean): LogicalPlan =
Except(logicalPlan, otherPlan, isAll)
def intersect(otherPlan: LogicalPlan, isAll: Boolean): LogicalPlan =
Intersect(logicalPlan, otherPlan, isAll)
def union(otherPlan: LogicalPlan): LogicalPlan = Union(logicalPlan, otherPlan)
def generate(
generator: Generator,
unrequiredChildIndex: Seq[Int] = Nil,
outer: Boolean = false,
alias: Option[String] = None,
outputNames: Seq[String] = Nil): LogicalPlan =
Generate(generator, unrequiredChildIndex, outer,
alias, outputNames.map(UnresolvedAttribute(_)), logicalPlan)
def insertInto(tableName: String, overwrite: Boolean = false): LogicalPlan =
InsertIntoTable(
analysis.UnresolvedRelation(TableIdentifier(tableName)),
Map.empty, logicalPlan, overwrite, ifPartitionNotExists = false)
def as(alias: String): LogicalPlan = SubqueryAlias(alias, logicalPlan)
def coalesce(num: Integer): LogicalPlan =
Repartition(num, shuffle = false, logicalPlan)
def repartition(num: Integer): LogicalPlan =
Repartition(num, shuffle = true, logicalPlan)
def distribute(exprs: Expression*)(n: Int): LogicalPlan =
RepartitionByExpression(exprs, logicalPlan, numPartitions = n)
def analyze: LogicalPlan =
EliminateSubqueryAliases(analysis.SimpleAnalyzer.execute(logicalPlan))
def hint(name: String, parameters: Any*): LogicalPlan =
UnresolvedHint(name, parameters, logicalPlan)
}
}
}
| guoxiaolongzte/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala | Scala | apache-2.0 | 18,312 |
import strawman.collection.Iterator
object Test {
def main(args: Array[String]): Unit = {
// FIXME: issue loading package object
// val it = new MyIterator
// println(it.hasNext)
// println(it.next())
println(true)
println(3)
}
}
//class MyIterator extends Iterator[Int] {
// override def hasNext: Boolean = true
// override def next(): Int = 3
//}
| som-snytt/dotty | tests/link/strawman/iterator-1.scala | Scala | apache-2.0 | 378 |
package geotrellis.test.multiband.accumulo
import geotrellis.config.Dataset
import geotrellis.raster.MultibandTile
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.test.AccumuloTest
import geotrellis.test.multiband.load.TemporalHadoopLoad
import geotrellis.util.SparkSupport
import org.apache.spark.SparkContext
abstract class TemporalHadoopIngestTest(dataset: Dataset) extends AccumuloTest[TemporalProjectedExtent, SpaceTimeKey, MultibandTile](dataset) with TemporalHadoopLoad
object TemporalHadoopIngestTest {
def apply(implicit dataset: Dataset, _sc: SparkContext) = new TemporalHadoopIngestTest(dataset) {
@transient implicit val sc = SparkSupport.configureTime(dataset)(_sc)
}
}
| geotrellis/geotrellis-integration-tests-tool | src/main/scala/geotrellis/test/multiband/accumulo/TemporalHadoopIngestTest.scala | Scala | apache-2.0 | 721 |
package cas.web.pages
import spray.routing._
import Directives._
import akka.actor.ActorRef
import akka.util.Timeout
import cas.analysis.estimation._
import cas.persistence.searching.{ElasticSearch, SearchEngine}
import cas.utils.{Files, Web}
import cas.web.dealers.vk.VkApiDealer
import cas.web.model.UsingDealerProtocol._
import cas.web.model._
import org.joda.time.{Duration, Period}
import spray.client.pipelining._
import akka.pattern.ask
import cas.analysis.subject.Subject
import cas.analysis.subject.components.{Attachments, Likability, Relevance}
import cas.math.Mathf._
import spray.json._
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
import cas.utils.StdImplicits._
import cas.utils.UtilAliases.ErrorMsg
import cas.web.dealers.DealersFactory
import scala.concurrent.Await
import scala.concurrent.duration._
import cas.web.pages.templates.Templates._
import scala.collection.mutable
// TODO: Rename to vk auth
object ControlPage {
import VkApiDealer._
import cas.web.interface.ImplicitRuntime._
import system.dispatcher
import cas.service.AServiceControl.{GetStatus, Start, Status, Stop}
implicit val timeout = Timeout(60.seconds)
val searcher = new ElasticSearch("http://localhost:9201", "content-ind", "posts")
var useAttachments = false
var useLikes = false
var useRelevance = false
var useCorrectness = false
/*new Period().plusSeconds(10) -> 1.0)*/
def apply(pagePath: String, serviceControl: ActorRef) = path(pagePath) {
get {
parameter("useAttachments".as[Boolean].?, "useLikes".as[Boolean].?, "useRelevance".as[Boolean].?,
"useCorrectness".as[Boolean].?, "isFormSent".as[Boolean].?) {
(attachmentsOpt, likesOpt, relevanceOpt, correctnessOpt, isFormSent) =>
val isTurnOf = attachmentsOpt.isEmpty && likesOpt.isEmpty && relevanceOpt.isEmpty &&
correctnessOpt.isEmpty && isFormSent.isEmpty
if (isTurnOf) {
onComplete((serviceControl ? GetStatus).mapTo[Status]) {
case Success(status) => complete(getHtml(status.status.toString, pagePath))
case Failure(NonFatal(ex)) => complete(getHtml(s"Application malformed: `${ex.getMessage}`", pagePath))
}
} else {
useAttachments = attachmentsOpt.isDefined && attachmentsOpt.get
useLikes = likesOpt.isDefined && likesOpt.get
useRelevance = relevanceOpt.isDefined && relevanceOpt.get
useCorrectness = correctnessOpt.isDefined && correctnessOpt.get
var attachEstimOpt: Option[AttachmentsEstimator] = None
val systems = new mutable.ListBuffer[ActualityEstimator]()
if (useAttachments) attachEstimOpt = Some(new AttachmentsEstimator(new AttachmentsConfigs))
if (useLikes) systems += new LoyaltyEstimator(LoyaltyConfigs(Map(
Duration.standardSeconds(5) -> 0.5,
Duration.standardMinutes(10) -> 0.2,
Duration.standardMinutes(15) -> 0.142857143,
Duration.standardMinutes(20) -> 0.1),
0.33))
if (useRelevance) systems += new InvRelevanceEstimator(InvRelevanceConfigs(searcher, 0.055, 0.33))
if (useCorrectness) systems += new CorrectnessEstimator(CorrectnessConfigs(weight = 0.33))
val estim = new TotalEstimator(systems.toList, attachEstimOpt)
val isAnyInitErrorsOpt = (for {
isSearcherInit <- Try(Await.result(searcher.initStorage, 10.seconds)).
toEither.left.map(e => s"Unable to init searcher: `${e.getMessage}`")
_ <- isSearcherInit
dealer <- tryCreateDealer(searcher).asEitherString
rawStatus = serviceControl ? Start(dealer, estim)
status <- Try(Await.result(rawStatus.mapTo[Status], timeout.duration)).
toEither.transformLeft(e => s"Internal service error: `${e.getMessage}`")
} yield status).left.toOption
onComplete((serviceControl ? GetStatus).mapTo[Status]) {
case Success(status) =>
complete(getHtml(status.status.toString, pagePath, isAnyInitErrorsOpt))
case Failure(NonFatal(ex)) =>
complete(getHtml(s"Application malformed: `${ex.getMessage}`", pagePath, isAnyInitErrorsOpt))
}
}
}
}
}
// TODO: Separate view and model
def getHtml(status: String, path: String, isInitErrorsOpt: Option[ErrorMsg] = None) = defaultTemplate { <span>
<h3 class="mb20">Status: { status }</h3>
<form action={path} method="get">
<input type="hidden" name="isFormSent" value="true" />
<div class="mb10">Components:</div>
<label><input type="checkbox" name="useAttachments" class="mb10" checked={useAttachments.toOption(xml.Text(""))} />
Use attachments</label> <br/>
<label><input type="checkbox" name="useLikes" class="mb10" checked={useLikes.toOption(xml.Text(""))} />
Use likes</label> <br/>
<label><input type="checkbox" name="useRelevance" class="mb10" checked={useRelevance.toOption(xml.Text(""))} />
Use relevance</label> <br/>
<label><input type="checkbox" name="useCorrectness" class="mb10" checked={useCorrectness.toOption(xml.Text(""))} />
Use correctness</label> <br/>
<input type="submit" value="Update state" class="mb10" /> <br/>
</form>
<span>{ if (isInitErrorsOpt.isDefined) s"Some errors occurs: `${isInitErrorsOpt.get}`" }</span></span>
}
def tryCreateDealer(searcher: SearchEngine) = for {
file <- Files.readFile(Files.currentDealer)
configs <- Try(file.parseJson.convertTo[UsingDealer])
dealer <- DealersFactory.buildDealer(configs.id, searcher)
} yield dealer
} | kell18/CAS | src/main/scala/cas/web/pages/ControlPage.scala | Scala | gpl-2.0 | 5,707 |
package de.mineformers.core.client.ui.state
import java.lang.{Integer => JInt}
/**
* IntProperty
*
* @author PaleoCrafter
*/
class IntProperty(val name: String, val defaultValue: Int = 0, range: Range = null, val priority: Int = 0) extends Property[Int] {
val allowedValues = if (range != null) range.toSeq else null
override def nameFrom[A >: Int](value: A): String = value.toString
override def parse(input: String): Int = JInt.valueOf(input).intValue()
} | MineFormers/MFCore | src/main/scala/de/mineformers/core/client/ui/state/IntProperty.scala | Scala | mit | 471 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.crdt.japi
import java.util.concurrent.CompletionStage
import akka.actor.{ ActorRef, ActorSystem }
import com.rbmhtechnology.eventuate.crdt.Counter
import java.lang.{ Integer => JInt, Long => JLong }
/**
* Java API of a replicated [[Counter]] CRDT service.
*
* @param serviceId Unique id of this service.
* @param log Event log.
* @param system Actor system.
* @tparam A Counter value type.
*/
class CounterService[A](val serviceId: String, val log: ActorRef, implicit val system: ActorSystem)(implicit val integral: Integral[A])
extends CRDTService[Counter[A], A, A] {
import CRDTConverter._
import system._
override protected val delegate = new com.rbmhtechnology.eventuate.crdt.CounterService[A](serviceId, log)
implicit protected def c: CRDTConverter[A, A] = CRDTConverter(identity[A])
/**
* Adds `delta` (which can also be negative) to the counter identified by `id` and returns the updated counter value.
*/
def update(id: String, delta: A): CompletionStage[A] =
delegate.update(id, delta).asJava
}
object CounterService {
def ofInt(serviceId: String, log: ActorRef, system: ActorSystem): CounterService[JInt] =
new CounterService[Int](serviceId, log, system).asInstanceOf[CounterService[JInt]]
def ofLong(serviceId: String, log: ActorRef, system: ActorSystem): CounterService[JLong] =
new CounterService[Long](serviceId, log, system).asInstanceOf[CounterService[JLong]]
} | ianclegg/eventuate | eventuate-crdt/src/main/scala/com/rbmhtechnology/eventuate/crdt/japi/CounterService.scala | Scala | apache-2.0 | 2,151 |
package bytecode
import sai.bytecode.Method
import sai.bytecode.instruction.{ExitPoint, Instruction}
import vm.Frame
import scala.annotation.tailrec
class BasicBlock(val method: Method, val leader: Instruction) {
def lineRange = leader.lineNumber to lastInstruction.lineNumber
override def toString: String = StringContext("", " ", "").s(method.name, lineRange.toString())
def successors: List[BasicBlock] =
for (basicBlock <- method.controlFlowGraph if successorLeaders.contains(basicBlock.leader))
yield basicBlock
def predecessors: List[BasicBlock] =
for (basicBlock <- method.controlFlowGraph if basicBlock.successors.contains(this))
yield basicBlock
def interpret(inFrame: Frame): List[Frame] = {
instructions.foldLeft(inFrame :: Nil)((frames, i) => frames.flatMap(i.interpret))
}
private lazy val lastInstruction: Instruction = {
val leaders =
for (basicBlock <- method.controlFlowGraph)
yield basicBlock.leader
@tailrec
def findLast(instruction: Instruction): Instruction = instruction match {
case ep: ExitPoint => ep
case i if i.successors.exists(leaders.contains) => i
case i =>
assert(i.successors.lengthCompare(1) == 0)
findLast(i.successors.head)
}
findLast(leader)
}
def instructions: List[Instruction] = {
val last = lastInstruction
@tailrec
def collectUntilLast(i: Instruction, instructions: List[Instruction]): List[Instruction] = {
if (i == last) instructions :+ i
else collectUntilLast(i.next, instructions :+ i)
}
collectUntilLast(leader, Nil)
}
private def successorLeaders: List[Instruction] = lastInstruction.successors
}
object BasicBlock {
def unapply(arg: BasicBlock): Option[(Method, Instruction)] = Some(arg.method, arg.leader)
} | oliverhaase/sai | src/sai/bytecode/BasicBlock.scala | Scala | mit | 1,819 |
package org.jetbrains.plugins.scala
package codeInspection.parentheses
import com.intellij.codeInspection.LocalInspectionTool
import org.jetbrains.plugins.scala.codeInspection.ScalaLightInspectionFixtureTestAdapter
/**
* Nikolay.Tropin
* 4/29/13
*/
class UnnecessaryParenthesesInspectionTest extends ScalaLightInspectionFixtureTestAdapter{
val annotation = "Unnecessary parentheses"
val hintBeginning = "Remove unnecessary parentheses"
protected def classOfInspection: Class[_ <: LocalInspectionTool] = classOf[ScalaUnnecessaryParenthesesInspection]
def test_1(): Unit = {
val selected = START + "(1 + 1)" + END
check(selected)
val text = "(<caret>1 + 1)"
val result = "1 + 1"
val hint = hintBeginning + " (1 + 1)"
testFix(text, result, hint)
}
def test_2(): Unit = {
val text = "1 + (1 * 2)"
checkTextHasNoErrors(text)
}
def test_3(): Unit = {
val selected = s"""
|def f(n: Int): Int = n match {
| case even if $START(<caret>even % 2 == 0)$END => (even + 1)
| case odd => 1 + (odd * 3)
|}
"""
check(selected)
val text = """
|def f(n: Int): Int = n match {
| case even if (<caret>even % 2 == 0) => (even + 1)
| case odd => 1 + (odd * 3)
|}
"""
val result = """
|def f(n: Int): Int = n match {
| case even if even % 2 == 0 => (even + 1)
| case odd => 1 + (odd * 3)
|}
"""
val hint = hintBeginning + " (even % 2 == 0)"
testFix(text, result, hint)
}
def test_4(): Unit = {
val selected = s"""
|def f(n: Int): Int = n match {
| case even if (even % 2 == 0) => $START(even + 1<caret>)$END
| case odd => 1 + (odd * 3)
|}
"""
check(selected)
val text = """
|def f(n: Int): Int = n match {
| case even if (even % 2 == 0) => (even + 1<caret>)
| case odd => 1 + (odd * 3)
|}
"""
val result = """
|def f(n: Int): Int = n match {
| case even if (even % 2 == 0) => even + 1
| case odd => 1 + (odd * 3)
|}
"""
val hint = hintBeginning + " (even + 1)"
testFix(text, result, hint)
}
def test_5(): Unit = {
val text = "1 :: (2 :: Nil)"
checkTextHasNoErrors(text)
}
def test_6(): Unit = {
val selected = "val a = " + START + "((<caret>(1)))" + END
check(selected)
val text = "val a = ((<caret>(1)))"
val result = "val a = 1"
val hint = hintBeginning + " (((1)))"
testFix(text, result, hint)
}
def test_7(): Unit = {
val text = """def a(x: Any): Boolean = true
|List() count (a(_))"""
checkTextHasNoErrors(text, annotation, classOf[ScalaUnnecessaryParenthesesInspection])
}
def test_8(): Unit = {
val selected = "1 to " + START +"((1, 2))" + END
check(selected)
val text = "1 to ((1, 2))"
val result = "1 to (1, 2)"
val hint = hintBeginning + " ((1, 2))"
testFix(text, result, hint)
}
def test_9(): Unit = {
val text = """(List("a")
| :+ new String("b")
| :+ new String("c")
| :+ new String("d"))"""
checkTextHasNoErrors(text)
}
}
| LPTK/intellij-scala | test/org/jetbrains/plugins/scala/codeInspection/parentheses/UnnecessaryParenthesesInspectionTest.scala | Scala | apache-2.0 | 3,566 |
package com.verisign.hio.logging
import org.slf4j.{Logger, LoggerFactory}
trait LazyLogging {
protected lazy val logger: Logger = LoggerFactory.getLogger(getClass.getName)
}
trait StrictLogging {
protected val logger: Logger = LoggerFactory.getLogger(getClass.getName)
}
| verisign/hio | src/main/scala/com/verisign/hio/logging/Logging.scala | Scala | apache-2.0 | 282 |
package io.udash.web.commons.views
import com.avsystem.commons._
import com.avsystem.commons.misc.AbstractCase
import io.udash._
import io.udash.bootstrap.alert.UdashAlert
import io.udash.bootstrap.utils.BootstrapImplicits._
import io.udash.bootstrap.utils.BootstrapStyles
import io.udash.web.guide.markdown.{MarkdownPage, MarkdownPageRPC}
import io.udash.web.guide.styles.MarkdownStyles
import scala.util.{Failure, Success}
trait MarkdownPageState extends State {
def page: MarkdownPage
}
final case class MarkdownModel(
content: String = "",
error: String = ""
)
object MarkdownModel extends HasModelPropertyCreator[MarkdownModel] {
implicit val blank: Blank[MarkdownModel] = Blank.Simple(apply())
}
final case class MarkdownPageViewFactory[T <: MarkdownPageState]()(
rpc: MarkdownPageRPC
) extends AbstractCase with ViewFactory[T] {
override def create(): (MarkdownView, MarkdownPresenter[T]) = {
val model: ModelProperty[MarkdownModel] = ModelProperty.blank
(new MarkdownView(model), new MarkdownPresenter[T](model, rpc))
}
}
final class MarkdownPresenter[T <: MarkdownPageState](
model: ModelProperty[MarkdownModel],
rpc: MarkdownPageRPC
) extends Presenter[T] {
override def handleState(state: T): Unit = {
model.set(MarkdownModel.blank.value)
rpc.loadContent(state.page).onCompleteNow {
case Success(rawHtml) => model.subProp(_.content).set(rawHtml)
case Failure(exception) => model.subProp(_.error).set(exception.toString)
}
}
}
final class MarkdownView(model: ReadableModelProperty[MarkdownModel]) extends View {
import io.udash.css.CssView._
import scalatags.JsDom.all._
override val getTemplate: Modifier = ISeq(
produce(model.roSubProp(_.error)) { error =>
error.opt.filter(_.nonEmpty).map(e =>
div(cls := "bootstrap")(
h1("Oops! Something went wrong :("),
p("An error occurred during rendering your page:"),
UdashAlert(alertStyle = BootstrapStyles.Color.Danger.toProperty)(e).render
).render
).toList
},
produce(model.roSubProp(_.content)) { content =>
content.opt.filter(_.nonEmpty).map(c =>
div(MarkdownStyles.markdownPage)(raw(c)).render
).toList
}
)
}
| UdashFramework/udash-guide | commons/src/main/scala/io/udash/web/commons/views/MarkdownView.scala | Scala | gpl-3.0 | 2,233 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.action.ws2
import io.gatling.commons.validation._
import io.gatling.core.action.{ Action, ExitableAction, RequestAction }
import io.gatling.core.config.GatlingConfiguration
import io.gatling.core.session.{ Expression, Session }
import io.gatling.core.stats.StatsEngine
import io.gatling.core.util.NameGen
import io.gatling.http.action.async.ws.WsAction
import io.gatling.http.action.ws2.fsm.{ PerformInitialConnect, WsActor }
import io.gatling.http.protocol.HttpComponents
import akka.actor.ActorSystem
import org.asynchttpclient.Request
class WsConnect(
override val requestName: Expression[String],
wsName: String,
request: Expression[Request],
connectCheckSequences: List[WsCheckSequence],
onConnected: Option[Action],
httpComponents: HttpComponents,
system: ActorSystem,
val statsEngine: StatsEngine,
configuration: GatlingConfiguration,
val next: Action
) extends RequestAction with WsAction with ExitableAction with NameGen {
override val name = genName("wsOpen")
override def sendRequest(requestName: String, session: Session): Validation[Unit] =
fetchActor(wsName, session) match {
case _: Failure =>
for {
request <- request(session)
} yield {
logger.info(s"Opening websocket '$wsName': Scenario '${session.scenario}', UserId #${session.userId}")
val wsActor = system.actorOf(WsActor.props(
wsName,
request,
requestName,
connectCheckSequences,
onConnected,
statsEngine,
httpComponents.httpEngine,
httpComponents.httpProtocol,
configuration
), genName("wsActor"))
wsActor ! PerformInitialConnect(session, next)
}
case _ =>
Failure(s"Unable to create a new WebSocket with name $wsName: Already exists")
}
}
| wiacekm/gatling | gatling-http/src/main/scala/io/gatling/http/action/ws2/WsConnect.scala | Scala | apache-2.0 | 2,633 |
package com.github.j5ik2o.dddbase.example.repository
trait SpecSupport {
def sameAs[A](c: Traversable[A], d: Traversable[A]): Boolean = {
def counts(e: Traversable[A]) = e groupBy identity mapValues (_.size)
counts(c) == counts(d)
}
}
| j5ik2o/scala-ddd-base-functional | example/src/test/scala/com/github/j5ik2o/dddbase/example/repository/SpecSupport.scala | Scala | mit | 250 |
/*
* BooleanObjView.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.mellite.impl.objview
import de.sciss.desktop
import de.sciss.lucre.expr.graph.Ex
import de.sciss.lucre.synth.Txn
import de.sciss.lucre.{BooleanObj, Expr, Source, Txn => LTxn}
import de.sciss.mellite.impl.ObjViewCmdLineParser
import de.sciss.mellite.impl.ObjViewCmdLineParser.collectBool
import de.sciss.mellite.impl.objview.ObjViewImpl.raphaelIcon
import de.sciss.mellite.{ObjListView, ObjView, Shapes}
import de.sciss.proc.{Code, Confluent, Universe}
import org.rogach.scallop
import javax.swing.Icon
import scala.swing.CheckBox
object BooleanObjView extends ObjListView.Factory with ProgramSupport {
type E[T <: LTxn[T]] = BooleanObj[T]
val icon : Icon = raphaelIcon(Shapes.BooleanNumber)
val prefix : String = "Boolean"
def humanName : String = prefix
def category : String = ObjView.categPrimitives
type Elem = Boolean
def tpe : Expr.Type[Elem, E] = BooleanObj
val codeType: Code.TypeT[Unit, Ex[Elem]] = Code.Program.Boolean
override protected def scallopValueConverter: scallop.ValueConverter[Elem] =
scallop.singleArgConverter(collectBool)
def mkListView[T <: Txn[T]](obj: BooleanObj[T])(implicit tx: T): ObjListView[T] = {
val ex = obj
val value = ex.value
val isEditable = Expr.isVar(ex)
val isProgram = Expr.isProgram(ex)
val isViewable = isProgram || tx.isInstanceOf[Confluent.Txn]
new Impl[T](tx.newHandle(obj), value, isListCellEditable = isEditable, isProgram = isProgram,
isViewable = isViewable).init(obj)
}
override def initMakeDialog[T <: Txn[T]](window: Option[desktop.Window])
(implicit universe: Universe[T]): MakeResult[T] = {
val ggValue = new CheckBox()
val codeValue0 = "false"
showMakeDialog(ggValue = ggValue, codeValue0 = codeValue0, prefix = prefix,
window = window)(ggValue.selected)
}
override def initMakeCmdLine[T <: Txn[T]](args: List[String])(implicit universe: Universe[T]): MakeResult[T] = {
object p extends ObjViewCmdLineParser[Config[T]](this, args) {
val const: Opt[Boolean] = opt (descr = s"Make constant instead of variable")
val value: Opt[Either[Elem, codeType.Repr]] = trailArg(descr = s"Initial $prefix value (0, 1, false, true, F, T)")
}
p.parseFut {
prepareConfig[T](p.name(), p.value(), p.const())
}
}
final class Impl[T <: Txn[T]](val objH: Source[T, BooleanObj[T]],
var value: Boolean,
override val isListCellEditable: Boolean, val isProgram: Boolean,
val isViewable: Boolean)
extends BooleanObjView[T] with ListBase[T]
with ObjListViewImpl.BooleanExprLike[T]
with ObjListViewImpl.SimpleExpr[T, Elem, E] {
}
}
trait BooleanObjView[T <: LTxn[T]] extends ObjView[T] {
type Repr = BooleanObj[T]
} | Sciss/Mellite | app/src/main/scala/de/sciss/mellite/impl/objview/BooleanObjView.scala | Scala | agpl-3.0 | 3,220 |
package org.jetbrains.plugins.scala.lang.optimize
package generated
class OptimizeImportsSimpleTest extends OptimizeImportsTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "simple/"
protected override def rootPath(): String = folderPath
def testSorted = doTest
def testSortedInPackage = doTest
def testTwoExpressions = doTest
def testDeleteBraces = doTest
def testDontSaveNotResolved = doTest
def testImportChainUsed = doTest
def testLanguageFeatures = doTest
def testNewLines = doTest
def testOneImport = doTest
def testScalaDoc(): Unit = doTest()
def testSCL7275(): Unit = doTest()
def testSomeTrait = doTest
def testUnusedImportChain = doTest
def testUnusedSelector = doTest
def testUsedImport = doTest
} | triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/lang/optimize/generated/OptimizeImportsSimpleTest.scala | Scala | apache-2.0 | 845 |
package domain.database
import domain.Identifiable
trait ProviderTypes { self: DriverProvider =>
import self.driver.api._
trait IdColumn[I] {
def id: Rep[I]
}
trait IdentityColumn[Id] extends IdColumn[Id] { this: IdTable[_, Id] =>
import self.driver.api._
def id: Rep[Id] = column[Id]("id", O.AutoInc, O.PrimaryKey)
}
abstract class IdTable[M, I](tag: Tag, schemaName: Option[String], tableName: String)(implicit val colType: BaseColumnType[I])
extends Table[M](tag, schemaName, tableName) with IdentityColumn[I] {
def this(tag: Tag, tableName: String)(implicit mapping: BaseColumnType[I]) = this(tag, None, tableName)
}
type EntityTable[M <: Identifiable[M]] = IdTable[M, M#Id]
}
| pnosko/staging.vita.infinita.api | src/main/scala/domain/database/ProviderTypes.scala | Scala | cc0-1.0 | 723 |
/*
* Copyright (c) 2017 Lucas Satabin
*
* Licensed under the Apache License Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package toolxit
package util
/** A non-empty list data strucutre. */
sealed abstract class List1[+T] {
def head: T
def ::[U >: T](u: U): List1[U] =
More(u, this)
}
object List1 {
def apply[T](fst: T, rest: T*): List1[T] =
rest match {
case Seq() => One(fst)
case Seq(h, t @ _*) => More(fst, apply(h, t: _*))
}
}
/** The base case with one single element. */
final case class One[T](head: T) extends List1[T]
/** More than one element. */
final case class More[T](head: T, tail: List1[T]) extends List1[T]
| satabin/toolxit-ng | core/src/main/scala/toolxit/util/List1.scala | Scala | apache-2.0 | 1,131 |
package it.dtk.feed
import akka.actor.{ Actor, ActorLogging, Props }
import akka.event.Logging
import akka.routing.{ DefaultResizer, RoundRobinPool }
import com.sclasen.akka.kafka.StreamFSM
import it.dtk.feed.Model._
import it.dtk.feed.logic.{ FeedUtil, HttpDownloader }
import it.dtk.kafka.FeedProducerKafka
import org.json4s._
import org.json4s.jackson.JsonMethods._
import net.ceedubs.ficus.Ficus._
import scala.util._
/**
* Created by fabiofumarola on 09/08/15.
*/
object FeedProcessor {
def props(kafkaProd: FeedProducerKafka, kafkaPageProd: FeedProducerKafka, ws: HttpDownloader) =
Props(classOf[FeedProcessor], kafkaProd, kafkaPageProd, ws)
def routerProps(kafkaProd: FeedProducerKafka, kafkaPageProd: FeedProducerKafka, ws: HttpDownloader,
nrWorkers: Int, lowerBound: Int = 2, upperBound: Int = 4) =
RoundRobinPool(nrWorkers, Some(DefaultResizer(lowerBound, upperBound))).props(props(kafkaProd, kafkaPageProd, ws))
}
class FeedProcessor(kafkaProd: FeedProducerKafka,
kafkaPageProd: FeedProducerKafka,
ws: HttpDownloader) extends Actor {
val log = Logging(context.system, this)
val config = context.system.settings.config
// implicit val ec = ExecutionContext.fromExecutorService(Executors.newFixedThreadPool(2))
import context.dispatcher
implicit val formats = org.json4s.DefaultFormats ++ org.json4s.ext.JodaTimeSerializers.all
override def receive = {
case json: String =>
sender ! StreamFSM.Processed
log.info(s"got message ${json.substring(0, 50)}")
parse(json).extractOpt[Feed] match {
case Some(feed) =>
log.info(s"parsed feed ${feed.uri}")
ws.download(feed.uri) onComplete {
case Success(response) =>
val contentType = response.header("Content-Type").getOrElse("")
log.info(s"downloaded with status ${response.statusText} page ${feed.uri}")
val html = response.body
val (processedFeed, pageData) = FeedUtil.processFeedEntry(feed, html, contentType)
kafkaProd.sendSync(processedFeed)
kafkaPageProd.sendSync(pageData)
log.info(s"saved processed feed with uri ${feed.uri} to kafka")
case Failure(ex) =>
log.error(ex, s"cannot process feed with url ${feed.uri}")
}
case None =>
log.error(s"cannot process feed message $json")
}
}
override def postStop(): Unit = {
}
}
| DataToKnowledge/wheretolive-feed | feed-processor/src/main/scala/it/dtk/feed/FeedProcessor.scala | Scala | apache-2.0 | 2,508 |
class Test {
inline def foo = 1
def test = -foo
}
| som-snytt/dotty | tests/pos/i3633.scala | Scala | apache-2.0 | 54 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.Locale
import scala.collection.JavaConverters._
import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.TerminalNode
import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.execution.command._
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf, VariableSubstitution}
import org.apache.spark.sql.types.StructType
/**
* Concrete parser for Spark SQL statements.
*/
class SparkSqlParser(conf: SQLConf) extends AbstractSqlParser {
val astBuilder = new SparkSqlAstBuilder(conf)
private val substitutor = new VariableSubstitution(conf)
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
super.parse(substitutor.substitute(command))(toResult)
}
}
/**
* Builder that converts an ANTLR ParseTree into a LogicalPlan/Expression/TableIdentifier.
*/
class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) {
import org.apache.spark.sql.catalyst.parser.ParserUtils._
/**
* Create a [[SetCommand]] logical plan.
*
* Note that we assume that everything after the SET keyword is assumed to be a part of the
* key-value pair. The split between key and value is made by searching for the first `=`
* character in the raw string.
*/
override def visitSetConfiguration(ctx: SetConfigurationContext): LogicalPlan = withOrigin(ctx) {
// Construct the command.
val raw = remainder(ctx.SET.getSymbol)
val keyValueSeparatorIndex = raw.indexOf('=')
if (keyValueSeparatorIndex >= 0) {
val key = raw.substring(0, keyValueSeparatorIndex).trim
val value = raw.substring(keyValueSeparatorIndex + 1).trim
SetCommand(Some(key -> Option(value)))
} else if (raw.nonEmpty) {
SetCommand(Some(raw.trim -> None))
} else {
SetCommand(None)
}
}
/**
* Create a [[ResetCommand]] logical plan.
* Example SQL :
* {{{
* RESET;
* }}}
*/
override def visitResetConfiguration(
ctx: ResetConfigurationContext): LogicalPlan = withOrigin(ctx) {
ResetCommand
}
/**
* Create an [[AnalyzeTableCommand]] command, or an [[AnalyzePartitionCommand]]
* or an [[AnalyzeColumnCommand]] command.
* Example SQL for analyzing a table or a set of partitions :
* {{{
* ANALYZE TABLE [db_name.]tablename [PARTITION (partcol1[=val1], partcol2[=val2], ...)]
* COMPUTE STATISTICS [NOSCAN];
* }}}
*
* Example SQL for analyzing columns :
* {{{
* ANALYZE TABLE [db_name.]tablename COMPUTE STATISTICS FOR COLUMNS column1, column2;
* }}}
*
* Example SQL for analyzing all columns of a table:
* {{{
* ANALYZE TABLE [db_name.]tablename COMPUTE STATISTICS FOR ALL COLUMNS;
* }}}
*/
override def visitAnalyze(ctx: AnalyzeContext): LogicalPlan = withOrigin(ctx) {
def checkPartitionSpec(): Unit = {
if (ctx.partitionSpec != null) {
logWarning("Partition specification is ignored when collecting column statistics: " +
ctx.partitionSpec.getText)
}
}
if (ctx.identifier != null &&
ctx.identifier.getText.toLowerCase(Locale.ROOT) != "noscan") {
throw new ParseException(s"Expected `NOSCAN` instead of `${ctx.identifier.getText}`", ctx)
}
val table = visitTableIdentifier(ctx.tableIdentifier)
if (ctx.ALL() != null) {
checkPartitionSpec()
AnalyzeColumnCommand(table, None, allColumns = true)
} else if (ctx.identifierSeq() == null) {
if (ctx.partitionSpec != null) {
AnalyzePartitionCommand(table, visitPartitionSpec(ctx.partitionSpec),
noscan = ctx.identifier != null)
} else {
AnalyzeTableCommand(table, noscan = ctx.identifier != null)
}
} else {
checkPartitionSpec()
AnalyzeColumnCommand(table,
Option(visitIdentifierSeq(ctx.identifierSeq())), allColumns = false)
}
}
/**
* Create a [[SetDatabaseCommand]] logical plan.
*/
override def visitUse(ctx: UseContext): LogicalPlan = withOrigin(ctx) {
SetDatabaseCommand(ctx.db.getText)
}
/**
* Create a [[ShowTablesCommand]] logical plan.
* Example SQL :
* {{{
* SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
* }}}
*/
override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
ShowTablesCommand(
Option(ctx.db).map(_.getText),
Option(ctx.pattern).map(string),
isExtended = false,
partitionSpec = None)
}
/**
* Create a [[ShowTablesCommand]] logical plan.
* Example SQL :
* {{{
* SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards'
* [PARTITION(partition_spec)];
* }}}
*/
override def visitShowTable(ctx: ShowTableContext): LogicalPlan = withOrigin(ctx) {
val partitionSpec = Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)
ShowTablesCommand(
Option(ctx.db).map(_.getText),
Option(ctx.pattern).map(string),
isExtended = true,
partitionSpec = partitionSpec)
}
/**
* Create a [[ShowDatabasesCommand]] logical plan.
* Example SQL:
* {{{
* SHOW (DATABASES|SCHEMAS) [LIKE 'identifier_with_wildcards'];
* }}}
*/
override def visitShowDatabases(ctx: ShowDatabasesContext): LogicalPlan = withOrigin(ctx) {
ShowDatabasesCommand(Option(ctx.pattern).map(string))
}
/**
* A command for users to list the properties for a table. If propertyKey is specified, the value
* for the propertyKey is returned. If propertyKey is not specified, all the keys and their
* corresponding values are returned.
* The syntax of using this command in SQL is:
* {{{
* SHOW TBLPROPERTIES table_name[('propertyKey')];
* }}}
*/
override def visitShowTblProperties(
ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) {
ShowTablePropertiesCommand(
visitTableIdentifier(ctx.tableIdentifier),
Option(ctx.key).map(visitTablePropertyKey))
}
/**
* A command for users to list the column names for a table.
* This function creates a [[ShowColumnsCommand]] logical plan.
*
* The syntax of using this command in SQL is:
* {{{
* SHOW COLUMNS (FROM | IN) table_identifier [(FROM | IN) database];
* }}}
*/
override def visitShowColumns(ctx: ShowColumnsContext): LogicalPlan = withOrigin(ctx) {
ShowColumnsCommand(Option(ctx.db).map(_.getText), visitTableIdentifier(ctx.tableIdentifier))
}
/**
* A command for users to list the partition names of a table. If partition spec is specified,
* partitions that match the spec are returned. Otherwise an empty result set is returned.
*
* This function creates a [[ShowPartitionsCommand]] logical plan
*
* The syntax of using this command in SQL is:
* {{{
* SHOW PARTITIONS table_identifier [partition_spec];
* }}}
*/
override def visitShowPartitions(ctx: ShowPartitionsContext): LogicalPlan = withOrigin(ctx) {
val table = visitTableIdentifier(ctx.tableIdentifier)
val partitionKeys = Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)
ShowPartitionsCommand(table, partitionKeys)
}
/**
* Creates a [[ShowCreateTableCommand]]
*/
override def visitShowCreateTable(ctx: ShowCreateTableContext): LogicalPlan = withOrigin(ctx) {
val table = visitTableIdentifier(ctx.tableIdentifier())
ShowCreateTableCommand(table)
}
/**
* Create a [[RefreshTable]] logical plan.
*/
override def visitRefreshTable(ctx: RefreshTableContext): LogicalPlan = withOrigin(ctx) {
RefreshTable(visitTableIdentifier(ctx.tableIdentifier))
}
/**
* Create a [[RefreshResource]] logical plan.
*/
override def visitRefreshResource(ctx: RefreshResourceContext): LogicalPlan = withOrigin(ctx) {
val path = if (ctx.STRING != null) string(ctx.STRING) else extractUnquotedResourcePath(ctx)
RefreshResource(path)
}
private def extractUnquotedResourcePath(ctx: RefreshResourceContext): String = withOrigin(ctx) {
val unquotedPath = remainder(ctx.REFRESH.getSymbol).trim
validate(
unquotedPath != null && !unquotedPath.isEmpty,
"Resource paths cannot be empty in REFRESH statements. Use / to match everything",
ctx)
val forbiddenSymbols = Seq(" ", "\\n", "\\r", "\\t")
validate(
!forbiddenSymbols.exists(unquotedPath.contains(_)),
"REFRESH statements cannot contain ' ', '\\\\n', '\\\\r', '\\\\t' inside unquoted resource paths",
ctx)
unquotedPath
}
/**
* Create a [[CacheTableCommand]] logical plan.
*/
override def visitCacheTable(ctx: CacheTableContext): LogicalPlan = withOrigin(ctx) {
val query = Option(ctx.query).map(plan)
val tableIdent = visitTableIdentifier(ctx.tableIdentifier)
if (query.isDefined && tableIdent.database.isDefined) {
val database = tableIdent.database.get
throw new ParseException(s"It is not allowed to add database prefix `$database` to " +
s"the table name in CACHE TABLE AS SELECT", ctx)
}
val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
CacheTableCommand(tableIdent, query, ctx.LAZY != null, options)
}
/**
* Create an [[UncacheTableCommand]] logical plan.
*/
override def visitUncacheTable(ctx: UncacheTableContext): LogicalPlan = withOrigin(ctx) {
UncacheTableCommand(visitTableIdentifier(ctx.tableIdentifier), ctx.EXISTS != null)
}
/**
* Create a [[ClearCacheCommand]] logical plan.
*/
override def visitClearCache(ctx: ClearCacheContext): LogicalPlan = withOrigin(ctx) {
ClearCacheCommand()
}
/**
* Create an [[ExplainCommand]] logical plan.
* The syntax of using this command in SQL is:
* {{{
* EXPLAIN (EXTENDED | CODEGEN) SELECT * FROM ...
* }}}
*/
override def visitExplain(ctx: ExplainContext): LogicalPlan = withOrigin(ctx) {
if (ctx.FORMATTED != null) {
operationNotAllowed("EXPLAIN FORMATTED", ctx)
}
if (ctx.LOGICAL != null) {
operationNotAllowed("EXPLAIN LOGICAL", ctx)
}
val statement = plan(ctx.statement)
if (statement == null) {
null // This is enough since ParseException will raise later.
} else if (isExplainableStatement(statement)) {
ExplainCommand(
logicalPlan = statement,
extended = ctx.EXTENDED != null,
codegen = ctx.CODEGEN != null,
cost = ctx.COST != null)
} else {
ExplainCommand(OneRowRelation())
}
}
/**
* Determine if a plan should be explained at all.
*/
protected def isExplainableStatement(plan: LogicalPlan): Boolean = plan match {
case _: DescribeTableCommand => false
case _ => true
}
/**
* Create a [[DescribeColumnCommand]] or [[DescribeTableCommand]] logical commands.
*/
override def visitDescribeTable(ctx: DescribeTableContext): LogicalPlan = withOrigin(ctx) {
val isExtended = ctx.EXTENDED != null || ctx.FORMATTED != null
if (ctx.describeColName != null) {
if (ctx.partitionSpec != null) {
throw new ParseException("DESC TABLE COLUMN for a specific partition is not supported", ctx)
} else {
DescribeColumnCommand(
visitTableIdentifier(ctx.tableIdentifier),
ctx.describeColName.nameParts.asScala.map(_.getText),
isExtended)
}
} else {
val partitionSpec = if (ctx.partitionSpec != null) {
// According to the syntax, visitPartitionSpec returns `Map[String, Option[String]]`.
visitPartitionSpec(ctx.partitionSpec).map {
case (key, Some(value)) => key -> value
case (key, _) =>
throw new ParseException(s"PARTITION specification is incomplete: `$key`", ctx)
}
} else {
Map.empty[String, String]
}
DescribeTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
partitionSpec,
isExtended)
}
}
/**
* Type to keep track of a table header: (identifier, isTemporary, ifNotExists, isExternal).
*/
type TableHeader = (TableIdentifier, Boolean, Boolean, Boolean)
/**
* Validate a create table statement and return the [[TableIdentifier]].
*/
override def visitCreateTableHeader(
ctx: CreateTableHeaderContext): TableHeader = withOrigin(ctx) {
val temporary = ctx.TEMPORARY != null
val ifNotExists = ctx.EXISTS != null
if (temporary && ifNotExists) {
operationNotAllowed("CREATE TEMPORARY TABLE ... IF NOT EXISTS", ctx)
}
(visitTableIdentifier(ctx.tableIdentifier), temporary, ifNotExists, ctx.EXTERNAL != null)
}
/**
* Create a table, returning a [[CreateTable]] logical plan.
*
* Expected format:
* {{{
* CREATE [TEMPORARY] TABLE [IF NOT EXISTS] [db_name.]table_name
* USING table_provider
* create_table_clauses
* [[AS] select_statement];
*
* create_table_clauses (order insensitive):
* [OPTIONS table_property_list]
* [PARTITIONED BY (col_name, col_name, ...)]
* [CLUSTERED BY (col_name, col_name, ...)
* [SORTED BY (col_name [ASC|DESC], ...)]
* INTO num_buckets BUCKETS
* ]
* [LOCATION path]
* [COMMENT table_comment]
* [TBLPROPERTIES (property_name=property_value, ...)]
* }}}
*/
override def visitCreateTable(ctx: CreateTableContext): LogicalPlan = withOrigin(ctx) {
val (table, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)
if (external) {
operationNotAllowed("CREATE EXTERNAL TABLE ... USING", ctx)
}
checkDuplicateClauses(ctx.TBLPROPERTIES, "TBLPROPERTIES", ctx)
checkDuplicateClauses(ctx.OPTIONS, "OPTIONS", ctx)
checkDuplicateClauses(ctx.PARTITIONED, "PARTITIONED BY", ctx)
checkDuplicateClauses(ctx.COMMENT, "COMMENT", ctx)
checkDuplicateClauses(ctx.bucketSpec(), "CLUSTERED BY", ctx)
checkDuplicateClauses(ctx.locationSpec, "LOCATION", ctx)
val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
val provider = ctx.tableProvider.qualifiedName.getText
val schema = Option(ctx.colTypeList()).map(createSchema)
val partitionColumnNames =
Option(ctx.partitionColumnNames)
.map(visitIdentifierList(_).toArray)
.getOrElse(Array.empty[String])
val properties = Option(ctx.tableProps).map(visitPropertyKeyValues).getOrElse(Map.empty)
val bucketSpec = ctx.bucketSpec().asScala.headOption.map(visitBucketSpec)
val location = ctx.locationSpec.asScala.headOption.map(visitLocationSpec)
val storage = DataSource.buildStorageFormatFromOptions(options)
if (location.isDefined && storage.locationUri.isDefined) {
throw new ParseException(
"LOCATION and 'path' in OPTIONS are both used to indicate the custom table path, " +
"you can only specify one of them.", ctx)
}
val customLocation = storage.locationUri.orElse(location.map(CatalogUtils.stringToURI))
val tableType = if (customLocation.isDefined) {
CatalogTableType.EXTERNAL
} else {
CatalogTableType.MANAGED
}
val tableDesc = CatalogTable(
identifier = table,
tableType = tableType,
storage = storage.copy(locationUri = customLocation),
schema = schema.getOrElse(new StructType),
provider = Some(provider),
partitionColumnNames = partitionColumnNames,
bucketSpec = bucketSpec,
properties = properties,
comment = Option(ctx.comment).map(string))
// Determine the storage mode.
val mode = if (ifNotExists) SaveMode.Ignore else SaveMode.ErrorIfExists
if (ctx.query != null) {
// Get the backing query.
val query = plan(ctx.query)
if (temp) {
operationNotAllowed("CREATE TEMPORARY TABLE ... USING ... AS query", ctx)
}
// Don't allow explicit specification of schema for CTAS
if (schema.nonEmpty) {
operationNotAllowed(
"Schema may not be specified in a Create Table As Select (CTAS) statement",
ctx)
}
CreateTable(tableDesc, mode, Some(query))
} else {
if (temp) {
if (ifNotExists) {
operationNotAllowed("CREATE TEMPORARY TABLE IF NOT EXISTS", ctx)
}
logWarning(s"CREATE TEMPORARY TABLE ... USING ... is deprecated, please use " +
"CREATE TEMPORARY VIEW ... USING ... instead")
// Unlike CREATE TEMPORARY VIEW USING, CREATE TEMPORARY TABLE USING does not support
// IF NOT EXISTS. Users are not allowed to replace the existing temp table.
CreateTempViewUsing(table, schema, replace = false, global = false, provider, options)
} else {
CreateTable(tableDesc, mode, None)
}
}
}
/**
* Creates a [[CreateTempViewUsing]] logical plan.
*/
override def visitCreateTempViewUsing(
ctx: CreateTempViewUsingContext): LogicalPlan = withOrigin(ctx) {
CreateTempViewUsing(
tableIdent = visitTableIdentifier(ctx.tableIdentifier()),
userSpecifiedSchema = Option(ctx.colTypeList()).map(createSchema),
replace = ctx.REPLACE != null,
global = ctx.GLOBAL != null,
provider = ctx.tableProvider.qualifiedName.getText,
options = Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty))
}
/**
* Create a [[LoadDataCommand]] command.
*
* For example:
* {{{
* LOAD DATA [LOCAL] INPATH 'filepath' [OVERWRITE] INTO TABLE tablename
* [PARTITION (partcol1=val1, partcol2=val2 ...)]
* }}}
*/
override def visitLoadData(ctx: LoadDataContext): LogicalPlan = withOrigin(ctx) {
LoadDataCommand(
table = visitTableIdentifier(ctx.tableIdentifier),
path = string(ctx.path),
isLocal = ctx.LOCAL != null,
isOverwrite = ctx.OVERWRITE != null,
partition = Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)
)
}
/**
* Create a [[TruncateTableCommand]] command.
*
* For example:
* {{{
* TRUNCATE TABLE tablename [PARTITION (partcol1=val1, partcol2=val2 ...)]
* }}}
*/
override def visitTruncateTable(ctx: TruncateTableContext): LogicalPlan = withOrigin(ctx) {
TruncateTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
}
/**
* Create a [[AlterTableRecoverPartitionsCommand]] command.
*
* For example:
* {{{
* MSCK REPAIR TABLE tablename
* }}}
*/
override def visitRepairTable(ctx: RepairTableContext): LogicalPlan = withOrigin(ctx) {
AlterTableRecoverPartitionsCommand(
visitTableIdentifier(ctx.tableIdentifier),
"MSCK REPAIR TABLE")
}
/**
* Convert a table property list into a key-value map.
* This should be called through [[visitPropertyKeyValues]] or [[visitPropertyKeys]].
*/
override def visitTablePropertyList(
ctx: TablePropertyListContext): Map[String, String] = withOrigin(ctx) {
val properties = ctx.tableProperty.asScala.map { property =>
val key = visitTablePropertyKey(property.key)
val value = visitTablePropertyValue(property.value)
key -> value
}
// Check for duplicate property names.
checkDuplicateKeys(properties, ctx)
properties.toMap
}
/**
* Parse a key-value map from a [[TablePropertyListContext]], assuming all values are specified.
*/
private def visitPropertyKeyValues(ctx: TablePropertyListContext): Map[String, String] = {
val props = visitTablePropertyList(ctx)
val badKeys = props.collect { case (key, null) => key }
if (badKeys.nonEmpty) {
operationNotAllowed(
s"Values must be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
}
props
}
/**
* Parse a list of keys from a [[TablePropertyListContext]], assuming no values are specified.
*/
private def visitPropertyKeys(ctx: TablePropertyListContext): Seq[String] = {
val props = visitTablePropertyList(ctx)
val badKeys = props.filter { case (_, v) => v != null }.keys
if (badKeys.nonEmpty) {
operationNotAllowed(
s"Values should not be specified for key(s): ${badKeys.mkString("[", ",", "]")}", ctx)
}
props.keys.toSeq
}
/**
* A table property key can either be String or a collection of dot separated elements. This
* function extracts the property key based on whether its a string literal or a table property
* identifier.
*/
override def visitTablePropertyKey(key: TablePropertyKeyContext): String = {
if (key.STRING != null) {
string(key.STRING)
} else {
key.getText
}
}
/**
* A table property value can be String, Integer, Boolean or Decimal. This function extracts
* the property value based on whether its a string, integer, boolean or decimal literal.
*/
override def visitTablePropertyValue(value: TablePropertyValueContext): String = {
if (value == null) {
null
} else if (value.STRING != null) {
string(value.STRING)
} else if (value.booleanValue != null) {
value.getText.toLowerCase(Locale.ROOT)
} else {
value.getText
}
}
/**
* Create a [[CreateDatabaseCommand]] command.
*
* For example:
* {{{
* CREATE DATABASE [IF NOT EXISTS] database_name [COMMENT database_comment]
* [LOCATION path] [WITH DBPROPERTIES (key1=val1, key2=val2, ...)]
* }}}
*/
override def visitCreateDatabase(ctx: CreateDatabaseContext): LogicalPlan = withOrigin(ctx) {
CreateDatabaseCommand(
ctx.identifier.getText,
ctx.EXISTS != null,
Option(ctx.locationSpec).map(visitLocationSpec),
Option(ctx.comment).map(string),
Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty))
}
/**
* Create an [[AlterDatabasePropertiesCommand]] command.
*
* For example:
* {{{
* ALTER (DATABASE|SCHEMA) database SET DBPROPERTIES (property_name=property_value, ...);
* }}}
*/
override def visitSetDatabaseProperties(
ctx: SetDatabasePropertiesContext): LogicalPlan = withOrigin(ctx) {
AlterDatabasePropertiesCommand(
ctx.identifier.getText,
visitPropertyKeyValues(ctx.tablePropertyList))
}
/**
* Create a [[DropDatabaseCommand]] command.
*
* For example:
* {{{
* DROP (DATABASE|SCHEMA) [IF EXISTS] database [RESTRICT|CASCADE];
* }}}
*/
override def visitDropDatabase(ctx: DropDatabaseContext): LogicalPlan = withOrigin(ctx) {
DropDatabaseCommand(ctx.identifier.getText, ctx.EXISTS != null, ctx.CASCADE != null)
}
/**
* Create a [[DescribeDatabaseCommand]] command.
*
* For example:
* {{{
* DESCRIBE DATABASE [EXTENDED] database;
* }}}
*/
override def visitDescribeDatabase(ctx: DescribeDatabaseContext): LogicalPlan = withOrigin(ctx) {
DescribeDatabaseCommand(ctx.identifier.getText, ctx.EXTENDED != null)
}
/**
* Create a plan for a DESCRIBE FUNCTION command.
*/
override def visitDescribeFunction(ctx: DescribeFunctionContext): LogicalPlan = withOrigin(ctx) {
import ctx._
val functionName =
if (describeFuncName.STRING() != null) {
FunctionIdentifier(string(describeFuncName.STRING()), database = None)
} else if (describeFuncName.qualifiedName() != null) {
visitFunctionName(describeFuncName.qualifiedName)
} else {
FunctionIdentifier(describeFuncName.getText, database = None)
}
DescribeFunctionCommand(functionName, EXTENDED != null)
}
/**
* Create a plan for a SHOW FUNCTIONS command.
*/
override def visitShowFunctions(ctx: ShowFunctionsContext): LogicalPlan = withOrigin(ctx) {
import ctx._
val (user, system) = Option(ctx.identifier).map(_.getText.toLowerCase(Locale.ROOT)) match {
case None | Some("all") => (true, true)
case Some("system") => (false, true)
case Some("user") => (true, false)
case Some(x) => throw new ParseException(s"SHOW $x FUNCTIONS not supported", ctx)
}
val (db, pat) = if (qualifiedName != null) {
val name = visitFunctionName(qualifiedName)
(name.database, Some(name.funcName))
} else if (pattern != null) {
(None, Some(string(pattern)))
} else {
(None, None)
}
ShowFunctionsCommand(db, pat, user, system)
}
/**
* Create a [[CreateFunctionCommand]] command.
*
* For example:
* {{{
* CREATE [OR REPLACE] [TEMPORARY] FUNCTION [IF NOT EXISTS] [db_name.]function_name
* AS class_name [USING JAR|FILE|ARCHIVE 'file_uri' [, JAR|FILE|ARCHIVE 'file_uri']];
* }}}
*/
override def visitCreateFunction(ctx: CreateFunctionContext): LogicalPlan = withOrigin(ctx) {
val resources = ctx.resource.asScala.map { resource =>
val resourceType = resource.identifier.getText.toLowerCase(Locale.ROOT)
resourceType match {
case "jar" | "file" | "archive" =>
FunctionResource(FunctionResourceType.fromString(resourceType), string(resource.STRING))
case other =>
operationNotAllowed(s"CREATE FUNCTION with resource type '$resourceType'", ctx)
}
}
// Extract database, name & alias.
val functionIdentifier = visitFunctionName(ctx.qualifiedName)
CreateFunctionCommand(
functionIdentifier.database,
functionIdentifier.funcName,
string(ctx.className),
resources,
ctx.TEMPORARY != null,
ctx.EXISTS != null,
ctx.REPLACE != null)
}
/**
* Create a [[DropFunctionCommand]] command.
*
* For example:
* {{{
* DROP [TEMPORARY] FUNCTION [IF EXISTS] function;
* }}}
*/
override def visitDropFunction(ctx: DropFunctionContext): LogicalPlan = withOrigin(ctx) {
val functionIdentifier = visitFunctionName(ctx.qualifiedName)
DropFunctionCommand(
functionIdentifier.database,
functionIdentifier.funcName,
ctx.EXISTS != null,
ctx.TEMPORARY != null)
}
/**
* Create a [[DropTableCommand]] command.
*/
override def visitDropTable(ctx: DropTableContext): LogicalPlan = withOrigin(ctx) {
DropTableCommand(
visitTableIdentifier(ctx.tableIdentifier),
ctx.EXISTS != null,
ctx.VIEW != null,
ctx.PURGE != null)
}
/**
* Create a [[AlterTableRenameCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table1 RENAME TO table2;
* ALTER VIEW view1 RENAME TO view2;
* }}}
*/
override def visitRenameTable(ctx: RenameTableContext): LogicalPlan = withOrigin(ctx) {
AlterTableRenameCommand(
visitTableIdentifier(ctx.from),
visitTableIdentifier(ctx.to),
ctx.VIEW != null)
}
/**
* Create a [[AlterTableAddColumnsCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table1
* ADD COLUMNS (col_name data_type [COMMENT col_comment], ...);
* }}}
*/
override def visitAddTableColumns(ctx: AddTableColumnsContext): LogicalPlan = withOrigin(ctx) {
AlterTableAddColumnsCommand(
visitTableIdentifier(ctx.tableIdentifier),
visitColTypeList(ctx.columns)
)
}
/**
* Create an [[AlterTableSetPropertiesCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table SET TBLPROPERTIES ('comment' = new_comment);
* ALTER VIEW view SET TBLPROPERTIES ('comment' = new_comment);
* }}}
*/
override def visitSetTableProperties(
ctx: SetTablePropertiesContext): LogicalPlan = withOrigin(ctx) {
AlterTableSetPropertiesCommand(
visitTableIdentifier(ctx.tableIdentifier),
visitPropertyKeyValues(ctx.tablePropertyList),
ctx.VIEW != null)
}
/**
* Create an [[AlterTableUnsetPropertiesCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key');
* ALTER VIEW view UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key');
* }}}
*/
override def visitUnsetTableProperties(
ctx: UnsetTablePropertiesContext): LogicalPlan = withOrigin(ctx) {
AlterTableUnsetPropertiesCommand(
visitTableIdentifier(ctx.tableIdentifier),
visitPropertyKeys(ctx.tablePropertyList),
ctx.EXISTS != null,
ctx.VIEW != null)
}
/**
* Create an [[AlterTableSerDePropertiesCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table [PARTITION spec] SET SERDE serde_name [WITH SERDEPROPERTIES props];
* ALTER TABLE table [PARTITION spec] SET SERDEPROPERTIES serde_properties;
* }}}
*/
override def visitSetTableSerDe(ctx: SetTableSerDeContext): LogicalPlan = withOrigin(ctx) {
AlterTableSerDePropertiesCommand(
visitTableIdentifier(ctx.tableIdentifier),
Option(ctx.STRING).map(string),
Option(ctx.tablePropertyList).map(visitPropertyKeyValues),
// TODO a partition spec is allowed to have optional values. This is currently violated.
Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
}
/**
* Create an [[AlterTableAddPartitionCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table ADD [IF NOT EXISTS] PARTITION spec [LOCATION 'loc1']
* ALTER VIEW view ADD [IF NOT EXISTS] PARTITION spec
* }}}
*
* ALTER VIEW ... ADD PARTITION ... is not supported because the concept of partitioning
* is associated with physical tables
*/
override def visitAddTablePartition(
ctx: AddTablePartitionContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
operationNotAllowed("ALTER VIEW ... ADD PARTITION", ctx)
}
// Create partition spec to location mapping.
val specsAndLocs = if (ctx.partitionSpec.isEmpty) {
ctx.partitionSpecLocation.asScala.map {
splCtx =>
val spec = visitNonOptionalPartitionSpec(splCtx.partitionSpec)
val location = Option(splCtx.locationSpec).map(visitLocationSpec)
spec -> location
}
} else {
// Alter View: the location clauses are not allowed.
ctx.partitionSpec.asScala.map(visitNonOptionalPartitionSpec(_) -> None)
}
AlterTableAddPartitionCommand(
visitTableIdentifier(ctx.tableIdentifier),
specsAndLocs,
ctx.EXISTS != null)
}
/**
* Create an [[AlterTableRenamePartitionCommand]] command
*
* For example:
* {{{
* ALTER TABLE table PARTITION spec1 RENAME TO PARTITION spec2;
* }}}
*/
override def visitRenameTablePartition(
ctx: RenameTablePartitionContext): LogicalPlan = withOrigin(ctx) {
AlterTableRenamePartitionCommand(
visitTableIdentifier(ctx.tableIdentifier),
visitNonOptionalPartitionSpec(ctx.from),
visitNonOptionalPartitionSpec(ctx.to))
}
/**
* Create an [[AlterTableDropPartitionCommand]] command
*
* For example:
* {{{
* ALTER TABLE table DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] [PURGE];
* ALTER VIEW view DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...];
* }}}
*
* ALTER VIEW ... DROP PARTITION ... is not supported because the concept of partitioning
* is associated with physical tables
*/
override def visitDropTablePartitions(
ctx: DropTablePartitionsContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) {
operationNotAllowed("ALTER VIEW ... DROP PARTITION", ctx)
}
AlterTableDropPartitionCommand(
visitTableIdentifier(ctx.tableIdentifier),
ctx.partitionSpec.asScala.map(visitNonOptionalPartitionSpec),
ifExists = ctx.EXISTS != null,
purge = ctx.PURGE != null,
retainData = false)
}
/**
* Create an [[AlterTableRecoverPartitionsCommand]] command
*
* For example:
* {{{
* ALTER TABLE table RECOVER PARTITIONS;
* }}}
*/
override def visitRecoverPartitions(
ctx: RecoverPartitionsContext): LogicalPlan = withOrigin(ctx) {
AlterTableRecoverPartitionsCommand(visitTableIdentifier(ctx.tableIdentifier))
}
/**
* Create an [[AlterTableSetLocationCommand]] command
*
* For example:
* {{{
* ALTER TABLE table [PARTITION spec] SET LOCATION "loc";
* }}}
*/
override def visitSetTableLocation(ctx: SetTableLocationContext): LogicalPlan = withOrigin(ctx) {
AlterTableSetLocationCommand(
visitTableIdentifier(ctx.tableIdentifier),
Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec),
visitLocationSpec(ctx.locationSpec))
}
/**
* Create a [[AlterTableChangeColumnCommand]] command.
*
* For example:
* {{{
* ALTER TABLE table [PARTITION partition_spec]
* CHANGE [COLUMN] column_old_name column_new_name column_dataType [COMMENT column_comment]
* [FIRST | AFTER column_name];
* }}}
*/
override def visitChangeColumn(ctx: ChangeColumnContext): LogicalPlan = withOrigin(ctx) {
if (ctx.partitionSpec != null) {
operationNotAllowed("ALTER TABLE table PARTITION partition_spec CHANGE COLUMN", ctx)
}
if (ctx.colPosition != null) {
operationNotAllowed(
"ALTER TABLE table [PARTITION partition_spec] CHANGE COLUMN ... FIRST | AFTER otherCol",
ctx)
}
AlterTableChangeColumnCommand(
tableName = visitTableIdentifier(ctx.tableIdentifier),
columnName = ctx.identifier.getText,
newColumn = visitColType(ctx.colType))
}
/**
* Create location string.
*/
override def visitLocationSpec(ctx: LocationSpecContext): String = withOrigin(ctx) {
string(ctx.STRING)
}
/**
* Create a [[BucketSpec]].
*/
override def visitBucketSpec(ctx: BucketSpecContext): BucketSpec = withOrigin(ctx) {
BucketSpec(
ctx.INTEGER_VALUE.getText.toInt,
visitIdentifierList(ctx.identifierList),
Option(ctx.orderedIdentifierList)
.toSeq
.flatMap(_.orderedIdentifier.asScala)
.map { orderedIdCtx =>
Option(orderedIdCtx.ordering).map(_.getText).foreach { dir =>
if (dir.toLowerCase(Locale.ROOT) != "asc") {
operationNotAllowed(s"Column ordering must be ASC, was '$dir'", ctx)
}
}
orderedIdCtx.identifier.getText
})
}
/**
* Convert a nested constants list into a sequence of string sequences.
*/
override def visitNestedConstantList(
ctx: NestedConstantListContext): Seq[Seq[String]] = withOrigin(ctx) {
ctx.constantList.asScala.map(visitConstantList)
}
/**
* Convert a constants list into a String sequence.
*/
override def visitConstantList(ctx: ConstantListContext): Seq[String] = withOrigin(ctx) {
ctx.constant.asScala.map(visitStringConstant)
}
/**
* Fail an unsupported Hive native command.
*/
override def visitFailNativeCommand(
ctx: FailNativeCommandContext): LogicalPlan = withOrigin(ctx) {
val keywords = if (ctx.unsupportedHiveNativeCommands != null) {
ctx.unsupportedHiveNativeCommands.children.asScala.collect {
case n: TerminalNode => n.getText
}.mkString(" ")
} else {
// SET ROLE is the exception to the rule, because we handle this before other SET commands.
"SET ROLE"
}
operationNotAllowed(keywords, ctx)
}
/**
* Create a [[AddFileCommand]], [[AddJarCommand]], [[ListFilesCommand]] or [[ListJarsCommand]]
* command depending on the requested operation on resources.
* Expected format:
* {{{
* ADD (FILE[S] <filepath ...> | JAR[S] <jarpath ...>)
* LIST (FILE[S] [filepath ...] | JAR[S] [jarpath ...])
* }}}
*/
override def visitManageResource(ctx: ManageResourceContext): LogicalPlan = withOrigin(ctx) {
val mayebePaths = remainder(ctx.identifier).trim
ctx.op.getType match {
case SqlBaseParser.ADD =>
ctx.identifier.getText.toLowerCase(Locale.ROOT) match {
case "file" => AddFileCommand(mayebePaths)
case "jar" => AddJarCommand(mayebePaths)
case other => operationNotAllowed(s"ADD with resource type '$other'", ctx)
}
case SqlBaseParser.LIST =>
ctx.identifier.getText.toLowerCase(Locale.ROOT) match {
case "files" | "file" =>
if (mayebePaths.length > 0) {
ListFilesCommand(mayebePaths.split("\\\\s+"))
} else {
ListFilesCommand()
}
case "jars" | "jar" =>
if (mayebePaths.length > 0) {
ListJarsCommand(mayebePaths.split("\\\\s+"))
} else {
ListJarsCommand()
}
case other => operationNotAllowed(s"LIST with resource type '$other'", ctx)
}
case _ => operationNotAllowed(s"Other types of operation on resources", ctx)
}
}
/**
* Create a Hive serde table, returning a [[CreateTable]] logical plan.
*
* This is a legacy syntax for Hive compatibility, we recommend users to use the Spark SQL
* CREATE TABLE syntax to create Hive serde table, e.g. "CREATE TABLE ... USING hive ..."
*
* Note: several features are currently not supported - temporary tables, bucketing,
* skewed columns and storage handlers (STORED BY).
*
* Expected format:
* {{{
* CREATE [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name
* [(col1[:] data_type [COMMENT col_comment], ...)]
* create_table_clauses
* [AS select_statement];
*
* create_table_clauses (order insensitive):
* [COMMENT table_comment]
* [PARTITIONED BY (col2[:] data_type [COMMENT col_comment], ...)]
* [ROW FORMAT row_format]
* [STORED AS file_format]
* [LOCATION path]
* [TBLPROPERTIES (property_name=property_value, ...)]
* }}}
*/
override def visitCreateHiveTable(ctx: CreateHiveTableContext): LogicalPlan = withOrigin(ctx) {
val (name, temp, ifNotExists, external) = visitCreateTableHeader(ctx.createTableHeader)
// TODO: implement temporary tables
if (temp) {
throw new ParseException(
"CREATE TEMPORARY TABLE is not supported yet. " +
"Please use CREATE TEMPORARY VIEW as an alternative.", ctx)
}
if (ctx.skewSpec.size > 0) {
operationNotAllowed("CREATE TABLE ... SKEWED BY", ctx)
}
checkDuplicateClauses(ctx.TBLPROPERTIES, "TBLPROPERTIES", ctx)
checkDuplicateClauses(ctx.PARTITIONED, "PARTITIONED BY", ctx)
checkDuplicateClauses(ctx.COMMENT, "COMMENT", ctx)
checkDuplicateClauses(ctx.bucketSpec(), "CLUSTERED BY", ctx)
checkDuplicateClauses(ctx.createFileFormat, "STORED AS/BY", ctx)
checkDuplicateClauses(ctx.rowFormat, "ROW FORMAT", ctx)
checkDuplicateClauses(ctx.locationSpec, "LOCATION", ctx)
val dataCols = Option(ctx.columns).map(visitColTypeList).getOrElse(Nil)
val partitionCols = Option(ctx.partitionColumns).map(visitColTypeList).getOrElse(Nil)
val properties = Option(ctx.tableProps).map(visitPropertyKeyValues).getOrElse(Map.empty)
val selectQuery = Option(ctx.query).map(plan)
val bucketSpec = ctx.bucketSpec().asScala.headOption.map(visitBucketSpec)
// Note: Hive requires partition columns to be distinct from the schema, so we need
// to include the partition columns here explicitly
val schema = StructType(dataCols ++ partitionCols)
// Storage format
val defaultStorage = HiveSerDe.getDefaultStorage(conf)
validateRowFormatFileFormat(ctx.rowFormat.asScala, ctx.createFileFormat.asScala, ctx)
val fileStorage = ctx.createFileFormat.asScala.headOption.map(visitCreateFileFormat)
.getOrElse(CatalogStorageFormat.empty)
val rowStorage = ctx.rowFormat.asScala.headOption.map(visitRowFormat)
.getOrElse(CatalogStorageFormat.empty)
val location = ctx.locationSpec.asScala.headOption.map(visitLocationSpec)
// If we are creating an EXTERNAL table, then the LOCATION field is required
if (external && location.isEmpty) {
operationNotAllowed("CREATE EXTERNAL TABLE must be accompanied by LOCATION", ctx)
}
val locUri = location.map(CatalogUtils.stringToURI(_))
val storage = CatalogStorageFormat(
locationUri = locUri,
inputFormat = fileStorage.inputFormat.orElse(defaultStorage.inputFormat),
outputFormat = fileStorage.outputFormat.orElse(defaultStorage.outputFormat),
serde = rowStorage.serde.orElse(fileStorage.serde).orElse(defaultStorage.serde),
compressed = false,
properties = rowStorage.properties ++ fileStorage.properties)
// If location is defined, we'll assume this is an external table.
// Otherwise, we may accidentally delete existing data.
val tableType = if (external || location.isDefined) {
CatalogTableType.EXTERNAL
} else {
CatalogTableType.MANAGED
}
// TODO support the sql text - have a proper location for this!
val tableDesc = CatalogTable(
identifier = name,
tableType = tableType,
storage = storage,
schema = schema,
bucketSpec = bucketSpec,
provider = Some(DDLUtils.HIVE_PROVIDER),
partitionColumnNames = partitionCols.map(_.name),
properties = properties,
comment = Option(ctx.comment).map(string))
val mode = if (ifNotExists) SaveMode.Ignore else SaveMode.ErrorIfExists
selectQuery match {
case Some(q) =>
// Don't allow explicit specification of schema for CTAS.
if (dataCols.nonEmpty) {
operationNotAllowed(
"Schema may not be specified in a Create Table As Select (CTAS) statement",
ctx)
}
// When creating partitioned table with CTAS statement, we can't specify data type for the
// partition columns.
if (partitionCols.nonEmpty) {
val errorMessage = "Create Partitioned Table As Select cannot specify data type for " +
"the partition columns of the target table."
operationNotAllowed(errorMessage, ctx)
}
// Hive CTAS supports dynamic partition by specifying partition column names.
val partitionColumnNames =
Option(ctx.partitionColumnNames)
.map(visitIdentifierList(_).toArray)
.getOrElse(Array.empty[String])
val tableDescWithPartitionColNames =
tableDesc.copy(partitionColumnNames = partitionColumnNames)
val hasStorageProperties = (ctx.createFileFormat.size != 0) || (ctx.rowFormat.size != 0)
if (conf.convertCTAS && !hasStorageProperties) {
// At here, both rowStorage.serdeProperties and fileStorage.serdeProperties
// are empty Maps.
val newTableDesc = tableDescWithPartitionColNames.copy(
storage = CatalogStorageFormat.empty.copy(locationUri = locUri),
provider = Some(conf.defaultDataSourceName))
CreateTable(newTableDesc, mode, Some(q))
} else {
CreateTable(tableDescWithPartitionColNames, mode, Some(q))
}
case None => CreateTable(tableDesc, mode, None)
}
}
/**
* Create a [[CreateTableLikeCommand]] command.
*
* For example:
* {{{
* CREATE TABLE [IF NOT EXISTS] [db_name.]table_name
* LIKE [other_db_name.]existing_table_name [locationSpec]
* }}}
*/
override def visitCreateTableLike(ctx: CreateTableLikeContext): LogicalPlan = withOrigin(ctx) {
val targetTable = visitTableIdentifier(ctx.target)
val sourceTable = visitTableIdentifier(ctx.source)
val location = Option(ctx.locationSpec).map(visitLocationSpec)
CreateTableLikeCommand(targetTable, sourceTable, location, ctx.EXISTS != null)
}
/**
* Create a [[CatalogStorageFormat]] for creating tables.
*
* Format: STORED AS ...
*/
override def visitCreateFileFormat(
ctx: CreateFileFormatContext): CatalogStorageFormat = withOrigin(ctx) {
(ctx.fileFormat, ctx.storageHandler) match {
// Expected format: INPUTFORMAT input_format OUTPUTFORMAT output_format
case (c: TableFileFormatContext, null) =>
visitTableFileFormat(c)
// Expected format: SEQUENCEFILE | TEXTFILE | RCFILE | ORC | PARQUET | AVRO
case (c: GenericFileFormatContext, null) =>
visitGenericFileFormat(c)
case (null, storageHandler) =>
operationNotAllowed("STORED BY", ctx)
case _ =>
throw new ParseException("Expected either STORED AS or STORED BY, not both", ctx)
}
}
/**
* Create a [[CatalogStorageFormat]].
*/
override def visitTableFileFormat(
ctx: TableFileFormatContext): CatalogStorageFormat = withOrigin(ctx) {
CatalogStorageFormat.empty.copy(
inputFormat = Option(string(ctx.inFmt)),
outputFormat = Option(string(ctx.outFmt)))
}
/**
* Resolve a [[HiveSerDe]] based on the name given and return it as a [[CatalogStorageFormat]].
*/
override def visitGenericFileFormat(
ctx: GenericFileFormatContext): CatalogStorageFormat = withOrigin(ctx) {
val source = ctx.identifier.getText
HiveSerDe.sourceToSerDe(source) match {
case Some(s) =>
CatalogStorageFormat.empty.copy(
inputFormat = s.inputFormat,
outputFormat = s.outputFormat,
serde = s.serde)
case None =>
operationNotAllowed(s"STORED AS with file format '$source'", ctx)
}
}
/**
* Create a [[CatalogStorageFormat]] used for creating tables.
*
* Example format:
* {{{
* SERDE serde_name [WITH SERDEPROPERTIES (k1=v1, k2=v2, ...)]
* }}}
*
* OR
*
* {{{
* DELIMITED [FIELDS TERMINATED BY char [ESCAPED BY char]]
* [COLLECTION ITEMS TERMINATED BY char]
* [MAP KEYS TERMINATED BY char]
* [LINES TERMINATED BY char]
* [NULL DEFINED AS char]
* }}}
*/
private def visitRowFormat(ctx: RowFormatContext): CatalogStorageFormat = withOrigin(ctx) {
ctx match {
case serde: RowFormatSerdeContext => visitRowFormatSerde(serde)
case delimited: RowFormatDelimitedContext => visitRowFormatDelimited(delimited)
}
}
/**
* Create SERDE row format name and properties pair.
*/
override def visitRowFormatSerde(
ctx: RowFormatSerdeContext): CatalogStorageFormat = withOrigin(ctx) {
import ctx._
CatalogStorageFormat.empty.copy(
serde = Option(string(name)),
properties = Option(tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty))
}
/**
* Create a delimited row format properties object.
*/
override def visitRowFormatDelimited(
ctx: RowFormatDelimitedContext): CatalogStorageFormat = withOrigin(ctx) {
// Collect the entries if any.
def entry(key: String, value: Token): Seq[(String, String)] = {
Option(value).toSeq.map(x => key -> string(x))
}
// TODO we need proper support for the NULL format.
val entries =
entry("field.delim", ctx.fieldsTerminatedBy) ++
entry("serialization.format", ctx.fieldsTerminatedBy) ++
entry("escape.delim", ctx.escapedBy) ++
// The following typo is inherited from Hive...
entry("colelction.delim", ctx.collectionItemsTerminatedBy) ++
entry("mapkey.delim", ctx.keysTerminatedBy) ++
Option(ctx.linesSeparatedBy).toSeq.map { token =>
val value = string(token)
validate(
value == "\\n",
s"LINES TERMINATED BY only supports newline '\\\\n' right now: $value",
ctx)
"line.delim" -> value
}
CatalogStorageFormat.empty.copy(properties = entries.toMap)
}
/**
* Throw a [[ParseException]] if the user specified incompatible SerDes through ROW FORMAT
* and STORED AS.
*
* The following are allowed. Anything else is not:
* ROW FORMAT SERDE ... STORED AS [SEQUENCEFILE | RCFILE | TEXTFILE]
* ROW FORMAT DELIMITED ... STORED AS TEXTFILE
* ROW FORMAT ... STORED AS INPUTFORMAT ... OUTPUTFORMAT ...
*/
private def validateRowFormatFileFormat(
rowFormatCtx: RowFormatContext,
createFileFormatCtx: CreateFileFormatContext,
parentCtx: ParserRuleContext): Unit = {
if (rowFormatCtx == null || createFileFormatCtx == null) {
return
}
(rowFormatCtx, createFileFormatCtx.fileFormat) match {
case (_, ffTable: TableFileFormatContext) => // OK
case (rfSerde: RowFormatSerdeContext, ffGeneric: GenericFileFormatContext) =>
ffGeneric.identifier.getText.toLowerCase(Locale.ROOT) match {
case ("sequencefile" | "textfile" | "rcfile") => // OK
case fmt =>
operationNotAllowed(
s"ROW FORMAT SERDE is incompatible with format '$fmt', which also specifies a serde",
parentCtx)
}
case (rfDelimited: RowFormatDelimitedContext, ffGeneric: GenericFileFormatContext) =>
ffGeneric.identifier.getText.toLowerCase(Locale.ROOT) match {
case "textfile" => // OK
case fmt => operationNotAllowed(
s"ROW FORMAT DELIMITED is only compatible with 'textfile', not '$fmt'", parentCtx)
}
case _ =>
// should never happen
def str(ctx: ParserRuleContext): String = {
(0 until ctx.getChildCount).map { i => ctx.getChild(i).getText }.mkString(" ")
}
operationNotAllowed(
s"Unexpected combination of ${str(rowFormatCtx)} and ${str(createFileFormatCtx)}",
parentCtx)
}
}
private def validateRowFormatFileFormat(
rowFormatCtx: Seq[RowFormatContext],
createFileFormatCtx: Seq[CreateFileFormatContext],
parentCtx: ParserRuleContext): Unit = {
if (rowFormatCtx.size == 1 && createFileFormatCtx.size == 1) {
validateRowFormatFileFormat(rowFormatCtx.head, createFileFormatCtx.head, parentCtx)
}
}
/**
* Create or replace a view. This creates a [[CreateViewCommand]] command.
*
* For example:
* {{{
* CREATE [OR REPLACE] [[GLOBAL] TEMPORARY] VIEW [IF NOT EXISTS] [db_name.]view_name
* [(column_name [COMMENT column_comment], ...) ]
* [COMMENT view_comment]
* [TBLPROPERTIES (property_name = property_value, ...)]
* AS SELECT ...;
* }}}
*/
override def visitCreateView(ctx: CreateViewContext): LogicalPlan = withOrigin(ctx) {
if (ctx.identifierList != null) {
operationNotAllowed("CREATE VIEW ... PARTITIONED ON", ctx)
} else {
// CREATE VIEW ... AS INSERT INTO is not allowed.
ctx.query.queryNoWith match {
case s: SingleInsertQueryContext if s.insertInto != null =>
operationNotAllowed("CREATE VIEW ... AS INSERT INTO", ctx)
case _: MultiInsertQueryContext =>
operationNotAllowed("CREATE VIEW ... AS FROM ... [INSERT INTO ...]+", ctx)
case _ => // OK
}
val userSpecifiedColumns = Option(ctx.identifierCommentList).toSeq.flatMap { icl =>
icl.identifierComment.asScala.map { ic =>
ic.identifier.getText -> Option(ic.STRING).map(string)
}
}
val viewType = if (ctx.TEMPORARY == null) {
PersistedView
} else if (ctx.GLOBAL != null) {
GlobalTempView
} else {
LocalTempView
}
CreateViewCommand(
name = visitTableIdentifier(ctx.tableIdentifier),
userSpecifiedColumns = userSpecifiedColumns,
comment = Option(ctx.STRING).map(string),
properties = Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty),
originalText = Option(source(ctx.query)),
child = plan(ctx.query),
allowExisting = ctx.EXISTS != null,
replace = ctx.REPLACE != null,
viewType = viewType)
}
}
/**
* Alter the query of a view. This creates a [[AlterViewAsCommand]] command.
*
* For example:
* {{{
* ALTER VIEW [db_name.]view_name AS SELECT ...;
* }}}
*/
override def visitAlterViewQuery(ctx: AlterViewQueryContext): LogicalPlan = withOrigin(ctx) {
// ALTER VIEW ... AS INSERT INTO is not allowed.
ctx.query.queryNoWith match {
case s: SingleInsertQueryContext if s.insertInto != null =>
operationNotAllowed("ALTER VIEW ... AS INSERT INTO", ctx)
case _: MultiInsertQueryContext =>
operationNotAllowed("ALTER VIEW ... AS FROM ... [INSERT INTO ...]+", ctx)
case _ => // OK
}
AlterViewAsCommand(
name = visitTableIdentifier(ctx.tableIdentifier),
originalText = source(ctx.query),
query = plan(ctx.query))
}
/**
* Create a [[ScriptInputOutputSchema]].
*/
override protected def withScriptIOSchema(
ctx: QuerySpecificationContext,
inRowFormat: RowFormatContext,
recordWriter: Token,
outRowFormat: RowFormatContext,
recordReader: Token,
schemaLess: Boolean): ScriptInputOutputSchema = {
if (recordWriter != null || recordReader != null) {
// TODO: what does this message mean?
throw new ParseException(
"Unsupported operation: Used defined record reader/writer classes.", ctx)
}
// Decode and input/output format.
type Format = (Seq[(String, String)], Option[String], Seq[(String, String)], Option[String])
def format(
fmt: RowFormatContext,
configKey: String,
defaultConfigValue: String): Format = fmt match {
case c: RowFormatDelimitedContext =>
// TODO we should use the visitRowFormatDelimited function here. However HiveScriptIOSchema
// expects a seq of pairs in which the old parsers' token names are used as keys.
// Transforming the result of visitRowFormatDelimited would be quite a bit messier than
// retrieving the key value pairs ourselves.
def entry(key: String, value: Token): Seq[(String, String)] = {
Option(value).map(t => key -> t.getText).toSeq
}
val entries = entry("TOK_TABLEROWFORMATFIELD", c.fieldsTerminatedBy) ++
entry("TOK_TABLEROWFORMATCOLLITEMS", c.collectionItemsTerminatedBy) ++
entry("TOK_TABLEROWFORMATMAPKEYS", c.keysTerminatedBy) ++
entry("TOK_TABLEROWFORMATLINES", c.linesSeparatedBy) ++
entry("TOK_TABLEROWFORMATNULL", c.nullDefinedAs)
(entries, None, Seq.empty, None)
case c: RowFormatSerdeContext =>
// Use a serde format.
val CatalogStorageFormat(None, None, None, Some(name), _, props) = visitRowFormatSerde(c)
// SPARK-10310: Special cases LazySimpleSerDe
val recordHandler = if (name == "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe") {
Option(conf.getConfString(configKey, defaultConfigValue))
} else {
None
}
(Seq.empty, Option(name), props.toSeq, recordHandler)
case null =>
// Use default (serde) format.
val name = conf.getConfString("hive.script.serde",
"org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe")
val props = Seq("field.delim" -> "\\t")
val recordHandler = Option(conf.getConfString(configKey, defaultConfigValue))
(Nil, Option(name), props, recordHandler)
}
val (inFormat, inSerdeClass, inSerdeProps, reader) =
format(
inRowFormat, "hive.script.recordreader", "org.apache.hadoop.hive.ql.exec.TextRecordReader")
val (outFormat, outSerdeClass, outSerdeProps, writer) =
format(
outRowFormat, "hive.script.recordwriter",
"org.apache.hadoop.hive.ql.exec.TextRecordWriter")
ScriptInputOutputSchema(
inFormat, outFormat,
inSerdeClass, outSerdeClass,
inSerdeProps, outSerdeProps,
reader, writer,
schemaLess)
}
/**
* Create a clause for DISTRIBUTE BY.
*/
override protected def withRepartitionByExpression(
ctx: QueryOrganizationContext,
expressions: Seq[Expression],
query: LogicalPlan): LogicalPlan = {
RepartitionByExpression(expressions, query, conf.numShufflePartitions)
}
/**
* Return the parameters for [[InsertIntoDir]] logical plan.
*
* Expected format:
* {{{
* INSERT OVERWRITE DIRECTORY
* [path]
* [OPTIONS table_property_list]
* select_statement;
* }}}
*/
override def visitInsertOverwriteDir(
ctx: InsertOverwriteDirContext): InsertDirParams = withOrigin(ctx) {
if (ctx.LOCAL != null) {
throw new ParseException(
"LOCAL is not supported in INSERT OVERWRITE DIRECTORY to data source", ctx)
}
val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
var storage = DataSource.buildStorageFormatFromOptions(options)
val path = Option(ctx.path).map(string).getOrElse("")
if (!(path.isEmpty ^ storage.locationUri.isEmpty)) {
throw new ParseException(
"Directory path and 'path' in OPTIONS should be specified one, but not both", ctx)
}
if (!path.isEmpty) {
val customLocation = Some(CatalogUtils.stringToURI(path))
storage = storage.copy(locationUri = customLocation)
}
val provider = ctx.tableProvider.qualifiedName.getText
(false, storage, Some(provider))
}
/**
* Return the parameters for [[InsertIntoDir]] logical plan.
*
* Expected format:
* {{{
* INSERT OVERWRITE [LOCAL] DIRECTORY
* path
* [ROW FORMAT row_format]
* [STORED AS file_format]
* select_statement;
* }}}
*/
override def visitInsertOverwriteHiveDir(
ctx: InsertOverwriteHiveDirContext): InsertDirParams = withOrigin(ctx) {
validateRowFormatFileFormat(ctx.rowFormat, ctx.createFileFormat, ctx)
val rowStorage = Option(ctx.rowFormat).map(visitRowFormat)
.getOrElse(CatalogStorageFormat.empty)
val fileStorage = Option(ctx.createFileFormat).map(visitCreateFileFormat)
.getOrElse(CatalogStorageFormat.empty)
val path = string(ctx.path)
// The path field is required
if (path.isEmpty) {
operationNotAllowed("INSERT OVERWRITE DIRECTORY must be accompanied by path", ctx)
}
val defaultStorage = HiveSerDe.getDefaultStorage(conf)
val storage = CatalogStorageFormat(
locationUri = Some(CatalogUtils.stringToURI(path)),
inputFormat = fileStorage.inputFormat.orElse(defaultStorage.inputFormat),
outputFormat = fileStorage.outputFormat.orElse(defaultStorage.outputFormat),
serde = rowStorage.serde.orElse(fileStorage.serde).orElse(defaultStorage.serde),
compressed = false,
properties = rowStorage.properties ++ fileStorage.properties)
(ctx.LOCAL != null, storage, Some(DDLUtils.HIVE_PROVIDER))
}
}
| WindCanDie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala | Scala | apache-2.0 | 59,254 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package synthesis.disambiguation
import datagen.GrammarDataGen
import synthesis.Solution
import evaluators.DefaultEvaluator
import purescala.Expressions._
import purescala.ExprOps
import purescala.Types._
import purescala.Common.Identifier
import purescala.Definitions.{FunDef, Program}
import purescala.DefOps
import grammars._
import solvers.ModelBuilder
import scala.collection.mutable.ListBuffer
import evaluators.AbstractEvaluator
import scala.annotation.tailrec
import leon.evaluators.EvaluationResults
import leon.purescala.Common
object QuestionBuilder {
/** Sort methods for questions. You can build your own */
trait QuestionSortingType {
def apply[T <: Expr](e: Question[T]): Int
}
object QuestionSortingType {
case object IncreasingInputSize extends QuestionSortingType {
def apply[T <: Expr](q: Question[T]) = q.inputs.map(i => ExprOps.count(e => 1)(i)).sum
}
case object DecreasingInputSize extends QuestionSortingType{
def apply[T <: Expr](q: Question[T]) = -IncreasingInputSize(q)
}
}
// Add more if needed.
/** Sort methods for question's answers. You can (and should) build your own. */
abstract class AlternativeSortingType[T <: Expr] extends Ordering[T] { self =>
/** Prioritizes this comparison operator against the second one. */
def &&(other: AlternativeSortingType[T]): AlternativeSortingType[T] = new AlternativeSortingType[T] {
def compare(e: T, f: T): Int = {
val ce = self.compare(e, f)
if(ce == 0) other.compare(e, f) else ce
}
}
}
object AlternativeSortingType {
/** Presents shortest alternatives first */
case class ShorterIsBetter[T <: Expr]()(implicit c: LeonContext) extends AlternativeSortingType[T] {
def compare(e: T, f: T) = e.asString.length - f.asString.length
}
/** Presents balanced alternatives first */
case class BalancedParenthesisIsBetter[T <: Expr]()(implicit c: LeonContext) extends AlternativeSortingType[T] {
def convert(e: T): Int = {
val s = e.asString
var openP, openB, openC = 0
for(c <- s) c match {
case '(' if openP >= 0 => openP += 1
case ')' => openP -= 1
case '{' if openB >= 0 => openB += 1
case '}' => openB -= 1
case '[' if openC >= 0 => openC += 1
case ']' => openC -= 1
case _ =>
}
Math.abs(openP) + Math.abs(openB) + Math.abs(openC)
}
def compare(e: T, f: T): Int = convert(e) - convert(f)
}
}
/** Specific enumeration of strings, which can be used with the QuestionBuilder#setValueEnumerator method */
object SpecialStringValueGrammar extends SimpleExpressionGrammar {
def computeProductions(t: TypeTree)(implicit ctx: LeonContext): Seq[Prod] = t match {
case StringType =>
List(
terminal(StringLiteral("")),
terminal(StringLiteral("a")),
terminal(StringLiteral("\\"'\\n\\t")),
terminal(StringLiteral("Lara 2007"))
)
case _ => ValueGrammar.computeProductions(t)
}
}
/** Make all generic values uniquely identifiable among the final string (no value is a substring of another if possible)
* Duplicate generic values are not suitable for disambiguating questions since they remove an order. */
def makeGenericValuesUnique(a: Expr): Expr = {
//println("Going to make this value unique:" + a)
var genVals = Set[Expr with Terminal](StringLiteral(""))
def freshenValue(g: Expr with Terminal): Option[Expr with Terminal] = g match {
case g: GenericValue => Some(GenericValue(g.tp, g.id + 1))
case StringLiteral(s) =>
val newS = if(s == "") "a" else s
val i = s.lastIndexWhere { c => c < 'a' || c > 'z' }
val prefix = s.take(i+1)
val suffix = s.drop(i+1)
val res = if(suffix.forall { _ == 'z' }) {
prefix + "a" + ("a" * suffix.length)
} else {
val last = suffix.reverse.prefixLength { _ == 'z' }
val digit2increase = suffix(suffix.length - last - 1)
prefix + (digit2increase.toInt + 1).toChar + ("a" * last)
}
Some(StringLiteral(res))
case InfiniteIntegerLiteral(i) => Some(InfiniteIntegerLiteral(i+1))
case IntLiteral(i) => if(i == Integer.MAX_VALUE) None else Some(IntLiteral(i+1))
case CharLiteral(c) => if(c == Char.MaxValue) None else Some(CharLiteral((c+1).toChar))
case otherLiteral => None
}
@tailrec @inline def freshValue(g: Expr with Terminal): Expr with Terminal = {
if(genVals contains g)
freshenValue(g) match {
case None => g
case Some(v) => freshValue(v)
}
else {
genVals += g
g
}
}
ExprOps.postMap{ e => e match {
case g:Expr with Terminal =>
Some(freshValue(g))
case _ => None
}}(a)
}
}
/**
* Builds a set of disambiguating questions for the problem
*
* {{{
* def f(input: input.getType): T =
* [element of r.solution]
* }}}
*
* @tparam T A subtype of Expr that will be the type used in the Question[T] results.
* @param input The identifier of the unique function's input. Must be typed or the type should be defined by setArgumentType
* @param filter A function filtering which outputs should be considered for comparison.
* It takes as input the sequence of outputs already considered for comparison, and the new output.
* It should return Some(result) if the result can be shown, and None else.
*
*/
class QuestionBuilder[T <: Expr](
input: Seq[Identifier],
solutions: Stream[Solution],
filter: (Seq[T], Expr) => Option[T],
originalFun: Option[FunDef] = None)(implicit c: LeonContext, p: Program) {
import QuestionBuilder._
private var _argTypes = input.map(_.getType)
private var _questionSorMethod: QuestionSortingType = QuestionSortingType.IncreasingInputSize
private var _alternativeSortMethod: AlternativeSortingType[T] = AlternativeSortingType.BalancedParenthesisIsBetter[T]() && AlternativeSortingType.ShorterIsBetter[T]()
private var solutionsToTake = 15
private var expressionsToTake = 15 // TODO: At least cover the program !
private var keepEmptyAlternativeQuestions: T => Boolean = Set()
private var value_enumerator: ExpressionGrammar = ValueGrammar
private var expressionsToTestFirst: Option[Stream[Seq[Expr]]] = None
/** Sets the way to sort questions during enumeration. Not used at this moment. See [[QuestionSortingType]] */
def setSortQuestionBy(questionSorMethod: QuestionSortingType) = { _questionSorMethod = questionSorMethod; this }
/** Sets the way to sort alternatives. See [[AlternativeSortingType]] */
def setSortAlternativesBy(alternativeSortMethod: AlternativeSortingType[T]) = { _alternativeSortMethod = alternativeSortMethod; this }
/** Sets the argument type. Not needed if the input identifier is already assigned a type. */
def setArgumentType(argTypes: List[TypeTree]) = { _argTypes = argTypes; this }
/** Sets the number of solutions to consider. Default is 15 */
def setSolutionsToTake(n: Int) = { solutionsToTake = n; this }
/** Sets the number of expressions to consider. Default is 15 */
def setExpressionsToTake(n: Int) = { expressionsToTake = n; this }
/** Sets if when there is no alternative, the question should be kept. */
def setKeepEmptyAlternativeQuestions(b: T => Boolean) = {keepEmptyAlternativeQuestions = b; this }
/** Sets the way to enumerate expressions */
def setValueEnumerator(v: ExpressionGrammar) = value_enumerator = v
/** Sets the expressions to test first */
def setExpressionsToTestFirst(s: Option[Stream[Seq[Expr]]]) = expressionsToTestFirst = s
private def run(s: Solution, elems: Seq[(Identifier, Expr)]): Option[Expr] = {
val newProgram = DefOps.addFunDefs(p, s.defs, p.definedFunctions.head)
s.ifOnFunDef(originalFun.getOrElse(new FunDef(Common.FreshIdentifier("useless"), Nil, Nil, UnitType))){
val e = new AbstractEvaluator(c, newProgram)
val model = new ModelBuilder
model ++= elems
val modelResult = model.result()
val evaluation = e.eval(s.term, modelResult)
for{x <- evaluation.result
res = x._1
simp = ExprOps.simplifyArithmetic(res)}
yield simp
}
}
/** Given an input, the current output, a list of alternative programs, compute a question if there is any. */
def computeQuestion(possibleInput: Seq[(Identifier, Expr)], currentOutput: T, alternatives: List[Solution]): Option[Question[T]] = {
augmentQuestion(possibleInput, currentOutput, Nil, alternatives)
}
/** Performs the same as computeQuestion but takes the previous outputs into account to produce an increasing question */
def augmentQuestion(possibleInput: Seq[(Identifier, Expr)], currentOutput: T, previousAlternativeOutputs: List[T], newAlternatives: List[Solution]): Option[Question[T]] = {
val alternative_outputs = (((ListBuffer[T](currentOutput) ++= previousAlternativeOutputs) /: newAlternatives) { (prev, alternative) =>
run(alternative, possibleInput) match {
case Some(alternative_output) if alternative_output != currentOutput =>
filter(prev, alternative_output) match {
case Some(alternative_output_filtered) =>
prev += alternative_output_filtered
case _ => prev
}
case _ => prev
}
}).drop(1 + previousAlternativeOutputs.length).toList.distinct
if(alternative_outputs.nonEmpty || keepEmptyAlternativeQuestions(currentOutput)) {
Some(Question(possibleInput.map(_._2), currentOutput, alternative_outputs.sortWith((e,f) => _alternativeSortMethod.compare(e, f) <= 0)))
} else {
None
}
}
def getExpressionsToTestFirst(): Option[Stream[Seq[(Identifier, Expr)]]] = expressionsToTestFirst map { inputs =>
val inputs_generics = inputs.map(y => y.map(x => makeGenericValuesUnique(x)))
inputs_generics.map(in => input zip in)
}
def getAllPossibleInputs(expressionsToTake: Int): Stream[Seq[(Identifier, Expr)]]= {
val datagen = new GrammarDataGen(new DefaultEvaluator(c, p), value_enumerator)
val enumerated_inputs = datagen.generateMapping(input, originalFun.map(f => f.precOrTrue).getOrElse(BooleanLiteral(true)), expressionsToTake, expressionsToTake)
.map(inputs =>
inputs.map(id_expr =>
(id_expr._1, makeGenericValuesUnique(id_expr._2)))).toStream
enumerated_inputs
}
def inputsToQuestions(inputs: Stream[Seq[(Identifier, Expr)]]): Stream[Question[T]] = {
val solution = solutions.head
val alternatives = solutions.drop(1).take(solutionsToTake).toList
for {
possibleInput <- inputs
currentOutputNonFiltered <- run(solution, possibleInput)
currentOutput <- filter(Seq(), currentOutputNonFiltered)
question <- computeQuestion(possibleInput, currentOutput, alternatives)
} yield question
}
def inputsToQuestionsByAlternativeFirst(inputs: Stream[Seq[(Identifier, Expr)]]): Stream[Question[T]] = {
val solution = solutions.head
val alternatives = solutions.drop(1).take(solutionsToTake)
var inputsToConsider = inputs
var stopOtherOutputs = false
var previousAlternativeOutputs = List[T]()
val res = for {
alternative <- alternatives
_ = (stopOtherOutputs = false)
possibleInput <- inputsToConsider
if !stopOtherOutputs
currentOutputNonFiltered <- run(solution, possibleInput)
currentOutput <- filter(previousAlternativeOutputs, currentOutputNonFiltered)
question <- augmentQuestion(possibleInput, currentOutput, previousAlternativeOutputs, List(alternative))
} yield {
inputsToConsider = Stream(possibleInput)
stopOtherOutputs = true
previousAlternativeOutputs = previousAlternativeOutputs ++ question.other_outputs
question
}
if(res.isEmpty) {
(for {
possibleInput <- inputs
currentOutputNonFiltered <- run(solution, possibleInput)
currentOutput <- filter(Seq(), currentOutputNonFiltered)
question <- computeQuestion(possibleInput, currentOutput, Nil)
} yield {
question
}).take(1)
} else res
}
/** Returns a list of input/output questions to ask to the user. */
def resultAsStream(): Stream[Question[T]] = {
if(solutions.isEmpty) return Stream.empty
val all_inputs = getExpressionsToTestFirst().getOrElse(Stream.Empty) #::: getAllPossibleInputs(expressionsToTake).take(expressionsToTake)
val res = inputsToQuestionsByAlternativeFirst(all_inputs)
return res
/*
getExpressionsToTestFirst() foreach { inputs_generics =>
val res = inputsToQuestions(inputs_generics)
if(res.nonEmpty) return res
}
val enumerated_inputs = getAllPossibleInputs(expressionsToTake).take(expressionsToTake)
val questions = inputsToQuestions(enumerated_inputs)
questions*/
}/*
def result(): List[Question[T]] = {
resultAsStream().toList.sortBy(_questionSorMethod(_))
}*/
}
| epfl-lara/leon | src/main/scala/leon/synthesis/disambiguation/QuestionBuilder.scala | Scala | gpl-3.0 | 13,290 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.controller
import org.apache.predictionio.core.BaseAlgorithm
/** A concrete implementation of [[LServing]] returning the average of all
* algorithms' predictions, where their classes are expected to be all Double.
*
* @group Serving
*/
class LAverageServing[Q] extends LServing[Q, Double] {
/** Returns the average of all algorithms' predictions. */
override def serve(query: Q, predictions: Seq[Double]): Double = {
predictions.sum / predictions.length
}
}
/** A concrete implementation of [[LServing]] returning the average of all
* algorithms' predictions, where their classes are expected to be all Double.
*
* @group Serving
*/
object LAverageServing {
/** Returns an instance of [[LAverageServing]]. */
def apply[Q](a: Class[_ <: BaseAlgorithm[_, _, Q, _]]): Class[LAverageServing[Q]] =
classOf[LAverageServing[Q]]
}
| PredictionIO/PredictionIO | core/src/main/scala/org/apache/predictionio/controller/LAverageServing.scala | Scala | apache-2.0 | 1,695 |
package com.mind_era.arima
import org.scalatest.FreeSpec
//import scalin.immutable.Vec
//import scalin._
import spire.algebra._
//import cats.kernel.Eq._
import spire.math._
import spire.implicits._
import scalin.mutable._
import scalin.mutable.dense._
class ArimaDiffTest extends FreeSpec {
"Methods tests" - {
"diff vector" in {
// implicit val eq = Eq.fromUniversalEquals[Number]
// implicit val field: Field[Int] = Field.
// > s <- c(3, 1, 4, 1, 5, 9)
val pi = Vec[Rational](3, 1, 4, 1, 5, 9)
// > diff(s, 1, 1)
// [1] -2 3 -3 4 4
assert (Vec[Rational](-2, 3, -3, 4, 4) === Arima.diff(pi, Natural.one, Natural.one))
// > diff(s, 1, 3)
// [1] -11 13 -7
assert (Vec[Rational](-11, 13, -7) === Arima.diff(pi, Natural.one, Natural(3)))
// > diff(s, 2, 1)
// [1] 1 0 1 8
assert (Vec[Rational](1, 0, 1, 8) === Arima.diff(pi, Natural(2), Natural.one))
}
"diff matrix" in {
// > m <- matrix(s, 2, 3)
// [,1] [,2] [,3]
// [1,] 3 4 5
// [2,] 1 1 9
val pi23 = Mat[Rational]((3: Rational, 4: Rational, 5: Rational), (1: Rational, 1: Rational, 9: Rational))
// > diff(m, 1, 1)
// [,1] [,2] [,3]
// [1,] -2 -3 4
assert (Vec[Rational](-2, -3, 4).toRowMat === Arima.diff(pi23, Natural.one, Natural.one))
// > diff(m, 1, 2)
// numeric(0)
assert (Mat.fillConstant(0, 3)(Rational.zero) === Arima.diff(pi23, Natural.one, Natural(2)))
// > diff(m, 2, 1)
// numeric(0)
assert (Mat.fillConstant(0, 3)(Rational.zero) === Arima.diff(pi23, Natural(2), Natural.one))
// 3.14159265359
// > mm <- matrix(c(3,1,4,1,5,9,2,6,5,3,5,9), 3, 4)
// [,1] [,2] [,3] [,4]
// [1,] 3 1 2 3
// [2,] 1 5 6 5
// [3,] 4 9 5 9
val row1: (Rational, Rational, Rational, Rational) = (3, 1, 2, 3)
val row2: (Rational, Rational, Rational, Rational) = (1, 5, 6, 5)
val row3: (Rational, Rational, Rational, Rational) = (4, 9, 5, 9)
val pi34 = Mat(row1, row2, row3)
// > diff(mm, 2, 1)
// [,1] [,2] [,3] [,4]
// [1,] 1 8 3 6
assert (Vec[Rational](1, 8, 3, 6).toRowMat === Arima.diff(pi34, Natural(2), Natural.one))
// > diff(mm, 1, 2)
// [,1] [,2] [,3] [,4]
// [1,] 5 0 -5 2
assert (Vec[Rational](5, 0, -5, 2).toRowMat === Arima.diff(pi34, Natural.one, Natural(2)))
// > diff(mm, 1, 1)
// [,1] [,2] [,3] [,4]
// [1,] -2 4 4 2
// [2,] 3 4 -1 4
val resRow1: (Rational, Rational, Rational, Rational) = (-2, 4, 4, 2)
val resRow2: (Rational, Rational, Rational, Rational) = (3, 4, -1, 4)
assert (Mat(resRow1, resRow2) === Arima.diff(pi34, Natural.one, Natural.one))
}
}
}
| aborg0/arima | src/test/scala/com/mind_era/arima/ArimaDiffTest.scala | Scala | agpl-3.0 | 2,899 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import fastparse.all._
object SignatureParser extends ClassParser {
protected val GenericClassSig: Parser[GenericClassName] =
P(GenericClassSigWithArgs | GenericClassSigWithoutArgs)
protected val GenericClassSigWithArgs: Parser[GenericClassName] =
P(ClassNameSig ~ "<" ~ GenericArgs ~ ">" ~ InnerClassSig.rep ~ ";")
.map((GenericClassName.apply _).tupled)
protected val GenericClassSigWithoutArgs: Parser[GenericClassName] =
P(ClassNameSig ~ InnerClassSig.rep ~ ";").map {
case (className, innerClass) =>
GenericClassName(className, Seq.empty, innerClass)
}
protected val InnerClassSig: Parser[InnerClassName] =
P(InnerClassSigWithArgs | InnerClassSigWithoutArgs)
protected val InnerClassSigWithArgs: Parser[InnerClassName] =
P("." ~ Name ~ "<" ~ GenericArgs ~ ">")
.map((InnerClassName.apply _).tupled)
protected val InnerClassSigWithoutArgs: Parser[InnerClassName] =
P("." ~ Name)
.map(InnerClassName(_, Seq.empty))
protected val PrimitiveClassSig: Parser[GenericClassName] =
P(PrimitiveClass)
.map(GenericClassName(_, Seq.empty))
private val GenericName: Parser[String] =
P(GenericNameCharPredicate.rep(1).!)
private val Generic: Parser[GenericClass] =
P((GenericWithParam | GenericSuper) ~ End)
private val GenericWithParam: Parser[GenericClass] =
P("<" ~ GenericSigParam.rep(1) ~ ">" ~ GenericClassSig.rep(1))
.map((GenericClass.apply _).tupled)
private val GenericSuper: Parser[GenericClass] =
P(GenericClassSig.rep(1))
.map(GenericClass(Seq.empty, _))
// class SomeClass[T <: SomeTrait] will have two : in signature
private val GenericSigParam: Parser[GenericParam] =
P(GenericName ~ ":" ~ (":".? ~ FieldTypeSignature).rep(1))
.map((GenericParam.apply _).tupled)
private val GenericArgs: Parser[Seq[GenericArg]] =
P((ExtendsObject | GenericArgWithSignature).rep(1))
val ExtendsObjectGenericArg =
GenericArg(None, GenericClassName(ClassName.fromFqn("java.lang.Object")))
private val ExtendsObject: Parser[GenericArg] =
P("*")
.map(_ => ExtendsObjectGenericArg)
private val GenericArgWithSignature: Parser[GenericArg] =
P((LowerBoundary | UpperBoundary).? ~ FieldTypeSignature)
.map((GenericArg.apply _).tupled)
private val LowerBoundary: Parser[BoundType] =
P("-")
.map(_ => LowerBound)
private val UpperBoundary: Parser[BoundType] =
P("+")
.map(_ => UpperBound)
private val FieldTypeSignature: Parser[GenericSignature] =
P(GenericClassSig | GenericArraySig | TypeVar)
private val GenericArraySig: Parser[GenericArray] =
P("[" ~ (PrimitiveClassSig | GenericClassSig | GenericArraySig | TypeVar))
.map(GenericArray)
private val TypeVar: Parser[GenericVar] =
P("T" ~ GenericNameCharPredicate.rep(1).! ~ ";").map(GenericVar)
def parseGeneric(desc: String): GenericClass =
Generic.parse(desc) match {
case Parsed.Success(sig, _) => sig
case f: Parsed.Failure =>
throw new Exception(s"Failed to parse generic: ${f.msg}")
}
protected val GenericNameCharPredicate = CharPred(c => ":;/ ".forall(_ != c))
override val PackageNamePredicate = CharPred(c => "<;/ ".forall(_ != c))
override val ClassNameCharPredicate = CharPred(c => "<;/ ".forall(_ != c))
}
| ensime/ensime-server | core/src/main/scala/org/ensime/indexer/SignatureParser.scala | Scala | gpl-3.0 | 3,489 |
package bylt.core
import bylt.core.JsonProtocol._
import org.scalacheck.Prop._
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers
import spray.json._
class JsonProtocolTest extends FunSuite with Checkers {
implicit override val generatorDrivenConfig =
PropertyCheckConfig (minSize = 0, maxSize = 40, workers = 4)
implicit val arbExpr = ArbitraryExpr.arb
test ("Expr to and from json are equivalent") {
check { (expr: Expr) =>
val jsonAst = expr.toJson
val expr2 = jsonAst.convertTo [Expr]
expr == expr2
}
}
implicit val arbType = ArbitraryType.arb
test ("Type to and from json are equivalent") {
check { (tpe: TypeExpr) =>
val jsonAst = tpe.toJson
val tpe2 = jsonAst.convertTo [TypeExpr]
tpe == tpe2
}
}
implicit val arbModule = ArbitraryModule.arb
test ("Module to and from json are equivalent") {
check { (module : Module) =>
val jsonAst = module.toJson
val module2 = jsonAst.convertTo [Module]
module == module2
}
}
}
| ozmi/bylt | src/test/scala/bylt/core/JsonProtocolTest.scala | Scala | mit | 1,150 |
package org.psesd.srx.shared.core
import org.psesd.srx.shared.core.exceptions.ArgumentNullOrEmptyOrWhitespaceException
import org.psesd.srx.shared.core.extensions.TypeExtensions._
/** Represents SRX service component.
*
* @version 1.0
* @since 1.0
* @author Stephen Pugmire (iTrellis, LLC)
**/
class SrxServiceComponent(val name: String, val version: String) {
if (name.isNullOrEmpty) {
throw new ArgumentNullOrEmptyOrWhitespaceException("name parameter")
}
if (version.isNullOrEmpty) {
throw new ArgumentNullOrEmptyOrWhitespaceException("version parameter")
}
}
| PSESD/srx-shared-core | src/main/scala/org/psesd/srx/shared/core/SrxServiceComponent.scala | Scala | mit | 591 |
package de.frosner.broccoli.models
import java.util.Date
import de.frosner.broccoli.models.JobStatus.JobStatus
import de.frosner.broccoli.models.ServiceStatus.ServiceStatus
import play.api.libs.json.Json
import JobStatusJson._
case class PeriodicRun(createdBy: String, status: JobStatus, utcSeconds: Long, jobName: String) extends Serializable
object PeriodicRun {
implicit val periodicRunWrites = Json.writes[PeriodicRun]
implicit val periodicRunReads = Json.reads[PeriodicRun]
} | FRosner/cluster-broccoli | server/src/main/scala/de/frosner/broccoli/models/PeriodicRun.scala | Scala | apache-2.0 | 491 |
package akka.rtcweb.protocol.dtls
import javax.crypto.{ Cipher, Mac }
import akka.rtcweb.protocol.dtls.handshake._
import scodec._
import scodec.codecs._
private[dtls] case object SecurityParameters
/**
* struct {
* ConnectionEnd entity;
* PRFAlgorithm prf_algorithm;
* BulkCipherAlgorithm bulk_cipher_algorithm;
* CipherType cipher_type;
* uint8 enc_key_length;
* uint8 block_length;
* uint8 fixed_iv_length;
* uint8 record_iv_length;
* MACAlgorithm mac_algorithm;
* uint8 mac_length;
* uint8 mac_key_length;
* CompressionMethod compression_algorithm;
* opaque master_secret[48];
* opaque client_random[32];
* opaque server_random[32];
* } SecurityParameters;
*/
private[dtls] case class SecurityParameters(
entity: ConnectionEnd,
prf_algorithm: PRFAlgorithm,
bulk_cipher_algorithm: BulkCipherAlgorithm,
cipher_type: CipherType,
enc_key_length: Int,
block_length: Int,
fixed_iv_length: Int,
record_iv_length: Int,
mac_algorithm: MACAlgorithm,
mac_length: Int,
mac_key_length: Int,
compression_algorithm: CompressionMethod)
private[dtls] sealed trait ConnectionEnd
private[dtls] object ConnectionEnd {
implicit val codec = "ConnectionEnd" | mappedEnum(uint8,
ServerConnectionEnd -> 0,
ClientConnectionEnd -> 1)
case object ServerConnectionEnd extends ConnectionEnd
case object ClientConnectionEnd extends ConnectionEnd
}
private[dtls] sealed trait PRFAlgorithm
private[dtls] object PRFAlgorithm {
implicit val codec = { "PRFAlgorithm" | mappedEnum(uint8, tls_prf_sha256 -> 0) }
case object tls_prf_sha256 extends PRFAlgorithm
}
private[dtls] sealed trait BulkCipherAlgorithm { def cipher(): Cipher }
private[dtls] object BulkCipherAlgorithm {
implicit val codec: Codec[BulkCipherAlgorithm] = "BulkCipherAlgorithm" | mappedEnum(uint8,
`null` -> 0,
rc4 -> 1,
`3des` -> 2,
aes -> 3)
case object `null` extends BulkCipherAlgorithm { def cipher() = ??? }
case object rc4 extends BulkCipherAlgorithm { def cipher() = ??? }
case object `3des` extends BulkCipherAlgorithm { def cipher() = Cipher.getInstance("DESede") }
case object aes extends BulkCipherAlgorithm { def cipher() = Cipher.getInstance("AES") }
}
/**
* see rfc3749
*/
private[dtls] sealed trait CompressionMethod
object CompressionMethod {
implicit val codec: Codec[CompressionMethod] = "CompressionMethod" | mappedEnum(uint8,
`null` -> 0,
DEFLATE -> 1)
case object `null` extends CompressionMethod //(0x00)
case object DEFLATE extends CompressionMethod //(0x01)
}
private[dtls] sealed trait MACAlgorithm {
def instance: Mac
}
private[dtls] object MACAlgorithm {
implicit val codec: Codec[MACAlgorithm] = "MACAlgorithm" | mappedEnum(uint8,
`null` -> 0,
hmac_md5 -> 1,
hmac_sha1 -> 2,
hmac_sha256 -> 3,
hmac_sha384 -> 4,
hmac_sha512 -> 5)
case object `null` extends MACAlgorithm { def instance = ??? }
case object hmac_md5 extends MACAlgorithm { def instance = Mac.getInstance("HmacMD5") }
case object hmac_sha1 extends MACAlgorithm { def instance = Mac.getInstance("HmacSHA1") }
case object hmac_sha256 extends MACAlgorithm { def instance = Mac.getInstance("HmacSHA256") }
case object hmac_sha384 extends MACAlgorithm { def instance = Mac.getInstance("HmacSHA384") }
case object hmac_sha512 extends MACAlgorithm { def instance = Mac.getInstance("HmacSHA512") }
}
| danielwegener/akka-rtcweb | src/main/scala/akka/rtcweb/protocol/dtls/SecurityParameters.scala | Scala | apache-2.0 | 3,583 |
package com.jroliveira.boo.routes
import akka.http.scaladsl.server.{Directives, Route}
object Router extends Directives {
val routes: Route =
HomeRoute.routes ~
UsersRoute.routes ~
TogglesRoute.routes
}
| jroliveira/boo-api | src/main/scala/com/jroliveira/boo/routes/Router.scala | Scala | mit | 223 |
//package com.eharmony.aloha.models.ensemble.maxima
//
//import scala.collection.GenTraversableOnce
//import com.eharmony.aloha.score.basic.ModelOutput
//import ModelOutput.Implicits.modelOutputOrdering
//import com.eharmony.aloha.models.ensemble.tie.TieBreaker
//import com.eharmony.aloha.id.ModelIdentity
//import com.eharmony.aloha.models.ensemble.{Ensemble, EnsembleCombiner}
//import com.eharmony.aloha.score.proto.conversions.ScoreConverter
//
//// TODO: Fix variance issue on B
//case class Max[-A, B: Ordering: ScoreConverter](
// subModels: GenTraversableOnce[Model[A, B]],
// tieBreaker: TieBreaker[B],
// modelId: ModelIdentity
//) extends Ensemble[A, B, MaximaList[B], B] with BaseModel[A, B] {
// require(subModels.size > 0)
// val combiner = EnsembleCombiner(Zero.zero[B], new Maxima[B], new MaximaSelector(tieBreaker))
// protected[this] val impl = ScoreConverterW[B]
//}
| eHarmony/aloha | aloha-core/src/main/scala/com/eharmony/aloha/models/ensemble/maxima/Max.scala | Scala | mit | 917 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Sun Sep 11 22:43:04 EDT 2011
* @see LICENSE (MIT style license file).
*/
package scalation.minima
import scala.collection.mutable.ArrayBuffer
import scala.math.{abs, ceil, floor, max, round, sqrt}
import scala.util.control.Breaks.{breakable, break}
import scalation.calculus.Differential.FunctionV2S
import scalation.linalgebra.VectorD
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** This `IntegerNLP`solves Integer Non-Linear Programming (INLP) and Mixed Integer
* Linear Non-Programming (MINLP) problems recursively using the Simplex algorithm.
* First, an NLP problem is solved. If the optimal solution vector 'x' is
* entirely integer valued, the INLP is solved. If not, pick the first 'x_j'
* that is not integer valued. Define two new NLP problems which bound 'x_j'
* to the integer below and above, respectively. Branch by solving each of
* these NLP problems in turn. Prune by not exploring branches less optimal
* than the currently best integer solution. This technique is referred to
* as Branch and Bound. An exclusion set may be optionally provided for
* MINLP problems.
*
* Given an objective function 'f(x)' and a constraint function 'g(x)',
* find values for the solution/decision vector 'x' that minimize the
* objective function 'f(x)', while satisfying the constraint function, i.e.,
*
* minimize f(x)
* subject to g(x) <= 0, some x_i must integer-valued
*
* Make b_i negative to indicate a ">=" constraint
*
* @param f the objective function to be minimized
* @param g the constraint function to be satisfied, if any
* @param excl the set of variables to be excluded from the integer requirement
*/
class IntegerNLP (f: FunctionV2S, n: Int, var g: FunctionV2S = null, excl: Set [Int] = Set ())
{
private val EPSILON = 1E-7 // number close to zero
private val SQRT_EPSILON = sqrt (EPSILON) // square root of EPSILON
// best integer solution so far
private var best: Tuple2 [VectorD, Double] = (null, Double.PositiveInfinity)
def g0 (x: VectorD): Double = 0.0
if (g == null) g = g0
val x_le = new VectorD (n); x_le.set (Double.NaN) // constraint x_j <= value
val x_ge = new VectorD (n); x_ge.set (Double.NaN) // constraint x_j >= value
println (">>>>>>>>>>>>>> root: dp = 0")
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add a new constraint to the current set of bounding constraints: x_j - bound <= 0
* or x_j - bound >= 0 (e.g., x_1 - 2 <= 0 or x_0 - 4 >= 0).
* @param j the index of variable x_j
* @param le whether it is a "less than or equal to" 'le' constraint
* @param bound the bounding value
*/
def addConstraint (j: Int, le: Boolean, bound: Double): Boolean =
{
println ("addConstraint: (" + j + ", " + le + ", " + bound + ")")
val low = x_le(j)
val hi = x_ge(j)
if (le) {
if (low.isNaN && hi.isNaN) x_le(j) = bound // add "<=" constraint
else if (bound >= hi) x_le(j) = bound // add "<=" constraint
else if (bound < hi) { x_le(j) = bound; x_ge(j) = -1 } // replace ">=" constraint
else if (bound < low) x_le(j) = bound // replace "<=" constraint
else return false
} else {
if (low.isNaN && hi.isNaN) x_ge(j) = bound // add ">=" constraint
else if (bound <= low) x_ge(j) = bound // add ">=" constraint
else if (bound > low) { x_ge(j) = bound; x_le(j) = -1 } // replace "<=" constraint
else if (bound > hi) x_ge(j) = bound // replace ">=" constraint
else return false
} // if
true
} // addConstraint
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add up all the violation of bounds constraints.
* @param x the current point
*/
def gBounds (x: VectorD): Double =
{
var sum = 0.0
for (j <- 0 until x.dim) { // loop over the variables x_j
if (! x_le(j).isNaN) { // check for x_j <= bound
println ("x_" + j + " - " + x_le(j) + " <= 0.0")
sum += max (0.0, x(j) - x_le(j))
} // if
if (! x_ge(j).isNaN) { // check for x_j >= bound
println (x_ge(j) + " - x_" + j + " <= 0.0")
sum += max (0.0, x_ge(j) - x(j))
} // if
} // for
sum
} // gBounds
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return j such that x_j has a fractional (non-integer) value, -1 otherwise.
* Make sure that j is not in the exclusion list.
* @param x the vector to check
*/
def fractionalVar (x: VectorD): Int =
{
for (j <- 0 until x.dim if ! (excl contains j) && abs (x(j) - round (x(j))) > SQRT_EPSILON) return j
-1
} // fractionalVar
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve the Mixed Integer Non-linear, Linear Programming (MINLP) problem by using
* Branch and Bound and an NLP Algorithm, e.g., `QuasiNewton`, recursively.
* @param dp the current depth of recursion
* @param gb the supplementary constraint function formed from bounds
*/
def solve (x0: VectorD, dp: Int)
{
val MAX_DEPTH = 2 // limit on depth of recursion FIX ??
def gg (x: VectorD): Double = g(x) + gBounds(x) // given + bounds constraints
val nlp = new QuasiNewton (f, gg) // set up a new NLP problem
val x = nlp.solve (x0) // solve the new NLP problem
val j = fractionalVar (x) // find j such that x_j is not an integer
var bound = 0.0
println ("IntegerNLP.solve: x = " + x + " f(x) = " + f(x) + ", j = " + j)
if (j != -1 && f(x) < best._2 && dp < MAX_DEPTH) { // x_j is not an integer => bound on both sides
println ("solve: add upper and lower bounds")
// add lower bound constraint: x_j <= floor (x(j))
bound = floor (x(j))
if (addConstraint (j, true, bound)) {
println (">>>>>>>>>>>>>> left branch: dp = " + (dp + 1))
println (">>>>>>>>>>>>>> add constraint x_" + j + " <= " + bound)
solve (x, dp + 1)
} // if
// add upper bound constraint: x_j >= -ceil (x(j)) where "-" => ">=" constraint
bound = ceil (x(j))
if (addConstraint (j, false, bound)) {
println (">>>>>>>>>>>>>> right branch: dp = " + (dp + 1))
println (">>>>>>>>>>>>>> add constraint x_" + j + " >= " + bound)
solve (x, dp + 1)
} // if
} // if
if (j == -1) {
println ("####################################################################")
println ("IntegerNLP.solve: found an INTEGER solution (x, f(x)) = " + (x, f(x)))
println ("####################################################################")
if (f(x) < best._2) best = (x, f(x)) // save the best result
} // if
} // solve
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the optimal (minimal) integer solution.
*/
def solution = best
} // IntegerNLP class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `IntegerNLPTest` object is used to test the `IntegerNLP` class.
* real solution x = (.8, 1.6), f = 8.8
* integer solution x = (2, 1), f = 10
* @see Linear Programming and Network Flows, Example 6.14
*/
object IntegerNLPTest extends App
{
val x0 = new VectorD (2)
def f (x: VectorD): Double = (x(0) - 3.5) * (x(0) - 3.5) + (x(1) - 5) * (x(1) - 5) + 1.0
val inlp = new IntegerNLP (f, x0.dim)
inlp.solve (x0, 0)
println ("###############################################################")
println ("optimal solution = " + inlp.solution)
println ("###############################################################")
} // IntegerNLPTest object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/minima/IntegerNLP.scala | Scala | mit | 8,688 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.collection.mutable.ArrayBuilder
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.Since
import org.apache.spark.ml._
import org.apache.spark.ml.attribute.{Attribute, AttributeGroup, NominalAttribute}
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.stat.Summarizer
import org.apache.spark.ml.util._
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{StructField, StructType}
/**
* Params for [[VarianceThresholdSelector]] and [[VarianceThresholdSelectorModel]].
*/
private[feature] trait VarianceThresholdSelectorParams extends Params
with HasFeaturesCol with HasOutputCol {
/**
* Param for variance threshold. Features with a variance not greater than this threshold
* will be removed. The default value is 0.0.
*
* @group param
*/
@Since("3.1.0")
final val varianceThreshold = new DoubleParam(this, "varianceThreshold",
"Param for variance threshold. Features with a variance not greater than this threshold" +
" will be removed. The default value is 0.0.", ParamValidators.gtEq(0))
setDefault(varianceThreshold -> 0.0)
/** @group getParam */
@Since("3.1.0")
def getVarianceThreshold: Double = $(varianceThreshold)
}
/**
* Feature selector that removes all low-variance features. Features with a
* variance not greater than the threshold will be removed. The default is to keep
* all features with non-zero variance, i.e. remove the features that have the
* same value in all samples.
*/
@Since("3.1.0")
final class VarianceThresholdSelector @Since("3.1.0")(@Since("3.1.0") override val uid: String)
extends Estimator[VarianceThresholdSelectorModel] with VarianceThresholdSelectorParams
with DefaultParamsWritable {
@Since("3.1.0")
def this() = this(Identifiable.randomUID("VarianceThresholdSelector"))
/** @group setParam */
@Since("3.1.0")
def setVarianceThreshold(value: Double): this.type = set(varianceThreshold, value)
/** @group setParam */
@Since("3.1.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("3.1.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
@Since("3.1.0")
override def fit(dataset: Dataset[_]): VarianceThresholdSelectorModel = {
transformSchema(dataset.schema, logging = true)
val Row(maxs: Vector, mins: Vector, variances: Vector) = dataset
.select(Summarizer.metrics("max", "min", "variance").summary(col($(featuresCol)))
.as("summary"))
.select("summary.max", "summary.min", "summary.variance")
.first()
val numFeatures = maxs.size
val indices = Array.tabulate(numFeatures) { i =>
// Use peak-to-peak to avoid numeric precision issues for constant features
(i, if (maxs(i) == mins(i)) 0.0 else variances(i))
}.filter(_._2 > getVarianceThreshold).map(_._1)
copyValues(new VarianceThresholdSelectorModel(uid, indices.sorted)
.setParent(this))
}
@Since("3.1.0")
override def transformSchema(schema: StructType): StructType = {
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
SchemaUtils.appendColumn(schema, $(outputCol), new VectorUDT)
}
@Since("3.1.0")
override def copy(extra: ParamMap): VarianceThresholdSelector = defaultCopy(extra)
}
@Since("3.1.0")
object VarianceThresholdSelector extends DefaultParamsReadable[VarianceThresholdSelector] {
@Since("3.1.0")
override def load(path: String): VarianceThresholdSelector = super.load(path)
}
/**
* Model fitted by [[VarianceThresholdSelector]].
*/
@Since("3.1.0")
class VarianceThresholdSelectorModel private[ml](
@Since("3.1.0") override val uid: String,
@Since("3.1.0") val selectedFeatures: Array[Int])
extends Model[VarianceThresholdSelectorModel] with VarianceThresholdSelectorParams
with MLWritable {
if (selectedFeatures.length >= 2) {
require(selectedFeatures.sliding(2).forall(l => l(0) < l(1)),
"Index should be strictly increasing.")
}
/** @group setParam */
@Since("3.1.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("3.1.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
@Since("3.1.0")
override def transform(dataset: Dataset[_]): DataFrame = {
val outputSchema = transformSchema(dataset.schema, logging = true)
val newSize = selectedFeatures.length
val func = { vector: Vector =>
vector match {
case SparseVector(_, indices, values) =>
val (newIndices, newValues) = compressSparse(indices, values)
Vectors.sparse(newSize, newIndices, newValues)
case DenseVector(values) =>
Vectors.dense(selectedFeatures.map(values))
case other =>
throw new UnsupportedOperationException(
s"Only sparse and dense vectors are supported but got ${other.getClass}.")
}
}
val transformer = udf(func)
dataset.withColumn($(outputCol), transformer(col($(featuresCol))),
outputSchema($(outputCol)).metadata)
}
@Since("3.1.0")
override def transformSchema(schema: StructType): StructType = {
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
val newField = prepOutputField(schema)
SchemaUtils.appendColumn(schema, newField)
}
/**
* Prepare the output column field, including per-feature metadata.
*/
private def prepOutputField(schema: StructType): StructField = {
val selector = selectedFeatures.toSet
val origAttrGroup = AttributeGroup.fromStructField(schema($(featuresCol)))
val featureAttributes: Array[Attribute] = if (origAttrGroup.attributes.nonEmpty) {
origAttrGroup.attributes.get.zipWithIndex.filter(x => selector.contains(x._2)).map(_._1)
} else {
Array.fill[Attribute](selector.size)(NominalAttribute.defaultAttr)
}
val newAttributeGroup = new AttributeGroup($(outputCol), featureAttributes)
newAttributeGroup.toStructField()
}
@Since("3.1.0")
override def copy(extra: ParamMap): VarianceThresholdSelectorModel = {
val copied = new VarianceThresholdSelectorModel(uid, selectedFeatures)
.setParent(parent)
copyValues(copied, extra)
}
@Since("3.1.0")
override def write: MLWriter =
new VarianceThresholdSelectorModel.VarianceThresholdSelectorWriter(this)
@Since("3.1.0")
override def toString: String = {
s"VarianceThresholdSelectorModel: uid=$uid, numSelectedFeatures=${selectedFeatures.length}"
}
private[spark] def compressSparse(
indices: Array[Int],
values: Array[Double]): (Array[Int], Array[Double]) = {
val newValues = new ArrayBuilder.ofDouble
val newIndices = new ArrayBuilder.ofInt
var i = 0
var j = 0
while (i < indices.length && j < selectedFeatures.length) {
val indicesIdx = indices(i)
val filterIndicesIdx = selectedFeatures(j)
if (indicesIdx == filterIndicesIdx) {
newIndices += j
newValues += values(i)
j += 1
i += 1
} else {
if (indicesIdx > filterIndicesIdx) {
j += 1
} else {
i += 1
}
}
}
// TODO: Sparse representation might be ineffective if (newSize ~= newValues.size)
(newIndices.result(), newValues.result())
}
}
@Since("3.1.0")
object VarianceThresholdSelectorModel extends MLReadable[VarianceThresholdSelectorModel] {
@Since("3.1.0")
override def read: MLReader[VarianceThresholdSelectorModel] =
new VarianceThresholdSelectorModelReader
@Since("3.1.0")
override def load(path: String): VarianceThresholdSelectorModel = super.load(path)
private[VarianceThresholdSelectorModel] class VarianceThresholdSelectorWriter(
instance: VarianceThresholdSelectorModel) extends MLWriter {
private case class Data(selectedFeatures: Seq[Int])
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val data = Data(instance.selectedFeatures.toSeq)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class VarianceThresholdSelectorModelReader extends
MLReader[VarianceThresholdSelectorModel] {
/** Checked against metadata when loading model */
private val className = classOf[VarianceThresholdSelectorModel].getName
override def load(path: String): VarianceThresholdSelectorModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
.select("selectedFeatures").head()
val selectedFeatures = data.getAs[Seq[Int]](0).toArray
val model = new VarianceThresholdSelectorModel(metadata.uid, selectedFeatures)
metadata.getAndSetParams(model)
model
}
}
}
| kevinyu98/spark | mllib/src/main/scala/org/apache/spark/ml/feature/VarianceThresholdSelector.scala | Scala | apache-2.0 | 9,866 |
package com.github.diegopacheco.sandbox.scripts.scala.java
class Person {
var name:String = ""
var age:Int = 0
def getName() = name
def setName(s:String) = name = s
def getAge() = age
def setAge(a:Int) = age = a
def show(a:java.lang.String) = println(a)
override def toString() = "Name " + name + " Age " + age
} | diegopacheco/scala-playground | scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/java/Person.scala | Scala | unlicense | 333 |
package intellij.haskell.spellchecker
import com.intellij.psi.PsiElement
import com.intellij.spellchecker.tokenizer.{SpellcheckingStrategy, Tokenizer}
import intellij.haskell.cabal.CabalLanguage
/**
* Provide spellchecker support for Cabal sources.
*/
class CabalSpellcheckingStrategy extends SpellcheckingStrategy {
override def isMyContext(element: PsiElement): Boolean = CabalLanguage.Instance.is(element.getLanguage)
}
| rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/spellchecker/CabalSpellcheckingStrategy.scala | Scala | apache-2.0 | 431 |
package lila.storm
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext
import lila.db.dsl._
import lila.memo.CacheApi
import lila.puzzle.PuzzleColls
/* The difficulty of storm should remain constant!
* Be very careful when adjusting the selector.
* Use the grafana average rating per slice chart.
*/
final class StormSelector(colls: PuzzleColls, cacheApi: CacheApi)(implicit ec: ExecutionContext) {
import StormBsonHandlers._
def apply: Fu[List[StormPuzzle]] = current.get {}
private val theme = lila.puzzle.PuzzleTheme.mix.key.value
private val tier = lila.puzzle.PuzzleTier.Good.key
private val maxDeviation = 85
/* for path boundaries:
* 800, 900, 1000, 1100, 1200, 1270, 1340, 1410, 1480, 1550, 1620,
* 1690, 1760, 1830, 1900, 2000, 2100, 2200, 2350, 2500, 2650, 2800
*/
private val ratingBuckets =
List(
1000 -> 7,
1150 -> 7,
1300 -> 8,
1450 -> 9,
1600 -> 10,
1750 -> 11,
1900 -> 13,
2050 -> 15,
2199 -> 17,
2349 -> 19,
2499 -> 21
)
private val poolSize = ratingBuckets.foldLeft(0) { case (acc, (_, nb)) =>
acc + nb
}
private val current = cacheApi.unit[List[StormPuzzle]] {
_.refreshAfterWrite(6 seconds)
.buildAsyncFuture { _ =>
colls
.path {
_.aggregateList(poolSize) { framework =>
import framework._
val fenColorRegex = $doc(
"$regexMatch" -> $doc(
"input" -> "$fen",
"regex" -> { if (scala.util.Random.nextBoolean()) " w " else " b " }
)
)
Facet(
ratingBuckets.map { case (rating, nbPuzzles) =>
rating.toString -> List(
Match(
$doc(
"min" $lte f"${theme}_${tier}_${rating}%04d",
"max" $gte f"${theme}_${tier}_${rating}%04d"
)
),
Sample(1),
Project($doc("_id" -> false, "ids" -> true)),
UnwindField("ids"),
// ensure we have enough after filtering deviation & color
Sample(nbPuzzles * 7),
PipelineOperator(
$doc(
"$lookup" -> $doc(
"from" -> colls.puzzle.name.value,
"as" -> "puzzle",
"let" -> $doc("id" -> "$ids"),
"pipeline" -> $arr(
$doc(
"$match" -> $doc(
"$expr" -> $doc(
"$and" -> $arr(
$doc("$eq" -> $arr("$_id", "$$id")),
$doc("$lte" -> $arr("$glicko.d", maxDeviation)),
fenColorRegex
)
)
)
),
$doc(
"$project" -> $doc(
"fen" -> true,
"line" -> true,
"rating" -> $doc("$toInt" -> "$glicko.r")
)
)
)
)
)
),
UnwindField("puzzle"),
Sample(nbPuzzles),
ReplaceRootField("puzzle")
)
}
) -> List(
Project($doc("all" -> $doc("$setUnion" -> ratingBuckets.map(r => s"$$${r._1}")))),
UnwindField("all"),
ReplaceRootField("all"),
Sort(Ascending("rating"))
)
}.map {
_.flatMap(StormPuzzleBSONReader.readOpt)
}
}
.mon(_.storm.selector.time)
.addEffect { puzzles =>
monitor(puzzles.toVector, poolSize)
}
}
}
private def monitor(puzzles: Vector[StormPuzzle], poolSize: Int): Unit = {
val nb = puzzles.size
lila.mon.storm.selector.count.record(nb)
if (nb < poolSize * 0.9)
logger.warn(s"Selector wanted $poolSize puzzles, only got $nb")
if (nb > 1) {
val rest = puzzles.toVector drop 1
lila.common.Maths.mean(rest.map(_.rating)) foreach { r =>
lila.mon.storm.selector.rating.record(r.toInt).unit
}
(0 to poolSize by 10) foreach { i =>
val slice = rest drop i take 10
lila.common.Maths.mean(slice.map(_.rating)) foreach { r =>
lila.mon.storm.selector.ratingSlice(i).record(r.toInt)
}
}
}
}
}
| luanlv/lila | modules/storm/src/main/StormSelector.scala | Scala | mit | 4,907 |
package com.arcusys.learn.liferay.update.version260.certificate
import com.arcusys.learn.liferay.update.version240.certificate.CertificateTableComponent
import com.arcusys.valamis.model.PeriodTypes
import com.arcusys.valamis.persistence.common.DbNameUtils._
import com.arcusys.valamis.persistence.common.SlickProfile
trait StatementGoalTableComponent extends CertificateTableComponent { self: SlickProfile =>
import driver.simple._
type StatementGoal = (Long, String, String, Int, PeriodTypes.Value)
class StatementGoalTable(tag: Tag) extends Table[StatementGoal](tag, tblName("CERT_STATEMENT_GOAL")) {
implicit val ValidPeriodTypeMapper = MappedColumnType.base[PeriodTypes.PeriodType, String](
s => s.toString,
s => PeriodTypes.withName(s)
)
def certificateId = column[Long]("CERTIFICATE_ID")
def verb = column[String]("VERB")
def obj = column[String]("OBJ")
def periodValue = column[Int]("PERIOD_VALUE")
def periodType = column[PeriodTypes.PeriodType]("PERIOD_TPE")
def * = (certificateId, verb, obj, periodValue, periodType)
def PK = primaryKey(pkName("CERT_STATEMENT_GOAL"), (certificateId, verb, obj))
def certificateFK = foreignKey(fkName("CERT_STATEMENT_GOAL_TO_CERT"), certificateId, certificates)(x => x.id, onDelete = ForeignKeyAction.Cascade)
}
val statementGoals = TableQuery[StatementGoalTable]
} | igor-borisov/valamis | learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/version260/certificate/StatementGoalTableComponent.scala | Scala | gpl-3.0 | 1,378 |
package io.youi.image.resize
import scala.concurrent.Future
object FastResizer extends ImageResizer {
override protected[resize] def resizeInternal(source: html.Image | html.Canvas, destination: html.Canvas): Future[html.Canvas] = {
destination.context.drawImage(source.asInstanceOf[html.Image], 0.0, 0.0, destination.width, destination.height)
Future.successful(destination)
}
}
| outr/youi | ui/js/src/main/scala/io/youi/image/resize/FastResizer.scala | Scala | mit | 394 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package elements
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
import org.jetbrains.plugins.scala.lang.psi.stubs.impl.ScVariableStubImpl
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys.VARIABLE_NAME_KEY
/**
* User: Alexander Podkhalyuzin
* Date: 18.10.2008
*/
abstract class ScVariableElementType[V <: ScVariable](debugName: String)
extends ScValueOrVariableElementType[ScVariableStub, ScVariable](debugName) {
override protected val key = VARIABLE_NAME_KEY
override def deserialize(dataStream: StubInputStream, parentStub: StubElement[_ <: PsiElement]): ScVariableStub =
new ScVariableStubImpl(parentStub, this,
isDeclaration = dataStream.readBoolean,
namesRefs = dataStream.readNames,
typeTextRef = dataStream.readOptionName,
bodyTextRef = dataStream.readOptionName,
containerTextRef = dataStream.readOptionName,
isLocal = dataStream.readBoolean)
override def createStubImpl(variable: ScVariable, parentStub: StubElement[_ <: PsiElement]): ScVariableStub =
new ScVariableStubImpl(parentStub, this,
isDeclaration = isDeclaration(variable),
namesRefs = names(variable),
typeTextRef = typeText(variable),
bodyTextRef = bodyText(variable),
containerTextRef = containerText(variable),
isLocal = isLocal(variable))
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/ScVariableElementType.scala | Scala | apache-2.0 | 1,499 |
object Test {
val x: T[Int] = ???
val y: Int = 1
val a: Int = T.f(x)
val b: T[Int] = T.g(y)
}
| som-snytt/dotty | tests/pos/toplevel-opaque/Test.scala | Scala | apache-2.0 | 102 |
package org.viz.lightning.types
class Settings(var map: Map[String, Any]) {
def this() = this(Map[String, Any]())
def append(style: Style): this.type = {
if (style.defined) {
style.validate
map += style.name -> style.contents
}
this
}
def append(styles: List[Style]): this.type = {
styles.foreach{ style =>
this.append(style)
}
this
}
def toMap: Map[String, Any] = {
map
}
} | lightning-viz/lightning-scala | src/main/scala/org/viz/lightning/types/Settings.scala | Scala | mit | 440 |
package tuner.gui.event
import scala.swing.Component
import scala.swing.event.ComponentEvent
case class ControlTableRowAdded(source:Component) extends ComponentEvent
| gabysbrain/tuner | src/main/scala/tuner/gui/event/ControlTableRowAdded.scala | Scala | mit | 169 |
package co.rc.tokenmanager.util
class TokenException( msg: String ) extends RuntimeException( msg )
| rodricifuentes1/token-manager | src/main/scala/co/rc/tokenmanager/util/TokenException.scala | Scala | mit | 101 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
import java.lang.{Integer => JInt, Short => JShort, Long => JLong, Float => JFloat, Double => JDouble, Boolean => JBool}
import java.util.{List => JList, Map => JMap}
import scala.annotation.implicitNotFound
/**
* Ring: Group + multiplication (see: http://en.wikipedia.org/wiki/Ring_%28mathematics%29)
* and the three elements it defines:
* - additive identity aka zero
* - addition
* - multiplication
*/
@implicitNotFound(msg = "Cannot find Ring type class for ${T}")
trait Ring[@specialized(Int,Long,Float,Double) T] extends Group[T] {
def one : T // Multiplicative identity
def times(l : T, r : T) : T
// Left product: (((a * b) * c) * d)
def product(iter : TraversableOnce[T]): T = Ring.product(iter)(this)
}
object IntRing extends Ring[Int] {
override def zero = 0
override def one = 1
override def negate(v : Int) = -v
override def plus(l : Int, r : Int) = l + r
override def minus(l : Int, r : Int) = l - r
override def times(l : Int, r : Int) = l * r
}
object ShortRing extends Ring[Short] {
override def zero = 0.toShort
override def one = 1.toShort
override def negate(v : Short) = (-v).toShort
override def plus(l : Short, r : Short) = (l + r).toShort
override def minus(l : Short, r : Short) = (l - r).toShort
override def times(l : Short, r : Short) = (l * r).toShort
}
object LongRing extends Ring[Long] {
override def zero = 0L
override def one = 1L
override def negate(v : Long) = -v
override def plus(l : Long, r : Long) = l + r
override def minus(l : Long, r : Long) = l - r
override def times(l : Long, r : Long) = l * r
}
object BigIntRing extends Ring[BigInt] {
override val zero = BigInt(0)
override val one = BigInt(1)
override def negate(v : BigInt) = -v
override def plus(l : BigInt, r : BigInt) = l + r
override def minus(l : BigInt, r : BigInt) = l - r
override def times(l : BigInt, r : BigInt) = l * r
}
object Ring extends GeneratedRingImplicits with ProductRings {
// This pattern is really useful for typeclasses
def one[T](implicit rng : Ring[T]) = rng.one
def times[T](l : T, r : T)(implicit rng : Ring[T]) = rng.times(l,r)
def asTimesMonoid[T](implicit ring: Ring[T]): Monoid[T] =
Monoid.from[T](ring.one)(ring.times _)
// Left product: (((a * b) * c) * d)
def product[T](iter : TraversableOnce[T])(implicit ring : Ring[T]) = {
// avoid touching one unless we need to (some items are pseudo-rings)
if(iter.isEmpty) ring.one
else iter.reduceLeft(ring.times _)
}
// If the ring doesn't have a one, or you want to distinguish empty cases:
def productOption[T](it: TraversableOnce[T])(implicit rng: Ring[T]): Option[T] =
it.reduceLeftOption(rng.times _)
implicit val boolRing : Ring[Boolean] = BooleanField
implicit val jboolRing : Ring[JBool] = JBoolField
implicit val intRing : Ring[Int] = IntRing
implicit val jintRing : Ring[JInt] = JIntRing
implicit val shortRing : Ring[Short] = ShortRing
implicit val jshortRing : Ring[JShort] = JShortRing
implicit val longRing : Ring[Long] = LongRing
implicit val bigIntRing : Ring[BigInt] = BigIntRing
implicit val jlongRing : Ring[JLong] = JLongRing
implicit val floatRing : Ring[Float] = FloatField
implicit val jfloatRing : Ring[JFloat] = JFloatField
implicit val doubleRing : Ring[Double] = DoubleField
implicit val jdoubleRing : Ring[JDouble] = JDoubleField
implicit def indexedSeqRing[T:Ring]: Ring[IndexedSeq[T]] = new IndexedSeqRing[T]
implicit def mapRing[K,V](implicit ring : Ring[V]) = new MapRing[K,V]()(ring)
}
| snoble/algebird | algebird-core/src/main/scala/com/twitter/algebird/Ring.scala | Scala | apache-2.0 | 4,126 |
/**
* Copyright 2015 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package schedoscope.example.osm.datahub
import org.schedoscope.dsl.Field._
import org.schedoscope.dsl.Parameter.p
import org.schedoscope.test.{SchedoscopeSpec, rows, test}
import schedoscope.example.osm.processed.Nodes
class ShopsTest extends SchedoscopeSpec {
val nodes = new Nodes(p("2014"), p("09")) with rows {
set(v(id, "122317"),
v(geohash, "t1y140djfcq0"),
v(tags, Map("name" -> "Netto",
"shop" -> "supermarket")))
set(v(id, "274850441"),
v(geohash, "t1y87ki9fcq0"),
v(tags, Map("name" -> "Schanzenbaeckerei",
"shop" -> "bakery")))
set(v(id, "279023080"),
v(geohash, "t1y77d8jfcq0"),
v(tags, Map("name" -> "Edeka Linow",
"shop" -> "supermarket")))
set(v(id, "279023080"),
v(geohash, "t1y77d8jfcq0"),
v(tags, Map("name" -> "Edeka Linow")))
}
"datahub.Shops" should "load correctly from processed.nodes" in {
new Shops() with test {
basedOn(nodes)
then()
numRows shouldBe 3
row(v(id) shouldBe "122317",
v(shopName) shouldBe "Netto",
v(shopType) shouldBe "supermarket",
v(area) shouldBe "t1y140d")
row(v(id) shouldBe "274850441",
v(shopName) shouldBe "Schanzenbaeckerei",
v(shopType) shouldBe "bakery",
v(area) shouldBe "t1y87ki")
}
}
}
| utzwestermann/schedoscope | schedoscope-tutorial/src/test/scala/schedoscope/example/osm/datahub/ShopsTest.scala | Scala | apache-2.0 | 1,941 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.graphite
import com.kenshoo.play.metrics.Metrics
import play.api.Play
@deprecated("Use DI", "-")
trait MicroserviceMetrics {
val metrics: Metrics = Play.current.injector.instanceOf[Metrics]
}
| hmrc/play-graphite | src/main/scala/uk/gov/hmrc/play/graphite/MicroserviceMetrics.scala | Scala | apache-2.0 | 826 |
package io.vamp.workflow_driver
import akka.actor.ActorRef
import akka.pattern.ask
import io.vamp.common.akka.IoC.actorFor
import io.vamp.container_driver.ContainerDriverActor.{ DeployWorkflow, GetWorkflow, UndeployWorkflow }
import io.vamp.container_driver.ContainerWorkflow
import io.vamp.model.artifact.{ DaemonSchedule, DefaultBreed, Instance, Workflow }
import io.vamp.persistence.PersistenceActor
import scala.concurrent.Future
trait DaemonWorkflowDriver extends WorkflowDriver {
protected def driverActor: ActorRef
override def receive = super.receive orElse {
case ContainerWorkflow(workflow, containers, health, _) β
workflow.breed match {
case breed: DefaultBreed β
if (workflow.health != health) actorFor[PersistenceActor] ! PersistenceActor.UpdateWorkflowHealth(workflow, health)
val instances = containers.map(_.instances.map { instance β
val ports = breed.ports.map(_.name) zip instance.ports
Instance(instance.name, instance.host, ports.toMap, instance.deployed)
}).getOrElse(Nil)
if (workflow.instances != instances) actorFor[PersistenceActor] ! PersistenceActor.UpdateWorkflowInstances(workflow, instances)
case _ β
}
case _ β
}
protected override def request(workflows: List[Workflow]): Unit = workflows.foreach(request)
protected def request: PartialFunction[Workflow, Unit] = {
case workflow if workflow.schedule == DaemonSchedule β driverActor ! GetWorkflow(workflow, self)
case _ β
}
protected override def schedule(data: Any): PartialFunction[Workflow, Future[Any]] = {
case workflow if workflow.schedule == DaemonSchedule β enrich(workflow, data).flatMap { enriched β driverActor ? DeployWorkflow(enriched, update = workflow.instances.nonEmpty) }
}
protected override def unschedule(): PartialFunction[Workflow, Future[Any]] = {
case workflow if workflow.schedule == DaemonSchedule && workflow.instances.nonEmpty β driverActor ? UndeployWorkflow(workflow)
}
}
| dragoslav/vamp | workflow_driver/src/main/scala/io/vamp/workflow_driver/DaemonWorkflowDriver.scala | Scala | apache-2.0 | 2,050 |
/*
* Copyright (C) 2016 Christopher Batey and Dogan Narinc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scassandra.server.e2e.query
import com.datastax.driver.core.{DataType, TypeTokens}
import com.google.common.reflect.TypeToken
import dispatch.Defaults._
import dispatch._
import org.scassandra.codec.datatype.{DataType => DType}
import org.scassandra.server.AbstractIntegrationTest
import org.scassandra.server.priming.json.Success
import org.scassandra.server.priming.query.When
import scala.collection.JavaConverters._
class MapPriming extends AbstractIntegrationTest {
before {
val svc = url("http://localhost:8043/prime-query-single").DELETE
val response = Http(svc OK as.String)
response()
}
test("Test a map of strings") {
val map = Map("one" -> "valueOne", "two" -> "valueTwo", "three" -> "valueThree")
val whenQuery = "Test prime with cql map"
val rows: List[Map[String, Any]] = List(Map("field" -> map))
val mapOfVarcharToVarchar = DType.Map(DType.Varchar, DType.Varchar)
val columnTypes = Map("field" -> mapOfVarcharToVarchar)
prime(When(query = Some(whenQuery)), rows, Success, columnTypes)
val result = session.execute(whenQuery)
val results = result.all()
results.size() should equal(1)
results.get(0).getColumnDefinitions.getType("field") should equal(DataType.map(DataType.varchar(), DataType.varchar()))
val c: Class[_] = Class.forName("java.lang.String")
val expectedMap = map.asJava
results.get(0).getMap("field", c, c) should equal(expectedMap)
}
test("Test a map of string key, list<varchar> value") {
val map = Map("one" -> List("valueOne", "valueOne1"), "two" -> List("valueTwo"), "three" -> List("valueThree"))
val whenQuery = "Test prime with cql map"
val rows: List[Map[String, Any]] = List(Map("field" -> map))
val mapOfVarcharToListVarchar = DType.Map(DType.Varchar, DType.List(DType.Varchar))
val columnTypes = Map("field" -> mapOfVarcharToListVarchar)
prime(When(query = Some(whenQuery)), rows, Success, columnTypes)
val result = session.execute(whenQuery)
val results = result.all()
results.size() should equal(1)
results.get(0).getColumnDefinitions.getType("field") should equal(DataType.map(DataType.varchar(), DataType.list(DataType.varchar())))
val expectedMap = map.mapValues(_.asJava).asJava
val stringToken = TypeToken.of(Class.forName("java.lang.String"))
results.get(0).getMap("field", stringToken, TypeTokens.listOf(stringToken)) should equal(expectedMap)
}
}
| mikefero/cpp-driver | gtests/src/integration/scassandra/server/server/src/test/scala/org/scassandra/server/e2e/query/MapPriming.scala | Scala | apache-2.0 | 3,071 |
/*
This file is part of Static Web Gallery (SWG).
MathMaster is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MathMaster is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with SWG. If not, see <http://www.gnu.org/licenses/>.
*/
package eu.lateral.swg.db
import org.squeryl.KeyedEntity
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.Schema
import org.squeryl.annotations.Column
import org.squeryl.dsl.OneToMany
class Menu(
@Column("id") val id: Long,
@Column("project_id") val projectId: Long,
@Column("menu_number") val menuNumber: Int,
@Column("menu_level") val menuLevel: Int,
@Column("article_number") val articleNumber: Option[Int],
@Column("gallery_number") val galleryNumber: Option[Int]) extends KeyedEntity[Long] {
def this() = this(0, 0, 0, 0, None, None)
def article = {
inTransaction {
if (articleNumber.isDefined) {
from(SWGSchema.articles)(a => where((a.projectId === projectId).and(a.articleNumber === articleNumber)) select (a)).headOption
} else None
}
}
def gallery = {
inTransaction {
if (galleryNumber.isDefined) {
from(SWGSchema.galleries)(a => where((a.projectId === projectId).and(a.galleryNumber === galleryNumber)) select (a)).headOption
} else None
}
}
}
| orest-d/swg | swg/src/main/scala/eu/lateral/swg/db/Menu.scala | Scala | gpl-3.0 | 1,707 |
package com.twitter.concurrent
import java.util.concurrent.atomic.AtomicReference
import scala.annotation.tailrec
import com.twitter.util.{Future, Promise, Return}
object SpoolSource {
private object DefaultInterruptHandler extends PartialFunction[Any, Nothing] {
def isDefinedAt(x: Any) = false
def apply(x: Any) = throw new MatchError(x)
}
}
/**
* A SpoolSource is a simple object for creating and populating a Spool-chain. apply()
* returns a Future[Spool] that is populated by calls to offer(). This class is thread-safe.
* @param interruptHandler attached to every Promise in the produced Spool.
*/
class SpoolSource[A](interruptHandler: PartialFunction[Throwable, Unit]) {
def this() = this(SpoolSource.DefaultInterruptHandler)
private val closedp = new Promise[Unit]
// a reference to the current outstanding promise for the next Future[Spool[A]] result
private val promiseRef = new AtomicReference[Promise[Spool[A]]]
// when the SpoolSource is closed, promiseRef will be permanently set to emptyPromise,
// which always returns an empty spool.
private val emptyPromise = new Promise(Return(Spool.empty[A]))
// set the first promise to be fulfilled by the first call to offer()
promiseRef.set({
val p = new Promise[Spool[A]]
p.setInterruptHandler(interruptHandler)
p
})
/**
* Gets the current outstanding Future for the next Spool value. The returned Spool
* will see all future values passed to offer(), up until close() is called.
* Previous values passed to offer() will not be seen in the Spool.
*/
def apply(): Future[Spool[A]] = promiseRef.get
/**
* Puts a value into the spool. Unless this SpoolSource has been closed, the current
* Future[Spool[A]] value will be fulfilled with a Spool that contains the
* provided value. If the SpoolSource has been closed, then this value is ignored.
* If multiple threads call `offer` simultaneously, the operation is thread-safe but
* the resulting order of values in the spool is non-deterministic.
*/
final def offer(value: A) {
import Spool.*::
val nextPromise = new Promise[Spool[A]]
nextPromise.setInterruptHandler(interruptHandler)
updatingTailCall(nextPromise) { currentPromise =>
currentPromise.setValue(value *:: nextPromise)
}
}
/**
* Puts a value into the spool and closes this SpoolSource. Unless
* this SpoolSource has been closed, the current Future[Spool[A]]
* value will be fulfilled with `value *:: Future.value(Spool.empty[A])`.
* If the SpoolSource has been closed, then this value is ignored.
* If multiple threads call offer simultaneously, the operation is
* thread-safe but the resulting order of values in the spool is
* non-deterministic.
*/
final def offerAndClose(value: A) {
import Spool.*::
updatingTailCall(emptyPromise) { currentPromise =>
currentPromise.setValue(value *:: Future.value(Spool.empty[A]))
closedp.setDone()
}
}
/**
* Closes this SpoolSource, which also terminates the generated Spool. This method
* is idempotent.
*/
final def close() {
updatingTailCall(emptyPromise) { currentPromise =>
currentPromise.setValue(Spool.empty[A])
closedp.setDone()
}
}
/**
* Fulfilled when this SpoolSource has been closed or an exception
* is raised.
*/
val closed: Future[Unit] = closedp
/**
* Raises exception on this SpoolSource, which also terminates the generated Spool. This method
* is idempotent.
*/
final def raise(e: Throwable) {
updatingTailCall(emptyPromise) { currentPromise =>
currentPromise.setException(e)
closedp.setException(e)
}
}
@tailrec
private[this] def updatingTailCall(newPromise: Promise[Spool[A]])(f: Promise[Spool[A]] => Unit) {
val currentPromise = promiseRef.get
// if the current promise is emptyPromise, then this source has already been closed
if (currentPromise ne emptyPromise) {
if (promiseRef.compareAndSet(currentPromise, newPromise)) {
f(currentPromise)
} else {
// try again
updatingTailCall(newPromise)(f)
}
}
}
}
| travisbrown/util | util-core/src/main/scala/com/twitter/concurrent/SpoolSource.scala | Scala | apache-2.0 | 4,176 |
package com.arcusys.learn.facades
import java.io.{ File, InputStream }
import com.arcusys.learn.liferay.LiferayClasses._
import com.arcusys.learn.liferay.model.Activity
import com.arcusys.valamis.gradebook.model.PackageGrade
import com.arcusys.valamis.lesson.tincan.model.ManifestActivity
import com.arcusys.valamis.lrs.api.StatementApi
import com.arcusys.valamis.model.PeriodTypes
import PeriodTypes.PeriodType
import com.arcusys.learn.models.response.CollectionResponse
import com.arcusys.learn.models.valamispackage.{ PackageResponse, PackageSortBy, PackageUploadModel, PlayerPackageResponse }
import com.arcusys.valamis.lesson.model.{ BaseManifest, LessonType }
import LessonType.LessonType
import org.joda.time.DateTime
trait PackageFacadeContract {
def getPackageGrade(valamisUserId: Int, packageId: Long): Option[PackageGrade]
private[facades] def getPackageType(packageId: Int): LessonType
private[facades] def updatePackageGrade(valamisUserId: Int, packageId: Int, grade: String, comment: String)
def getManifestActivities(packageId: Long): Seq[ManifestActivity]
def getPackagesCount(courseId: Int): Int
def getCompletedPackagesCount(courseId: Int, userId: Int): Int
def getPackage(packageId: Long): BaseManifest
def getPackagesByCourse(courseId: Int): Seq[BaseManifest]
def getTincanPackagesByCourse(courseId: Int, onlyVisible: Boolean = false): Seq[BaseManifest]
def exportAllPackages(courseID: Int): InputStream
def exportPackages(packagesIds: Seq[Long]): InputStream
def exportPackagesForMobile(packagesIds: Seq[Long]): InputStream
def importPackages(file: File, courseID: Int, userId: Long): Unit
def getForPlayerConfig(playerID: String, companyID: Long, groupId: Long, user: LUser): Seq[PackageResponse]
def getAllPackages(packageType: Option[String], courseID: Option[Int], scope: String, filter: String, tagId: Option[Long],
isSortDirectionAsc: Boolean, skip: Int, count: Int, page: Int,
companyID: Long, user: LUser): CollectionResponse[PackageResponse]
def getForPlayer(statementApi: StatementApi, companyID: Long, courseID: Int, pageID: String, filter: String, tagId: Option[Long],
playerID: String, user: LUser, isSortDirectionAsc: Boolean, sortBy: PackageSortBy.PackageSortBy,
page: Int, count: Int): CollectionResponse[PlayerPackageResponse]
def getByScopeType(courseID: Int, scope: String, pageID: Option[String], playerID: Option[String],
companyID: Long, courseIds: List[Int], user: LUser): Seq[PackageResponse]
def updatePackage(packageId: Long, tags: Seq[String], passingLimit: Int, rerunInterval: Int, rerunIntervalType: PeriodType,
beginDate: Option[DateTime], endDate: Option[DateTime], scope: String, visibility: Boolean, isDefault: Boolean,
companyId: Long, courseId: Int, title: String, description: String, packageType: String, pageID: Option[String],
playerID: Option[String], user: LUser): PackageResponse
def updatePackageLogo(packageId: Long, packageType: String, packageLogo: Option[String])
def uploadPackages(packages: Seq[PackageUploadModel], scope: String, courseId: Int, pageID: Option[String], playerID: Option[String])
def updatePackageScopeVisibility(id: Long, scope: String, courseID: Int, visibility: Boolean, isDefault: Boolean, pageID: Option[String], playerID: Option[String], user: LUser): Unit
def addPackageToPlayer(playerID: String, packageID: Long)
def updatePlayerScope(scope: String, playerID: String)
def removePackage(packageId: Long, packageType: String): String
def removePackages(packageIds: Seq[Long]): String
}
| ViLPy/Valamis | learn-portlet/src/main/scala/com/arcusys/learn/facades/PackageFacadeContract.scala | Scala | lgpl-3.0 | 3,588 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.table.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.{TableTestBase, TableTestUtil}
import org.junit.Test
import java.sql.Timestamp
class TemporalTableJoinValidationTest extends TableTestBase {
val util: TableTestUtil = streamTestUtil()
val orders: Table = util.addDataStream[(Long, String, Timestamp)](
"Orders", 'o_amount, 'o_currency, 'o_rowtime.rowtime)
val ordersProctime: Table = util.addDataStream[(Long, String)](
"OrdersProctime", 'o_amount, 'o_currency, 'o_rowtime.proctime)
val ordersWithoutTimeAttribute: Table = util.addDataStream[(Long, String, Timestamp)](
"OrdersWithoutTimeAttribute", 'o_amount, 'o_currency, 'o_rowtime)
val ratesHistory: Table = util.addDataStream[(String, Int, Timestamp)](
"RatesHistory", 'currency, 'rate, 'rowtime.rowtime)
val ratesHistoryWithoutTimeAttribute: Table = util.addDataStream[(String, Int, Timestamp)](
"ratesHistoryWithoutTimeAttribute", 'currency, 'rate, 'rowtime)
@Test
def testInvalidFieldReference(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [foobar]")
ratesHistory.createTemporalTableFunction('rowtime, 'foobar)
}
@Test
def testInvalidStringFieldReference(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [foobar]")
ratesHistory.createTemporalTableFunction($"rowtime", $"foobar")
}
@Test
def testNonTimeIndicatorOnRightSide(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIMESTAMP(3)] used to create TemporalTableFunction")
val rates = ratesHistoryWithoutTimeAttribute.createTemporalTableFunction('rowtime, 'currency)
val result = orders
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select($"o_amount" * $"rate").as("rate")
util.verifyExplain(result)
}
@Test
def testNonTimeIndicatorOnLeftSide(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIMESTAMP(3)] passed as the argument to TemporalTableFunction")
val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
val result = ordersWithoutTimeAttribute
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select($"o_amount" * $"rate").as("rate")
util.verifyExplain(result)
}
@Test
def testMixedTimeIndicators(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIME ATTRIBUTE(PROCTIME)] passed as the argument " +
"to TemporalTableFunction")
val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
val result = ordersProctime
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select($"o_amount" * $"rate").as("rate")
util.verifyExplain(result)
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TemporalTableJoinValidationTest.scala | Scala | apache-2.0 | 3,981 |
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.redshift
import java.io.{DataOutputStream, File, FileOutputStream}
import scala.language.implicitConversions
import com.databricks.spark.redshift.RedshiftInputFormat._
import com.google.common.io.Files
import org.apache.hadoop.conf.Configuration
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.apache.spark.SparkContext
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SQLContext}
class RedshiftInputFormatSuite extends FunSuite with BeforeAndAfterAll {
import RedshiftInputFormatSuite._
private var sc: SparkContext = _
override def beforeAll(): Unit = {
super.beforeAll()
sc = new SparkContext("local", this.getClass.getName)
}
override def afterAll(): Unit = {
sc.stop()
super.afterAll()
}
private def writeToFile(contents: String, file: File): Unit = {
val bytes = contents.getBytes
val out = new DataOutputStream(new FileOutputStream(file))
out.write(bytes, 0, bytes.length)
out.close()
}
private def escape(records: Set[Seq[String]], delimiter: Char): String = {
require(delimiter != '\\' && delimiter != '\n')
records.map { r =>
r.map { f =>
f.replace("\\", "\\\\")
.replace("\n", "\\\n")
.replace(delimiter, "\\" + delimiter)
}.mkString(delimiter)
}.mkString("", "\n", "\n")
}
private final val KEY_BLOCK_SIZE = "fs.local.block.size"
private final val TAB = '\t'
private val records = Set(
Seq("a\n", DEFAULT_DELIMITER + "b\\"),
Seq("c", TAB + "d"),
Seq("\ne", "\\\\f"))
private def withTempDir(func: File => Unit): Unit = {
val dir = Files.createTempDir()
dir.deleteOnExit()
func(dir)
}
test("default delimiter") {
withTempDir { dir =>
val escaped = escape(records, DEFAULT_DELIMITER)
writeToFile(escaped, new File(dir, "part-00000"))
val conf = new Configuration
conf.setLong(KEY_BLOCK_SIZE, 4)
val rdd = sc.newAPIHadoopFile(dir.toString, classOf[RedshiftInputFormat],
classOf[java.lang.Long], classOf[Array[String]], conf)
// TODO: Check this assertion - fails on Travis only, no idea what, or what it's for
// assert(rdd.partitions.size > records.size) // so there exist at least one empty partition
println("############" + rdd.values.map(_.toSeq).glom().map(_.toSeq).collect().toSeq)
val actual = rdd.values.map(_.toSeq).collect()
assert(actual.size === records.size)
assert(actual.toSet === records)
}
}
test("customized delimiter") {
withTempDir { dir =>
val escaped = escape(records, TAB)
writeToFile(escaped, new File(dir, "part-00000"))
val conf = new Configuration
conf.setLong(KEY_BLOCK_SIZE, 4)
conf.set(KEY_DELIMITER, TAB)
val rdd = sc.newAPIHadoopFile(dir.toString, classOf[RedshiftInputFormat],
classOf[java.lang.Long], classOf[Array[String]], conf)
// TODO: Check this assertion - fails on Travis only, no idea what, or what it's for
// assert(rdd.partitions.size > records.size) // so there exist at least one empty partitions
val actual = rdd.values.map(_.toSeq).collect()
assert(actual.size === records.size)
assert(actual.toSet === records)
}
}
test("schema parser") {
withTempDir { dir =>
val testRecords = Set(
Seq("a\n", "TX", 1, 1.0, 1000L, 200000000000L),
Seq("b", "CA", 2, 2.0, 2000L, 1231412314L))
val escaped = escape(testRecords.map(_.map(_.toString)), DEFAULT_DELIMITER)
writeToFile(escaped, new File(dir, "part-00000"))
val conf = new Configuration
conf.setLong(KEY_BLOCK_SIZE, 4)
val sqlContext = new SQLContext(sc)
val srdd = sqlContext.redshiftFile(
dir.toString,
"name varchar(10) state text id integer score float big_score numeric(4, 0) some_long bigint")
val expectedSchema = StructType(Seq(
StructField("name", StringType, nullable = true),
StructField("state", StringType, nullable = true),
StructField("id", IntegerType, nullable = true),
StructField("score", DoubleType, nullable = true),
StructField("big_score", LongType, nullable = true),
StructField("some_long", LongType, nullable = true)))
assert(srdd.schema === expectedSchema)
val parsed = srdd.map {
case Row(name: String, state: String, id: Int, score: Double,
bigScore: Long, someLong: Long) =>
Seq(name, state, id, score, bigScore, someLong)
}.collect().toSet
assert(parsed === testRecords)
}
}
}
object RedshiftInputFormatSuite {
implicit def charToString(c: Char): String = c.toString
}
| methodmill/spark-redshift | src/test/scala/com/databricks/spark/redshift/RedshiftInputFormatSuite.scala | Scala | apache-2.0 | 5,304 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.