code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.ems.app.util
import java.{util => ju}
import com.google.gson.Gson
import org.beangle.commons.lang.Strings
import scala.collection.mutable
import scala.jdk.javaapi.CollectionConverters.asScala
object JSON {
private val gson = new Gson()
def parseValue[T](json: String, clazz: Class[T]): T = {
gson.fromJson(json, clazz)
}
def parseObj(json: String): collection.Map[String, Any] = {
if (Strings.isEmpty(json)) {
Map.empty
} else {
val map = gson.fromJson(json, classOf[java.util.Map[_, _]])
convert(map).asInstanceOf[collection.Map[String, Any]]
}
}
def parseSeq(json: String): collection.Seq[Any] = {
if (Strings.isEmpty(json)) {
List.empty
} else {
val list = gson.fromJson(json, classOf[java.util.List[_]])
convert(list).asInstanceOf[collection.Seq[Any]]
}
}
def convert(value: Any): Any = {
value match {
case b: ju.Map[_, _] =>
val iter = b.entrySet().iterator
val result = new mutable.HashMap[Any, Any]
while (iter.hasNext) {
val one = iter.next
result.put(one.getKey, convert(one.getValue))
}
result
case l: ju.Collection[_] => asScala(l).map(convert)
case null => null
case _ =>
if (value.getClass.isArray) {
value.asInstanceOf[Array[_]].map(convert).toList
} else {
value
}
}
}
}
|
beangle/ems
|
app/src/main/scala/org/beangle/ems/app/util/JSON.scala
|
Scala
|
lgpl-3.0
| 2,154 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.api.controllers.common
import scalaz._
import play.api.data.validation.ValidationError
import play.api.libs.json._
import play.api.mvc.{Controller, Request}
/** JSON consuming controller. */
trait JsonController { this: Controller =>
def validJsonBody[Payload: Reads](request: Request[JsValue]): ActionValidation[Payload] = {
import Scalaz._
Json.fromJson[Payload](request.body).fold(
invalid = errors => formatErrors(errors).failure,
valid = payload => payload.success
)
}
private def formatErrors(errorsByPath: Seq[(JsPath, Seq[ValidationError])]) = {
val formattedErrors = errorsByPath.map {
case (path, errors) => (path.toString(), errors.map(_.message))
}
BadRequest(Json.toJson(Map(formattedErrors: _*)))
}
}
|
telefonicaid/fiware-cosmos-platform
|
cosmos-api/app/es/tid/cosmos/api/controllers/common/JsonController.scala
|
Scala
|
apache-2.0
| 1,433 |
package io.youi.http.content
import java.io.File
import io.youi.net.ContentType
import io.youi.stream.IO
case class FileContent(file: File, contentType: ContentType, lastModifiedOverride: Option[Long] = None) extends Content {
assert(file.isFile, s"Cannot send back ${file.getAbsolutePath} as it is a directory or does not exist!")
override def length: Long = file.length()
override def withContentType(contentType: ContentType): Content = copy(contentType = contentType)
override def withLastModified(lastModified: Long): Content = copy(lastModifiedOverride = Some(lastModified))
override def lastModified: Long = lastModifiedOverride.getOrElse(file.lastModified())
override def toString: String = s"FileContent(file: ${file.getAbsolutePath}, contentType: $contentType)"
override def asString: String = IO.stream(file, new StringBuilder).toString
}
|
outr/youi
|
core/jvm/src/main/scala/io/youi/http/content/FileContent.scala
|
Scala
|
mit
| 871 |
/*
* Original implementation (C) 2009-2011 Debasish Ghosh
* Adapted and extended in 2011 by Mathias Doenitz
* Adapted and extended in 2016 by Eugene Yokota
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sjsonnew
/**
* Provides all the predefined JsonFormats.
*/
trait BasicJsonProtocol
extends PrimitiveFormats
with StandardFormats
with TupleFormats
with CollectionFormats
with AdditionalFormats
with UnionFormats
with FlatUnionFormats
with IsoFormats
with JavaPrimitiveFormats
with JavaExtraFormats
with CalendarFormats
with ImplicitHashWriters
with CaseClassFormats
with ThrowableFormats
object BasicJsonProtocol extends BasicJsonProtocol
|
eed3si9n/sjson-new
|
core/src/main/scala/sjsonnew/BasicJsonProtocol.scala
|
Scala
|
apache-2.0
| 1,281 |
package br.edu.ifrn.potigol.editor
import org.antlr.v4.runtime.tree.TerminalNode
import org.antlr.v4.runtime.tree.ParseTreeListener;
import org.antlr.v4.runtime.Token
import org.antlr.v4.runtime.tree.ErrorNode
import org.antlr.v4.runtime.ParserRuleContext
import br.edu.ifrn.potigol.parser.potigolBaseListener
class HighLight extends ParseTreeListener {
var tokens = List[Token]()
override def visitErrorNode(node: ErrorNode) {}
override def exitEveryRule(ctx: ParserRuleContext) {}
override def enterEveryRule(ctx: ParserRuleContext) {}
override def visitTerminal(node: TerminalNode) {
tokens ::= node.getSymbol()
}
def getSaida() = tokens
}
|
potigol/EditorPotigol
|
src/main/scala/br/edu/ifrn/potigol/editor/HighLight.scala
|
Scala
|
gpl-2.0
| 686 |
package org.ergoplatform.validation
/** Interface implemented by objects capable of checking soft-fork conditions. */
trait SoftForkChecker {
/** Check soft-fork condition.
* @param vs ValidationSettings actualized from blockchain extension sections
* @param ruleId id of the rule which raised ValidationException
* @param status status of the rule in the blockchain (agreed upon via voting)
* @param args arguments of Validation rule with which the rule has risen the exception
* @return true if `args` and `status` can be interpreted as valid soft-fork condition.
*/
def isSoftFork(vs: SigmaValidationSettings, ruleId: Short, status: RuleStatus, args: Seq[Any]): Boolean = false
}
/** Checks that the failed validation rule has ReplacedRule status in block extensions section.
* This means the rule given by `ruleId` is not used in newer versions of the protocol.
* Instead it has been replaced by the new rule given by ReplacedRule status.
*/
trait SoftForkWhenReplaced extends SoftForkChecker {
override def isSoftFork(vs: SigmaValidationSettings,
ruleId: Short,
status: RuleStatus,
args: Seq[Any]): Boolean = (status, args) match {
case (ReplacedRule(_), _) => true
case _ => false
}
}
/** Checks that the unknown `code` is however present in the ChangedRule new value
* stored in block extensions section. This is interpreted as soft-fork condition,
* i.e. the unknown `code` is not arbitrary, but explicitly added to the blockchain
* configuration and implemented in newer versions of the protocol.
*/
trait SoftForkWhenCodeAdded extends SoftForkChecker {
override def isSoftFork(vs: SigmaValidationSettings,
ruleId: Short,
status: RuleStatus,
args: Seq[Any]): Boolean = (status, args) match {
case (ChangedRule(newValue), Seq(code: Byte)) => newValue.contains(code)
case _ => false
}
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/main/scala/org/ergoplatform/validation/SoftForkChecker.scala
|
Scala
|
mit
| 1,921 |
package com.aughma.dataflow
trait InputPort extends Port {
def Retrieve: String
}
|
fahdrafi/Aughma-Dataflow-Service
|
src/com/aughma/dataflow/InputPort.scala
|
Scala
|
apache-2.0
| 84 |
package models
import anorm._
import anorm.SqlParser._
import play.api.db._
import play.api.Play.current
case class Student(
id: Pk[Long] = NotAssigned,
name: String,
department: Long
)
case class Department(
id: Pk[Long] = NotAssigned,
name: String,
imageSrc: String,
code: String
)
object Student {
val simple = {
get[Pk[Long]]("student.id") ~
get[String]("student.name") ~
get[Long]("student.department") map {
case id ~ name ~ department => Student(id, name, department)
}
}
val withDepartment = Student.simple ~ Department.simple map {
case computer ~ department => (computer, department)
}
def findById(id: Long): Option[Student] = {
DB.withConnection(implicit connection =>
SQL("select * from student where id = {id}")
.on('id -> id)
.as(Student.simple.singleOpt)
)
}
def list() = {
DB.withConnection {
implicit connection =>
SQL(
"""
select * from student
order by student.name
""").as(Student.simple *)
}
}
def listWithDepartment: Seq[(Student, Department)] = {
DB.withConnection {
implicit connection =>
SQL(
"""
select * from student
left join department on student.department = department.id
""").as(Student.withDepartment *)
}
}
def update(id: Long, student: Student) = {
DB.withConnection {
implicit connection =>
SQL(
"""
update student
set name = {name}, department = {department}
where id = {id}
""")
.on(
'id -> id,
'name -> student.name,
'department -> student.department
).executeUpdate()
}
}
def insert(student: Student) = {
DB.withConnection {
implicit connection =>
SQL(
"""
insert into student (name, department)
values ({name}, {department})
""")
.on(
'name -> student.name,
'department -> student.department
).executeUpdate() != 0
}
}
def delete(id: Long) = {
DB.withConnection {
implicit connection =>
SQL("delete from student where id = {id}").on('id -> id).executeUpdate() != 0
}
}
}
object Department {
val simple = get[Pk[Long]]("department.id") ~
get[String]("department.name") ~
get[String]("department.imageSrc") ~
get[String]("department.code") map {
case id ~ name ~ imageSrc ~ code => Department(id, name, imageSrc, code)
}
def findById(id: Long): Option[Department] = {
DB.withConnection(implicit connection =>
SQL("select * from department where id = {id}")
.on('id -> id)
.as(Department.simple.singleOpt)
)
}
def list() = {
DB.withConnection {
implicit connection =>
SQL(
"""
select * from department
order by name
""").as(Department.simple *)
}
}
def options() = {
DB.withConnection {
implicit connection =>
SQL(
"""
select * from department
order by name
""")
.as(Department.simple *)
.map(c => c.id.toString -> c.name)
}
}
def update(id: Long, department: Department) = {
DB.withConnection {
implicit connection =>
try {
SQL(
"""
update department
set name = {name}, imageSrc = {imageSrc}, code = {code}
where id = {id}
""")
.on(
'id -> id,
'name -> department.name,
'imageSrc -> department.imageSrc,
'code -> department.code
).executeUpdate() != 0
}
catch {
case e: Throwable => false
}
}
}
def insert(department: Department) = {
DB.withConnection {
implicit connection =>
try {
SQL(
"""
insert into department (name, imageSrc, code)
values ({name}, {imageSrc}, {code})
""")
.
on(
'name -> department.name,
'imageSrc -> department.imageSrc,
'code -> department.code
).executeUpdate() != 0
}
catch {
case e: Throwable => false
}
}
}
def delete(id: Long) = {
DB.withConnection {
implicit connection =>
try {
SQL("delete from student where department = {id}").on('id -> id).executeUpdate()
SQL("delete from department where id = {id}").on('id -> id).executeUpdate()
true;
}
catch {
case e: Throwable => false
}
}
}
}
|
sayon/simple-rest-app
|
app/models/Models.scala
|
Scala
|
mit
| 6,467 |
package org.jetbrains.plugins.scala.lang.psi.impl.statements
import com.intellij.lang.ASTNode
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi._
import com.intellij.psi.scope._
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.tree.IElementType
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.stubs.ScFunctionStub
import org.jetbrains.plugins.scala.lang.psi.types.{Any, ScType}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypeResult, TypingContext}
/**
* @author Jason Zaugg
*/
class ScMacroDefinitionImpl private (stub: StubElement[ScFunction], nodeType: IElementType, node: ASTNode)
extends ScFunctionImpl(stub, nodeType, node) with ScMacroDefinition {
def this(node: ASTNode) = {this(null, null, node)}
def this(stub: ScFunctionStub) = {this(stub, ScalaElementTypes.MACRO_DEFINITION, null)}
override def processDeclarations(processor: PsiScopeProcessor,
state: ResolveState,
lastParent: PsiElement,
place: PsiElement): Boolean = {
//process function's parameters for dependent method types, and process type parameters
if (!super[ScFunctionImpl].processDeclarations(processor, state, lastParent, place)) return false
//do not process parameters for default parameters, only for function body
//processing parameters for default parameters in ScParameters
val parameterIncludingSynthetic: Seq[ScParameter] = effectiveParameterClauses.flatMap(_.parameters)
if (getStub == null) {
body match {
case Some(x)
if lastParent != null &&
(!needCheckProcessingDeclarationsForBody ||
x.startOffsetInParent == lastParent.startOffsetInParent) =>
for (p <- parameterIncludingSynthetic) {
ProgressManager.checkCanceled()
if (!processor.execute(p, state)) return false
}
case _ =>
}
} else {
if (lastParent != null && lastParent.getContext != lastParent.getParent) {
for (p <- parameterIncludingSynthetic) {
ProgressManager.checkCanceled()
if (!processor.execute(p, state)) return false
}
}
}
true
}
protected def needCheckProcessingDeclarationsForBody = true
override def toString: String = "ScMacroDefinition: " + name
def returnTypeInner: TypeResult[ScType] = returnTypeElement match {
case None => Success(doGetType(), Some(this)) // TODO look up type from the macro impl.
case Some(rte: ScTypeElement) => rte.getType(TypingContext.empty)
}
def body: Option[ScExpression] = {
val stub = getStub
if (stub != null) stub.asInstanceOf[ScFunctionStub].getBodyExpression else findChild(classOf[ScExpression])
}
override def hasAssign: Boolean = true
override def accept(visitor: ScalaElementVisitor) {
visitor.visitMacroDefinition(this)
}
override def getType(ctx: TypingContext): TypeResult[ScType] = {
super.getType(ctx)
}
def doGetType() = {
name match {
case "doMacro" =>
ScalaPsiElementFactory.createTypeElementFromText("(Int, String)", getManager).getType().get
case _ => Any
}
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitMacroDefinition(this)
case _ => super.accept(visitor)
}
}
}
|
JetBrains/intellij-scala-historical
|
src/org/jetbrains/plugins/scala/lang/psi/impl/statements/ScMacroDefinitionImpl.scala
|
Scala
|
apache-2.0
| 3,953 |
package teststep
package util
package csv
import org.scalatest.FreeSpec
class CsvParserSpec extends FreeSpec {
"Given a CsvParser is created with MapOutput" - {
val csvParser = new CsvParser with MapOutput { val delimiter = "," }
"When parse is passed a empty String Iterator" - {
"Then return a None" in {
assertResult(None) {
csvParser.parse(Iterator[String]())
}
}
}
"When parse is passed a non-empty String Iterator" - {
"And the Srings don't contain a word before the 1st delimiter" - {
"Then return a None" in {
assertResult(None) {
csvParser.parse(Iterator(",foo", " , bar"))
}
}
}
"And some Strings contain values before the 1st delimiter" - {
"Then return a Some with a Map" - {
"And the words separated by the delimiter will be key and value" in {
assertResult(Some(Map("foo" -> "foo", "bar" -> "bar"))) {
csvParser.parse(Iterator("foo,foo", " bar , bar "))
}
}
"And lines without words before the delimiter are left out" in {
assertResult(Some(Map("foo" -> "foo", "bar" -> "bar"))) {
csvParser.parse(Iterator("foo,foo", ",foobar", "bar,bar"))
}
}
}
}
}
}
}
|
ssfc/test_step
|
src/test/scala/teststep/util/csv/CsvParserSpec.scala
|
Scala
|
apache-2.0
| 1,342 |
package techex.cases
import org.http4s.dsl._
import org.joda.time.Instant
import techex._
import techex.data._
import techex.domain._
import scalaz.Scalaz._
import scalaz._
import scalaz.stream.async.mutable.Topic
object startSession {
def restApi(topic: Topic[InputMessage]): WebHandler = {
case req@POST -> Root / "sessions" / "start" / sessionId => {
for {
exists <- Storage.run(State.gets(sch => sch.schedule.get(ScId(sessionId)).isDefined))
result <- if(exists) topic.publishOne(StartEntry(ScId(sessionId),Instant.now())) *> Ok() else NotFound()
} yield result
}
/*To avaoid CORS crap*/
case req@GET -> Root / "sessions" / "start" / sessionId => {
for {
exists <- Storage.run(State.gets(sch => sch.schedule.get(ScId(sessionId)).isDefined))
result <- if(exists) topic.publishOne(StartEntry(ScId(sessionId),Instant.now())) *> Ok() else NotFound()
} yield result
}
}
}
|
kantega/tech-ex-2015
|
backend/src/main/scala/techex/cases/startSession.scala
|
Scala
|
mit
| 958 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
import org.junit.{Assert, Test}
import kafka.utils.MockTime
import org.apache.kafka.clients.{ClientRequest, ClientResponse, NetworkClient, RequestCompletionHandler}
import org.apache.kafka.common.Node
import org.apache.kafka.common.protocol.ApiKeys
import org.apache.kafka.common.requests.AbstractRequest
import org.apache.kafka.common.utils.Utils
import org.easymock.EasyMock
import scala.collection.mutable
class InterBrokerSendThreadTest {
private val time = new MockTime()
private val networkClient = EasyMock.createMock(classOf[NetworkClient])
private val completionHandler = new StubCompletionHandler
@Test
def shouldNotSendAnythingWhenNoRequests(): Unit = {
val sendThread = new InterBrokerSendThread("name", networkClient, time) {
override def generateRequests() = mutable.Iterable.empty
}
// poll is always called but there should be no further invocations on NetworkClient
EasyMock.expect(networkClient.poll(EasyMock.anyLong(), EasyMock.anyLong()))
.andReturn(Utils.mkList())
EasyMock.replay(networkClient)
sendThread.doWork()
EasyMock.verify(networkClient)
}
@Test
def shouldCreateClientRequestAndSendWhenNodeIsReady(): Unit = {
val request = new StubRequestBuilder()
val node = new Node(1, "", 8080)
val handler = RequestAndCompletionHandler(node, request, completionHandler)
val sendThread = new InterBrokerSendThread("name", networkClient, time) {
override def generateRequests() = List[RequestAndCompletionHandler](handler)
}
val clientRequest = new ClientRequest("dest", request, 0, "1", 0, true, handler.handler)
EasyMock.expect(networkClient.newClientRequest(EasyMock.eq("1"),
EasyMock.same(handler.request),
EasyMock.anyLong(),
EasyMock.eq(true),
EasyMock.same(handler.handler)))
.andReturn(clientRequest)
EasyMock.expect(networkClient.ready(node, time.milliseconds()))
.andReturn(true)
EasyMock.expect(networkClient.send(clientRequest, time.milliseconds()))
EasyMock.expect(networkClient.poll(EasyMock.anyLong(), EasyMock.anyLong()))
.andReturn(Utils.mkList())
EasyMock.replay(networkClient)
sendThread.doWork()
EasyMock.verify(networkClient)
}
@Test
def shouldCallCompletionHandlerWithDisconnectedResponseWhenNodeNotReady(): Unit = {
val request = new StubRequestBuilder
val node = new Node(1, "", 8080)
val requestAndCompletionHandler = RequestAndCompletionHandler(node, request, completionHandler)
val sendThread = new InterBrokerSendThread("name", networkClient, time) {
override def generateRequests() = List[RequestAndCompletionHandler](requestAndCompletionHandler)
}
val clientRequest = new ClientRequest("dest", request, 0, "1", 0, true, requestAndCompletionHandler.handler)
EasyMock.expect(networkClient.newClientRequest(EasyMock.eq("1"),
EasyMock.same(requestAndCompletionHandler.request),
EasyMock.anyLong(),
EasyMock.eq(true),
EasyMock.same(requestAndCompletionHandler.handler)))
.andReturn(clientRequest)
EasyMock.expect(networkClient.ready(node, time.milliseconds()))
.andReturn(false)
EasyMock.expect(networkClient.connectionDelay(EasyMock.anyObject(), EasyMock.anyLong()))
.andReturn(0)
EasyMock.expect(networkClient.poll(EasyMock.anyLong(), EasyMock.anyLong()))
.andReturn(Utils.mkList())
EasyMock.replay(networkClient)
sendThread.doWork()
EasyMock.verify(networkClient)
Assert.assertTrue(completionHandler.response.wasDisconnected())
}
private class StubRequestBuilder extends AbstractRequest.Builder(ApiKeys.END_TXN) {
override def build(version: Short): Nothing = ???
}
private class StubCompletionHandler extends RequestCompletionHandler {
var response: ClientResponse = _
override def onComplete(response: ClientResponse): Unit = {
this.response = response
}
}
}
|
wangcy6/storm_app
|
frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/kafka/common/InterBrokerSendThreadTest.scala
|
Scala
|
apache-2.0
| 4,752 |
/*
* MOIS: VarCalc Process Helper Trait
* Copyright (C) 2014 University of Edinburgh School of Informatics
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.ac.ed.inf.mois
import scala.collection.mutable
trait VarCalc extends Process {
type Func = () => Unit
private val funcs = mutable.ArrayBuffer.empty[Func]
protected class Calc[T](val v: Var[T]) {
def := (e: => T): Unit = {
funcs += (() => v := e)
}
}
@inline final def calc[T](v: Var[T]) = new Calc(v)
protected class CalcVars extends StepHandler {
def init(t: Double, proc: Process) {
handleStep(t, proc)
}
def handleStep(t: Double, proc: Process) {
for (f <- funcs) {
f()
}
}
}
addStepHandler(new CalcVars)
}
|
edinburgh-rbm/mois
|
src/main/scala/uk/ac/ed/inf/mois/VarCalc.scala
|
Scala
|
gpl-3.0
| 1,375 |
/**
* Digi-Lib-Mesh - distributed mesh library for Digi components
*
* Copyright (c) 2012-2013 Alexey Aksenov [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.digi.lib.mesh
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Buffer
import scala.collection.mutable.Publisher
import scala.collection.mutable.Subscriber
import scala.collection.mutable.SynchronizedBuffer
import org.digimead.digi.lib.DependencyInjection
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.log.api.Loggable
import org.digimead.digi.lib.mesh.Mesh.mesh2implementation
import org.digimead.digi.lib.mesh.hexapod.Hexapod
import com.escalatesoft.subcut.inject.BindingModule
import com.escalatesoft.subcut.inject.Injectable
import language.implicitConversions
/**
* Class Peer is default implementation for Peer singleton.
* It provide global pool of peers. Peer is subset of Mesh with contains only reachable Hexapods.
*/
class Peer(implicit val bindingModule: BindingModule) extends Injectable with Peer.Interface {
protected val pool = new ArrayBuffer[Hexapod] with SynchronizedBuffer[Hexapod]
/**
* Add hexapod to peer pool. Register hexapod in Mesh if necessary
*/
@log
def add(hexapod: Hexapod): Boolean = {
log.debug("add %s to peer pool".format(hexapod))
if (pool.contains(hexapod)) {
log.error("hexapod %s already registered".format(hexapod))
return false
}
if (Mesh(hexapod.uuid).isEmpty)
if (!Mesh.register(hexapod)) {
log.error("unable registers %s in mesh".format(hexapod))
return false
}
pool += hexapod
log.___glance("PUBLISH")
publish(Peer.Event.Add(hexapod))
true
}
/**
* Remove hexapod from peer pool.
*/
@log
def remove(hexapod: Hexapod): Boolean = {
log.debug("remove %s to peer pool".format(hexapod))
if (!pool.contains(hexapod)) {
log.error("hexapod %s not registered".format(hexapod))
return false
}
pool -= hexapod
publish(Peer.Event.Remove(hexapod))
true
}
/**
*
* @log
* def get(transport: Option[Class[_ <: Endpoint]], direction: Endpoint.Direction*): Seq[Hexapod] = {
* val message = "search best peer" + (if (transport.nonEmpty || direction.nonEmpty) " for " else "")
* val messageTransport = transport.map("transport " + _.getName()).getOrElse("")
* val messageDirrection = (if (transport.nonEmpty) " and " else "") +
* (if (direction.nonEmpty) "direction %s".format(direction.mkString(" or ")) else "")
* log.debug(message + messageTransport + messageDirrection)
* var result = pool.toSeq
* transport.foreach(transport => result = result.filter(_.getEndpoints.exists(ep => {
* transport.isAssignableFrom(ep.getClass()) && (direction.isEmpty || direction.contains(ep.direction))
* })))
* result.take(5)
* }
*
*/
/**
* Clear peer pool
*/
@log
def clear() = pool.foreach(remove)
override def toString = "default peers pool implemetation"
}
/**
* Singleton Peer contains global registry of discovered Hexapods
*/
object Peer extends Loggable {
//assert(org.digimead.digi.lib.mesh.isReady, "Mesh not ready, please build it first")
type Pub = Publisher[Event]
type Sub = Subscriber[Event, Pub]
implicit def peer2implementation(p: Peer.type): Interface = p.inner
def inner() = DI.implementation
trait Interface extends Peer.Pub with Loggable {
protected val pool: Buffer[Hexapod]
def apply(): Iterable[Hexapod] = Peer.inner.pool.toIterable
/** add Hexapod to peer pool */
def add(hexapod: Hexapod): Boolean
/** remove Hexapod to peer pool */
def remove(hexapod: Hexapod): Boolean
/** get best hexapod */
//def get(transport: Option[Class[_ <: Endpoint]], direction: Endpoint.Direction*): Seq[Hexapod]
/** clear peer pool */
def clear()
override protected def publish(event: Peer.Event) = try {
super.publish(event)
} catch {
case e: Throwable =>
log.error(e.getMessage(), e)
}
}
sealed trait Event
object Event {
case class Add(hexapod: Hexapod) extends Event
case class Remove(hexapod: Hexapod) extends Event
}
/**
* Dependency injection routines
*/
private object DI extends DependencyInjection.PersistentInjectable {
/** Peer implementation */
lazy val implementation = inject[Interface]
}
}
|
ezh/digi-lib-mesh
|
src/main/scala/org/digimead/digi/lib/mesh/Peer.scala
|
Scala
|
apache-2.0
| 4,919 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import Suite.formatterForSuiteAborted
import Suite.formatterForSuiteCompleted
import Suite.formatterForSuiteStarting
import org.scalatest.funspec.AnyFunSpec
/*
java -Dorg.scalatest.BigSuite.size=5 -Dorg.scalatest.SuiteCompletedStatusReporter.max=100 -classpath scalatest-1.0-CLICKDEMO.jar:/usr/artima/scala/lib/scala-library.jar org.scalatest.tools.Runner -c4 -p "scalatest-1.0-CLICKDEMO-tests.jar" -oNCXEHLO -r org.scalatest.SuiteCompletedStatusReporter -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite
BigSuite.size determines how many suites will be in each BigSuite tree. I haven't taken time to figure out the function, but it looks like this:
size => number of suites in the tree
1 => 2
2 => 5
3 => 16
4 => 65
5 => 326
6 => 1957
7 => 13700
Each -s org.scalatest.BigSuite will create one BigSuite instance using the size specified by the property.
By saying -r org.scalatest.SuiteCompletedStatusReporter, you get a custom reporter that prints out a duration note to the standard output
for every <configurable number> of SuiteCompleted events it receives. It defaults to 10, and can be set via the
-Dorg.scalatest.SuiteCompletedStatusReporter.max=100 setting.
So the knobs we can turn are:
-cN N is the number of threads in the thread pool
-Dorg.scalatest.BigSuite.size=M, M determines the number of suites in the tree via some mysterious function
-s org.scalatest.BigSuite..., repeating this gets you more instances of these trees sized by M
-Dorg.scalatest.SuiteCompletedStatusReporter.max=X, where X is the number of SuiteCompleted events between duration notes
*/
protected[scalatest] class BigSuite(nestedSuiteCount: Option[Int], propMap: Map[String, String]) extends AnyFunSpec { thisSuite =>
override def nestedSuites: collection.immutable.IndexedSeq[Suite] = {
def makeList(remaining: Int, soFar: List[Suite], nestedCount: Int): List[Suite] = {
if (remaining == 0) soFar
else makeList(remaining - 1, (new BigSuite(Some(nestedCount - 1), propMap) :: soFar), nestedCount)
}
val nsList = nestedSuiteCount match {
case None =>
val sizeString = propMap.getOrElse("org.scalatest.BigSuite.size", "0")
val size =
try {
sizeString.toInt
}
catch {
case e: NumberFormatException => 0
}
makeList(size, Nil, size)
case Some(n) =>
if (n == 0) List()
else {
makeList(n, Nil, n)
}
}
Vector.empty ++ nsList
}
it("test number 1") {
val someFailures = propMap.getOrElse("org.scalatest.BigSuite.someFailures", "")
nestedSuiteCount match {
case Some(0) if someFailures == "true" => assert(1 + 1 === 3)
case _ => assert(1 + 1 === 2)
}
}
it("test number 2") {
assert(1 + 1 === 2)
}
it("test number 3") {
assert(1 + 1 === 2)
}
it("test number 4") {
assert(1 + 1 === 2)
}
it("test number 5") {
assert(1 + 1 === 2)
}
it("test number 6") {
assert(1 + 1 === 2)
}
it("test number 7") {
assert(1 + 1 === 2)
}
it("test number 8") {
assert(1 + 1 === 2)
}
it("test number 9") {
assert(1 + 1 === 2)
}
it("test number 10") {
assert(1 + 1 === 2)
}
it("test number 11") {
assert(1 + 1 === 2)
}
it("test number 12") {
assert(1 + 1 === 2)
}
it("test number 13") {
assert(1 + 1 === 2)
}
it("test number 14") {
assert(1 + 1 === 2)
}
it("test number 15") {
assert(1 + 1 === 2)
}
it("test number 16") {
assert(1 + 1 === 2)
}
it("test number 17") {
assert(1 + 1 === 2)
}
it("test number 18") {
assert(1 + 1 === 2)
}
it("test number 19") {
assert(1 + 1 === 2)
}
it("test number 20") {
assert(1 + 1 === 2)
}
it("test number 21") {
assert(1 + 1 === 2)
}
it("test number 22") {
assert(1 + 1 === 2)
}
it("test number 23") {
assert(1 + 1 === 2)
}
it("test number 24") {
assert(1 + 1 === 2)
}
it("test number 25") {
assert(1 + 1 === 2)
}
it("test number 26") {
assert(1 + 1 === 2)
}
it("test number 27") {
assert(1 + 1 === 2)
}
it("test number 28") {
assert(1 + 1 === 2)
}
it("test number 29") {
assert(1 + 1 === 2)
}
it("test number 30") {
assert(1 + 1 === 2)
}
it("test number 31") {
assert(1 + 1 === 2)
}
it("test number 32") {
assert(1 + 1 === 2)
}
it("test number 33") {
assert(1 + 1 === 2)
}
it("test number 34") {
assert(1 + 1 === 2)
}
it("test number 35") {
assert(1 + 1 === 2)
}
it("test number 36") {
assert(1 + 1 === 2)
}
it("test number 37") {
assert(1 + 1 === 2)
}
it("test number 38") {
assert(1 + 1 === 2)
}
it("test number 39") {
assert(1 + 1 === 2)
}
it("test number 40") {
assert(1 + 1 === 2)
}
it("test number 41") {
assert(1 + 1 === 2)
}
it("test number 42") {
assert(1 + 1 === 2)
}
it("test number 43") {
assert(1 + 1 === 2)
}
it("test number 44") {
assert(1 + 1 === 2)
}
it("test number 45") {
assert(1 + 1 === 2)
}
it("test number 46") {
assert(1 + 1 === 2)
}
it("test number 47") {
assert(1 + 1 === 2)
}
it("test number 48") {
assert(1 + 1 === 2)
}
it("test number 49") {
assert(1 + 1 === 2)
}
it("test number 50") {
assert(1 + 1 === 2)
}
it("test number 51") {
assert(1 + 1 === 2)
}
it("test number 52") {
assert(1 + 1 === 2)
}
it("test number 53") {
assert(1 + 1 === 2)
}
it("test number 54") {
assert(1 + 1 === 2)
}
it("test number 55") {
assert(1 + 1 === 2)
}
it("test number 56") {
assert(1 + 1 === 2)
}
it("test number 57") {
assert(1 + 1 === 2)
}
it("test number 58") {
assert(1 + 1 === 2)
}
it("test number 59") {
assert(1 + 1 === 2)
}
it("test number 60") {
assert(1 + 1 === 2)
}
it("test number 61") {
assert(1 + 1 === 2)
}
it("test number 62") {
assert(1 + 1 === 2)
}
it("test number 63") {
assert(1 + 1 === 2)
}
it("test number 64") {
assert(1 + 1 === 2)
}
it("test number 65") {
assert(1 + 1 === 2)
}
it("test number 66") {
assert(1 + 1 === 2)
}
it("test number 67") {
assert(1 + 1 === 2)
}
it("test number 68") {
assert(1 + 1 === 2)
}
it("test number 69") {
assert(1 + 1 === 2)
}
it("test number 70") {
assert(1 + 1 === 2)
}
it("test number 71") {
assert(1 + 1 === 2)
}
it("test number 72") {
assert(1 + 1 === 2)
}
it("test number 73") {
assert(1 + 1 === 2)
}
it("test number 74") {
assert(1 + 1 === 2)
}
it("test number 75") {
assert(1 + 1 === 2)
}
it("test number 76") {
assert(1 + 1 === 2)
}
it("test number 77") {
assert(1 + 1 === 2)
}
it("test number 78") {
assert(1 + 1 === 2)
}
it("test number 79") {
assert(1 + 1 === 2)
}
it("test number 80") {
assert(1 + 1 === 2)
}
it("test number 81") {
assert(1 + 1 === 2)
}
it("test number 82") {
assert(1 + 1 === 2)
}
it("test number 83") {
assert(1 + 1 === 2)
}
it("test number 84") {
assert(1 + 1 === 2)
}
it("test number 85") {
assert(1 + 1 === 2)
}
it("test number 86") {
assert(1 + 1 === 2)
}
it("test number 87") {
assert(1 + 1 === 2)
}
it("test number 88") {
assert(1 + 1 === 2)
}
it("test number 89") {
assert(1 + 1 === 2)
}
it("test number 90") {
assert(1 + 1 === 2)
}
it("test number 91") {
assert(1 + 1 === 2)
}
it("test number 92") {
assert(1 + 1 === 2)
}
it("test number 93") {
assert(1 + 1 === 2)
}
it("test number 94") {
assert(1 + 1 === 2)
}
it("test number 95") {
assert(1 + 1 === 2)
}
it("test number 96") {
assert(1 + 1 === 2)
}
it("test number 97") {
assert(1 + 1 === 2)
}
it("test number 98") {
assert(1 + 1 === 2)
}
it("test number 99") {
assert(1 + 1 === 2)
}
it("test number 100") {
assert(1 + 1 === 2)
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/BigSuite.scala
|
Scala
|
apache-2.0
| 8,783 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.conf
import java.io.InputStream
import com.typesafe.scalalogging.LazyLogging
import scala.util.control.NonFatal
import scala.util.{Failure, Try}
import scala.xml.XML
object ConfigLoader extends LazyLogging {
val GEOMESA_CONFIG_FILE_PROP = "geomesa.config.file"
val GEOMESA_CONFIG_FILE_NAME = "geomesa-site.xml"
private val EmbeddedConfigFile = "org/locationtech/geomesa/geomesa-site.xml.template"
lazy val Config: Map[String, (String, Boolean)] = {
val file = Option(System.getProperty(GEOMESA_CONFIG_FILE_PROP)).getOrElse(GEOMESA_CONFIG_FILE_NAME)
// load defaults first then overwrite with user values (if any)
loadConfig(EmbeddedConfigFile) ++ loadConfig(file)
}
def loadConfig(path: String): Map[String, (String, Boolean)] = {
val input = getClass.getClassLoader.getResourceAsStream(path)
val config: Map[String, (String, Boolean)] =
if (input == null) {
Map.empty
} else {
try {
logger.debug(s"Loading config: $path")
loadConfig(input, path)
} catch {
case NonFatal(e) =>
logger.warn(s"Error reading config file at: $path", e)
Map.empty
}
}
logger.trace(s"Loaded ${config.mkString(",")}")
config
}
def loadConfig(input: InputStream, path: String): Map[String, (String, Boolean)] = {
val xml = XML.load(input)
val properties = xml \\\\ "configuration" \\\\ "property"
properties.flatMap { prop =>
// Use try here so if we fail on a property the rest can still load
val pair = Try {
val key = (prop \\ "name").text
val value = (prop \\ "value").text
// don't overwrite properties, this gives commandline params preference
val isFinal: Boolean = (prop \\ "final").text.toString.toBoolean
key -> (value, isFinal)
}
pair match {
case Failure(e) => logger.warn(s"Unable to load property from: $path\\n$prop", e)
case _ => // no-op
}
pair.toOption.filter { case (_, (v, _)) => v != null && v.nonEmpty }
}.toMap
}
}
|
ronq/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/conf/ConfigLoader.scala
|
Scala
|
apache-2.0
| 2,575 |
package pl.touk.nussknacker.engine.management.sample.transformer
import org.apache.flink.api.common.state.ValueStateDescriptor
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.co.KeyedCoProcessFunction
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.flink.util.Collector
import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.CustomNodeError
import pl.touk.nussknacker.engine.api.context.{OutputVar, ValidationContext}
import pl.touk.nussknacker.engine.api.context.transformation._
import pl.touk.nussknacker.engine.api.definition._
import pl.touk.nussknacker.engine.api.{Context, CustomStreamTransformer, LazyParameter, ValueWithContext}
import pl.touk.nussknacker.engine.flink.api.process.{FlinkCustomJoinTransformation, FlinkCustomNodeContext, FlinkLazyParameterFunctionHelper, OneParamLazyParameterFunction}
import pl.touk.nussknacker.engine.api.NodeId
/*
This is basically left outer join - we join events stream (left side of join) with additional data stream (e.g. users - right side of join)
Implementation is simplistic, it doesn't wait for additional data stream to initialize etc. - it's mainly to
show how JoinGenericNodeTransformation works
*/
object EnrichWithAdditionalDataTransformer extends CustomStreamTransformer with JoinGenericNodeTransformation[AnyRef] {
private val roleParameter = "role"
private val additionalDataValueParameter = "additional data value"
private val keyParameter = "key"
private val roleValues = List("Events", "Additional data")
override def canHaveManyInputs: Boolean = true
override type State = Nothing
override def contextTransformation(contexts: Map[String, ValidationContext],
dependencies: List[NodeDependencyValue])(implicit nodeId: NodeId): EnrichWithAdditionalDataTransformer.NodeTransformationDefinition = {
case TransformationStep(Nil, _) => NextParameters(List(
Parameter[String](roleParameter).copy(branchParam = true, editor = Some(FixedValuesParameterEditor(roleValues.map(role => FixedExpressionValue(s"'$role'", role))))),
Parameter[String](keyParameter).copy(branchParam = true, isLazyParameter = true)))
case TransformationStep((`roleParameter`, DefinedEagerBranchParameter(byBranch: Map[String, String]@unchecked, _)) :: (`keyParameter`, _) ::Nil, _) =>
val error = if (byBranch.values.toList.sorted != roleValues.sorted) List(CustomNodeError(s"Has to be exactly one Event and Additional data, got: ${byBranch.values.mkString(", ")}",
Some(roleParameter))) else Nil
NextParameters(
List(Parameter[Any](additionalDataValueParameter).copy(additionalVariables = right(byBranch).map(contexts).getOrElse(ValidationContext()).localVariables.mapValues(AdditionalVariableProvidedInRuntime(_)), isLazyParameter = true)), error
)
case TransformationStep((`roleParameter`, FailedToDefineParameter) :: (`keyParameter`, _) ::Nil, _) =>
FinalResults(ValidationContext())
case TransformationStep((`roleParameter`, DefinedEagerBranchParameter(byBranch: Map[String, String]@unchecked, _)) :: (`keyParameter`, _) :: (`additionalDataValueParameter`, rightValue: DefinedSingleParameter) ::Nil, _) =>
val outName = OutputVariableNameDependency.extract(dependencies)
val leftCtx = left(byBranch).map(contexts).getOrElse(ValidationContext())
FinalResults.forValidation(leftCtx)(_.withVariable(OutputVar.customNode(outName), rightValue.returnType))
}
private def left(byBranch: Map[String, String]): Option[String] = byBranch.find(_._2 == "Events").map(_._1)
private def right(byBranch: Map[String, String]): Option[String] = byBranch.find(_._2 == "Additional data").map(_._1)
override def implementation(params: Map[String, Any], dependencies: List[NodeDependencyValue], finalState: Option[State]): AnyRef = {
val role = params(roleParameter).asInstanceOf[Map[String, String]]
val leftName = left(role)
val rightName = right(role)
val key = params(keyParameter).asInstanceOf[Map[String, LazyParameter[String]]]
new FlinkCustomJoinTransformation {
override def transform(inputs: Map[String, DataStream[Context]], context: FlinkCustomNodeContext): DataStream[ValueWithContext[AnyRef]] = {
val leftSide = inputs(leftName.get)
val rightSide = inputs(rightName.get)
leftSide
.flatMap(context.lazyParameterHelper.lazyMapFunction(key(leftName.get)))
.connect(rightSide.flatMap(context.lazyParameterHelper.lazyMapFunction(key(rightName.get))))
.keyBy(_.value, _.value)
.process(new EnrichWithAdditionalDataFunction(params(additionalDataValueParameter).asInstanceOf[LazyParameter[AnyRef]], context.lazyParameterHelper))
}
}
}
override def nodeDependencies: List[NodeDependency] = List(OutputVariableNameDependency)
}
class EnrichWithAdditionalDataFunction(val parameter: LazyParameter[AnyRef], val lazyParameterHelper: FlinkLazyParameterFunctionHelper)
extends KeyedCoProcessFunction[String, ValueWithContext[String], ValueWithContext[String], ValueWithContext[AnyRef]]
with OneParamLazyParameterFunction[AnyRef]{
private lazy val state = getRuntimeContext.getState[AnyRef](new ValueStateDescriptor[AnyRef]("right", classOf[AnyRef]))
override def processElement1(value: ValueWithContext[String], ctx: KeyedCoProcessFunction[String, ValueWithContext[String],
ValueWithContext[String], ValueWithContext[AnyRef]]#Context, out: Collector[ValueWithContext[AnyRef]]): Unit = {
val currentValue = state.value()
out.collect(ValueWithContext(currentValue, value.context))
}
override def processElement2(value: ValueWithContext[String], ctx: KeyedCoProcessFunction[String, ValueWithContext[String],
ValueWithContext[String], ValueWithContext[AnyRef]]#Context, out: Collector[ValueWithContext[AnyRef]]): Unit = {
val currentValue = evaluateParameter(value.context)
state.update(currentValue)
}
}
|
TouK/nussknacker
|
engine/flink/management/dev-model/src/main/scala/pl/touk/nussknacker/engine/management/sample/transformer/EnrichWithAdditionalDataTransformer.scala
|
Scala
|
apache-2.0
| 6,081 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.corretto.rdd
import org.apache.spark.SparkContext._
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.util.PhredUtils
import org.bdgenomics.corretto.models.ProbabilisticSequence
import org.bdgenomics.formats.avro.AlignmentRecord
import org.bdgenomics.utils.statistics.mixtures.{
PoissonMixtureModel
}
import scala.annotation.tailrec
import scala.collection.immutable.StringOps
import scala.math.{ abs, exp, log => mathLog, max, min, Pi, pow, sqrt }
private[corretto] object ErrorCorrection extends Logging {
val ec = new ErrorCorrection
/**
* For an RDD of read data, performs read error correction.
*
* @param rdd The RDD of reads to correct.
* @param qmerLength The length of the q-mers to create. The default value is 20.
* @return Returns a corrected RDD of reads.
*/
def apply(rdd: RDD[AlignmentRecord],
kmerLength: Int = 20,
maxIterations: Int = 10,
fixingThreshold: Int = 20,
missingKmerProbability: Double = 0.05,
ploidy: Int = 2): RDD[AlignmentRecord] = {
// generate qmer counts
val qmerCounts: RDD[(String, Int)] = rdd.adamCountKmers(kmerLength)
.map(p => (p._1, p._2.toInt))
.cache()
val counts = qmerCounts.map(kv => kv._2)
.cache()
// run em to fit distributions
val distributions = PoissonMixtureModel.train(counts,
ploidy + 1,
maxIterations)
val errorDistribution = distributions.head
val trustedDistributions = distributions.drop(1)
// determine if kmers are trusted
val qmerLikelihoods: RDD[(String, Double, Double)] = qmerCounts.map(p => {
val (qmer, count) = p
// apply distributions
(qmer,
errorDistribution.probabilityOf(count),
trustedDistributions.map(_.probabilityOf(count)).sum)
})
// unpersist counts
qmerCounts.unpersist()
// filter and collect trusted kmers
val trustedKmers = qmerLikelihoods.filter(t => t._2 <= t._3 &&
// normally, i wouldn't create a "new StringOps", but there is a collision between implicits
new StringOps(t._1).find(c => c == 'n' || c == 'N').isEmpty)
.map(t => (t._1, t._3 / (t._2 + t._3)))
.collectAsMap
.toMap
// fix reads
val fixedReads = fixReads(rdd,
trustedKmers,
kmerLength,
PhredUtils.successProbabilityToPhred(fixingThreshold),
missingKmerProbability)
fixedReads
}
/**
* Uses a probabilistic model for base transitions to fix errors in reads.
*
* @param rdd An RDD of reads to correct.
* @param kmerTrie A prefix trie populated with the probability of a kmer being
* a "correct" kmer.
* @param kmerLength The length of k to use when cutting q-mers.
* @param fixingThreshold The minimum probability to require for allowing a base to be fixed.
* @param missingProbability The assumed (upper bound) probability for a k-mer that is not
* resident in the prefix trie.
* @return Returns an RDD of fixed reads.
*/
private[corretto] def fixReads(rdd: RDD[AlignmentRecord],
kmerTrie: Map[String, Double],
kmerLength: Int,
fixingThreshold: Double,
missingProbability: Double): RDD[AlignmentRecord] = {
// cut reads into k-mers and get probabilities
val cutReads = rdd.map(ec.cutRead(_, kmerTrie, kmerLength, missingProbability))
.cache()
// correct the reads
val fixPhred = (PhredUtils.successProbabilityToPhred(fixingThreshold) + 33).toChar
val correctedReads = cutReads.map(ec.correctRead(_,
fixingThreshold,
fixPhred,
kmerTrie,
kmerLength,
missingProbability))
// unpersist cut reads
cutReads.unpersist()
correctedReads
}
}
/**
* This case class is used as a key for tracking transition probabilities of bases.
*/
private[corretto] case class ErrorCovariate(base: Char, cycle: Int, phred: Char) {
}
private[corretto] class ErrorCorrection extends Serializable with Logging {
private def intToBase(i: Int): String = i match {
case 0 => "A"
case 1 => "C"
case 2 => "G"
case _ => "T"
}
private def baseToInt(c: Char): Int = c match {
case 'A' | 'a' => 0
case 'C' | 'c' => 1
case 'G' | 'g' => 2
case 'T' | 't' => 3
case _ => 4
}
/**
* Cuts a single read into q-mers.
*
* @param read Read to cut.
* @param kmerLength The length of the qmer to cut.
* @return Returns an iterator containing q-mer/weight mappings.
*/
def readToQmers(read: AlignmentRecord,
kmerLength: Int = 20): Iterator[(String, Double)] = {
// get read bases and quality scores
val bases = read.getSequence.toSeq
val scores = read.getQual.toString.toCharArray.map(q => {
PhredUtils.phredToSuccessProbability(q.toInt - 33)
})
// zip and put into sliding windows to get qmers
bases.zip(scores)
.sliding(kmerLength)
.map(w => {
// bases are first in tuple
val b = w.map(_._1)
// quals are second
val q = w.map(_._2)
// reduce bases into string, reduce quality scores
(b.map(_.toString).reduce(_ + _), q.reduce(_ * _))
})
}
/**
* Cuts a read into q-mers and then generates base likelihoods.
*
* @param read Read to cut.
* @param trie A trie containing the probability that a q-mer is "true".
* @param kmerLength The length of k to use when cutting q-mers.
* @param missingKmerProbability The upper bound probability to assign to a
* k-mer not found in the trie.
* @return Returns a probabilistic sequence and the original read.
*/
def cutRead(read: AlignmentRecord,
trie: Map[String, Double],
kmerLength: Int,
missingKmerProbability: Double): (ProbabilisticSequence, AlignmentRecord) = {
// cut sequence into k-mers
val readSequence = read.getSequence.toString
// call to cut string
(cutString(readSequence, trie, kmerLength, missingKmerProbability), read)
}
/**
* Cuts a string into q-mers and then generates base likelihoods. Helper function.
*
* @see cutRead
* @see correctRead
*
* @param readSequence String to cut.
* @param trie A trie containing the probability that a q-mer is "true".
* @param kmerLength The length of k to use when cutting q-mers.
* @param missingKmerProbability The upper bound probability to assign to a
* k-mer not found in the trie.
* @return Returns a probabilistic sequence and the original read.
*/
private[corretto] def cutString(readSequence: String,
trie: Map[String, Double],
kmerLength: Int,
missingKmerProbability: Double,
start: Int = 0,
end: Int = Int.MaxValue): ProbabilisticSequence = {
// cut sequence into kmers
val kmers = readSequence.sliding(kmerLength).toArray
// loop over sequence and get probabilities
val readLength = readSequence.length
val kmersLength = kmers.length
val readProbabilities = new Array[Array[Double]](readLength)
(0 until readLength).foreach(i => {
readProbabilities(i) = Array(1.0, 1.0, 1.0, 1.0)
// we only do a probability update if requested
if (i >= start && i <= end) {
val startIdx = if (i == 0) {
0
} else {
max(i - kmerLength + 1, 0)
}
val endIdx = min(i, kmersLength - 1)
(startIdx to endIdx).foreach(j => {
val kmer = kmers(j)
val kIdx = i - j
(0 to 3).foreach(b => {
val testKmer = kmer.take(kIdx) + intToBase(b) + kmer.drop(kIdx + 1)
readProbabilities(i)(b) *= trie.getOrElse(testKmer, missingKmerProbability)
})
})
}
})
// build probabilistic sequence and return
ProbabilisticSequence(readProbabilities)
}
/**
* Performs the read correction step, after transition probabilities have been
* estimated. This is done via a coordinate descent process, where the read is
* considered to have a different coordinate at each position. We continue as
* long as the probability of the read is increasing, and only touch each base
* once.
*
* @param read Tuple of probabilistic sequence and original read.
* @param fixingThreshold The threshold to use for correcting a read.
* @param fixingThresholdAsPhred The phred score corresponding to the fixing threshold.
* @param compensation The covariate prior probability table.
* @param kmerTrie Trie containing k-mer "trueness" probabilities.
* @param kmerLength Length k to use when cutting k-mers.
* @param missingKmerProbability The upper bound probability to assign to any k-mer
* not in the trie.
* @return Returns a fixed read.
*/
private[corretto] def correctRead(read: (ProbabilisticSequence, AlignmentRecord),
fixingThreshold: Double,
fixingThresholdAsPhred: Char,
kmerTrie: Map[String, Double],
kmerLength: Int,
missingKmerProbability: Double): AlignmentRecord = {
@tailrec def tryFix(checkablePositions: Array[((Array[Double], Char), Int)],
pSeq: ProbabilisticSequence,
seq: String,
pRead: Double,
substitutedPositions: Set[Int]): ProbabilisticSequence = {
// if there are no further positions we can check, then we are done
if (checkablePositions.isEmpty) {
pSeq
} else {
// check the position with the highest probability of a change
val toCheck = checkablePositions.minBy(vk => {
val ((probabilities, originalBase), _) = vk
probabilities(baseToInt(originalBase))
})
// unpack this position
val ((probabilities, originalBase), position) = toCheck
// we want to try the highest probability base
val maxIdx = probabilities.indexOf(probabilities.max)
// if this base is greater than the substitution probability,
// we try the change
if (probabilities(maxIdx) < fixingThreshold) {
// if our current top probability doesn't meet the fixing threshold, we can return
pSeq
} else {
// build the new sequence
val newBase = intToBase(maxIdx)
val testSequence = seq.take(position) + newBase + seq.drop(position + 1)
// chop up the new read
val testPSeq = cutString(testSequence, kmerTrie, kmerLength, missingKmerProbability, position - kmerLength, position + kmerLength)
// use the last probabilistic sequence as a prior for the new probablistic sequence
// then apply a softmax in place
(0 until testPSeq.sequence.length).foreach(i => {
(0 to 3).foreach(j => {
testPSeq.sequence(i)(j) *= pSeq.sequence(i)(j)
})
})
testPSeq.softMax()
// is this an improvement?
val testPRead = readProbability(testPSeq, testSequence)
val (newCPos, newPSeq, newSeq, newPRead, newSub) = if (testPRead > pRead) {
// recompute the positions to check
val newPositionsToCheck = checkablePositions.flatMap(vk => {
val ((_, base), idx) = vk
// if this site is the site we just evaluated, we can filter it out
if (idx == position) {
None
} else {
// collect probabilities for this position from the current probabilistic read
val newProbabilities = testPSeq.sequence(idx)
// what is the max probability at this site?
val maxProbability = newProbabilities.max
// if the current base is the max probability base, we can filter,
// else return updated probabilities
if (maxProbability == newProbabilities(baseToInt(base))) {
None
} else {
Some(((newProbabilities, base), idx))
}
}
})
// emit new artifacts
(newPositionsToCheck, testPSeq, testSequence, testPRead, substitutedPositions + maxIdx)
} else {
// if we don't have an improvement in probability, we don't make a
// change, just filter out the position we just checked
(checkablePositions.filter(vk => vk._2 != position), pSeq, seq, pRead, substitutedPositions)
}
// recurse, and try the next fix candidate
tryFix(newCPos, newPSeq, newSeq, newPRead, newSub)
}
}
}
def readProbability(pSeq: ProbabilisticSequence,
read: String): Double = {
pSeq.sequence
.zip(read)
.map(p => {
val (pArray, base) = p
pArray(baseToInt(base))
}).reduce(_ * _)
}
// unpack probabilities and read sequence
val (pSeq, oldRead) = read
val sequence = oldRead.getSequence.toString
val phred = oldRead.getQual.toString
val pRead = readProbability(pSeq, sequence)
// filter out bases that are already over the fixing threshold
val positions = pSeq.sequence.zip(sequence)
.zipWithIndex
.filter(vk => {
val ((probabilities, base), _) = vk
probabilities(baseToInt(base)) < fixingThreshold
})
// try to fix the read
val newPSeq = tryFix(positions,
pSeq,
sequence,
pRead,
Set[Int]())
val (newSequence, newQuals) = newPSeq.toSequence
// finalize the fix and return
finalizeFix(oldRead, newSequence, newQuals, fixingThresholdAsPhred)
}
/**
* Finalizes a fixed read by trimming low quality bases off of the end of
* the read, and converting quals to phred.
*
* @param oldRead Initial read with all structural information.
* @param newSequence The new, error corrected sequence.
* @param newQuals The integer phred quality of all bases in the read.
* @param fixPhred The phred score limit for accepting a base.
* @return Returns a fixed read.
*/
def finalizeFix(oldRead: AlignmentRecord,
newSequence: String,
newQuals: Array[Int],
fixPhred: Char): AlignmentRecord = {
val phredAsInt = fixPhred.toInt - 33
def dropLength(array: Array[Int],
reverse: Boolean): Int = {
val (startIdx, increment) = if (reverse) {
(array.length - 1, -1)
} else {
(0, 1)
}
@tailrec def dropTest(idx: Int,
dropCount: Int): Int = {
if (array(idx) >= phredAsInt) {
dropCount
} else {
dropTest(idx + increment, dropCount + 1)
}
}
dropTest(startIdx, 0)
}
// trim ends off of read, if they are below the fixing threshold
val trimStart = dropLength(newQuals, false)
val trimEnd = dropLength(newQuals, true)
// rebuild read with trimmed ends
AlignmentRecord.newBuilder(oldRead)
.setSequence(newSequence.drop(trimStart).dropRight(trimEnd))
.setQual(newQuals.drop(trimStart).dropRight(trimEnd).mkString)
.setBasesTrimmedFromStart(trimStart)
.setBasesTrimmedFromEnd(trimEnd)
.build()
}
}
|
fnothaft/corretto
|
src/main/scala/org/bdgenomics/corretto/rdd/ErrorCorrection.scala
|
Scala
|
apache-2.0
| 16,706 |
package beam.sim.vehiclesharing
import beam.router.BeamSkimmer
import beam.sim.BeamServices
import beam.sim.config.BeamConfig
import org.matsim.api.core.v01.Id
object RepositionAlgorithms {
def lookup(
config: BeamConfig.Beam.Agentsim.Agents.Vehicles.SharedFleets$Elm.Reposition
): RepositionAlgorithmType = {
config.name match {
case "min-availability-undersupply-algorithm" =>
AvailabilityBasedRepositioningType(config)
case _ =>
throw new RuntimeException("Unknown reposition algorithm type")
}
}
}
trait RepositionAlgorithmType {
def getInstance(
managerId: Id[VehicleManager],
beamServices: BeamServices,
beamSkimmer: BeamSkimmer
): RepositionAlgorithm
def getRepositionTimeBin: Int
def getStatTimeBin: Int
}
case class AvailabilityBasedRepositioningType(
params: BeamConfig.Beam.Agentsim.Agents.Vehicles.SharedFleets$Elm.Reposition
) extends RepositionAlgorithmType {
override def getInstance(
managerId: Id[VehicleManager],
beamServices: BeamServices,
beamSkimmer: BeamSkimmer
): RepositionAlgorithm = {
AvailabilityBasedRepositioning(
params.repositionTimeBin,
params.statTimeBin,
params.min_availability_undersupply_algorithm.get.matchLimit,
managerId,
beamServices,
beamSkimmer
)
}
def getRepositionTimeBin: Int = params.repositionTimeBin
def getStatTimeBin: Int = params.statTimeBin
}
|
colinsheppard/beam
|
src/main/scala/beam/sim/vehiclesharing/RepositionAlgorithms.scala
|
Scala
|
gpl-3.0
| 1,436 |
package wakfutcp.protocol.messages.server
import wakfutcp.protocol.protobuf.aptitude.SheetSet
import wakfutcp.protocol.protobuf.buildSheet.ProtoBuildSheetSet
import wakfutcp.protocol.raw.character._
import wakfutcp.protocol.{Codec, ServerMessage}
final case class CharacterInformationMessage(
reservedIds: Array[Long],
info: ForLocalCharacterInformation,
builds: ProtoBuildSheetSet,
sheets: SheetSet
) extends ServerMessage {
override val id = 4098
}
object CharacterInformationMessage {
import cats.syntax.apply._
import wakfutcp.protocol.Codec._
implicit val codec: Codec[CharacterInformationMessage] =
(array(short, long),
block(int, Codec[ForLocalCharacterInformation]),
block(int, protobuf(ProtoBuildSheetSet)),
block(int, protobuf(SheetSet)))
.imapN(apply)(Function.unlift(unapply))
}
|
OpenWakfu/wakfutcp
|
protocol/src/main/scala/wakfutcp/protocol/messages/server/CharacterInformationMessage.scala
|
Scala
|
mit
| 839 |
package example
class MethodUsages {
val m = new Methods[Int]
m.m1
m.m2()
m.m3(0)
m.m4(0)(0)
m.m5("")
m.m5(0)
m.m6(0)
m.m6(new m.List[Int])
m.m6(Nil)
m.m7(m, new m.List[Int])
m.`m8().`()
m.m9(null)
m.m10(null)
m.m11(Predef)
m.m11(Example)
m.m12a(null)
m.m12b(null)
m.m13(0)
m.m15(0)
m.m16(0)
m.m16(0)
m.m17.m()
m.m17(1)
m.m17("")
m.m18.m()
m.m18(1)
m.m18("")
m.m19(1,2)(3)
m.m20(1)
m.m20("")
m.m20.m()
}
|
dotty-staging/dotty
|
tests/semanticdb/expect/MethodUsages.scala
|
Scala
|
apache-2.0
| 469 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.Resolver
import org.apache.spark.sql.catalyst.expressions.{Ascending, Descending, Expression, NamedExpression, NullOrdering, NullsFirst, NullsLast, SortDirection, SortOrder}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, RepartitionByExpression, Sort}
import org.apache.spark.sql.connector.distributions.{ClusteredDistribution, OrderedDistribution, UnspecifiedDistribution}
import org.apache.spark.sql.connector.expressions.{Expression => V2Expression, FieldReference, IdentityTransform, NullOrdering => V2NullOrdering, SortDirection => V2SortDirection, SortValue}
import org.apache.spark.sql.connector.write.{RequiresDistributionAndOrdering, Write}
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf
object DistributionAndOrderingUtils {
def prepareQuery(write: Write, query: LogicalPlan, conf: SQLConf): LogicalPlan = write match {
case write: RequiresDistributionAndOrdering =>
val resolver = conf.resolver
val numPartitions = write.requiredNumPartitions()
val distribution = write.requiredDistribution match {
case d: OrderedDistribution => d.ordering.map(e => toCatalyst(e, query, resolver))
case d: ClusteredDistribution => d.clustering.map(e => toCatalyst(e, query, resolver))
case _: UnspecifiedDistribution => Array.empty[Expression]
}
val queryWithDistribution = if (distribution.nonEmpty) {
val finalNumPartitions = if (numPartitions > 0) {
numPartitions
} else {
conf.numShufflePartitions
}
// the conversion to catalyst expressions above produces SortOrder expressions
// for OrderedDistribution and generic expressions for ClusteredDistribution
// this allows RepartitionByExpression to pick either range or hash partitioning
RepartitionByExpression(distribution, query, finalNumPartitions)
} else if (numPartitions > 0) {
throw QueryCompilationErrors.numberOfPartitionsNotAllowedWithUnspecifiedDistributionError()
} else {
query
}
val ordering = write.requiredOrdering.toSeq
.map(e => toCatalyst(e, query, resolver))
.asInstanceOf[Seq[SortOrder]]
val queryWithDistributionAndOrdering = if (ordering.nonEmpty) {
Sort(ordering, global = false, queryWithDistribution)
} else {
queryWithDistribution
}
queryWithDistributionAndOrdering
case _ =>
query
}
private def toCatalyst(
expr: V2Expression,
query: LogicalPlan,
resolver: Resolver): Expression = {
// we cannot perform the resolution in the analyzer since we need to optimize expressions
// in nodes like OverwriteByExpression before constructing a logical write
def resolve(ref: FieldReference): NamedExpression = {
query.resolve(ref.parts, resolver) match {
case Some(attr) => attr
case None => throw new AnalysisException(s"Cannot resolve '$ref' using ${query.output}")
}
}
expr match {
case SortValue(child, direction, nullOrdering) =>
val catalystChild = toCatalyst(child, query, resolver)
SortOrder(catalystChild, toCatalyst(direction), toCatalyst(nullOrdering), Seq.empty)
case IdentityTransform(ref) =>
resolve(ref)
case ref: FieldReference =>
resolve(ref)
case _ =>
throw new AnalysisException(s"$expr is not currently supported")
}
}
private def toCatalyst(direction: V2SortDirection): SortDirection = direction match {
case V2SortDirection.ASCENDING => Ascending
case V2SortDirection.DESCENDING => Descending
}
private def toCatalyst(nullOrdering: V2NullOrdering): NullOrdering = nullOrdering match {
case V2NullOrdering.NULLS_FIRST => NullsFirst
case V2NullOrdering.NULLS_LAST => NullsLast
}
}
|
maropu/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DistributionAndOrderingUtils.scala
|
Scala
|
apache-2.0
| 4,810 |
package com.julianpeeters.avro.annotations
package provider
import org.apache.avro.file.DataFileReader
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.apache.avro.Schema
import org.apache.avro.Schema.Parser
import org.apache.avro.Schema.Type._
import scala.collection.JavaConverters._
object FileParser {
def getSchema(infile: java.io.File): Schema = {
val schema = infile.getName.split("\\\\.").last match {
case "avro" =>
val gdr = new GenericDatumReader[GenericRecord]
val dfr = new DataFileReader(infile, gdr)
dfr.getSchema
case "avsc" =>
new Parser().parse(infile)
case _ => throw new Exception("Invalid file ending. Must be .avsc for plain text json files and .avro for binary files.")
}
schema.getType match {
case UNION => {
val maybeSchema = schema.getTypes.asScala.toList.collectFirst({case x if x == RECORD => x})
if (maybeSchema.isDefined) maybeSchema.get
else sys.error("no record type found in the union from " + infile)
}
case RECORD => schema
case _ => sys.error("The Schema in the datafile is neither a record nor a union of a record type, nothing to map to case class.")
}
}
}
|
rvvincelli/avro-scala-macro-annotations
|
macros/src/main/scala/avro/scala/macro/annotations/provider/FileParser.scala
|
Scala
|
apache-2.0
| 1,252 |
package controllers
import util._
import play.api.libs.json._
import play.api.mvc._
object ApiResponse extends ApiResponse {
import Results._
import OutputType.contentTypeWithCharset
def formatJsonMessage(status: String, data: JsValue): JsObject = {
JsObject(Seq(
"status" -> JsString(status),
"data" -> data
))
}
def formatJsonMessage(status: Results.Status, data: JsValue): JsObject = {
formatJsonMessage(statusToString(status), data)
}
def isJsonErrorMessage(js: JsValue): Boolean = {
(js \\ "status").asOpt[String].map(e => e.contains("error")).getOrElse(false)
}
def formatJsonError(msg: String, ex: Option[Throwable]): JsObject = {
val message = Seq("message" -> JsString(msg))
val optional = ex match {
case None => Nil
case Some(e) =>
val seq = formatException(e).map(s => s._1 -> JsString(s._2))
Seq("details" -> JsObject(seq))
}
formatJsonMessage("error", JsObject(message ++ optional))
}
def bashError(msg: String, status: Results.Status = Results.BadRequest, ex: Option[Throwable]) = {
val exSeq = ex.map { e => formatException(e) }.getOrElse(Seq())
val exMsg = exSeq.map { case(k,v) =>
"""DATA_EXCEPTION_%s='%s';""".format(k.toUpperCase, v)
}.mkString("\\n")
val output =
"""STATUS="error";
DATA_MESSAGE='%s';
%s
""".format(msg, exMsg)
status(output).as(contentTypeWithCharset(BashOutput()))
}
def statusToString(status: Results.Status): String = {
status.header.status match {
case 200 => "success:ok"
case 201 => "success:created"
case 202 => "success:accepted"
case ok if ok >= 200 && ok < 300 => "success:other"
case 400 => "client_error:bad request"
case 401 => "client_error:unauthorized"
case 403 => "client_error:forbidden"
case 404 => "client_error:not found"
case 405 => "client_error:method not allowed"
case 406 => "client_error:not acceptable"
case 409 => "client_error:conflict"
case userErr if userErr >= 400 && userErr < 500 => "client_error:unknown"
case 500 => "server_error:internal server error"
case 501 => "server_error:not implemented"
case srvErr if srvErr >= 500 => "server_error:unknown"
case n => "unknown:%d".format(n)
}
}
def jsonError(msg: String, status: Results.Status = Results.BadRequest, ex: Option[Throwable]) = {
val output: JsValue = formatJsonMessage(status, formatJsonError(msg, ex))
status(output).as(contentTypeWithCharset(JsonOutput()))
}
def textError(msg: String, status: Results.Status = Results.BadRequest, ex: Option[Throwable]) = {
val exSeq = ex.map { e => formatException(e) }.getOrElse(Seq())
val exMsg = exSeq.map { case(k,v) => """
Exception %s %s""".format(k, v.replace("\\n","\\n\\t\\t"))
}.mkString("\\n")
val output =
"""Status Error
Details
-----------------------------------------------------------------
Message %s
%s
""".format(msg, exMsg)
status(output).as(contentTypeWithCharset(TextOutput()))
}
private def formatException(ex: Throwable): Seq[(String,String)] = {
Seq("classOf" -> ex.getClass.toString,
"message" -> ex.getMessage,
"stackTrace" -> ex.getStackTrace.map { _.toString }.mkString("\\n"))
}
}
trait ApiResponse extends Controller {
protected val defaultOutputType = JsonOutput()
import OutputType.contentTypeWithCharset
def formatResponseData(response: ResponseData)(implicit req: Request[AnyContent]) = {
getOutputType(req) match {
case o: TextOutput =>
response.status(formatTextResponse(response.data) + "\\n").as(contentTypeWithCharset(o)).withHeaders(response.headers:_*)
case o: BashOutput =>
response.status(formatBashResponse(response.data) + "\\n").as(contentTypeWithCharset(o)).withHeaders(response.headers:_*)
case o: JsonOutput =>
val rewritten = ApiResponse.isJsonErrorMessage(response.data) match {
case true => response.data
case false => ApiResponse.formatJsonMessage(response.status, response.data)
}
response.status(Json.stringify(rewritten)).as(contentTypeWithCharset(o)).withHeaders(response.headers:_*)
case o: HtmlOutput =>
val e = new Exception("Unhandled view")
e.printStackTrace()
throw e
}
}
protected def formatBashResponse(jsobject: JsValue, prefix: String = ""): String = {
def formatBasic(jsvalue: JsValue): String = {
jsvalue match {
case JsNull => ""
case JsUndefined(error) => "\\"%s\\"".format(error)
case JsBoolean(value) => value match {
case true => "true"
case false => "false"
}
case JsNumber(number) => number.toString
case JsString(s) => "\\"%s\\"".format(s)
case _ => throw new IllegalArgumentException("Unsupported JS type")
}
}
def formatList(jsvalue: Seq[JsValue], listPrefix: String = ""): String = {
val isObj = jsvalue.find { item => item.isInstanceOf[JsObject] }.map { _ => true }.getOrElse(false)
val isNonPrim = jsvalue.find { item =>
item.isInstanceOf[JsObject] || item.isInstanceOf[JsArray]
}.map { _ => true }.getOrElse(false)
if (isObj) {
jsvalue.zipWithIndex.map { case(item,id) =>
item match {
case o: JsObject => formatBashResponse(o, listPrefix + id.toString + "_") + "\\n"
case b => formatBasic(b)
}
}.mkString("")
} else if (!isNonPrim) {
listPrefix + "=" + jsvalue.map { i => formatBasic(i) }.mkString(",") + ";"
} else {
throw new Exception("Invalid JS specified")
}
}
// formats a key to an acceptable POSIX environment variable name
def formatPosixKey(key: String): String = if (!key.isEmpty) {
val posixHeadRegex = """^[^a-zA-Z_]""".r
val posixTailRegex = """[^a-zA-Z0-9_]""".r
key.head.toString match {
case posixHeadRegex() => formatPosixKey("_" + key)
case _ => posixTailRegex.replaceAllIn(key,"_").toUpperCase
}
} else {
throw new Exception("Cannot convert an empty key into a POSIX environment variable name")
}
// FIXME
require(jsobject.isInstanceOf[JsObject], "Required a JsObject")
jsobject.asInstanceOf[JsObject].value.map { case(k, v) =>
v match {
case m: JsObject => formatBashResponse(m, "%s_".format(prefix + k))
case JsArray(list) => formatList(list, "%s_".format(prefix + k))
case o => "%s=%s;".format(formatPosixKey(prefix + k), formatBasic(o))
}
}.mkString("\\n")
}
protected def formatTextResponse(jsobject: JsValue, depth: Int = 0): String = {
def formatBasic(jsvalue: JsValue): String = {
jsvalue match {
case JsNull => "null"
case JsUndefined(error) => error
case JsBoolean(value) => value.toString
case JsNumber(number) => number.toString
case JsString(s) => s
case _ => throw new IllegalArgumentException("Unsupported JS type")
}
}
def formatList(jsvalue: Seq[JsValue]): String = {
jsvalue.map { item =>
item match {
case JsArray(list) => formatList(list)
case o: JsObject => "\\n" + formatTextResponse(o, depth + 1)
case b => formatBasic(b)
}
}.mkString(",")
}
val prefix = if (depth > 0) { "\\t" * depth } else { "" }
// FIXME
require(jsobject.isInstanceOf[JsObject], "Required a JsObject")
jsobject.asInstanceOf[JsObject].value.map { case(k, v) =>
prefix + k + "\\t" + (v match {
case m: JsObject => "\\n" + formatTextResponse(m, depth + 1)
case JsArray(list) => formatList(list)
case o => formatBasic(v)
})
}.mkString("\\n")
}
protected def getOutputType(request: Request[AnyContent]): OutputType = {
OutputType(request) match {
case Some(ot) => ot
case None => defaultOutputType
}
}
}
|
Shopify/collins
|
app/controllers/ApiResponse.scala
|
Scala
|
apache-2.0
| 7,980 |
package org.jetbrains.plugins.scala
package lang.refactoring.changeSignature
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.codeStyle.JavaCodeStyleManager
import com.intellij.refactoring.changeSignature.JavaParameterInfo
import com.intellij.refactoring.util.CanonicalTypes
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.ElementScope
import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.{FunctionType, JavaArrayType, Nothing}
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.refactoring._
import scala.beans.{BeanProperty, BooleanBeanProperty}
/**
* Nikolay.Tropin
* 2014-08-10
*/
class ScalaParameterInfo(@BeanProperty var name: String,
@BeanProperty val oldIndex: Int,
var scType: ScType,
val project: Project,
var isRepeatedParameter: Boolean,
var isByName: Boolean,
@BeanProperty var defaultValue: String = "",
var keywordsAndAnnotations: String = "",
val isIntroducedParameter: Boolean = false)
extends JavaParameterInfo {
def this(p: ScParameter) {
this(p.name, p.index, p.`type`().getOrAny, p.getProject, p.isRepeatedParameter, p.isCallByNameParameter,
keywordsAndAnnotations = ScalaParameterInfo.keywordsAndAnnotations(p))
}
var defaultForJava = defaultValue
@BooleanBeanProperty
var useAnySingleVariable: Boolean = false
val wasArrayType: Boolean = scType match {
case JavaArrayType(_) => true
case _ => false
}
val isVarargType = false //overriders in java of method with repeated parameters are not varargs
protected def psiType: PsiType = {
if (scType == null) return null
implicit val elementScope = ElementScope(project)
val resultType = if (isByName) {
val functionType = FunctionType(scType, Seq())
functionType
}
else if (isRepeatedParameter) {
val seqType = ScalaPsiManager.instance(project).getCachedClass(elementScope.scope, "scala.collection.Seq")
.map(ScalaType.designator(_))
.getOrElse(Nothing)
ScParameterizedType(seqType, Seq(scType))
}
else scType
resultType.toPsiType
}
override def createType(context: PsiElement, manager: PsiManager): PsiType = psiType
override def getValue(expr: PsiCallExpression): PsiExpression = {
if (defaultForJava.isEmpty) return null
val defaultText =
if (defaultForJava.contains("$default$")) {
val qual = expr match {
case mc: PsiMethodCallExpression =>
mc.getMethodExpression.getQualifierExpression match {
case _: PsiSuperExpression => ""
case null => ""
case q => q.getText + "."
}
case _ => ""
}
qual + defaultForJava
} else defaultForJava
val expression = JavaPsiFacade.getElementFactory(project).createExpressionFromText(defaultText, expr)
JavaCodeStyleManager.getInstance(project).shortenClassReferences(expression).asInstanceOf[PsiExpression]
}
override def getTypeWrapper: CanonicalTypes.Type = {
if (scType != null) CanonicalTypes.createTypeWrapper(psiType) else null
}
override def getTypeText: String =
if (scType != null) getTypeWrapper.getTypeText else null
def typeText(implicit context: TypePresentationContext): String = {
val baseText = Option(scType).fold("")(_.codeText)
if (isRepeatedParameter) baseText + "*"
else if (isByName) " => " + baseText
else baseText
}
}
object ScalaParameterInfo {
def apply(p: ScParameter) = new ScalaParameterInfo(p)
def apply(project: Project) = new ScalaParameterInfo("", -1, null, project, false, false)
def keywordsAndAnnotations(p: ScParameter): String = {
val nameId = p.nameId
val elems = p.children.takeWhile(_ != nameId)
elems.map(_.getText).mkString
}
def allForMethod(methodLike: ScMethodLike): Seq[Seq[ScalaParameterInfo]] = {
def infos(clause: ScParameterClause): Seq[ScalaParameterInfo] = clause.parameters.map(new ScalaParameterInfo(_))
methodLike.parameterList.clauses.map(infos)
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/refactoring/changeSignature/ScalaParameterInfo.scala
|
Scala
|
apache-2.0
| 4,599 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.connector.cassandra.statements
import com.stratio.crossdata.connector.cassandra.DefaultSource.CassandraDataSourceKeyspaceReplicationStringProperty
case class CreateKeyspaceStatement(options: Map[String, String]) {
override def toString(): String = {
val cqlCommand = StringBuilder.newBuilder
cqlCommand.append(s"CREATE KEYSPACE $keyspace WITH REPLICATION = $replication")
cqlCommand.toString()
}
lazy val keyspace: String = {
options.get("keyspace").get
}
lazy val replication: String = {
require(options.contains(CassandraDataSourceKeyspaceReplicationStringProperty),
s"$CassandraDataSourceKeyspaceReplicationStringProperty required when use CREATE EXTERNAL TABLE command")
options.get(CassandraDataSourceKeyspaceReplicationStringProperty).get
}
}
|
darroyocazorla/crossdata
|
cassandra/src/main/scala/com/stratio/crossdata/connector/cassandra/statements/CreateKeyspaceStatement.scala
|
Scala
|
apache-2.0
| 1,451 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2017-2019
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.util.monitor
import akka.actor.{DeadLetter, PoisonPill, Props}
import akka.testkit.TestProbe
import io.techcode.streamy.StreamyTestSystem
// Simulate supervision restart
private[this] class Impl extends DeadLetterMonitor {
override def receive: Receive = {
case _ => throw new IllegalStateException()
}
}
/**
* Dead letter monitoring spec.
*/
class DeadLetterMonitorSpec extends StreamyTestSystem {
"Dead letter monitoring" can {
"be started and stopped" in {
val deadLetterMonitor = system.actorOf(Props[DeadLetterMonitor])
val probe = TestProbe()
probe watch deadLetterMonitor
deadLetterMonitor ! PoisonPill
probe.expectTerminated(deadLetterMonitor)
}
"handle correctly dead letter" in {
val deadLetterMonitor = system.actorOf(Props[DeadLetterMonitor])
system.eventStream.subscribe(deadLetterMonitor, classOf[DeadLetter])
system.eventStream.publish(DeadLetter("Test", deadLetterMonitor, deadLetterMonitor))
}
"handle correctly restart" in {
val deadLetterMonitor = system.actorOf(Props[Impl])
deadLetterMonitor ! "fatal"
}
"not receive message by default" in {
val deadLetterMonitor = system.actorOf(Props[DeadLetterMonitor])
deadLetterMonitor ! "test"
}
}
}
|
amannocci/streamy
|
core/src/test/scala/io/techcode/streamy/util/monitor/DeadLetterMonitorSpec.scala
|
Scala
|
mit
| 2,472 |
/*
* SkolemSymbols.scala
*/
package at.logic.gapt.language.hol
import at.logic.gapt.expr._
import at.logic.gapt.utils.ds.streams.Definitions._
trait TSkolemSymbol
object TypeSynonyms {
type SkolemSymbol = SymbolA with TSkolemSymbol
}
import at.logic.gapt.language.hol.TypeSynonyms._
/* The idea of SkolemSymbolFactory is to provide
a singleton for access to the (global) Skolem symbols.
SkolemSymbolFactory provides
(1) single Skolem symbols, and
(2) streams of Skolem symbols.
Every Skolem symbol is only returned once in both
cases.
This is realized by keeping a stream s of Skolem
symbols internally, and upon request returning a stream
consisting of the even indices of s, while keeping
the odd indices of s.
*/
object SkolemSymbolFactory {
private def skolem_symbol_stream_from( n: Int ): Stream[SkolemSymbol] =
Stream.cons( new StringSymbol( "s_{" + n + "}" ) with TSkolemSymbol, skolem_symbol_stream_from( n + 1 ) )
private var skolem_symbol_stream = skolem_symbol_stream_from( 0 )
// This method resets the internal state of the factory.
// WARNING: uniqueness of Skolem Symbols is now not guaranteed anymore
// (since Skolem Symbols returned before the reset call may now
// be returned again)
//
// Hence, this function should only be used for testing.
def reset = { skolem_symbol_stream = skolem_symbol_stream_from( 0 ) }
def getSkolemSymbols = {
val stream = even( skolem_symbol_stream )
skolem_symbol_stream = odd( skolem_symbol_stream )
stream
}
def getSkolemSymbol = {
val sym = skolem_symbol_stream.head
skolem_symbol_stream = skolem_symbol_stream.tail
sym
}
}
|
gisellemnr/gapt
|
src/main/scala/at/logic/gapt/language/hol/skolemSymbols.scala
|
Scala
|
gpl-3.0
| 1,702 |
package se.chimps.bitziness.core.generic.serializers
import scala.reflect.ClassTag
trait ObjectSerializer {
import akka.actor.ActorSystem
import akka.serialization.SerializationExtension
protected def system:ActorSystem
lazy protected val extension = SerializationExtension(system)
def serialize(instance: AnyRef):Array[Byte] = extension.serialize(instance).get // let it crash.
def deserialize[T](bytes:Array[Byte])(implicit evidence:ClassTag[T]):T = extension.serializerFor(evidence.runtimeClass).fromBinary(bytes).asInstanceOf[T]
}
|
Meduzz/Bitziness
|
src/main/scala/se/chimps/bitziness/core/generic/serializers/ObjectSerializer.scala
|
Scala
|
apache-2.0
| 545 |
package com.partup
import akka.actor.{Actor, ActorLogging, Props}
import com.partup.utils.IsoDate
import iot.jcypher.database.IDBAccess
import iot.jcypher.query.JcQuery
import iot.jcypher.query.api.IClause
import spray.httpx.SprayJsonSupport
import spray.json.DefaultJsonProtocol
import scala.collection.JavaConversions._
import scala.collection.immutable.List
case object GetNeo4jStatus
/**
* Status of Neo4j actor
*
* @param completed amount of successful updates since start
* @param failed amount of failures since start
* @param unexpected amount of unexpected events since start
* @param errors timestamps of last errors
* @param started timestamp of start
*/
case class Neo4jStatus(completed: Long, failed: Long, unexpected: Long, errors: List[String], started: String)
object Neo4jStatus extends DefaultJsonProtocol with SprayJsonSupport {
implicit val neo4jStatusJson = jsonFormat5(Neo4jStatus.apply)
}
object UpdateNeo4jActor {
/** pass constructor arguments regardless of the context it is being used in.
*
* @see http://doc.akka.io/docs/akka/current/scala/actors.html
*/
def props(conn: IDBAccess): Props = Props(new UpdateNeo4jActor(conn))
}
/**
* Updates the graph database based on the events received from Meteor
*/
class UpdateNeo4jActor(conn: IDBAccess) extends Actor with ActorLogging {
var status = Neo4jStatus(0, 0, 0, Nil, IsoDate.now)
override def receive = {
//NODES
//Users
case e: UsersInsertedEvent =>
val clauses = Neo4jClauseCreator.processUsersInsertedEvent(e)
sendToNeo4j(clauses)
case e: UsersEvent =>
val clauses = Neo4jClauseCreator.processUsersEvent(e)
sendToNeo4j(clauses)
case e: UsersSettingsEvent =>
val clauses = Neo4jClauseCreator.processUsersSettingsEvent(e)
sendToNeo4j(clauses)
//Networks
case e: TribesEvent =>
val clauses = Neo4jClauseCreator.processTribesEvent(e)
sendToNeo4j(clauses)
// case e: TribesRemovedEvent =>
// val clauses = Neo4jClauseCreator.processTribesRemovedEvent(e)
// sendToNeo4j(clauses)
//Teams
case e: PartupsEvent =>
val clauses = Neo4jClauseCreator.processPartupsEvent(e)
sendToNeo4j(clauses)
case e: PartupsUnarchivedEvent =>
val clauses = Neo4jClauseCreator.processPartupsUnarchivedEvent(e)
sendToNeo4j(clauses)
//EDGES
//Location
case e: NetworksLocationChangedEvent =>
val clauses = Neo4jClauseCreator.processNetworksLocationChangedEvent(e)
sendToNeo4j(clauses)
case e: PartupsLocationChangedEvent =>
val clauses = Neo4jClauseCreator.processPartupsLocationChangedEvent(e)
sendToNeo4j(clauses)
//Members
case e: MembersRemovedEvent =>
val clauses = Neo4jClauseCreator.processMembersRemovedEvent(e)
sendToNeo4j(clauses)
//Supporters
case e: SupportersRemovedEvent =>
val clauses = Neo4jClauseCreator.processSupportersRemovedEvent(e)
sendToNeo4j(clauses)
case GetNeo4jStatus =>
sender() ! status
case unexpected =>
status = status.copy(unexpected = status.unexpected + 1)
log.warning(s"Unexpected event for UpdateNeo4jActor: $unexpected")
}
private def sendToNeo4j(clauses: Array[IClause]): Unit = {
val query = new JcQuery()
query.setClauses(clauses)
query.setExtractParams(false)
val result = conn.execute(query)
status = if (result.hasErrors) {
log.error("Failed to update Neo4J..")
if (result.getGeneralErrors.nonEmpty) {
log.error("General errors:")
result.getGeneralErrors.foreach(x => log.error(x.toString))
}
if (result.getDBErrors.nonEmpty) {
log.error("Database errors:")
result.getDBErrors.foreach(x => log.error(x.toString))
}
status.copy(failed = status.failed + 1, errors = IsoDate.now :: status.errors.take(10))
} else {
status.copy(completed = status.completed + 1)
}
}
@throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.debug("Actor stopping!")
}
override def preRestart(reason: Throwable, message: Option[Any]) {
log.error(reason, "Restarting due to [{}] when processing [{}]", reason.getMessage, message.getOrElse(""))
}
}
|
part-up/api
|
src/main/scala/com/partup/UpdateNeo4jActor.scala
|
Scala
|
agpl-3.0
| 4,276 |
package ls
import scala.util.control.NonFatal
object Git {
object GhRepo {
val GHRemote = """^[email protected][:](\S+)/(\S+)[.]git$""".r
def unapply(line: String) = line.split("""\s+""") match {
case Array(_, GHRemote(user, repo), _) => Some(user, repo)
case _ => None
}
}
val CurrentBranch = """^([*]\S+)(.*)$""".r
lazy val cli =
sys.props.get("os.name")
.filter(_.toLowerCase.contains("windows"))
.map(_ => "git.exe")
.getOrElse("git")
def branch: Option[String] =
try {
sbt.Process("%s branch" format Git.cli)
.lines_!(ProcessLogging.silent).collectFirst {
case CurrentBranch(_, br) => br
}
} catch {
case NonFatal(_) => None
}
def ghRepo: Option[(String, String)] =
try {
sbt.Process("%s remote -v" format Git.cli)
.lines_!(ProcessLogging.silent).collectFirst {
case GhRepo(user, repo) => (user, repo)
}
} catch {
case NonFatal(_) => None
}
}
|
softprops/ls
|
plugin/src/main/scala/git.scala
|
Scala
|
mit
| 1,013 |
package org.jetbrains.plugins.scala.debugger
import java.io.File
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl
import com.intellij.openapi.projectRoots.{JavaSdk, Sdk}
import com.intellij.openapi.util.registry.Registry
import org.jetbrains.plugins.scala.compiler.ScalaCompileServerSettings
import org.jetbrains.plugins.scala.extensions._
/**
* @author Nikolay.Tropin
*/
object DebuggerTestUtil {
val jdk8Name = "JDK 1.8"
def findJdk8(): Sdk = {
val jdkTable = JavaAwareProjectJdkTableImpl.getInstanceEx
Option(jdkTable.findJdk(jdk8Name)).getOrElse {
val path = discoverJRE18().getOrElse(throw new RuntimeException("Could not find jdk8 installation, " +
"please define a valid JDK_18_x64 or JDK_18, " +
s"current - ${sys.env("JDK_18_x64")} or ${sys.env("JDK_18")}"))
val jdk = JavaSdk.getInstance.createJdk(jdk8Name, path)
inWriteAction {
jdkTable.addJdk(jdk)
}
jdk
}
}
def enableCompileServer(enable: Boolean): Unit = {
val compileServerSettings = ScalaCompileServerSettings.getInstance()
compileServerSettings.COMPILE_SERVER_ENABLED = enable
compileServerSettings.COMPILE_SERVER_SHUTDOWN_IDLE = true
compileServerSettings.COMPILE_SERVER_SHUTDOWN_DELAY = 30
ApplicationManager.getApplication.saveSettings()
}
def forceJdk8ForBuildProcess(): Unit = {
val jdk8 = findJdk8()
if (jdk8.getHomeDirectory == null) {
throw new RuntimeException(s"Failed to set up JDK, got: ${jdk8.toString}")
}
val jdkHome = jdk8.getHomeDirectory.getParent.getCanonicalPath
Registry.get("compiler.process.jdk").setValue(jdkHome)
}
val candidates = Seq(
"/usr/lib/jvm", // linux style
"C:\\\\Program Files\\\\Java\\\\", // windows style
"C:\\\\Program Files (x86)\\\\Java\\\\", // windows 32bit style
"/Library/Java/JavaVirtualMachines" // mac style
)
def discoverJRE18(): Option[String] = discoverJre(candidates, "8")
def discoverJRE16(): Option[String] = discoverJre(candidates, "6")
def discoverJDK18(): Option[String] = discoverJRE18().map(new File(_).getParent)
def discoverJDK16(): Option[String] = discoverJRE16().map(new File(_).getParent)
def discoverJre(paths: Seq[String], versionMajor: String): Option[String] = {
import java.io._
def isJDK(f: File) = f.listFiles().exists { b =>
b.getName == "bin" && b.listFiles().exists(x => x.getName == "javac.exe" || x.getName == "javac")
}
def inJvm(path: String, suffix: String) = {
val postfix = if (path.startsWith("/Library")) "/Contents/Home" else "" // mac workaround
Option(new File(path))
.filter(_.exists())
.flatMap(_.listFiles()
.sortBy(_.getName) // TODO somehow sort by release number to get the newest actually
.reverse
.find(f => f.getName.contains(suffix) && isJDK(new File(f, postfix)))
.map(new File(_, s"$postfix/jre").getAbsolutePath)
)
}
def currentJava() = {
sys.props.get("java.version") match {
case Some(v) if v.startsWith(s"1.$versionMajor") =>
sys.props.get("java.home") match {
case Some(path) if isJDK(new File(path).getParentFile) =>
Some(path)
case _ => None
}
case _ => None
}
}
val versionStrings = Seq(s"1.$versionMajor", s"-$versionMajor")
val priorityPaths = Seq(
currentJava(),
Option(sys.env.getOrElse(s"JDK_1${versionMajor}_x64",
sys.env.getOrElse(s"JDK_1$versionMajor", null))
).map(_+"/jre") // teamcity style
)
if (priorityPaths.exists(_.isDefined)) {
priorityPaths.flatten.headOption
} else {
val fullSearchPaths = paths flatMap { p => versionStrings.map((p, _)) }
for ((path, ver) <- fullSearchPaths) {
inJvm(path, ver) match {
case x@Some(p) => return x
case _ => None
}
}
None
}
}
}
|
triplequote/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/debugger/DebuggerTestUtil.scala
|
Scala
|
apache-2.0
| 4,118 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
*/
package org.openapitools.server.model
case class DiskSpaceMonitorDescriptorDiskSpace(
`class`: Option[String],
timestamp: Option[Int],
path: Option[String],
size: Option[Int]
)
|
cliffano/swaggy-jenkins
|
clients/scalatra/generated/src/main/scala/org/openapitools/server/model/DiskSpaceMonitorDescriptorDiskSpace.scala
|
Scala
|
mit
| 504 |
package composition
import _root_.webserviceclients.fakes.DateServiceConstants.DayValid
import _root_.webserviceclients.fakes.DateServiceConstants.MonthValid
import _root_.webserviceclients.fakes.DateServiceConstants.YearValid
import com.tzavellas.sse.guice.ScalaModule
import org.joda.time.DateTime
import org.joda.time.Instant
import org.mockito.Mockito.when
import org.scalatest.mock.MockitoSugar
import uk.gov.dvla.vehicles.presentation.common.services.DateService
import uk.gov.dvla.vehicles.presentation.common.views.models.DayMonthYear
final class TestDateService extends ScalaModule with MockitoSugar {
val stub = {
val dateTimeISOChronology: String = new DateTime(
YearValid.toInt,
MonthValid.toInt,
DayValid.toInt,
0,
0).toString
val today = DayMonthYear(
DayValid.toInt,
MonthValid.toInt,
YearValid.toInt
)
val dateTime = new DateTime(
YearValid.toInt,
MonthValid.toInt,
DayValid.toInt,
0,
0)
val now: Instant = dateTime.toInstant
val dateService = mock[DateService]
when(dateService.dateTimeISOChronology).thenReturn(dateTimeISOChronology)
when(dateService.today).thenReturn(today)
when(dateService.now).thenReturn(now)
dateService
}
def configure() = bind[DateService].toInstance(stub)
}
|
dvla/vrm-retention-online
|
test/composition/TestDateService.scala
|
Scala
|
mit
| 1,328 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package io.swagger.client.model
import play.api.libs.json._
case class DiskSpaceMonitorDescriptorDiskSpace (
`class`: Option[String],
timestamp: Option[Int],
`path`: Option[String],
size: Option[Int]
)
object DiskSpaceMonitorDescriptorDiskSpace {
implicit val format: Format[DiskSpaceMonitorDescriptorDiskSpace] = Json.format
}
|
cliffano/swaggy-jenkins
|
clients/scala-lagom-server/generated/src/main/scala/io/swagger/client/model/DiskSpaceMonitorDescriptorDiskSpace.scala
|
Scala
|
mit
| 751 |
//
// GraftGroup.scala -- Scala class GraftGroup
// Project OrcScala
//
// Created by dkitchin on Aug 12, 2011.
//
// Copyright (c) 2017 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.run.core
/** A GraftGroup is the group associated with expression g in val x = g # f.
*
* We use early initialization here because the group can be killed as soon as the
* constructor for Subgroup runs. So normal initialization could be too late and result
* in crashes in the kill call.
*
* @author dkitchin, amp
*/
class GraftGroup(parent: Group) extends {
var state: GraftGroupState = GraftGroupState.ValueUnknown
private var _future: Future = new LocalFuture(parent.runtime)
} with Subgroup(parent) {
override def toString = super.toString + s"(state=${state}, ${_future})"
/** Get a binding connecting to this graft group.
*
* This should only be called before any member of this group can run. This
* is because if this group has bound the future by publication then this
* method will not work. So binding should only be called shortly after
* construction before anything has been scheduled (staging is OK as long
* as the stage has not been flushed by returning to the scheduler).
*
* This will usually return a BoundReadable, however if the group is silent
* (due to halting or being killed) this will return a BoundStop.
*/
def binding = synchronized {
if (_future ne null)
BoundReadable(_future)
else if (state == GraftGroupState.ValueSilent)
BoundStop
else
throw new AssertionError(s"Requesting binding for bound graft group. This should not be possible. This must be a threading issue. $this")
}
// Publishing is idempotent
override def publish(t: Token, v: Option[AnyRef]) = synchronized {
state match {
case GraftGroupState.ValueUnknown => {
state = GraftGroupState.ValuePublished
// There should be no situations in which v is None. Just let it crash if it's not.
_future.bind(v.get)
// Clear the reference to the future so that it can be collected even if this group still exists.
_future = null
}
case _ => {}
}
t.halt()
}
override def onHalt() = synchronized {
state match {
case GraftGroupState.ValueUnknown => {
state = GraftGroupState.ValueSilent
parent.remove(this)
_future.stop()
_future = null
}
case GraftGroupState.ValuePublished => {
parent.remove(this)
}
case _ => {}
}
}
def onDiscorporate() = synchronized {
state match {
case GraftGroupState.ValueUnknown => {
parent.discorporate(this)
_future = null
}
case GraftGroupState.ValuePublished => {
parent.discorporate(this)
}
case _ => {}
}
}
// This is not needed for Graft itself. However it doesn't hurt anything and it is needed for
// object field futures to halt when the object is killed.
override def kill() = {
synchronized {
state match {
case GraftGroupState.ValueUnknown => {
state = GraftGroupState.ValueSilent
_future.stop()
_future = null
}
case _ => {}
}
}
super.kill()
}
}
/** Possible states of a GraftGroup */
abstract sealed class GraftGroupState()
object GraftGroupState {
case object ValueUnknown extends GraftGroupState()
case object ValuePublished extends GraftGroupState()
case object ValueSilent extends GraftGroupState()
}
|
orc-lang/orc
|
OrcScala/src/orc/run/core/GraftGroup.scala
|
Scala
|
bsd-3-clause
| 3,769 |
package com.twitter.finagle.service
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.service.RetryPolicy._
import com.twitter.finagle.{
Backoff,
ChannelClosedException,
Failure,
FailureFlags,
TimeoutException,
WriteException
}
import com.twitter.util._
import org.scalatest.funspec.AnyFunSpec
class RetryPolicyTest extends AnyFunSpec {
def getBackoffs(
policy: RetryPolicy[Try[Nothing]],
exceptions: Stream[Exception]
): Backoff = {
exceptions match {
case Stream.Empty => Backoff.empty
case e #:: tail =>
policy(Throw(e)) match {
case None => Backoff.empty
case Some((backoff, p2)) => Backoff.fromStream(backoff #:: getBackoffs(p2, tail).toStream)
}
}
}
describe("RetryPolicy") {
val NoExceptions: PartialFunction[Try[Nothing], Boolean] = {
case _ => false
}
val timeoutExc = new TimeoutException {
protected val timeout = 0.seconds
protected val explanation = "!"
}
it("should WriteExceptionsOnly") {
val weo = WriteExceptionsOnly orElse NoExceptions
assert(!weo(Throw(new Exception)))
assert(weo(Throw(WriteException(new Exception))))
assert(!weo(Throw(Failure(new Exception, FailureFlags.Interrupted))))
// it's important that this failure isn't retried, despite being "retryable".
// interrupted futures should never be retried.
assert(!weo(Throw(Failure(new Exception, FailureFlags.Interrupted | FailureFlags.Retryable))))
assert(weo(Throw(Failure(new Exception, FailureFlags.Retryable))))
assert(!weo(Throw(Failure(new Exception, FailureFlags.Rejected | FailureFlags.NonRetryable))))
assert(!weo(Throw(timeoutExc)))
}
it("should TimeoutAndWriteExceptionsOnly") {
val taweo = TimeoutAndWriteExceptionsOnly orElse NoExceptions
assert(!taweo(Throw(new Exception)))
assert(taweo(Throw(WriteException(new Exception))))
assert(!taweo(Throw(Failure(new Exception, FailureFlags.Interrupted))))
assert(taweo(Throw(Failure(timeoutExc, FailureFlags.Interrupted))))
assert(taweo(Throw(timeoutExc)))
assert(taweo(Throw(new com.twitter.util.TimeoutException(""))))
}
it("RetryableWriteException matches retryable exception") {
val retryable = Seq(Failure.rejected("test"), WriteException(new Exception))
val nonRetryable =
Seq(
Failure("test", FailureFlags.Interrupted),
new Exception,
new ChannelClosedException,
Failure("boo", FailureFlags.NonRetryable)
)
retryable.foreach {
case RetryPolicy.RetryableWriteException(_) =>
case _ => fail("should match RetryableWriteException")
}
nonRetryable.foreach {
case RetryPolicy.RetryableWriteException(_) =>
fail("should not match RetryableWriteException")
case _ =>
}
}
}
case class IException(i: Int) extends Exception
val iExceptionsOnly: PartialFunction[Try[Nothing], Boolean] = {
case Throw(IException(_)) => true
}
val iGreaterThan1: Try[Nothing] => Boolean = {
case Throw(IException(i)) if i > 1 => true
case _ => false
}
describe("RetryPolicy.filter/filterEach") {
val backoffs = Backoff.linear(10.milliseconds, 10.milliseconds).take(3)
val policy = RetryPolicy.backoff(backoffs)(iExceptionsOnly).filter(iGreaterThan1)
it("returns None if filter rejects") {
val actual = getBackoffs(policy, Stream(IException(0), IException(1)))
assert(actual == Backoff.empty)
}
it("returns underlying result if filter accepts first") {
val actual = getBackoffs(policy, Stream(IException(2), IException(0)))
verifyBackoff(actual, backoffs.take(2))
}
}
describe("RetryPolicy.filterEach") {
val backoffs = Backoff.linear(10.milliseconds, 10.milliseconds).take(3)
val policy = RetryPolicy.backoff(backoffs)(iExceptionsOnly).filterEach(iGreaterThan1)
it("returns None if filterEach rejects") {
val actual = getBackoffs(policy, Stream(IException(0), IException(1)))
assert(actual == Backoff.empty)
}
it("returns underlying result if filterEach accepts") {
val actual = getBackoffs(policy, Stream(IException(2), IException(2), IException(0)))
verifyBackoff(actual, backoffs.take(2))
}
}
describe("RetryPolicy.limit") {
var currentMaxRetries: Int = 0
val maxBackoffs = Backoff.const(10.milliseconds).take(3)
val policy =
RetryPolicy
.backoff(maxBackoffs)(RetryPolicy.ChannelClosedExceptionsOnly)
.limit(currentMaxRetries)
it("limits retries dynamically") {
for (i <- 0 until 5) {
currentMaxRetries = i
val backoffs = getBackoffs(policy, Stream.fill(3)(new ChannelClosedException()))
verifyBackoff(backoffs, maxBackoffs.take(1.min(3)))
}
}
}
describe("RetryPolicy.combine") {
val channelClosedBackoff = 10.milliseconds
val writeExceptionBackoff = 0.milliseconds
val combinedPolicy =
RetryPolicy.combine(
RetryPolicy.backoff(Backoff.const(Duration.Zero).take(2))(RetryPolicy.WriteExceptionsOnly),
RetryPolicy
.backoff(Backoff.const(channelClosedBackoff).take(3))(
RetryPolicy.ChannelClosedExceptionsOnly)
)
it("return None for unmatched exception") {
val backoffs = getBackoffs(combinedPolicy, Stream(new UnsupportedOperationException))
assert(backoffs == Backoff.empty)
}
it("mimicks first policy") {
val backoffs = getBackoffs(combinedPolicy, Stream.fill(4)(WriteException(new Exception)))
verifyBackoff(backoffs, Backoff.const(writeExceptionBackoff).take(2))
}
it("mimicks second policy") {
val backoffs = getBackoffs(combinedPolicy, Stream.fill(4)(new ChannelClosedException()))
verifyBackoff(backoffs, Backoff.const(channelClosedBackoff).take(3))
}
it("interleaves backoffs") {
val exceptions = Stream(
new ChannelClosedException(),
WriteException(new Exception),
WriteException(new Exception),
new ChannelClosedException(),
WriteException(new Exception)
)
val backoffs = getBackoffs(combinedPolicy, exceptions)
val expectedBackoffs = Backoff
.const(channelClosedBackoff).take(1)
.concat(Backoff.const(writeExceptionBackoff).take(2))
.concat(Backoff.const(channelClosedBackoff).take(1))
verifyBackoff(backoffs, expectedBackoffs)
}
}
describe("RetryPolicy.namedPF") {
it("uses the name parameter as the toString method") {
val f = RetryPolicy.namedPF[Int]("foo") { case _ => false }
assert(f.toString == "foo")
}
it("preserves the behavior of the underlying partial function") {
val f: PartialFunction[Int, Boolean] = { case i if i >= 0 => true }
val f1 = RetryPolicy.namedPF("foo")(f)
assert(f.isDefinedAt(1) == f1.isDefinedAt(1))
assert(f.isDefinedAt(-1) == f1.isDefinedAt(-1))
}
it("preserves toString information when composition with .orElse") {
val f1 = RetryPolicy.namedPF[Int]("foo") { case i if i >= 0 => false }
val f2 = RetryPolicy.namedPF[Int]("bar") { case _ => true }
val composed = f1.orElse(f2)
assert(composed.toString == "foo.orElse(bar)")
}
}
describe("RetryPolicy.Never") {
val never = RetryPolicy.Never.asInstanceOf[RetryPolicy[Try[Int]]]
it("should not retry") {
assert(None == never(Return(1)))
assert(None == never(Throw(new RuntimeException)))
}
}
describe("RetryPolicy.none") {
val nah = RetryPolicy.none
it("should not retry") {
assert(None == nah((1, Return(1))))
assert(None == nah((1, Throw(new RuntimeException))))
}
}
private def verifyBackoff(b1: Backoff, b2: Backoff): Unit = {
if (!b1.isExhausted && !b2.isExhausted) {
assert(b1.duration == b2.duration)
verifyBackoff(b1.next, b2.next)
}
}
}
|
twitter/finagle
|
finagle-core/src/test/scala/com/twitter/finagle/service/RetryPolicyTest.scala
|
Scala
|
apache-2.0
| 7,995 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity
import scala.util.Try
import spray.json.JsValue
import spray.json.RootJsonFormat
import spray.json.deserializationError
import spray.json.DefaultJsonProtocol
/**
* Abstract type for limits on triggers and actions. This may
* expand to include global limits as well (for example limits
* that require global knowledge).
*/
protected[entity] abstract class Limits {
protected[entity] def toJson: JsValue
override def toString = toJson.compactPrint
}
/**
* Limits on a specific action. Includes the following properties
* {
* timeout: maximum duration in msecs an action is allowed to consume in [100 msecs, 5 minutes],
* memory: maximum memory in megabytes an action is allowed to consume within system limit, default [128 MB, 512 MB],
* logs: maximum logs line in megabytes an action is allowed to generate [10 MB],
* concurrency: maximum number of concurrently processed activations per container [1, 200]
* }
*
* @param timeout the duration in milliseconds, assured to be non-null because it is a value
* @param memory the memory limit in megabytes, assured to be non-null because it is a value
* @param logs the limit for logs written by the container and stored in the activation record, assured to be non-null because it is a value
* @param concurrency the limit on concurrently processed activations per container, assured to be non-null because it is a value
*/
protected[core] case class ActionLimits(timeout: TimeLimit = TimeLimit(),
memory: MemoryLimit = MemoryLimit(),
logs: LogLimit = LogLimit(),
concurrency: ConcurrencyLimit = ConcurrencyLimit())
extends Limits {
override protected[entity] def toJson = ActionLimits.serdes.write(this)
}
/**
* Limits on a specific trigger. None yet.
*/
protected[core] case class TriggerLimits protected[core] () extends Limits {
override protected[entity] def toJson: JsValue = TriggerLimits.serdes.write(this)
}
protected[core] object ActionLimits extends ArgNormalizer[ActionLimits] with DefaultJsonProtocol {
override protected[core] implicit val serdes = new RootJsonFormat[ActionLimits] {
val helper = jsonFormat4(ActionLimits.apply)
def read(value: JsValue) = {
val obj = Try {
value.asJsObject.convertTo[Map[String, JsValue]]
} getOrElse deserializationError("no valid json object passed")
val time = TimeLimit.serdes.read(obj.get("timeout") getOrElse deserializationError("'timeout' is missing"))
val memory = MemoryLimit.serdes.read(obj.get("memory") getOrElse deserializationError("'memory' is missing"))
val logs = obj.get("logs") map { LogLimit.serdes.read(_) } getOrElse LogLimit()
val concurrency = obj.get("concurrency") map { ConcurrencyLimit.serdes.read(_) } getOrElse ConcurrencyLimit()
ActionLimits(time, memory, logs, concurrency)
}
def write(a: ActionLimits) = helper.write(a)
}
}
protected[core] object TriggerLimits extends ArgNormalizer[TriggerLimits] with DefaultJsonProtocol {
override protected[core] implicit val serdes = jsonFormat0(TriggerLimits.apply _)
}
|
cbickel/openwhisk
|
common/scala/src/main/scala/org/apache/openwhisk/core/entity/Limits.scala
|
Scala
|
apache-2.0
| 4,022 |
package com.twitter.finagle.http
import com.twitter.conversions.DurationOps._
import com.twitter.conversions.StorageUnitOps._
import com.twitter.finagle.http.{Status => HttpStatus}
import com.twitter.finagle.service.ConstantService
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.{Http => FinagleHttp, _}
import com.twitter.io._
import com.twitter.util._
import java.net.{InetSocketAddress, SocketAddress}
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import org.scalatest.funsuite.AnyFunSuite
abstract class AbstractStreamingTest extends AnyFunSuite {
protected def configureClient(
client: FinagleHttp.Client,
singletonPool: Boolean
): FinagleHttp.Client
protected def configureServer(server: FinagleHttp.Server): FinagleHttp.Server
import StreamingTest._
// Enumerated Failure Cases
// ------------------------
//
// Caused by network failure:
//
// 1. Client: request stream fails on write
// 2. Client: response stream fails on read
// 3. Server: request stream fails on read
// 4. Server: response stream fails on write
//
// Application initiated failure:
//
// 5. Client: fails request writer
// 6. Client: discards response reader
// 7. Server: fails response writer
// 8. Server: discards request reader
// We call write repeatedly for `streamChunks` to *be sure* to notice
// transport failure.
def writeLots(writer: Writer[Buf], buf: Buf): Future[Unit] =
writer.write(buf) before writeLots(writer, buf)
class ClientCtx(singletonPool: Boolean = false) {
@volatile var shouldFail = true
val failure = new Promise[Unit]
val server = startServer(echo)
val client = connectWithModifier(server.boundAddress, singletonPool = singletonPool) {
transport =>
if (shouldFail) failure.ensure { await(transport.close()) }
transport
}
val buf = Buf.Utf8(".")
val req = get("/")
val res = await(client(req))
// Demonstrate normal operations by testing for a single echo'd chunk.
await(req.writer.write(buf))
assert(await(res.reader.read()) == Some(buf))
// This request should queue in the service pool.
shouldFail = false
val req2 = get("abc")
val res2 = client(req2)
// Assert previously queued request is now processed, and not interrupted
// midstream.
def assertSecondRequestOk(): Unit = {
try {
await(res2.liftToTry) match {
case Return(rsp) =>
await(req2.writer.close())
await(BufReader.readAll(rsp.reader))
case Throw(e) =>
fail(s"second request failed: $e")
}
} finally {
await(Closable.all(server, client).close())
}
}
}
test("client: request stream fails on write")(new ClientCtx {
// Simulate network failure by closing the transport.
failure.setDone()
intercept[ReaderDiscardedException] { await(writeLots(req.writer, buf)) }
// We call read for the collating function to notice transport failure.
intercept[ChannelException] { await(res.reader.read()) }
assertSecondRequestOk()
})
test("client: response stream fails on read")(new ClientCtx(singletonPool = true) {
assert(res2.poll == None)
// Reader should be suspended in a reading state.
val f = res.reader.read()
assert(f.poll == None)
// Simulate network failure by closing the transport.
failure.setDone()
// Assert reading state suspension is interrupted by transport closure.
intercept[ChannelException] { await(f) }
intercept[ReaderDiscardedException] { await(writeLots(req.writer, buf)) }
assertSecondRequestOk()
})
test("client: server disconnect on pending response should fail request") {
val reqReceived = Promise[Unit]()
val server = startServer(Service.mk { _ =>
reqReceived.setDone()
Future.never
})
val client = connect(server.boundAddress)
val resF = client(get("/"))
await(reqReceived.before(server.close()))
intercept[ChannelException] { await(resF) }
await(client.close())
}
test("client: client closes transport after server disconnects") {
val clientClosed = new Promise[Unit]
val service = Service.mk[Request, Response] { _ => Future.value(Response()) }
val server = startServer(service)
val client = connectWithModifier(server.boundAddress) { transport =>
clientClosed.become(transport.onClose.unit)
transport
}
val res = await(client(get("/")))
assert(await(res.reader.read()) == None)
await(server.close())
await(clientClosed)
}
test("client: fail request writer")(new ClientCtx(singletonPool = true) {
assert(res2.poll.isEmpty)
req.writer.fail(new Exception)
assertSecondRequestOk()
})
test("client: discard respond reader")(new ClientCtx(singletonPool = true) {
assert(res2.poll.isEmpty)
res.reader.discard()
assertSecondRequestOk()
})
test("server: request stream fails read") {
val buf = Buf.Utf8(".")
val n = new AtomicInteger(0)
val readp = new Promise[Unit]
val writer = new Pipe[Buf]()
val req2Complete = Promise[Unit]()
val service = new Service[Request, Response] {
def apply(req: Request) = n.getAndIncrement() match {
case 0 =>
req.reader.read().unit.proxyTo(readp)
Future.value(ok(writer))
case _ =>
val writer = new Pipe[Buf]()
req2Complete.become(writer.write(buf).before(writer.close()))
Future.value(ok(writer))
}
}
val server = startServer(service)
val client1 = connect(server.boundAddress, "client1")
val client2 = connect(server.boundAddress, "client2")
val req1 = get("/")
val req2 = get("abc")
val f1 = client1(req1)
// note: while the server is configured with a max concurrency of 1,
// the requests flow through the transport before that. this means
// that these requests must be sequenced.
val f2 = f1.flatMap { _ => client2(req2) }
val res = await(f1)
await(req2Complete.before(server.close()))
intercept[ChannelClosedException] { await(readp) }
intercept[ReaderDiscardedException] { await(writeLots(writer, buf)) }
intercept[ChannelException] { await(res.reader.read()) }
intercept[ReaderDiscardedException] { await(writeLots(req1.writer, buf)) }
val res2 = await(f2)
await(BufReader.readAll(res2.reader))
await(Closable.all(server, client1, client2).close())
}
test("server: response stream fails write") {
val buf = Buf.Utf8(".")
val n = new AtomicInteger(0)
val readp = new Promise[Unit]
val writer = new Pipe[Buf]()
val writep = new Promise[Unit]
val req2Complete = Promise[Unit]()
writeLots(writer, buf).proxyTo(writep)
val service = new Service[Request, Response] {
def apply(req: Request) = n.getAndIncrement() match {
case 0 =>
writep.ensure { req.reader.read().unit.proxyTo(readp) }
Future.value(ok(writer))
case _ =>
val writer = new Pipe[Buf]()
req2Complete.become(writer.write(buf).before(writer.close()))
Future.value(ok(writer))
}
}
val server = startServer(service)
val client1 = connect(server.boundAddress, "client1")
val client2 = connect(server.boundAddress, "client2")
val req1 = get("/")
val req2 = get("abc")
val f1 = client1(req1)
// note: while the server is configured with a max concurrency of 1,
// the requests flow through the transport before that. this means
// that these requests must be sequenced.
val f2 = f1.flatMap { _ => client2(req2) }
val res = await(f1)
// Cut the server connections.
await(req2Complete.before(server.close()))
intercept[ReaderDiscardedException] { await(writep) }
intercept[ChannelClosedException] { await(readp) }
intercept[ChannelException] {
val start = Time.now
while ((Time.now - start) < 30.seconds) await(res.reader.read())
}
intercept[ReaderDiscardedException] { await(writeLots(req1.writer, buf)) }
val res2 = await(f2)
await(BufReader.readAll(res2.reader))
await(Closable.all(client1, client2).close())
}
test("server: fail response writer") {
val buf = Buf.Utf8(".")
val n = new AtomicInteger(0)
val failure = new Promise[Unit]
val service = new Service[Request, Response] {
def apply(req: Request) = n.getAndIncrement() match {
case 0 =>
val writer = new Pipe[Buf]()
failure.ensure { writer.fail(new Exception) }
Future.value(ok(writer))
case _ =>
val writer = new Pipe[Buf]()
failure.ensure { writer.write(buf) ensure writer.close() }
Future.value(ok(writer))
}
}
val server = startServer(service)
val client1 = connect(server.boundAddress, "client1")
val client2 = connect(server.boundAddress, "client2")
val req1 = get("/")
val req2 = get("abc")
val f1 = client1(req1)
val f2 = f1.flatMap { _ => client2(req2) }
val res = await(f1)
failure.setDone()
intercept[ChannelException] { await(res.reader.read()) }
intercept[ReaderDiscardedException] { await(writeLots(req1.writer, buf)) }
val res2 = await(f2)
await(BufReader.readAll(res2.reader))
await(Closable.all(server, client1, client2).close())
}
test("server: fail request reader") {
val buf = Buf.Utf8(".")
val n = new AtomicInteger(0)
val failure = new Promise[Unit]
val service = new Service[Request, Response] {
def apply(req: Request) = n.getAndIncrement() match {
case 0 =>
val writer = new Pipe[Buf]()
failure.ensure {
req.reader.discard()
writer.write(buf).ensure { writer.close() }
}
Future.value(ok(writer))
case _ =>
val writer = new Pipe[Buf]()
failure.ensure { writer.write(buf).ensure { writer.close() } }
Future.value(ok(writer))
}
}
val server = startServer(service)
val client1 = connect(server.boundAddress, "client1")
val client2 = connect(server.boundAddress, "client2")
val req1 = get("/")
val req2 = get("abc")
val f1 = client1(req1)
val f2 = f1.flatMap { _ => client2(req2) }
val res = await(f1)
failure.setDone()
intercept[ChannelException] { await(res.reader.read()) }
intercept[ReaderDiscardedException] { await(writeLots(req1.writer, buf)) }
val res2 = await(f2)
await(BufReader.readAll(res2.reader))
await(Closable.all(server, client1, client2).close())
}
test("server: empty buf doesn't close response stream") {
val service = const(Seq(Buf.Utf8("hello"), Buf.Empty, Buf.Utf8("world")))
val server = startServer(service)
val client = connect(server.boundAddress, "client")
val body = await(client(get("/")).flatMap(res => BufReader.readAll(res.reader)))
assert(body == Buf.Utf8("helloworld"))
await(Closable.all(server, client).close())
}
test("client: empty buf doesn't close request stream") {
val server = startServer(echo)
val client = connect(server.boundAddress, "client")
val req = get("/")
val res = await(client(req))
await(for {
_ <- req.writer.write(Buf.Utf8("hello"))
_ <- req.writer.write(Buf.Empty)
_ <- req.writer.write(Buf.Utf8("world"))
_ <- req.writer.close()
} yield ())
val body = await(BufReader.readAll(res.reader))
assert(body == Buf.Utf8("helloworld"))
await(Closable.all(server, client).close())
}
test("end-to-end: server gets content for chunked request made to client with content length") {
val svc = Service.mk[Request, Response] { req =>
assert(req.contentString == "hello")
Future.value(Response(req))
}
val server = startServer(svc)
val writer = new Pipe[Buf]()
val req = Request(Version.Http11, Method.Post, "/foo", writer)
req.headerMap.put("Content-Length", "5")
req.setChunked(true)
val client = connect(server.boundAddress, "client")
val res = client(req)
await(writer.write(Buf.Utf8("hello")))
writer.close()
await(res)
await(Closable.all(server, client).close())
}
test("end-to-end: client may process multiple streaming requests simultaneously") {
val service = Service.mk[Request, Response] { req =>
val writable = new Pipe[Buf]() // never gets closed
Future.value(Response(req.version, HttpStatus.Ok, writable))
}
val server = startServer(service)
val addr = server.boundAddress
val client = connect(addr)
try {
val req0 = Request("/0")
val rep0 = await(client(req0))
assert(rep0.status == HttpStatus.Ok)
assert(rep0.isChunked)
val req1 = Request("/1")
val rep1 = await(client(req1))
assert(rep1.status == HttpStatus.Ok)
assert(rep1.isChunked)
} finally {
await(Closable.all(client, server).close())
}
}
test("server: inbound stream (reader) propagates closures initiated remotely") {
val termination = new Promise[StreamTermination]
val service = Service.mk[Request, Response] { req =>
termination.become(req.reader.onClose)
Future.never // never responds
}
val server = startServer(service)
val addr = server.boundAddress
val client = connect(addr)
try {
val req = Request("/")
req.setChunked(true)
client(req)
req.writer.fail(new Exception())
assert(await(termination.liftToTry).isThrow)
} finally {
await(Closable.all(client, server).close())
}
}
test("server: outbound stream (writer) propagates closures initiated remotely") {
val termination = new Promise[StreamTermination]
val service = Service.mk[Request, Response] { _ =>
val rep = Response()
rep.setChunked(true)
termination.become(rep.writer.onClose)
Future.value(rep)
}
val server = startServer(service)
val addr = server.boundAddress
val client = connect(addr)
try {
val rep = await(client(Request("/")))
rep.reader.discard()
assert(!await(termination).isFullyRead)
} finally {
await(Closable.all(client, server).close())
}
}
test("client: inbound stream (reader) propagates closures initiated remotely") {
val stream = new Promise[Writer[Buf]]
val service = Service.mk[Request, Response] { req =>
val rep = Response()
rep.setChunked(true)
stream.setValue(rep.writer)
Future.value(rep)
}
val server = startServer(service)
val addr = server.boundAddress
val client = connect(addr)
try {
val rep = await(client(Request("/")))
await(stream).fail(new Exception())
assert(await(rep.reader.onClose.liftToTry).isThrow)
} finally {
await(Closable.all(client, server).close())
}
}
test("client: outbound stream (writer) propagates closures initiated remotely") {
val service = Service.mk[Request, Response] { req =>
req.reader.discard()
Future.never
}
val server = startServer(service)
val addr = server.boundAddress
val client = connect(addr)
try {
val req = Request("/")
req.setChunked(true)
client(req)
assert(!await(req.writer.onClose).isFullyRead)
} finally {
await(Closable.all(client, server).close())
}
}
def startServer(service: Service[Request, Response]): ListeningServer = {
configureServer(FinagleHttp.server)
.withStreaming(0.bytes) // no aggregation
.withLabel("server")
.serve(new InetSocketAddress(0), service)
}
def connect(
addr: SocketAddress,
name: String = "client",
singletonPool: Boolean = false
): Service[Request, Response] = connectWithModifier(addr, name, singletonPool)(identity)
def connectWithModifier(
addr: SocketAddress,
name: String = "client",
singletonPool: Boolean = false
)(
mod: Modifier
): Service[Request, Response] = {
configureClient(FinagleHttp.client, singletonPool)
.withStreaming(0.bytes) // no aggregation
.configured(ClientEndpointer.TransportModifier(mod))
.newService(Name.bound(Address(addr.asInstanceOf[InetSocketAddress])), name)
}
def closingOnceTransport(closed: Future[Unit]): Modifier = {
val setFail = new AtomicBoolean(false)
val mod: Modifier = { transport: Transport[Any, Any] =>
if (!setFail.getAndSet(true)) closed.ensure {
await(transport.close())
}
transport
}
mod
}
}
object StreamingTest {
type Modifier = Transport[Any, Any] => Transport[Any, Any]
def await[A](f: Future[A]): A = Await.result(f, 30.seconds)
val echo = new Service[Request, Response] {
def apply(req: Request): Future[Response] = Future.value(ok(req.reader))
}
def const(bufs: Seq[Buf]): Service[Request, Response] =
new Service[Request, Response] {
private def drain(writer: Writer[Buf], bs: Seq[Buf]): Future[Unit] = bs match {
case Nil => Future.Done
case head +: tail => writer.write(head).before(drain(writer, tail))
}
def apply(req: Request): Future[Response] = {
val writer = new Pipe[Buf]()
drain(writer, bufs).before(writer.close)
Future.value(ok(writer))
}
}
val neverRespond = new ConstantService[Request, Response](Future.never)
def get(uri: String): Request = {
val req = Request(uri)
req.setChunked(true)
req
}
def ok(readerIn: Reader[Buf]): Response = {
val res = Response(Version.Http11, HttpStatus.Ok, readerIn)
res.headerMap.set("Connection", "close")
res
}
}
|
twitter/finagle
|
finagle-http/src/test/scala/com/twitter/finagle/http/AbstractStreamingTest.scala
|
Scala
|
apache-2.0
| 17,713 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest, MLTestingUtils}
import org.apache.spark.sql.Row
class MaxAbsScalerSuite extends MLTest with DefaultReadWriteTest {
import testImplicits._
test("MaxAbsScaler fit basic case") {
val data = Array(
Vectors.dense(1, 0, 100),
Vectors.dense(2, 0, 0),
Vectors.sparse(3, Array(0, 2), Array(-2, -100)),
Vectors.sparse(3, Array(0), Array(-1.5)))
val expected: Array[Vector] = Array(
Vectors.dense(0.5, 0, 1),
Vectors.dense(1, 0, 0),
Vectors.sparse(3, Array(0, 2), Array(-1, -1)),
Vectors.sparse(3, Array(0), Array(-0.75)))
val df = data.zip(expected).toSeq.toDF("features", "expected")
val scaler = new MaxAbsScaler()
.setInputCol("features")
.setOutputCol("scaled")
val model = scaler.fit(df)
testTransformer[(Vector, Vector)](df, model, "expected", "scaled") {
case Row(expectedVec: Vector, actualVec: Vector) =>
assert(expectedVec === actualVec,
s"MaxAbsScaler error: Expected $expectedVec but computed $actualVec")
}
MLTestingUtils.checkCopyAndUids(scaler, model)
}
test("MaxAbsScaler read/write") {
val t = new MaxAbsScaler()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
testDefaultReadWrite(t)
}
test("MaxAbsScalerModel read/write") {
val instance = new MaxAbsScalerModel(
"myMaxAbsScalerModel", Vectors.dense(1.0, 10.0))
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
val newInstance = testDefaultReadWrite(instance)
assert(newInstance.maxAbs === instance.maxAbs)
}
}
|
WindCanDie/spark
|
mllib/src/test/scala/org/apache/spark/ml/feature/MaxAbsScalerSuite.scala
|
Scala
|
apache-2.0
| 2,528 |
package org.cloudfun.util
import _root_.java.util.logging.{Logger, Level}
/**
* A mixin that provides various logging methods that call a generic logging method.
*
* Utilizes scalas function passing syntax to only evaluate parameters if the message should be logged.
*/
// TODO: Use own level names for severe and fine, or official, or both?
// TODO: Does this mean that we instead create a lot of instances of abstract classes that are passed to the log methods?
trait LogMethods {
def loggingPath = getClass().getName
lazy val logger : Logger = Logger.getLogger( loggingPath )
def log(level: Level, message: => String, exception: => Throwable) {
if (logger != null && logger.isLoggable(level) ) logger.log( level, message, exception )
}
final def logError( message : => String ) { log( Level.SEVERE, message ) }
//def severe( message : => String ) = log( Level.SEVERE, message )
final def logWarning( message : => String ) { log( Level.WARNING, message ) }
final def logInfo( message : => String ) { log( Level.INFO, message ) }
final def logDebug( message : => String ) { log( Level.FINE, message ) }
//def fine( message : => String ) = log( Level.FINE, message )
final def logTrace( message : => String ) { log( Level.FINER, message ) }
//def finer( message : => String )= log( Level.FINER, message )
final def logError( message : => String, exception : => Throwable ) { log( Level.SEVERE, message, exception ) }
//def severe( message : => String, exception : => Throwable ) = log( Level.SEVERE, message, exception )
final def logWarning( message : => String, exception : => Throwable ) { log( Level.WARNING, message, exception ) }
final def logInfo( message : => String, exception : => Throwable ) { log( Level.INFO, message, exception ) }
final def logDebug( message : => String, exception : => Throwable ) { log( Level.FINE, message, exception ) }
//def fine( message : => String, exception : => Throwable ) = log( Level.FINE, message, exception )
final def logTrace( message : => String , exception : => Throwable ) { log( Level.FINER, message, exception ) }
//def finer( message : => String , exception : => Throwable )= log( Level.FINER, message, exception )
final def log( level : Level, message : => String ) : Unit = { log( level, message, null ) }
}
|
zzorn/cloudfun
|
src/main/scala/org/cloudfun/util/LogMethods.scala
|
Scala
|
lgpl-3.0
| 2,327 |
package chapter26
/**
* 26장 익스트랙터
*
* 아마도 지금까지 패턴 매치를 사용해 데이터를 분해하고 분석하는 간결한 방법에 많이 익숙해졌을 것이다.
* 26장에서는 이 개념을 더 일반화하는 방법을 설명한다.
*
* 지금까지 생성자를 사용한 패턴은 케이스 클래스와 관련이 있었다. 예를 들어 Some(x) 는
* Some이 케이스 클래스라서 올바른 패턴이었다. 때로 케이스 클래스는 만들고 싶지 않지만, 이런 패턴은
* 사용하고 싶을 때가 있다. extractor는 그런 방법을 제공한다.
*
* 26.1 예제: 전자우편 주소 추출
*
* 주어진 문자열이 전자우편 주소인지 여부를 결정하고, 맞다면 주소 중 사용자와 도메인 부분에 접근하고 싶다.
* 이를 해결하기 위한 전통적인 방법은 보통 3 가지 도우미 함수를 사용한다.
*
*
*/
class c26_i01 extends App {
/*
// 도우미 함수
def isEMail(s: String): Boolean
def domain(s: String): String
def user(s: String): String
val s = "str"
if (isEMail(s)) println(user(s) + "AT" + domain(s))
else println("not an email address")
*/
/*
* 동작하기는 하지만 어설프다. 테스트를 여러 가지 합쳐야 한다면
* 프로그램이 더욱 복잡해져 버린다. 예를 들어 어떤 리스트에서 연속으로 두 문자열이 같은 사용자의
* 전자우편 주소인지 알아내고 싶다면? => 15장에서 이미 패턴 매치가 이상적인 방법임을 보았다.
*
* EMail(user, domain) 문자열에 @ 기호가 있따면 매치 가능한 패턴이다.
*
* s match {
* case EMail(user, domain) => println(user + " AT " + domain)
* case _ => println("not an email address")
* }
*
* 같은 전자우편 주소 2개가 연속으로 있는 더 복잡한 경우는..
* ss match {
* case EMail(u1, d1) :: EMail(u2, d2) :: _ if (u1 == u2) => ...
* }
*
* 위 코드는 세 가지 도우미 함수로 작성한 어떤 것보다 더 읽기 쉽다. 하지만 문제는
* 문자열이 케이스 클래스가 아니란 점에 있다. 문자열은 EMail(user, domain)에 부합하는 표현으로
* 돼 있지 않다. 바로 여기서 스칼라의 익스트랙터가 역할을 할 수 있다. 패턴이 타입의 내부 표현을 꼭
* 따를 필요는 없다.
*/
}
|
seraekim/srkim-lang-scala
|
src/main/java/chapter26/c26_i01.scala
|
Scala
|
bsd-3-clause
| 2,420 |
package io.eels.component.parquet
import com.sksamuel.exts.Logging
import com.sksamuel.exts.io.Using
import io.eels.component.parquet.util.ParquetIterator
import io.eels.schema.StructType
import io.eels.{CloseableIterator, Part, Predicate, Row}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.parquet.hadoop.ParquetFileReader
import com.sksamuel.exts.OptionImplicits._
class ParquetPart(path: Path,
predicate: Option[Predicate],
projection: Seq[String])
(implicit conf: Configuration) extends Part with Logging with Using {
lazy val projectionSchema = {
if (projection.isEmpty)
None
else {
val messageType = ParquetFileReader.open(conf, path).getFileMetaData.getSchema
val structType = ParquetSchemaFns.fromParquetMessageType(messageType)
val projected = StructType(structType.fields.filter(field => projection.contains(field.name)))
ParquetSchemaFns.toParquetMessageType(projected).some
}
}
override def iterator(): CloseableIterator[Seq[Row]] = new CloseableIterator[Seq[Row]] {
val reader = ParquetReaderFn(path, predicate, projectionSchema)
override def close(): Unit = {
super.close()
reader.close()
}
override val iterator: Iterator[Seq[Row]] = ParquetIterator(reader).grouped(100).withPartial(true)
}
}
|
stheppi/eel
|
eel-components/src/main/scala/io/eels/component/parquet/ParquetPart.scala
|
Scala
|
apache-2.0
| 1,394 |
package com.thangiee.lolhangouts.ui.core
import android.view.ViewGroup
import android.view.ViewGroup.LayoutParams._
import com.google.android.gms.ads.{AdRequest, AdSize, AdView}
trait Ads extends TActivity {
private lazy val adView = new AdView(ctx)
def adUnitId: String
def adLayout: ViewGroup
def setupAds(): Unit = {
adView.setAdSize(AdSize.SMART_BANNER)
adView.setAdUnitId(adUnitId)
val params = new ViewGroup.LayoutParams(WRAP_CONTENT, WRAP_CONTENT)
adLayout.addView(adView, params)
val adRequest = new AdRequest.Builder()
.addTestDevice(AdRequest.DEVICE_ID_EMULATOR)
.build()
adView.loadAd(adRequest)
}
override def onResume(): Unit = {
super.onResume()
if (adView != null) adView.resume()
}
override def onPause(): Unit = {
if (adView != null) adView.pause()
super.onPause()
}
override def onDestroy(): Unit = {
if (adView != null) adView.destroy()
super.onDestroy()
}
}
|
Thangiee/LoL-Hangouts
|
src/com/thangiee/lolhangouts/ui/core/Ads.scala
|
Scala
|
apache-2.0
| 971 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.{PrintWriter, File, DataInput, DataOutput}
import java.util.{ArrayList, Arrays, Properties}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.udf.UDAFPercentile
import org.apache.hadoop.hive.ql.udf.generic._
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory}
import org.apache.hadoop.hive.serde2.{AbstractSerDe, SerDeStats}
import org.apache.hadoop.io.Writable
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.util.Utils
case class Fields(f1: Int, f2: Int, f3: Int, f4: Int, f5: Int)
// Case classes for the custom UDF's.
case class IntegerCaseClass(i: Int)
case class ListListIntCaseClass(lli: Seq[(Int, Int, Int)])
case class StringCaseClass(s: String)
case class ListStringCaseClass(l: Seq[String])
/**
* A test suite for Hive custom UDFs.
*/
class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
import hiveContext.{udf, sql}
import hiveContext.implicits._
test("spark sql udf test that returns a struct") {
udf.register("getStruct", (_: Int) => Fields(1, 2, 3, 4, 5))
assert(sql(
"""
|SELECT getStruct(1).f1,
| getStruct(1).f2,
| getStruct(1).f3,
| getStruct(1).f4,
| getStruct(1).f5 FROM src LIMIT 1
""".stripMargin).head() === Row(1, 2, 3, 4, 5))
}
test("SPARK-4785 When called with arguments referring column fields, PMOD throws NPE") {
checkAnswer(
sql("SELECT PMOD(CAST(key as INT), 10) FROM src LIMIT 1"),
Row(8)
)
}
test("hive struct udf") {
sql(
"""
|CREATE EXTERNAL TABLE hiveUDFTestTable (
| pair STRUCT<id: INT, value: INT>
|)
|PARTITIONED BY (partition STRING)
|ROW FORMAT SERDE '%s'
|STORED AS SEQUENCEFILE
""".
stripMargin.format(classOf[PairSerDe].getName))
val location = Utils.getSparkClassLoader.getResource("data/files/testUDF").getFile
sql(s"""
ALTER TABLE hiveUDFTestTable
ADD IF NOT EXISTS PARTITION(partition='testUDF')
LOCATION '$location'""")
sql(s"CREATE TEMPORARY FUNCTION testUDF AS '${classOf[PairUDF].getName}'")
sql("SELECT testUDF(pair) FROM hiveUDFTestTable")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDF")
}
test("Max/Min on named_struct") {
checkAnswer(sql(
"""
|SELECT max(named_struct(
| "key", key,
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_498")))
checkAnswer(sql(
"""
|SELECT min(named_struct(
| "key", key,
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_0")))
// nested struct cases
checkAnswer(sql(
"""
|SELECT max(named_struct(
| "key", named_struct(
"key", key,
"value", value),
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_498")))
checkAnswer(sql(
"""
|SELECT min(named_struct(
| "key", named_struct(
"key", key,
"value", value),
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_0")))
}
test("SPARK-6409 UDAF Average test") {
sql(s"CREATE TEMPORARY FUNCTION test_avg AS '${classOf[GenericUDAFAverage].getName}'")
checkAnswer(
sql("SELECT test_avg(1), test_avg(substr(value,5)) FROM src"),
Seq(Row(1.0, 260.182)))
sql("DROP TEMPORARY FUNCTION IF EXISTS test_avg")
hiveContext.reset()
}
test("SPARK-2693 udaf aggregates test") {
checkAnswer(sql("SELECT percentile(key, 1) FROM src LIMIT 1"),
sql("SELECT max(key) FROM src").collect().toSeq)
checkAnswer(sql("SELECT percentile(key, array(1, 1)) FROM src LIMIT 1"),
sql("SELECT array(max(key), max(key)) FROM src").collect().toSeq)
}
test("Generic UDAF aggregates") {
checkAnswer(sql("SELECT ceiling(percentile_approx(key, 0.99999)) FROM src LIMIT 1"),
sql("SELECT max(key) FROM src LIMIT 1").collect().toSeq)
checkAnswer(sql("SELECT percentile_approx(100.0, array(0.9, 0.9)) FROM src LIMIT 1"),
sql("SELECT array(100, 100) FROM src LIMIT 1").collect().toSeq)
}
test("UDFIntegerToString") {
val testData = hiveContext.sparkContext.parallelize(
IntegerCaseClass(1) :: IntegerCaseClass(2) :: Nil).toDF()
testData.registerTempTable("integerTable")
val udfName = classOf[UDFIntegerToString].getName
sql(s"CREATE TEMPORARY FUNCTION testUDFIntegerToString AS '$udfName'")
checkAnswer(
sql("SELECT testUDFIntegerToString(i) FROM integerTable"),
Seq(Row("1"), Row("2")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFIntegerToString")
hiveContext.reset()
}
test("UDFToListString") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListString AS '${classOf[UDFToListString].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToListString(s) FROM inputTable")
}
assert(errMsg.getMessage contains "List type in java is unsupported because " +
"JVM type erasure makes spark fail to catch a component type in List<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToListString")
hiveContext.reset()
}
test("UDFToListInt") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListInt AS '${classOf[UDFToListInt].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToListInt(s) FROM inputTable")
}
assert(errMsg.getMessage contains "List type in java is unsupported because " +
"JVM type erasure makes spark fail to catch a component type in List<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToListInt")
hiveContext.reset()
}
test("UDFToStringIntMap") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToStringIntMap " +
s"AS '${classOf[UDFToStringIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToStringIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage contains "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToStringIntMap")
hiveContext.reset()
}
test("UDFToIntIntMap") {
val testData = hiveContext.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.registerTempTable("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToIntIntMap " +
s"AS '${classOf[UDFToIntIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToIntIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage contains "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToIntIntMap")
hiveContext.reset()
}
test("UDFListListInt") {
val testData = hiveContext.sparkContext.parallelize(
ListListIntCaseClass(Nil) ::
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) :: Nil).toDF()
testData.registerTempTable("listListIntTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListListInt AS '${classOf[UDFListListInt].getName}'")
checkAnswer(
sql("SELECT testUDFListListInt(lli) FROM listListIntTable"),
Seq(Row(0), Row(2), Row(13)))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListListInt")
hiveContext.reset()
}
test("UDFListString") {
val testData = hiveContext.sparkContext.parallelize(
ListStringCaseClass(Seq("a", "b", "c")) ::
ListStringCaseClass(Seq("d", "e")) :: Nil).toDF()
testData.registerTempTable("listStringTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListString AS '${classOf[UDFListString].getName}'")
checkAnswer(
sql("SELECT testUDFListString(l) FROM listStringTable"),
Seq(Row("a,b,c"), Row("d,e")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListString")
hiveContext.reset()
}
test("UDFStringString") {
val testData = hiveContext.sparkContext.parallelize(
StringCaseClass("world") :: StringCaseClass("goodbye") :: Nil).toDF()
testData.registerTempTable("stringTable")
sql(s"CREATE TEMPORARY FUNCTION testStringStringUDF AS '${classOf[UDFStringString].getName}'")
checkAnswer(
sql("SELECT testStringStringUDF(\\"hello\\", s) FROM stringTable"),
Seq(Row("hello world"), Row("hello goodbye")))
checkAnswer(
sql("SELECT testStringStringUDF(\\"\\", testStringStringUDF(\\"hello\\", s)) FROM stringTable"),
Seq(Row(" hello world"), Row(" hello goodbye")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testStringStringUDF")
hiveContext.reset()
}
test("UDFTwoListList") {
val testData = hiveContext.sparkContext.parallelize(
ListListIntCaseClass(Nil) ::
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) ::
Nil).toDF()
testData.registerTempTable("TwoListTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
checkAnswer(
sql("SELECT testUDFTwoListList(lli, lli) FROM TwoListTable"),
Seq(Row("0, 0"), Row("2, 2"), Row("13, 13")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFTwoListList")
hiveContext.reset()
}
test("Hive UDFs with insufficient number of input arguments should trigger an analysis error") {
Seq((1, 2)).toDF("a", "b").registerTempTable("testUDF")
{
// HiveSimpleUDF
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDFTwoListList() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive udf"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFTwoListList")
}
{
// HiveGenericUDF
sql(s"CREATE TEMPORARY FUNCTION testUDFAnd AS '${classOf[GenericUDFOPAnd].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDFAnd() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive udf"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFAnd")
}
{
// Hive UDAF
sql(s"CREATE TEMPORARY FUNCTION testUDAFPercentile AS '${classOf[UDAFPercentile].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDAFPercentile(a) FROM testUDF GROUP BY b")
}.getMessage
assert(message.contains("No handler for Hive udf"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDAFPercentile")
}
{
// AbstractGenericUDAFResolver
sql(s"CREATE TEMPORARY FUNCTION testUDAFAverage AS '${classOf[GenericUDAFAverage].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDAFAverage() FROM testUDF GROUP BY b")
}.getMessage
assert(message.contains("No handler for Hive udf"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDAFAverage")
}
{
// Hive UDTF
sql(s"CREATE TEMPORARY FUNCTION testUDTFExplode AS '${classOf[GenericUDTFExplode].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDTFExplode() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive udf"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDTFExplode")
}
sqlContext.dropTempTable("testUDF")
}
test("Hive UDF in group by") {
withTempTable("tab1") {
Seq(Tuple1(1451400761)).toDF("test_date").registerTempTable("tab1")
sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS '${classOf[GenericUDFToDate].getName}'")
val count = sql("select testUDFToDate(cast(test_date as timestamp))" +
" from tab1 group by testUDFToDate(cast(test_date as timestamp))").count()
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToDate")
assert(count == 1)
}
}
test("SPARK-11522 select input_file_name from non-parquet table"){
withTempDir { tempDir =>
// EXTERNAL OpenCSVSerde table pointing to LOCATION
val file1 = new File(tempDir + "/data1")
val writer1 = new PrintWriter(file1)
writer1.write("1,2")
writer1.close()
val file2 = new File(tempDir + "/data2")
val writer2 = new PrintWriter(file2)
writer2.write("1,2")
writer2.close()
sql(
s"""CREATE EXTERNAL TABLE csv_table(page_id INT, impressions INT)
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
WITH SERDEPROPERTIES (
\\"separatorChar\\" = \\",\\",
\\"quoteChar\\" = \\"\\\\\\"\\",
\\"escapeChar\\" = \\"\\\\\\\\\\")
LOCATION '$tempDir'
""")
val answer1 =
sql("SELECT input_file_name() FROM csv_table").head().getString(0)
assert(answer1.contains("data1") || answer1.contains("data2"))
val count1 = sql("SELECT input_file_name() FROM csv_table").distinct().count()
assert(count1 == 2)
sql("DROP TABLE csv_table")
// EXTERNAL pointing to LOCATION
sql(
s"""CREATE EXTERNAL TABLE external_t5 (c1 int, c2 int)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
LOCATION '$tempDir'
""")
val answer2 =
sql("SELECT input_file_name() as file FROM external_t5").head().getString(0)
assert(answer1.contains("data1") || answer1.contains("data2"))
val count2 = sql("SELECT input_file_name() as file FROM external_t5").distinct().count
assert(count2 == 2)
sql("DROP TABLE external_t5")
}
withTempDir { tempDir =>
// External parquet pointing to LOCATION
val parquetLocation = tempDir + "/external_parquet"
sql("SELECT 1, 2").write.parquet(parquetLocation)
sql(
s"""CREATE EXTERNAL TABLE external_parquet(c1 int, c2 int)
STORED AS PARQUET
LOCATION '$parquetLocation'
""")
val answer3 =
sql("SELECT input_file_name() as file FROM external_parquet").head().getString(0)
assert(answer3.contains("external_parquet"))
val count3 = sql("SELECT input_file_name() as file FROM external_parquet").distinct().count
assert(count3 == 1)
sql("DROP TABLE external_parquet")
}
// Non-External parquet pointing to /tmp/...
sql("CREATE TABLE parquet_tmp(c1 int, c2 int) " +
" STORED AS parquet " +
" AS SELECT 1, 2")
val answer4 =
sql("SELECT input_file_name() as file FROM parquet_tmp").head().getString(0)
assert(answer4.contains("parquet_tmp"))
val count4 = sql("SELECT input_file_name() as file FROM parquet_tmp").distinct().count
assert(count4 == 1)
sql("DROP TABLE parquet_tmp")
}
}
class TestPair(x: Int, y: Int) extends Writable with Serializable {
def this() = this(0, 0)
var entry: (Int, Int) = (x, y)
override def write(output: DataOutput): Unit = {
output.writeInt(entry._1)
output.writeInt(entry._2)
}
override def readFields(input: DataInput): Unit = {
val x = input.readInt()
val y = input.readInt()
entry = (x, y)
}
}
class PairSerDe extends AbstractSerDe {
override def initialize(p1: Configuration, p2: Properties): Unit = {}
override def getObjectInspector: ObjectInspector = {
ObjectInspectorFactory
.getStandardStructObjectInspector(
Arrays.asList("pair"),
Arrays.asList(ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList("id", "value"),
Arrays.asList(PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector))
))
}
override def getSerializedClass: Class[_ <: Writable] = classOf[TestPair]
override def getSerDeStats: SerDeStats = null
override def serialize(p1: scala.Any, p2: ObjectInspector): Writable = null
override def deserialize(value: Writable): AnyRef = {
val pair = value.asInstanceOf[TestPair]
val row = new ArrayList[ArrayList[AnyRef]]
row.add(new ArrayList[AnyRef](2))
row.get(0).add(Integer.valueOf(pair.entry._1))
row.get(0).add(Integer.valueOf(pair.entry._2))
row
}
}
class PairUDF extends GenericUDF {
override def initialize(p1: Array[ObjectInspector]): ObjectInspector =
ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList("id", "value"),
Arrays.asList(PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector)
)
override def evaluate(args: Array[DeferredObject]): AnyRef = {
Integer.valueOf(args(0).get.asInstanceOf[TestPair].entry._2)
}
override def getDisplayString(p1: Array[String]): String = ""
}
|
chenc10/Spark-PAF
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
|
Scala
|
apache-2.0
| 18,460 |
package tul.poiis.decision_tree
/**
* Created by michal on 06.01.2016.
*/
class Feature(val value: String) {
def stringValue: String ={
value
}
}
|
CucumisSativus/decision-tree-movie-evaluator
|
src/main/scala/tul/poiis/decision_tree/Feature.scala
|
Scala
|
mit
| 159 |
package org.scalamu.report
sealed abstract class LineStatus(val styleName: String)
object LineStatus {
case object Covered extends LineStatus("covered")
case object NotCovered extends LineStatus("uncovered")
case object NotApplicable extends LineStatus("na")
}
|
sugakandrey/scalamu
|
report/src/main/scala/org/scalamu/report/LineStatus.scala
|
Scala
|
gpl-3.0
| 278 |
package com.sksamuel.elastic4s.searches.queries.geo
import com.sksamuel.elastic4s.DistanceUnit
import com.sksamuel.elastic4s.searches.queries.Query
import com.sksamuel.exts.OptionImplicits._
case class GeoDistanceQuery(field: String,
geoDistance: Option[GeoDistance] = None,
geohash: Option[String] = None,
distanceStr: Option[String] = None,
distance: Option[(Double, DistanceUnit)] = None,
ignoreUnmapped: Option[Boolean] = None,
boost: Option[Double] = None,
validationMethod: Option[GeoValidationMethod] = None,
queryName: Option[String] = None,
point: Option[(Double, Double)] = None)
extends Query {
def queryName(queryName: String): GeoDistanceQuery = copy(queryName = queryName.some)
def boost(boost: Double): GeoDistanceQuery = copy(boost = boost.some)
// alias for geoDistance
def distanceType(geod: GeoDistance): GeoDistanceQuery = geoDistance(geod)
def geoDistance(geod: GeoDistance): GeoDistanceQuery = copy(geoDistance = geod.some)
def ignoreUnmapped(ignore: Boolean): GeoDistanceQuery = copy(ignoreUnmapped = ignore.some)
def geohash(geohash: String): GeoDistanceQuery = copy(geohash = geohash.some)
def validationMethod(validationMethod: GeoValidationMethod): GeoDistanceQuery =
copy(validationMethod = validationMethod.some)
def point(lat: Double, long: Double): GeoDistanceQuery = copy(point = (lat, long).some)
def distance(distance: String): GeoDistanceQuery = copy(distanceStr = distance.some)
def distance(distance: Double, unit: DistanceUnit): GeoDistanceQuery =
copy(distance = (distance, unit).some)
}
|
Tecsisa/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/geo/GeoDistanceQuery.scala
|
Scala
|
apache-2.0
| 1,830 |
package ammonite
import ammonite.runtime.{History, Interpreter, Storage}
import ammonite.main.Defaults
import ammonite.ops._
import ammonite.runtime.tools.IvyConstructor._
import ammonite.TestUtils._
import utest._
object CachingTests extends TestSuite{
val tests = TestSuite{
println("ScriptTests")
val scriptPath = pwd/'amm/'src/'test/'resources/'scripts
val resourcesPath = pwd/'amm/'src/'test/'resources
val tempDir = tmp.dir(prefix="ammonite-tester")
'noAutoIncrementWrapper{
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
interp.interpApi.load.module(scriptPath/"ThreeBlocks.sc")
try{
Class.forName("cmd0")
assert(false)
} catch {
case e: ClassNotFoundException => assert(true)
case e: Exception => assert(false)
}
}
'blocks{
val cases = Seq("OneBlock.sc" -> 2, "TwoBlocks.sc" -> 3, "ThreeBlocks.sc" -> 4)
for((fileName, expected) <- cases){
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
val n0 = storage.compileCache.size
assert(n0 == 1) // customLolz predef
interp.interpApi.load.module(scriptPath/fileName)
val n = storage.compileCache.size
assert(n == expected)
}
}
'processModuleCaching{
def check(script: RelPath){
val storage = new Storage.Folder(tempDir)
val interp1 = createTestInterp(
storage,
Defaults.predefString
)
interp1.interpApi.load.module(resourcesPath/script)
assert(interp1.compiler != null)
val interp2 = createTestInterp(
storage,
Defaults.predefString
)
assert(interp2.compiler == null)
interp2.interpApi.load.module(resourcesPath/script)
assert(interp2.compiler == null)
}
'testOne - check('scriptLevelCaching/"scriptTwo.sc")
'testTwo - check('scriptLevelCaching/"scriptOne.sc")
'testThree - check('scriptLevelCaching/"QuickSort.sc")
'testLoadModule - check('scriptLevelCaching/"testLoadModule.sc")
'testFileImport - check('scriptLevelCaching/"testFileImport.sc")
'testIvyImport - check('scriptLevelCaching/"ivyCacheTest.sc")
}
'testRunTimeExceptionForCachedScripts{
val storage = new Storage.Folder(tempDir)
val numFile = pwd/'amm/'target/'test/'resources/'scriptLevelCaching/"num.value"
rm(numFile)
write(numFile, "1")
val interp1 = createTestInterp(
storage,
Defaults.predefString
)
interp1.interpApi.load.module(resourcesPath/'scriptLevelCaching/"runTimeExceptions.sc")
val interp2 = createTestInterp(
storage,
Defaults.predefString
)
val res = intercept[java.lang.ArithmeticException]{
interp2.interpApi.load.module(
resourcesPath/'scriptLevelCaching/"runTimeExceptions.sc"
)
}
assert(interp2.compiler == null &&
res.toString == "java.lang.ArithmeticException: / by zero")
}
'persistence{
val tempDir = ammonite.ops.Path(
java.nio.file.Files.createTempDirectory("ammonite-tester-x")
)
val interp1 = createTestInterp(new Storage.Folder(tempDir))
val interp2 = createTestInterp(new Storage.Folder(tempDir))
interp1.interpApi.load.module(scriptPath/"OneBlock.sc")
interp2.interpApi.load.module(scriptPath/"OneBlock.sc")
val n1 = interp1.compilationCount
val n2 = interp2.compilationCount
assert(n1 == 2) // customLolz predef + OneBlock.sc
assert(n2 == 0) // both should be cached
}
'tags{
val storage = Storage.InMemory()
val interp = createTestInterp(storage)
interp.interpApi.load.module(scriptPath/"TagBase.sc")
interp.interpApi.load.module(scriptPath/"TagPrevCommand.sc")
interp.interpApi.load.ivy("com.lihaoyi" %% "scalatags" % "0.4.5")
interp.interpApi.load.module(scriptPath/"TagBase.sc")
val n = storage.compileCache.size
assert(n == 5) // customLolz predef + two blocks for each loaded file
}
'changeScriptInvalidation{
// This makes sure that the compile caches are properly utilized, and
// flushed, in a variety of circumstances: changes to the number of
// blocks in the predef, predefs containing magic imports, and changes
// to the script being run. For each change, the caches should be
// invalidated, and subsequently a single compile should be enough
// to re-fill the caches
val predefFile = tmp("""
val x = 1337
@
val y = x
import $ivy.`com.lihaoyi::scalatags:0.5.4`, scalatags.Text.all._
""")
val scriptFile = tmp("""div("<('.'<)", y).render""")
def processAndCheckCompiler(f: ammonite.runtime.Compiler => Boolean) ={
val interp = createTestInterp(
new Storage.Folder(tempDir){
override val predef = predefFile
},
Defaults.predefString
)
interp.interpApi.load.module(scriptFile)
assert(f(interp.compiler))
}
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
rm! predefFile
write(
predefFile,
"""
import $ivy.`com.lihaoyi::scalatags:0.5.4`; import scalatags.Text.all._
val y = 31337
"""
)
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
rm! scriptFile
write(
scriptFile,
"""div("(>'.')>", y).render"""
)
processAndCheckCompiler(_ != null)
processAndCheckCompiler(_ == null)
}
}
}
|
coderabhishek/Ammonite
|
amm/src/test/scala/ammonite/CachingTests.scala
|
Scala
|
mit
| 5,694 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.bigdecimal
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* Test cases for testing columns having null value
*/
class TestNullAndEmptyFields extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("drop table if exists carbonTable")
sql("drop table if exists hiveTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT
)
val csvFilePath = s"$resourcesPath/nullandnonparsableValue.csv"
sql(
"CREATE TABLE IF NOT EXISTS carbonTable (ID String, date Timestamp, country String, name " +
"String, phonetype String, serialname String, salary Decimal(17,2)) STORED AS carbondata"
)
sql(
"create table if not exists hiveTable(ID String, date Timestamp, country String, name " +
"String, " +
"phonetype String, serialname String, salary Decimal(17,2))row format delimited fields " +
"terminated by ','"
)
sql(
"LOAD DATA LOCAL INPATH '" + csvFilePath + "' into table carbonTable OPTIONS " +
"('FILEHEADER'='ID,date," +
"country,name,phonetype,serialname,salary')"
)
sql(
"LOAD DATA local inpath '" + csvFilePath + "' INTO table hiveTable"
)
}
test("test detail query on column having null values") {
checkAnswer(
sql("select * from carbonTable"),
sql("select * from hiveTable")
)
}
test("test filter query on column is null") {
checkAnswer(
sql("select * from carbonTable where salary is null"),
sql("select * from hiveTable where salary is null")
)
}
test("test filter query on column is not null") {
checkAnswer(
sql("select * from carbonTable where salary is not null"),
sql("select * from hiveTable where salary is not null")
)
}
test("test filter query on columnValue=null") {
checkAnswer(
sql("select * from carbonTable where salary=null"),
sql("select * from hiveTable where salary=null")
)
}
test("test filter query where date is null") {
checkAnswer(
sql("select * from carbonTable where date is null"),
sql("select * from hiveTable where date is null")
)
}
test("test subquery on column having null values") {
checkAnswer(
sql("select * from (select if(country='china','c', country) test " +
"from carbonTable)qq where test is null"),
sql("select * from (select if(country='china','c', country) test " +
"from hiveTable)qq where test is null")
)
}
test("test subquery on column having not null values") {
checkAnswer(
sql("select * from (select if(country='china','c', country) test " +
"from carbonTable)qq where test is not null"),
sql("select * from (select if(country='china','c', country) test " +
"from hiveTable)qq where test is not null")
)
}
override def afterAll {
sql("drop table if exists carbonTable")
sql("drop table if exists hiveTable")
}
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestNullAndEmptyFields.scala
|
Scala
|
apache-2.0
| 4,068 |
package com.wincom.dcim.sharded
import java.lang.Math._
import akka.actor.{ActorRef, Props, ReceiveTimeout}
import akka.cluster.sharding.ShardRegion
import akka.cluster.sharding.ShardRegion.Passivate
import akka.http.scaladsl.model.DateTime
import com.wincom.dcim.domain.{AlarmRecord, Settings}
import com.wincom.dcim.message.alarmrecord.PassivateAlarmRecordCmd
import com.wincom.dcim.message.common.Command
import com.wincom.dcim.util.DateFormat._
/**
* Created by wangxy on 17-8-29.
*/
object ShardedAlarmRecord {
def props(notifier: () => ActorRef) = Props(new ShardedAlarmRecord(notifier))
def name(alarmId: String, beginTime: DateTime): String = s"${alarmId},${formatTimestamp(beginTime.clicks)}"
val shardName = "alarm-record-shard"
var numberOfShards = 100
val extractEntityId: ShardRegion.ExtractEntityId = {
case cmd: Command =>
(cmd.entityId, cmd)
}
val extractShardId: ShardRegion.ExtractShardId = {
case cmd: Command =>
(abs(cmd.entityId.hashCode) % numberOfShards).toString
}
}
class ShardedAlarmRecord(notifier: () => ActorRef) extends AlarmRecord(notifier) {
val settings = Settings(context.system)
context.setReceiveTimeout(settings.actor.passivateTimeout)
override def unhandled(message: Any): Unit = message match {
case ReceiveTimeout =>
context.parent ! Passivate(stopMessage = PassivateAlarmRecordCmd)
case PassivateAlarmRecordCmd =>
context.stop(self)
case x => log.info("unhandled COMMAND: {} {}", this, x)
}
}
|
xtwxy/mysc
|
dcim-cluster/cluster/src/main/scala/com/wincom/dcim/sharded/ShardedAlarmRecord.scala
|
Scala
|
apache-2.0
| 1,513 |
package com.arcusys.valamis.gradebook.storage
import com.arcusys.valamis.gradebook.model.CourseGrade
trait CourseGradeStorage {
def create(course: CourseGrade)
def get(courseId: Int, userID: Int): Option[CourseGrade]
def modify(course: CourseGrade)
def renew()
}
|
ViLPy/Valamis
|
valamis-gradebook/src/main/scala/com/arcusys/valamis/gradebook/storage/CourseGradeStorage.scala
|
Scala
|
lgpl-3.0
| 273 |
package com.wavesplatform.lang.v1
import java.nio.charset.StandardCharsets
import java.util.concurrent.{ThreadLocalRandom, TimeUnit}
import cats.Id
import cats.syntax.bifunctor._
import com.wavesplatform.account
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.crypto.Curve25519
import com.wavesplatform.lang.directives.DirectiveSet
import com.wavesplatform.lang.directives.values.{Account, DApp, V4}
import com.wavesplatform.lang.script.Script
import com.wavesplatform.lang.v1.EnvironmentFunctionsBenchmark._
import com.wavesplatform.lang.v1.compiler.Terms.{CONST_STRING, EVALUATED, EXPR, FUNCTION_CALL}
import com.wavesplatform.lang.v1.evaluator.EvaluatorV2
import com.wavesplatform.lang.v1.evaluator.ctx.EvaluationContext
import com.wavesplatform.lang.v1.evaluator.ctx.impl.EnvironmentFunctions
import com.wavesplatform.lang.v1.evaluator.ctx.impl.waves.{Functions, WavesContext}
import com.wavesplatform.lang.v1.traits._
import com.wavesplatform.lang.v1.traits.domain.Recipient.Address
import com.wavesplatform.lang.v1.traits.domain.{BlockInfo, Recipient, ScriptAssetInfo, Tx}
import com.wavesplatform.lang.{Common, Global, ValidationError}
import com.wavesplatform.wallet.Wallet
import monix.eval.Coeval
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.infra.Blackhole
import scala.util.Random
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@BenchmarkMode(Array(Mode.AverageTime))
@Threads(1)
@Fork(1)
@Warmup(iterations = 10)
@Measurement(iterations = 10)
class EnvironmentFunctionsBenchmark {
@Benchmark
def random_bytes_500_test(): Array[Byte] = randomBytes(DataBytesLength)
@Benchmark
def base58_decode_full_test(): Either[String, Array[Byte]] =
Global.base58Decode(Global.base58Encode(randomBytes(Base58BytesLength)).explicitGet())
@Benchmark
def base58_encode_test(): String = hashTest(Base58BytesLength, Global.base58Encode(_).explicitGet())
@Benchmark
def base58_26_encode_test(): String = hashTest(26, Global.base58Encode(_).explicitGet()) // for addressFromString_full_test
@Benchmark
def base16_decode_test(): Array[Byte] = Global.base16Decode(string32Kb, checkLength = true).explicitGet()
@Benchmark
def base16_encode_test(): String = Global.base16Encode(bytes8Kb, checkLength = true).explicitGet()
@Benchmark
def sha256_test(): Array[Byte] = hashTest(Global.sha256)
@Benchmark
def keccak256_test(): Array[Byte] = hashTest(Global.keccak256)
@Benchmark
def blake2b256_test(): Array[Byte] = hashTest(Global.blake2b256)
@Benchmark
def secureHash_test(): Array[Byte] = hashTest(Global.secureHash)
@Benchmark
def curve25519_generateKeypair_test(): (Array[Byte], Array[Byte]) = curve25519.generateKeypair
@Benchmark
def curve25519_sign_full_test(): Array[Byte] = {
val (privateKey, _) = curve25519.generateKeypair
curve25519.sign(privateKey, randomBytes(DataBytesLength))
}
@Benchmark
def curve25519_full_test(): Boolean = {
val (privateKey, publicKey) = curve25519.generateKeypair
val message = randomBytes(DataBytesLength)
val signature = curve25519.sign(privateKey, message)
Curve25519.verify(signature, message, publicKey)
}
@Benchmark
def addressFromPublicKey_test(): ByteStr = randomAddress
@Benchmark
def addressFromString(st: AddressFromString, bh: Blackhole): Unit = {
val i = Random.nextInt(100)
bh.consume(EvaluatorV2.applyCompleted(st.ctx, st.expr(i), V4))
}
}
object EnvironmentFunctionsBenchmark {
val ChainId: Byte = 'P'
val Base58BytesLength = Global.MaxBase58Bytes
val DataBytesLength = 512
val SeedBytesLength = 128
val defaultEnvironment: Environment[Id] = new Environment[Id] {
override def height: Long = 1
override def chainId: Byte = ChainId
override def inputEntity: Environment.InputEntity = ???
override def transactionById(id: Array[Byte]): Option[Tx] = ???
override def transferTransactionById(id: Array[Byte]): Option[Tx.Transfer] = ???
override def data(recipient: Recipient, key: String, dataType: DataType): Option[Any] = ???
override def hasData(recipient: Recipient): Boolean = false
override def resolveAlias(alias: String): Either[String, Recipient.Address] = ???
override def transactionHeightById(id: Array[Byte]): Option[Long] = ???
override def assetInfoById(id: Array[Byte]): Option[ScriptAssetInfo] = ???
override def lastBlockOpt(): Option[BlockInfo] = ???
override def blockInfoByHeight(height: Int): Option[BlockInfo] = ???
override def accountBalanceOf(addressOrAlias: Recipient, assetId: Option[Array[Byte]]): Either[String, Long] = ???
override def accountWavesBalanceOf(addressOrAlias: Recipient): Either[String, Environment.BalanceDetails] = ???
override def tthis: Environment.Tthis = ???
override def multiPaymentAllowed: Boolean = ???
override def transferTransactionFromProto(b: Array[Byte]): Option[Tx.Transfer] = ???
override def txId: ByteStr = ByteStr(new Array[Byte](64))
override def addressFromString(addressStr: String): Either[String, Recipient.Address] =
account.Address
.fromString(addressStr)
.bimap(
_.toString,
address => Address(ByteStr(address.bytes))
)
override def accountScript(addressOrAlias: Recipient): Option[Script] = ???
override def callScript(
dApp: Address,
func: String,
args: List[EVALUATED],
payments: Seq[(Option[Array[Byte]], Long)],
availableComplexity: Int,
reentrant: Boolean
): Coeval[(Either[ValidationError, EVALUATED], Int)] = ???
}
val environmentFunctions = new EnvironmentFunctions(defaultEnvironment)
val string32Kb: String = "FEDCBA9876543210" * (32 * 1024 / 16)
val bytes8Kb: Array[Byte] = ("FEDCBA9876543210" * (8 * 1024 / 16)).getBytes(StandardCharsets.UTF_8)
def randomBytes(length: Int): Array[Byte] = {
val bytes = Array.fill[Byte](length)(0)
ThreadLocalRandom.current().nextBytes(bytes)
bytes
}
def randomAddress: ByteStr = ByteStr(Common.addressFromPublicKey(ChainId, randomBytes(Curve25519.KeyLength)))
def hashTest[T](f: Array[Byte] => T): T = f(randomBytes(DataBytesLength))
def hashTest[T](len: Int, f: Array[Byte] => T): T = f(randomBytes(len))
object curve25519 {
def generateKeypair = Curve25519.createKeyPair(randomBytes(SeedBytesLength))
def sign(privateKey: Array[Byte], message: Array[Byte]): Array[Byte] = Curve25519.sign(privateKey, message)
}
}
@State(Scope.Benchmark)
class AddressFromString {
val ctx: EvaluationContext[Environment, Id] =
WavesContext
.build(Global, DirectiveSet(V4, Account, DApp).explicitGet())
.evaluationContext(defaultEnvironment)
val expr: Array[EXPR] =
(1 to 100).map { _ =>
val address =
Wallet
.generateNewAccount(Random.nextBytes(8), 1)
.publicKey
.toAddress(ChainId)
.toString
FUNCTION_CALL(
Functions.addressFromStringV4.header,
List(CONST_STRING(address).explicitGet())
)
}.toArray
}
|
wavesplatform/Waves
|
benchmark/src/test/scala/com/wavesplatform/lang/v1/EnvironmentFunctionsBenchmark.scala
|
Scala
|
mit
| 8,066 |
package view
import java.util.Date
import org.specs2.mutable._
import org.specs2.mock.Mockito
import service.RequestCache
import model.Account
import service.SystemSettingsService.SystemSettings
import play.twirl.api.Html
import javax.servlet.http.HttpServletRequest
class AvatarImageProviderSpec extends Specification with Mockito {
val request = mock[HttpServletRequest]
request.getRequestURL returns new StringBuffer("http://localhost:8080/path.html")
request.getRequestURI returns "/path.html"
request.getContextPath returns ""
"getAvatarImageHtml" should {
"show Gravatar image for no image account if gravatar integration is enabled" in {
implicit val context = app.Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(None)))
provider.toHtml("user", 32).toString mustEqual
"<img src=\\"https://www.gravatar.com/avatar/d41d8cd98f00b204e9800998ecf8427e?s=32&d=retro&r=g\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />"
}
"show uploaded image even if gravatar integration is enabled" in {
implicit val context = app.Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(Some("icon.png"))))
provider.toHtml("user", 32).toString mustEqual
"<img src=\\"/user/_avatar\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />"
}
"show local image for no image account if gravatar integration is disabled" in {
implicit val context = app.Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(Some(createAccount(None)))
provider.toHtml("user", 32).toString mustEqual
"<img src=\\"/user/_avatar\\" class=\\"avatar\\" style=\\"width: 32px; height: 32px;\\" />"
}
"show Gravatar image for specified mail address if gravatar integration is enabled" in {
implicit val context = app.Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(None)
provider.toHtml("user", 20, "[email protected]").toString mustEqual
"<img src=\\"https://www.gravatar.com/avatar/4712f9b0e63f56ad952ad387eaa23b9c?s=20&d=retro&r=g\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />"
}
"show unknown image for unknown user if gravatar integration is enabled" in {
implicit val context = app.Context(createSystemSettings(true), None, request)
val provider = new AvatarImageProviderImpl(None)
provider.toHtml("user", 20).toString mustEqual
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />"
}
"show unknown image for specified mail address if gravatar integration is disabled" in {
implicit val context = app.Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(None)
provider.toHtml("user", 20, "[email protected]").toString mustEqual
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" />"
}
"add tooltip if it's enabled" in {
implicit val context = app.Context(createSystemSettings(false), None, request)
val provider = new AvatarImageProviderImpl(None)
provider.toHtml("user", 20, "[email protected]", true).toString mustEqual
"<img src=\\"/_unknown/_avatar\\" class=\\"avatar-mini\\" style=\\"width: 20px; height: 20px;\\" data-toggle=\\"tooltip\\" title=\\"user\\"/>"
}
}
private def createAccount(image: Option[String]) =
Account(
userName = "user",
fullName = "user@localhost",
mailAddress = "",
password = "",
isAdmin = false,
url = None,
registeredDate = new Date(),
updatedDate = new Date(),
lastLoginDate = None,
image = image,
isGroupAccount = false,
isRemoved = false)
private def createSystemSettings(useGravatar: Boolean) =
SystemSettings(
baseUrl = None,
allowAccountRegistration = false,
gravatar = useGravatar,
notification = false,
ssh = false,
sshPort = None,
smtp = None,
ldapAuthentication = false,
ldap = None)
/**
* Adapter to test AvatarImageProviderImpl.
*/
class AvatarImageProviderImpl(account: Option[Account]) extends AvatarImageProvider with RequestCache {
def toHtml(userName: String, size: Int, mailAddress: String = "", tooltip: Boolean = false)
(implicit context: app.Context): Html = getAvatarImageHtml(userName, size, mailAddress, tooltip)
override def getAccountByMailAddress(mailAddress: String)(implicit context: app.Context): Option[Account] = account
override def getAccountByUserName(userName: String)(implicit context: app.Context): Option[Account] = account
}
}
|
tb280320889/TESTTB
|
src/test/scala/view/AvatarImageProviderSpec.scala
|
Scala
|
apache-2.0
| 5,030 |
package controllers
import java.util.concurrent.TimeUnit
import akka.util.Timeout
import play.api.mvc._
import scala.concurrent.ExecutionContext
object Application extends Controller {
import play.api.libs.concurrent.Execution.Implicits._
val bench = new BenchService {
override implicit def ec: ExecutionContext = scala.concurrent.ExecutionContext.global
}
implicit val timeout = Timeout(10, TimeUnit.SECONDS)
def fib(n: Int) = Action.async { implicit request =>
bench.fib(n) map {
fib => Ok(s"Fin #${n} = ${fib}")
}
}
def sleep(n: Int) = Action.async { implicit request =>
bench.sleep(n) map {
res =>
Ok(res)
}
}
}
|
rafax/playspray
|
play/app/controllers/Application.scala
|
Scala
|
mit
| 681 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.stats
import java.lang.{Double => jDouble, Float => jFloat, Long => jLong}
import java.util.Date
import com.clearspring.analytics.stream.StreamSummary
import com.clearspring.analytics.stream.cardinality.HyperLogLog
import com.clearspring.analytics.stream.frequency.RichCountMinSketch
import com.esotericsoftware.kryo.io.{Input, Output}
import com.vividsolutions.jts.geom.Geometry
import org.locationtech.geomesa.utils.cache.SoftThreadLocal
import org.locationtech.geomesa.utils.stats.MinMax.MinMaxDefaults
import org.locationtech.geomesa.utils.text.WKBUtils
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
/**
* Serialize and deserialize stats
*/
trait StatSerializer {
def serialize(stat: Stat): Array[Byte]
def deserialize(bytes: Array[Byte], immutable: Boolean = false): Stat =
deserialize(bytes, 0, bytes.length, immutable)
def deserialize(bytes: Array[Byte], offset: Int, length: Int, immutable: Boolean): Stat
}
object StatSerializer {
def apply(sft: SimpleFeatureType): StatSerializer = new KryoStatSerializer(sft)
}
/**
* Kryo implementation of stat serializer. Thread-safe.
*
* @param sft simple feature type
*/
class KryoStatSerializer(sft: SimpleFeatureType) extends StatSerializer {
override def serialize(stat: Stat): Array[Byte] = {
val output = KryoStatSerializer.outputs.getOrElseUpdate(new Output(1024, -1))
output.clear()
KryoStatSerializer.write(output, sft, stat)
output.toBytes
}
override def deserialize(bytes: Array[Byte], offset: Int, length: Int, immutable: Boolean): Stat = {
val input = KryoStatSerializer.inputs.getOrElseUpdate(new Input)
input.setBuffer(bytes, offset, length)
KryoStatSerializer.read(input, sft, immutable)
}
}
object KryoStatSerializer {
private [stats] val inputs = new SoftThreadLocal[Input]()
private [stats] val outputs = new SoftThreadLocal[Output]()
// bytes indicating the type of stat
private [stats] val SeqStatByte: Byte = 0
private [stats] val CountByte: Byte = 1
private [stats] val MinMaxByte: Byte = 2
private [stats] val IteratorStackByte: Byte = 3
private [stats] val EnumerationByte: Byte = 4
private [stats] val HistogramByte: Byte = 5
private [stats] val FrequencyByte: Byte = 6
private [stats] val Z3HistogramByte: Byte = 7
private [stats] val Z3FrequencyByte: Byte = 8
private [stats] val TopKByte: Byte = 9
private [stats] def write(output: Output, sft: SimpleFeatureType, stat: Stat): Unit = {
stat match {
case s: CountStat => output.writeByte(CountByte); writeCount(output, s)
case s: MinMax[_] => output.writeByte(MinMaxByte); writeMinMax(output, sft, s)
case s: EnumerationStat[_] => output.writeByte(EnumerationByte); writeEnumeration(output, sft, s)
case s: TopK[_] => output.writeByte(TopKByte); writeTopK(output, sft, s)
case s: Histogram[_] => output.writeByte(HistogramByte); writeHistogram(output, sft, s)
case s: Frequency[_] => output.writeByte(FrequencyByte); writeFrequency(output, sft, s)
case s: Z3Histogram => output.writeByte(Z3HistogramByte); writeZ3Histogram(output, sft, s)
case s: Z3Frequency => output.writeByte(Z3FrequencyByte); writeZ3Frequency(output, sft, s)
case s: IteratorStackCount => output.writeByte(IteratorStackByte); writeIteratorStackCount(output, s)
case s: SeqStat => output.writeByte(SeqStatByte); writeSeqStat(output, sft, s)
}
}
private [stats] def read(input: Input, sft: SimpleFeatureType, immutable: Boolean): Stat = {
input.readByte() match {
case CountByte => readCount(input, immutable)
case MinMaxByte => readMinMax(input, sft, immutable)
case EnumerationByte => readEnumeration(input, sft, immutable)
case TopKByte => readTopK(input, sft, immutable)
case HistogramByte => readHistogram(input, sft, immutable)
case FrequencyByte => readFrequency(input, sft, immutable)
case Z3HistogramByte => readZ3Histogram(input, sft, immutable)
case Z3FrequencyByte => readZ3Frequency(input, sft, immutable)
case IteratorStackByte => readIteratorStackCount(input, immutable)
case SeqStatByte => readSeqStat(input, sft, immutable)
}
}
private [stats] def writeSeqStat(output: Output, sft: SimpleFeatureType, stat: SeqStat): Unit =
stat.stats.foreach(write(output, sft, _))
private [stats] def readSeqStat(input: Input, sft: SimpleFeatureType, immutable: Boolean): SeqStat = {
val stats = ArrayBuffer.empty[Stat]
while (input.available() > 0) {
stats.append(read(input, sft, immutable))
}
if (immutable) {
new SeqStat(stats) with ImmutableStat
} else {
new SeqStat(stats)
}
}
private [stats] def writeCount(output: Output, stat: CountStat): Unit = output.writeLong(stat.counter, true)
private [stats] def readCount(input: Input, immutable: Boolean): CountStat = {
val stat = if (immutable) new CountStat() with ImmutableStat else new CountStat
stat.counter = input.readLong(true)
stat
}
private [stats] def writeMinMax(output: Output, sft: SimpleFeatureType, stat: MinMax[_]): Unit = {
output.writeInt(stat.attribute, true)
val hpp = stat.hpp.getBytes
output.writeInt(hpp.length, true)
output.write(hpp)
val write = writer(output, sft.getDescriptor(stat.attribute).getType.getBinding)
write(stat.minValue)
write(stat.maxValue)
}
private [stats] def readMinMax(input: Input, sft: SimpleFeatureType, immutable: Boolean): MinMax[_] = {
val attribute = input.readInt(true)
val hpp = {
val hppBytes = Array.ofDim[Byte](input.readInt(true))
input.read(hppBytes)
HyperLogLog.Builder.build(hppBytes)
}
val binding = sft.getDescriptor(attribute).getType.getBinding
val read = reader(input, binding)
val min = read()
val max = read()
val defaults = MinMaxDefaults[Any](binding)
val classTag = ClassTag[Any](binding)
if (immutable) {
new MinMax[Any](attribute, min, max, hpp)(defaults, classTag) with ImmutableStat
} else {
new MinMax[Any](attribute, min, max, hpp)(defaults, classTag)
}
}
private [stats] def writeEnumeration(output: Output, sft: SimpleFeatureType, stat: EnumerationStat[_]): Unit = {
output.writeInt(stat.attribute, true)
output.writeInt(stat.enumeration.size, true)
val write = writer(output, sft.getDescriptor(stat.attribute).getType.getBinding)
stat.enumeration.foreach { case (key, count) => write(key); output.writeLong(count, true) }
}
private [stats] def readEnumeration(input: Input, sft: SimpleFeatureType, immutable: Boolean): EnumerationStat[_] = {
val attribute = input.readInt(true)
val size = input.readInt(true)
val binding = sft.getDescriptor(attribute).getType.getBinding
val read = reader(input, binding)
val classTag = ClassTag[Any](binding)
val stat = if (immutable) {
new EnumerationStat[Any](attribute)(classTag) with ImmutableStat
} else {
new EnumerationStat[Any](attribute)(classTag)
}
var i = 0
while (i < size) {
stat.enumeration(read()) = input.readLong(true)
i += 1
}
stat
}
private [stats] def writeTopK(output: Output, sft: SimpleFeatureType, stat: TopK[_]): Unit = {
output.writeInt(stat.attribute, true)
val summary = stat.summary.toBytes
output.writeInt(summary.length, true)
output.write(summary)
}
private [stats] def readTopK(input: Input, sft: SimpleFeatureType, immutable: Boolean): TopK[_] = {
val attribute = input.readInt(true)
val summary = {
val summaryBytes = Array.ofDim[Byte](input.readInt(true))
input.read(summaryBytes)
new StreamSummary[Any](summaryBytes)
}
val binding = sft.getDescriptor(attribute).getType.getBinding
val classTag = ClassTag[Any](binding)
if (immutable) {
new TopK[Any](attribute, summary)(classTag) with ImmutableStat
} else {
new TopK[Any](attribute, summary)(classTag)
}
}
private [stats] def writeHistogram(output: Output, sft: SimpleFeatureType, stat: Histogram[_]): Unit = {
output.writeInt(stat.attribute, true)
output.writeInt(stat.length, true)
val write = writer(output, sft.getDescriptor(stat.attribute).getType.getBinding)
write(stat.bounds._1)
write(stat.bounds._2)
writeCountArray(output, stat.bins.counts)
}
private [stats] def readHistogram(input: Input, sft: SimpleFeatureType, immutable: Boolean): Histogram[_] = {
val attribute = input.readInt(true)
val length = input.readInt(true)
val binding = sft.getDescriptor(attribute).getType.getBinding
val read = reader(input, binding)
val min = read()
val max = read()
val defaults = MinMaxDefaults[Any](binding)
val classTag = ClassTag[Any](binding)
val stat = if (immutable) {
new Histogram[Any](attribute, length, (min, max))(defaults, classTag) with ImmutableStat
} else {
new Histogram[Any](attribute, length, (min, max))(defaults, classTag)
}
readCountArray(input, stat.bins.counts)
stat
}
private [stats] def writeZ3Histogram(output: Output, sft: SimpleFeatureType, stat: Z3Histogram): Unit = {
output.writeInt(stat.geomIndex, true)
output.writeInt(stat.dtgIndex, true)
output.writeInt(stat.length, true)
val bins = stat.binMap.filter(_._2.counts.exists(_ != 0L))
output.writeInt(bins.size, true)
bins.foreach { case (w, bin) =>
output.writeShort(w)
writeCountArray(output, bin.counts)
}
}
private [stats] def readZ3Histogram(input: Input, sft: SimpleFeatureType, immutable: Boolean): Z3Histogram = {
val geomIndex = input.readInt(true)
val dtgIndex = input.readInt(true)
val length = input.readInt(true)
val stat = if (immutable) {
new Z3Histogram(geomIndex, dtgIndex, length) with ImmutableStat
} else {
new Z3Histogram(geomIndex, dtgIndex, length)
}
val numWeeks = input.readInt(true)
var week = 0
while (week < numWeeks) {
val bins = stat.newBins
stat.binMap.put(input.readShort, bins)
readCountArray(input, bins.counts)
week += 1
}
stat
}
private [stats] def writeFrequency(output: Output, sft: SimpleFeatureType, stat: Frequency[_]): Unit = {
output.writeInt(stat.attribute, true)
output.writeInt(stat.dtgIndex, true)
output.writeInt(stat.precision, true)
output.writeDouble(stat.eps)
output.writeDouble(stat.confidence)
val sketches = stat.sketchMap.filter(_._2.size > 0)
output.writeInt(sketches.size, true)
sketches.foreach { case (w, sketch) =>
output.writeShort(w)
val table = new RichCountMinSketch(sketch).table
var i = 0
while (i < table.length) {
writeCountArray(output, table(i))
i += 1
}
output.writeLong(sketch.size, true)
}
}
private [stats] def readFrequency(input: Input, sft: SimpleFeatureType, immutable: Boolean): Frequency[_] = {
val attribute = input.readInt(true)
val dtgIndex = input.readInt(true)
val precision = input.readInt(true)
val eps = input.readDouble()
val confidence = input.readDouble()
val binding = sft.getDescriptor(attribute).getType.getBinding
val stat = if (immutable) {
new Frequency[Any](attribute, dtgIndex, precision, eps, confidence)(ClassTag[Any](binding)) with ImmutableStat
} else {
new Frequency[Any](attribute, dtgIndex, precision, eps, confidence)(ClassTag[Any](binding))
}
val sketchCount = input.readInt(true)
var c = 0
while (c < sketchCount) {
val week = input.readShort
val sketch = stat.newSketch
stat.sketchMap.put(week, sketch)
val table = new RichCountMinSketch(sketch).table
var i = 0
while (i < table.length) {
readCountArray(input, table(i))
i += 1
}
new RichCountMinSketch(sketch).setSize(input.readLong(true))
c += 1
}
stat
}
private [stats] def writeZ3Frequency(output: Output, sft: SimpleFeatureType, stat: Z3Frequency): Unit = {
output.writeInt(stat.geomIndex, true)
output.writeInt(stat.dtgIndex, true)
output.writeInt(stat.precision, true)
output.writeDouble(stat.eps)
output.writeDouble(stat.confidence)
val sketches = stat.sketches.filter(_._2.size > 0)
output.writeInt(sketches.size, true)
sketches.foreach { case (w, sketch) =>
output.writeShort(w)
val table = new RichCountMinSketch(sketch).table
var i = 0
while (i < table.length) {
writeCountArray(output, table(i))
i += 1
}
output.writeLong(sketch.size, true)
}
}
private [stats] def readZ3Frequency(input: Input, sft: SimpleFeatureType, immutable: Boolean): Z3Frequency = {
val geomIndex = input.readInt(true)
val dtgIndex = input.readInt(true)
val precision = input.readInt(true)
val eps = input.readDouble()
val confidence = input.readDouble()
val stat = if (immutable) {
new Z3Frequency(geomIndex, dtgIndex, precision, eps, confidence) with ImmutableStat
} else {
new Z3Frequency(geomIndex, dtgIndex, precision, eps, confidence)
}
val numSketches = input.readInt(true)
var sketchCount = 0
while (sketchCount < numSketches) {
val sketch = stat.newSketch
stat.sketches.put(input.readShort, sketch)
val table = new RichCountMinSketch(sketch).table
var i = 0
while (i < table.length) {
readCountArray(input, table(i))
i += 1
}
new RichCountMinSketch(sketch).setSize(input.readLong(true))
sketchCount += 1
}
stat
}
private [stats] def writeIteratorStackCount(output: Output, stat: IteratorStackCount): Unit =
output.writeLong(stat.counter, true)
private [stats] def readIteratorStackCount(input: Input, immutable: Boolean): IteratorStackCount = {
val stat = if (immutable) new IteratorStackCount() with ImmutableStat else new IteratorStackCount()
stat.counter = input.readLong(true)
stat
}
private def writeCountArray(output: Output, counts: Array[Long]): Unit = {
var i = 0
while (i < counts.length) {
val count = counts(i)
if (count == 0) {
var nextNonZero = i + 1
while (nextNonZero < counts.length && counts(nextNonZero) == 0) {
nextNonZero += 1
}
val numZeros = nextNonZero - i
if (numZeros > 4) {
// write a max long as an indicator that we have sparse values, then write the number of zeros
output.writeLong(Long.MaxValue, true)
output.writeInt(numZeros, true)
} else if (numZeros > 0) {
(0 until numZeros).foreach(_ => output.writeLong(0L, true))
}
i = nextNonZero
} else {
output.writeLong(count, true)
i += 1
}
}
}
private def readCountArray(input: Input, counts: Array[Long]): Unit = {
var i = 0
while (i < counts.length) {
val count = input.readLong(true)
if (count == Long.MaxValue) {
i += input.readInt(true) // skip sparsely written values
} else {
counts(i) = count
i += 1
}
}
}
private def writer(output: Output, binding: Class[_]): (Any) => Unit = {
if (binding == classOf[String]) {
(value) => output.writeString(value.asInstanceOf[String])
} else if (binding == classOf[Integer]) {
(value) => output.writeInt(value.asInstanceOf[Integer], true)
} else if (binding == classOf[jLong]) {
(value) => output.writeLong(value.asInstanceOf[jLong], true)
} else if (binding == classOf[jFloat]) {
(value) => output.writeFloat(value.asInstanceOf[jFloat])
} else if (binding == classOf[jDouble]) {
(value) => output.writeDouble(value.asInstanceOf[jDouble])
} else if (classOf[Date].isAssignableFrom(binding)) {
(value) => output.writeLong(value.asInstanceOf[Date].getTime, true)
} else if (classOf[Geometry].isAssignableFrom(binding)) {
(value) => {
val b1 = WKBUtils.write(value.asInstanceOf[Geometry])
output.writeInt(b1.length, true)
output.write(b1)
}
} else {
throw new Exception(s"Cannot serialize stat due to invalid type: $binding")
}
}
private def reader(input: Input, binding: Class[_]): () => Any = {
if (binding == classOf[String]) {
() => input.readString()
} else if (binding == classOf[Integer]) {
() => input.readInt(true)
} else if (binding == classOf[jLong]) {
() => input.readLong(true)
} else if (binding == classOf[jFloat]) {
() => input.readFloat()
} else if (binding == classOf[jDouble]) {
() => input.readDouble()
} else if (classOf[Date].isAssignableFrom(binding)) {
() => new Date(input.readLong(true))
} else if (classOf[Geometry].isAssignableFrom(binding)) {
() => {
val b = Array.ofDim[Byte](input.readInt(true))
input.read(b)
WKBUtils.read(b)
}
} else {
throw new Exception(s"Cannot deserialize stat due to invalid type: $binding")
}
}
}
|
mdzimmerman/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/stats/StatSerializer.scala
|
Scala
|
apache-2.0
| 17,885 |
package dispatch.spec
import java.nio.charset.Charset
import org.asynchttpclient.Realm
import org.asynchttpclient.Realm.AuthScheme
import org.scalacheck._
import org.scalacheck.Prop._
object BasicSpecification
extends Properties("Basic")
with DispatchCleanup {
import java.net.{URLEncoder,URLDecoder}
import Prop.{forAll,AnyOperators}
private val port = unfiltered.util.Port.any
val server = {
import unfiltered.netty
import unfiltered.response._
import unfiltered.request._
object Echo extends Params.Extract("echo", Params.first)
netty.Server.local(port).handler(netty.cycle.Planify {
case req @ Path("/echo") & Params(Echo(echo)) =>
PlainTextContent ~> ResponseString(req.method + echo)
case req @ Path(Seg("echopath" :: echo :: _)) =>
PlainTextContent ~> ResponseString(req.method + URLDecoder.decode(echo, "utf-8"))
case req @ Path(Seg("echopath" :: Nil)) =>
PlainTextContent ~> ResponseString(req.method)
case req @ Path(Seg("echobody" :: Nil)) =>
PlainTextContent ~> ResponseString(req.method + Body.string(req))
case req @ Path(Seg("echoquery" :: Nil)) & QueryParams(queryParams) =>
val params = queryParams.flatMap { case (k, vs) => vs.map(v => k + "=" + v) }.mkString("&")
PlainTextContent ~> ResponseString(req.method + params)
case Path(Seg("agent" :: Nil)) & UserAgent(agent) =>
PlainTextContent ~> ResponseString(agent)
case Path(Seg("contenttype" :: Nil)) & RequestContentType(contenttype) =>
PlainTextContent ~> ResponseString(contenttype)
}).start()
}
import dispatch._
val localhost = host("127.0.0.1", port)
// a shim until we can update scalacheck to a version that non-alpha strings that don't break Java
val syms = "&#$@%"
def cyrillicChars = Gen.choose( 0x0400, 0x04FF) map {_.toChar}
def cyrillic = for {
cs <- Gen.listOf(cyrillicChars)
} yield {
cs.mkString
}
property("url() should encode non-ascii chars in the path") = forAll(cyrillic) { (sample: String) =>
val path = if (sample.isEmpty) "" else "/" + sample
val wiki = "http://wikipedia.com" + path
val uri = url(wiki)
uri.toRequest.getUrl() ?= RawUri(wiki).toString
}
property("Path segments can be before and after query parameters") = forAll(Gen.alphaStr) { (sample: String) =>
val segmentLast = (localhost <<? Map("key" -> "value")) / sample
val segmentFirst = localhost / sample <<? Map("key" -> "value")
segmentLast.toRequest.getUrl() ?= segmentFirst.toRequest.getUrl()
}
property("Path segments can be optional") = forAll(Gen.alphaStr) { (sample: String) =>
val segmentLast = (localhost <<? Map("key" -> "value")) / sample
val segmentOptional = localhost /? Some(sample) /? None <<? Map("key" -> "value")
segmentLast.toRequest.getUrl ?= segmentOptional.toRequest.getUrl
}
property("POST and handle") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
localhost / "echo" << Map("echo" -> sample) > as.String
)
res() ?= ("POST" + sample)
}
property("POST json with query params") = forAll(Gen.alphaStr) { (value: String) =>
val headers = Map("Content-Type" -> "application/json")
val params = Map("key" -> value)
val body = """{"foo":"bar"}"""
val res = Http.default(
localhost / "echoquery" <:< headers <<? params << body OK as.String
)
res() ?= ("POST" + "key=" + value)
}
property("POST non-ascii chars body and get response") = forAll(cyrillic) { (sample: String) =>
val res = Http.default(
localhost / "echobody" << sample > as.String
)
res() ?= ("POST" + sample)
}
property("PUT alphaString body with setBody and get response") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
(localhost / "echobody").PUT.setBody(sample) > as.String
)
res() ?= ("PUT" + sample)
}
property("PUT alphaString body with << and get response") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
(localhost / "echobody").PUT << sample > as.String
)
res() ?= ("PUT" + sample)
}
property("GET and handle") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
localhost / "echo" <<? Map("echo" -> sample) > as.String
)
res() ?= ("GET" + sample)
}
property("GET and get response") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
localhost / "echo" <<? Map("echo" -> sample)
)
res().getResponseBody ?= ("GET" + sample)
}
property("GET with encoded path") = forAll(Gen.alphaStr) { (sample: String) =>
// (second sample in request path is ignored)
val res = Http.default(
localhost / "echopath" / (sample + syms) / sample OK as.String
)
("GET" + sample + syms) ?= res()
}
property("GET with encoded path as url") = forAll(Gen.alphaStr) { (sample: String) =>
val requesturl = "http://127.0.0.1:%d/echopath/%s".format(port, URLEncoder.encode(sample + syms, "utf-8"))
val res = Http.default(url(requesturl) / sample OK as.String)
res() == ("GET" + sample + syms)
}
property("OPTIONS and handle") = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
localhost.OPTIONS / "echo" <<? Map("echo" -> sample) > as.String
)
res() ?= ("OPTIONS" + sample)
}
property("Send Dispatch/%s User-Agent" format BuildInfo.version) = forAll(Gen.alphaStr) { (sample: String) =>
val res = Http.default(
localhost / "agent" > as.String
)
res() ?= ("Dispatch/%s" format BuildInfo.version)
}
property("Send a default content-type with <<") = forAll(Gen.const("unused")) { (sample: String) =>
val res = Http.default(
localhost / "contenttype" << "request body" > as.String
)
res() ?= ("text/plain; charset=UTF-8")
}
property("Send a custom content type after <<") = forAll(Gen.oneOf("application/json", "application/foo")) { (sample: String) =>
val res = Http.default(
(localhost / "contenttype" << "request body").setContentType(sample, Charset.forName("UTF-8")) > as.String
)
res() ?= (sample + "; charset=UTF-8")
}
property("Send a custom content type with <:< after <<") = forAll(Gen.oneOf("application/json", "application/foo")) { (sample: String) =>
val res: Future[String] = Http.default(
localhost / "contenttype" << "request body" <:< Map("Content-Type" -> sample) > as.String
)
res() ?= (sample)
}
property("Set query params with <<? after setBody(String) and setContentType") = {
forAll(Gen.mapOf(Gen.zip(
Gen.alphaStr.suchThat(_.nonEmpty),
Gen.alphaStr
)).suchThat(_.nonEmpty)) { (sample : Map[String, String]) =>
val expectedParams = sample.map { case (key, value) => "%s=%s".format(key, value) }
val req = localhost.setBody("").setContentType("text/plain", Charset.forName("UTF-8")) <<? sample
req.toRequest.getUrl ?= "http://127.0.0.1:%d/?%s".format(port, expectedParams.mkString("&"))
}
}
property("Add query params with just the key") = forAll(Gen.const("unused")) { (sample: String) =>
val req = (localhost / "path").addQueryParameter("key")
req.toRequest.getUrl ?= "http://127.0.0.1:%d/path?key".format(port)
}
property("Set query params with just the key") = forAll(Gen.const("unused")) { (sample: String) =>
val req = (localhost / "path").setQueryParameters(Map("key" -> Seq()))
req.toRequest.getUrl ?= "http://127.0.0.1:%d/path?key".format(port)
}
property("Building a Realm without a scheme should throw NPE") = {
forAll(Gen.zip(Gen.alphaNumStr, Gen.alphaNumStr)) { case (user, password) =>
throws(classOf[NullPointerException])(new Realm.Builder(user, password).build())
}
}
property("Build a Realm using as") = {
forAll(Gen.zip(Gen.alphaNumStr, Gen.alphaNumStr, Gen.oneOf(AuthScheme.values()))) { case (user, password, scheme) =>
val realm = localhost.as(user, password, scheme).toRequest.getRealm
realm.getScheme ?= scheme
realm.getPrincipal ?= user
realm.getPassword ?= password
}
}
}
|
dispatch/reboot
|
core/src/test/scala/basic.scala
|
Scala
|
lgpl-3.0
| 8,144 |
package mesosphere.marathon
package core.task.bus
import mesosphere.marathon.core.condition.Condition
import mesosphere.marathon.core.instance.update._
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder }
import mesosphere.marathon.core.pod.MesosContainer
import mesosphere.marathon.core.task.{ Task, TaskCondition }
import mesosphere.marathon.state.{ PathId, Timestamp }
import org.apache.mesos.Protos.TaskStatus.Reason
import org.apache.mesos.Protos.{ TaskState, TaskStatus }
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
class TaskStatusUpdateTestHelper(val operation: InstanceUpdateOperation, val effect: InstanceUpdateEffect) {
def simpleName = operation match {
case InstanceUpdateOperation.MesosUpdate(_, marathonTaskStatus, mesosStatus, _) =>
mesosStatus.getState.toString
case _ => operation.getClass.getSimpleName
}
def status = operation match {
case InstanceUpdateOperation.MesosUpdate(_, marathonTaskStatus, mesosStatus, _) => mesosStatus
case _ => throw new scala.RuntimeException("the wrapped stateOp os no MesosUpdate!")
}
def reason: String = if (status.hasReason) status.getReason.toString else "no reason"
def wrapped: InstanceChange = effect match {
case InstanceUpdateEffect.Update(instance, old, events) => InstanceUpdated(instance, old.map(_.state), events)
case InstanceUpdateEffect.Expunge(instance, events) => InstanceDeleted(instance, None, events)
case _ => throw new scala.RuntimeException(s"The wrapped effect does not result in an update or expunge: $effect")
}
private[this] def instanceFromOperation: Instance = operation match {
case launch: InstanceUpdateOperation.LaunchEphemeral => launch.instance
case update: InstanceUpdateOperation.MesosUpdate => update.instance
case _ => throw new RuntimeException(s"Unable to fetch instance from ${operation.getClass.getSimpleName}")
}
def updatedInstance: Instance = effect match {
case InstanceUpdateEffect.Update(instance, old, events) => instance
case InstanceUpdateEffect.Expunge(instance, events) => instance
case _ => instanceFromOperation
}
}
object TaskStatusUpdateTestHelper {
val log = LoggerFactory.getLogger(getClass)
def apply(operation: InstanceUpdateOperation, effect: InstanceUpdateEffect): TaskStatusUpdateTestHelper =
new TaskStatusUpdateTestHelper(operation, effect)
lazy val defaultInstance = TestInstanceBuilder.newBuilder(PathId("/app")).addTaskStaged().getInstance()
lazy val defaultTimestamp = Timestamp.apply(new DateTime(2015, 2, 3, 12, 30, 0, 0))
def taskLaunchFor(instance: Instance) = {
val operation = InstanceUpdateOperation.LaunchEphemeral(instance)
val effect = InstanceUpdateEffect.Update(operation.instance, oldState = None, events = Nil)
TaskStatusUpdateTestHelper(operation, effect)
}
def taskUpdateFor(instance: Instance, taskCondition: Condition, mesosStatus: TaskStatus, timestamp: Timestamp = defaultTimestamp) = {
val operation = InstanceUpdateOperation.MesosUpdate(instance, taskCondition, mesosStatus, timestamp)
val effect = InstanceUpdater.mesosUpdate(instance, operation)
TaskStatusUpdateTestHelper(operation, effect)
}
def taskExpungeFor(instance: Instance, taskCondition: Condition, mesosStatus: TaskStatus, timestamp: Timestamp = defaultTimestamp) = {
val operation = InstanceUpdateOperation.MesosUpdate(instance, taskCondition, mesosStatus, timestamp)
val effect = InstanceUpdater.mesosUpdate(instance, operation)
if (!effect.isInstanceOf[InstanceUpdateEffect.Expunge]) {
throw new RuntimeException(s"Applying a MesosUpdate with status $taskCondition did not result in an Expunge effect but in a $effect")
}
TaskStatusUpdateTestHelper(operation, effect)
}
def taskId(instance: Instance, container: Option[MesosContainer]): Task.Id = {
val taskId = instance.tasksMap.headOption.map(_._1)
taskId.getOrElse(Task.Id.forInstanceId(instance.instanceId, container))
}
def running(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.running(taskId)
taskUpdateFor(instance, Condition.Running, status)
}
def runningHealthy(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.runningHealthy(taskId)
taskUpdateFor(instance, Condition.Running, status)
}
def runningUnhealthy(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.runningUnhealthy(taskId)
taskUpdateFor(instance, Condition.Running, status)
}
def staging(instance: Instance = defaultInstance) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, None)
val status = MesosTaskStatusTestHelper.staging(taskId)
taskUpdateFor(instance, Condition.Staging, status)
}
def finished(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.finished(taskId)
taskUpdateFor(instance, Condition.Finished, status)
}
def lost(reason: Reason, instance: Instance = defaultInstance, maybeMessage: Option[String] = None, timestamp: Timestamp = defaultTimestamp) = {
val taskId = instance.appTask.taskId
val mesosStatus = MesosTaskStatusTestHelper.mesosStatus(
state = TaskState.TASK_LOST,
maybeReason = Some(reason), maybeMessage = maybeMessage,
taskId = taskId,
timestamp = timestamp
)
val marathonTaskStatus = TaskCondition(mesosStatus)
marathonTaskStatus match {
case _: Condition.Terminal =>
taskExpungeFor(instance, marathonTaskStatus, mesosStatus)
case _ =>
taskUpdateFor(instance, marathonTaskStatus, mesosStatus, timestamp)
}
}
def unreachable(instance: Instance = defaultInstance) = {
val mesosStatus = MesosTaskStatusTestHelper.unreachable(Task.Id.forInstanceId(instance.instanceId, None))
val marathonTaskStatus = TaskCondition(mesosStatus)
marathonTaskStatus match {
case _: Condition.Terminal =>
taskExpungeFor(instance, marathonTaskStatus, mesosStatus)
case _ =>
taskUpdateFor(instance, marathonTaskStatus, mesosStatus)
}
}
def killed(instance: Instance = defaultInstance) = {
// TODO(PODS): the method signature should allow passing a taskId
val (taskId, _) = instance.tasksMap.head
val status = MesosTaskStatusTestHelper.killed(taskId)
taskExpungeFor(instance, Condition.Killed, status)
}
def killing(instance: Instance = defaultInstance) = {
val status = MesosTaskStatusTestHelper.killing(Task.Id.forInstanceId(instance.instanceId, None))
taskUpdateFor(instance, Condition.Killing, status)
}
def error(instance: Instance = defaultInstance) = {
val status = MesosTaskStatusTestHelper.error(Task.Id.forInstanceId(instance.instanceId, None))
taskExpungeFor(instance, Condition.Error, status)
}
def failed(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.failed(taskId)
taskUpdateFor(instance, Condition.Failed, status)
}
def gone(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.gone(taskId)
taskUpdateFor(instance, Condition.Gone, status)
}
def dropped(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.dropped(taskId)
taskUpdateFor(instance, Condition.Dropped, status)
}
def unknown(instance: Instance = defaultInstance, container: Option[MesosContainer] = None) = {
val taskId = Task.Id.forInstanceId(instance.instanceId, container)
val status = MesosTaskStatusTestHelper.unknown(taskId)
taskUpdateFor(instance, Condition.Unknown, status)
}
}
|
natemurthy/marathon
|
src/test/scala/mesosphere/marathon/core/task/bus/TaskStatusUpdateTestHelper.scala
|
Scala
|
apache-2.0
| 8,438 |
package io.plasmap.geo.producing.test
import _root_.io.plasmap.generator.OsmObjectGenerator
import _root_.io.plasmap.geo.producing.OsmProducer
import _root_.io.plasmap.model.OsmObject
import akka.actor._
import akka.stream._
import akka.stream.scaladsl._
import org.scalamock.proxy.ProxyMockFactory
import org.scalamock.specs2.IsolatedMockFactory
import org.specs2.mutable.Specification
/**
* Specification for Queries
*/
class OsmProducerSpec
extends Specification
with IsolatedMockFactory
with ProxyMockFactory {
sequential
implicit val system = ActorSystem("test")
implicit val mat = ActorMaterializer()
val gen = OsmObjectGenerator()
/*
"The OsmProducer" should {
"branch osm objects to the correct sink based on their type" in {
val node = gen.generateNode
val way = gen.generateWay
val relation = gen.generateRelation
val objSource = Source(List(node,way,relation))
val nodesBuf = scala.collection.mutable.ArrayBuffer.empty[OsmObject]
val waysBuf = scala.collection.mutable.ArrayBuffer.empty[OsmObject]
val relationsBuf = scala.collection.mutable.ArrayBuffer.empty[OsmObject]
val nodesSink = Sink.foreach[OsmObject](nodesBuf.append(_))
val waysSink = Sink.foreach[OsmObject](waysBuf.append(_))
val relationsSink = Sink.foreach[OsmObject](relationsBuf.append(_))
val flowUnderTest = OsmProducer.osmJunction(objSource,nodesSink,waysSink,relationsSink)
flowUnderTest.run()
nodesBuf.map(_.toString) must contain(node.toString)
nodesBuf must not contain(way)
nodesBuf must not contain(relation)
waysBuf must contain(way)
waysBuf must not contain(node)
waysBuf must not contain(relation)
relationsBuf must contain(relation)
relationsBuf must not contain(node)
relationsBuf must not contain(way)
}
}
*/
}
|
plasmap/plasmap
|
processing/src/test/scala/io/plasmap/geo/producing/test/OsmProducerSpec.scala
|
Scala
|
apache-2.0
| 1,875 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example
import com.google.cloud.bigtable.hbase.BigtableConfiguration
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
object Wordcount extends App {
def parse(args: Array[String]): (String, String, String, String) = {
if (args.length < 4) {
throw new IllegalStateException("Missing command-line argument(s). Required are: BIGTABLE_SPARK_PROJECT_ID, BIGTABLE_SPARK_INSTANCE_ID, BIGTABLE_SPARK_WORDCOUNT_TABLE, BIGTABLE_SPARK_WORDCOUNT_FILE")
}
val projectId = args(0)
val instanceId = args(1)
val table = args(2)
val file = args(3)
(projectId, instanceId, table, file)
}
val (projectId, instanceId, table, file) = parse(args)
var hConf = BigtableConfiguration.configure(projectId, instanceId)
hConf.set(TableOutputFormat.OUTPUT_TABLE, table)
import org.apache.hadoop.mapreduce.Job
val job = Job.getInstance(hConf)
job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])
hConf = job.getConfiguration
import org.apache.spark.SparkConf
val config = new SparkConf()
// Workaround for a bug in TableOutputFormat
// See https://stackoverflow.com/a/51959451/1305344
config.set("spark.hadoop.validateOutputSpecs", "false")
val sc = SparkContext.getOrCreate(config)
val wordCounts = sc
.textFile(file)
.flatMap(_.split("\\W+"))
.filter(!_.isEmpty)
.map { word => (word, 1) }
.reduceByKey(_ + _)
.map { case (word, count) =>
val ColumnFamilyBytes = Bytes.toBytes("cf")
val ColumnNameBytes = Bytes.toBytes("Count")
val put = new Put(Bytes.toBytes(word))
.addColumn(ColumnFamilyBytes, ColumnNameBytes, Bytes.toBytes(count))
// The KEY is ignored while the output value must be either a Put or a Delete instance
// The underlying writer ignores keys, only the value matters here.
(null, put)
}
wordCounts.saveAsNewAPIHadoopDataset(hConf)
}
|
GoogleCloudPlatform/java-docs-samples
|
bigtable/spark/src/main/scala/example/Wordcount.scala
|
Scala
|
apache-2.0
| 2,688 |
package com.typesafe.sbt.packager.rpm
import sbt._
import com.typesafe.sbt.packager.linux.LinuxSymlink
object RpmHelper {
/** Returns the host vendor for an rpm. */
def hostVendor =
sys.process.Process(Seq("rpm", "-E", "%{_host_vendor}")) !!
/**
* Prepares the staging directory for the rpm build command.
*
* @param spec The RpmSpec
* @param workArea The target
* @param log Logger
* @return the `workArea`
*/
def stage(spec: RpmSpec, workArea: File, log: sbt.Logger): File = {
buildWorkArea(workArea)
copyFiles(spec, workArea, log)
writeSpecFile(spec, workArea, log)
spec.validate(log)
workArea
}
private[rpm] def defaultRpmArtifactPath(stagingArea: File, meta: RpmMetadata): File =
stagingArea / "RPMS" / meta.arch / s"${meta.name}-${meta.version}-${meta.release}.${meta.arch}.rpm"
/**
* Build the rpm package
*
* @param spec The RpmSpec
* @param stagingArea Prepared staging area
* @param log Logger
* @return The rpm package
*/
def buildRpm(spec: RpmSpec, stagingArea: File, log: sbt.Logger): File = {
buildPackage(stagingArea, spec, log)
defaultRpmArtifactPath(stagingArea, spec.meta)
}
private[this] def copyFiles(spec: RpmSpec, workArea: File, log: sbt.Logger): Unit = {
// TODO - special treatment of icon...
val buildroot = workArea / "tmp-buildroot"
def copyWithZip(from: File, to: File, zipped: Boolean): Unit = {
log.debug("Copying %s to %s".format(from, to))
if (zipped) IO.gzip(from, to)
else IO.copyFile(from, to, true)
}
// First make sure directories are there....
IO createDirectories (for {
mapping <- spec.mappings
(file, dest) <- mapping.mappings
if file.isDirectory
target = buildroot / dest
} yield target)
// We don't have to do any permission modifications since that's in the
// the .spec file.
for {
mapping <- spec.mappings
(file, dest) <- mapping.mappings
if file.exists && !file.isDirectory()
target = buildroot / dest
} copyWithZip(file, target, mapping.zipped)
LinuxSymlink.makeSymLinks(spec.symlinks, buildroot, relativeLinks = false)
}
private[this] def writeSpecFile(spec: RpmSpec, workArea: File, log: sbt.Logger): File = {
val specdir = workArea / "SPECS"
val rpmBuildroot = workArea / "buildroot"
val tmpBuildRoot = workArea / "tmp-buildroot"
val specfile = specdir / (spec.meta.name + ".spec")
log.debug("Creating SPEC file: " + specfile.getAbsolutePath)
IO.write(specfile, spec.writeSpec(rpmBuildroot, tmpBuildRoot))
specfile
}
private[this] def buildPackage(workArea: File, spec: RpmSpec, log: sbt.Logger): Unit = {
val buildRoot = workArea / "buildroot"
val specsDir = workArea / "SPECS"
val gpg = false
// TODO - Full GPG support (with GPG plugin).
IO.withTemporaryDirectory { tmpRpmBuildDir =>
val args: Seq[String] = (spec.setarch match {
case Some(arch) => Seq("setarch", arch)
case None => Seq()
}) ++ Seq(
"rpmbuild",
"-bb",
"--target",
spec.meta.arch + '-' + spec.meta.vendor + '-' + spec.meta.os,
"--buildroot",
buildRoot.getAbsolutePath,
"--define",
"_topdir " + workArea.getAbsolutePath,
"--define",
"_tmppath " + tmpRpmBuildDir.getAbsolutePath
) ++ (
if (gpg) Seq("--define", "_gpg_name " + "<insert keyname>", "--sign")
else Seq.empty
) ++ Seq(spec.meta.name + ".spec")
log.debug("Executing rpmbuild with: " + args.mkString(" "))
// RPM outputs to standard error in non-error cases. So just collect all the output, then dump
// it all to either error log or info log depending on the exit status
val outputBuffer = collection.mutable.ArrayBuffer.empty[String]
sys.process.Process(args, Some(specsDir)) ! sys.process.ProcessLogger(o => outputBuffer.append(o)) match {
case 0 =>
// Workaround for #1246 - random tests fail with a NullPointerException in the sbt ConsoleLogger
// I wasn't able to reproduce this locally and there aren't any user reports on this, so we catch
// the NPE and log via println
try {
outputBuffer.foreach(log.info(_))
} catch {
case e: NullPointerException =>
outputBuffer.foreach(println(_))
}
case code =>
outputBuffer.foreach(log.error(_))
sys.error("Unable to run rpmbuild, check output for details. Errorcode " + code)
}
}
}
private[this] val topleveldirs = Seq("BUILD", "RPMS", "SOURCES", "SPECS", "SRPMS", "tmp-buildroot", "buildroot")
/** Builds the work area and returns the tmp build root, and rpm build root. */
private[this] def buildWorkArea(workArea: File): Unit = {
if (!workArea.exists) workArea.mkdirs()
// TODO - validate workarea
// Clean out work area
topleveldirs map (workArea / _) foreach { d =>
if (d.exists()) IO.delete(d)
d.mkdir()
}
}
def evalMacro(mcro: String): String =
sys.process.Process(Seq("rpm", "--eval", '%' + mcro)).!!
}
|
fsat/sbt-native-packager
|
src/main/scala/com/typesafe/sbt/packager/rpm/RpmHelper.scala
|
Scala
|
bsd-2-clause
| 5,200 |
package pl.bigpicture.wikipv.job
import java.text.SimpleDateFormat
import java.util.Date
import org.scalatest.FunSuite
/**
* Created by kuba on 21/02/16.
*/
class HousekeepingJobTest extends FunSuite {
test("file with current timestamp should not be removed") {
val sdf = new SimpleDateFormat("yyyyMMdd-HH")
val file1 = "pagecounts-%s0000.gz".format(sdf.format(new Date()))
// File with current timestamp should not be removed when
// we keep 1, 2 or 100 day-history
assert(!HousekeepingJob.shouldBeRemoved(file1, 1))
assert(!HousekeepingJob.shouldBeRemoved(file1, 2))
assert(!HousekeepingJob.shouldBeRemoved(file1, 100))
}
test("old file should be removed according to history setting") {
val sdf = new SimpleDateFormat("yyyyMMdd-HH")
// file name with 5 days ago timestamp
val file1 = "pagecounts-%s0000.gz".format(sdf.format(new Date(new Date().getTime - 5 * 24 * 60 * 60 * 1000)))
assert(HousekeepingJob.shouldBeRemoved(file1, 1))
assert(HousekeepingJob.shouldBeRemoved(file1, 2))
assert(HousekeepingJob.shouldBeRemoved(file1, 3))
assert(HousekeepingJob.shouldBeRemoved(file1, 4))
assert(!HousekeepingJob.shouldBeRemoved(file1, 5))
assert(!HousekeepingJob.shouldBeRemoved(file1, 6))
assert(!HousekeepingJob.shouldBeRemoved(file1, 10))
}
}
|
jpieprzyk/wikipv
|
src/test/scala/pl/bigpicture/wikipv/job/HousekeepingJobTest.scala
|
Scala
|
apache-2.0
| 1,335 |
package com.ntsdev.connect4.ai
import com.ntsdev.connect4.game.Game
import com.ntsdev.connect4.model.{Board, RedCell}
import org.specs2.mutable.Specification
class SimpleComputerPlayerSpec extends Specification {
"the simple computer player" should {
"generate a move" in {
val testBoard = Board
val computerPlayer = new SimpleComputerPlayer
val testBoard2 = testBoard.placeCell(0,0,Some(RedCell))
val testGame = new Game(testBoard2)
val column = computerPlayer.nextMove(testGame)
column shouldEqual 0
}
}
}
|
neilshannon/connect4
|
src/test/scala/com/ntsdev/connect4/ai/SimpleComputerPlayerSpec.scala
|
Scala
|
mit
| 558 |
package io.getquill.norm
import io.getquill.Spec
import io.getquill.testContext.qr1
import io.getquill.testContext.qr2
import io.getquill.testContext.quote
import io.getquill.testContext.unquote
class AdHocReductionSpec extends Spec {
"*.filter" - {
"a.filter(b => c).filter(d => e)" in {
val q = quote {
qr1.filter(b => b.s == "s1").filter(d => d.s == "s2")
}
val n = quote {
qr1.filter(b => b.s == "s1" && b.s == "s2")
}
AdHocReduction.unapply(q.ast) mustEqual Some(n.ast)
}
}
"flatMap.*" - {
"a.flatMap(b => c).map(d => e)" in {
val q = quote {
qr1.flatMap(b => qr2).map(d => d.s)
}
val n = quote {
qr1.flatMap(b => qr2.map(d => d.s))
}
AdHocReduction.unapply(q.ast) mustEqual Some(n.ast)
}
"a.flatMap(b => c).filter(d => e)" in {
val q = quote {
qr1.flatMap(b => qr2).filter(d => d.s == "s2")
}
val n = quote {
qr1.flatMap(b => qr2.filter(d => d.s == "s2"))
}
AdHocReduction.unapply(q.ast) mustEqual Some(n.ast)
}
"a.flatMap(b => c.union(d))" in {
val q = quote {
qr1.flatMap(b => qr2.filter(t => t.i == 1).union(qr2.filter(t => t.s == "s")))
}
val n = quote {
qr1.flatMap(b => qr2.filter(t => t.i == 1)).union(qr1.flatMap(b => qr2.filter(t => t.s == "s")))
}
AdHocReduction.unapply(q.ast) mustEqual Some(n.ast)
}
"a.flatMap(b => c.unionAll(d))" in {
val q = quote {
qr1.flatMap(b => qr2.filter(t => t.i == 1).unionAll(qr2.filter(t => t.s == "s")))
}
val n = quote {
qr1.flatMap(b => qr2.filter(t => t.i == 1)).unionAll(qr1.flatMap(b => qr2.filter(t => t.s == "s")))
}
AdHocReduction.unapply(q.ast) mustEqual Some(n.ast)
}
}
}
|
getquill/quill
|
quill-core/src/test/scala/io/getquill/norm/AdHocReductionSpec.scala
|
Scala
|
apache-2.0
| 1,813 |
package scalajsreact.select.example.components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
object Footer {
val component = ReactComponentB.static("Footer",
<.footer(^.textAlign.center,
<.div(^.borderBottom := "1px solid grey", ^.padding := "0px"),
<.p(^.paddingTop := "5px", "Built using scalajs/scalajs-react/scalacss")
)
).buildU
def apply() = component()
}
|
lvitaly/scalajs-react-select-example
|
src/main/scala/scalajsreact/select/example/components/Footer.scala
|
Scala
|
apache-2.0
| 421 |
package com.cterm2.mcfm1710
import net.minecraft.nbt.NBTTagCompound
import cpw.mods.fml.relauncher.{SideOnly, Side}
import interops.smartcursor._
package interfaces
{
trait IModuledInjector
{
def storeSpecificData(nbt: NBTTagCompound)
def loadSpecificData(nbt: NBTTagCompound)
}
}
package object EnergyInjector
{
def register()
{
ContentRegistry register BlockModuled.setCreativeTab(FluidTab) as "energyInjector.attachable"
ContentRegistry register BlockStandalone.setCreativeTab(FluidTab) as "energyInjector.standalone"
ContentRegistry register classOf[TEModuled] as "TEEnergyInjectorModuled"
ContentRegistry register classOf[TEStandalone] as "TEEnergyInjector"
}
@SideOnly(Side.CLIENT)
def registerClient()
{
import cpw.mods.fml.client.registry.{RenderingRegistry, ClientRegistry}
BlockModuled.renderType = RenderingRegistry.getNextAvailableRenderId()
RenderingRegistry.registerBlockHandler(BlockModuled.renderType, BlockRenderer)
ClientRegistry.bindTileEntitySpecialRenderer(classOf[TEModuled], TileEntityRenderer)
ClientRegistry.bindTileEntitySpecialRenderer(classOf[TEStandalone], TileEntityRenderer)
}
lazy val ItemModuled = net.minecraft.item.Item.getItemFromBlock(BlockModuled)
}
package EnergyInjector
{
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.block.BlockContainer, net.minecraft.block.material.Material
import net.minecraft.world.World, net.minecraft.entity.EntityLivingBase
import net.minecraftforge.fluids._
import net.minecraft.tileentity.TileEntity
import net.minecraftforge.common.util.ForgeDirection
import net.minecraft.network.NetworkManager
import net.minecraft.network.play.server.S35PacketUpdateTileEntity
import net.minecraft.item.{ItemStack, ItemBlock}
import com.cterm2.mcfm1710.utils.EntityLivingUtils._
import cpw.mods.fml.common.Optional
import com.cterm2.mcfm1710.Fluids
import net.minecraft.util.IIcon
import net.minecraft.client.renderer.texture.IIconRegister
import utils.WorldExtensions._, utils.DeobfuscatorSupport._, utils.EntityLivingUtils._
// Common Values
final object EnergyInjectorSynchronizeDataKeys
{
final val WaterKey = "WaterData"
final val EnergeticKey = "EnergyFluidsData"
final val BlockDirectionKey = "BlockDirection"
}
final object FluidLimits
{
// unit: milli-buckets
final val Module = 4 * 1000
final val Standalone = 16 * 1000
}
// Block //
// Energy Injector Block Base
abstract class BlockBase extends BlockContainer(Material.iron) with interfaces.ITextureIndicesProvider
{
this.setHardness(2.0f)
// Rendering Configurations //
@SideOnly(Side.CLIENT)
override final val renderAsNormalBlock = false
override final val isOpaqueCube = false
protected final val icons = new Array[IIcon](7)
@SideOnly(Side.CLIENT)
override final def getIcon(side: Int, meta: Int) = this.icons(side)
override def onBlockPlacedBy(world: World, x: Int, y: Int, z: Int, placer: EntityLivingBase, stack: ItemStack) =
{
world.setBlockMetadataWithNotify(x, y, z, (placer.facingInt + 2) & 0x03, 2)
Option(world.getTileEntity(x, y, z).asInstanceOf[TEBase]) foreach
{
_.dir = convertFacingDirection((placer.facingInt + 2) & 0x03)
}
}
}
// Attachable Modification
final object BlockModuled extends BlockBase
{
this.setBlockBounds(0.0f, 0.0f, 0.0f, 1.0f, 0.5f, 1.0f)
// Overrided Configurations //
override final val isNormalCube = false
var renderType: Int = 0
override final def getRenderType = renderType
@SideOnly(Side.CLIENT)
override def registerBlockIcons(register: IIconRegister)
{
for((i, tex) <- 0 until 7 map { x => (x, s"mcfm1710:energyInjectorModuled$x") })
this.icons(i) = register.registerIcon(tex)
}
override def createNewTileEntity(world: World, meta: Int) = new TEModuled
override final def onBlockActivated(world: World, x: Int, y: Int, z: Int, player: EntityPlayer, side: Int, xo: Float, yo: Float, zo: Float) =
{
if(world.inServerSide)
{
Option(player.inventory.getCurrentItem) map { _.getItem } foreach
{
case cistack: ItemBlock if cistack.block.isInstanceOf[SourceGenerator.BlockBase] =>
val entity = world.getTileEntity(x, y, z).asInstanceOf[TEModuled]
val meta = world.getBlockMetadata(x, y, z)
world.setBlock(x, y, z, cistack.block)
world.setBlockMetadataWithNotify(x, y, z, SourceGenerator.MetaValue(true, meta), 2)
Option(world.getTileEntity(x, y, z).asInstanceOf[SourceGenerator.TileEntityBase]) foreach
{ newtile =>
newtile.injector = entity
newtile.updateMetaInfo()
}
world.notifyBlockChange(x, y, z, cistack.block)
player.useCurrentItem()
true
case _ => true
}
true
}
else true
}
}
// Standalone
final object BlockStandalone extends BlockBase
{
// Overrided Configurations //
var renderType: Int = 0
override final def getRenderType = renderType
@SideOnly(Side.CLIENT)
override def registerBlockIcons(register: IIconRegister)
{
for((i, tex) <- 0 until 7 map { x => (x, s"mcfm1710:energyInjectorModuled$x") })
this.icons(i) = register.registerIcon(tex)
}
override def createNewTileEntity(world: World, meta: Int) = new TEStandalone
}
// TileEntity //
// Energy Injector Tile Entity Base
@Optional.Interface(iface="com.cterm2.mcfm1710.interops.smartcursor.IInformationProvider", modid=SCModuleConnector.ID, striprefs=true)
abstract class TEBase(val maxFluidAmount: Int) extends TileEntity with IFluidHandler with IInformationProvider
{
tankHolder =>
import EnergyInjectorSynchronizeDataKeys._
// External Values //
var dir = ForgeDirection.UNKNOWN
def facingDegree = this.dir match
{
case ForgeDirection.SOUTH => 0.0d
case ForgeDirection.WEST => 90.0d
case ForgeDirection.NORTH => 180.0d
case ForgeDirection.EAST => -90.0d
case _ => 0.0d
}
def isFacingXAxis = this.dir == ForgeDirection.WEST || this.dir == ForgeDirection.EAST
// Make as Combinated Fluid Tanks //
// Content restricted fluid tank
private class RestrictedFluidTank(val acceptType: Fluid) extends IFluidTank
{
// Tank Traits //
override def getCapacity = tankHolder.maxFluidAmount
override lazy val getInfo = new FluidTankInfo(this)
private[TEBase] def canAccept(fluid: Fluid) = fluid == this.stack.getFluid
private[TEBase] def canAccept(fluid: FluidStack): Boolean = this.canAccept(fluid.getFluid)
// Tank Exports //
private lazy val stack = new FluidStack(acceptType, 0)
override def getFluid = this.stack
override def getFluidAmount = this.stack.amount
// Tank Interacts //
// called when attempting to fill fluids
override def fill(resource: FluidStack, perform: Boolean) = resource match
{
case null => 0
case _ if !this.canAccept(resource) => 0
case _ =>
{
val newAmount = Math.min(resource.amount, this.getCapacity - this.getFluidAmount)
if(perform && newAmount > 0)
{
this.stack.amount += newAmount
tankHolder.updateTileInfo()
}
newAmount
}
}
// Called when attempting to drain fluids
override def drain(maxDrain: Int, perform: Boolean) =
{
val drained = Math.min(maxDrain, this.stack.amount)
if(perform && drained > 0)
{
this.stack.amount -= drained
tankHolder.updateTileInfo()
}
if(drained <= 0) null else new FluidStack(this.stack, drained)
}
// Data Synchronizations //
private[TEBase] def synchronizeData =
{
val tag = new NBTTagCompound
this.stack.writeToNBT(tag)
tag
}
private[TEBase] def synchronizeDataFrom(tag: NBTTagCompound) =
{
val newFluid = FluidStack.loadFluidStackFromNBT(tag)
if(!this.canAccept(newFluid)) throw new RuntimeException("Restriction Error")
this.stack.amount = newFluid.amount
}
}
private lazy val waterTank = new RestrictedFluidTank(FluidRegistry.WATER)
private lazy val energeticTank = new RestrictedFluidTank(EnergeticFluid.Fluid)
private final def getTank(from: ForgeDirection) = from match
{
case ForgeDirection.EAST if dir == ForgeDirection.NORTH => Some(this.waterTank)
case ForgeDirection.EAST if dir == ForgeDirection.SOUTH => Some(this.energeticTank)
case ForgeDirection.WEST if dir == ForgeDirection.NORTH => Some(this.energeticTank)
case ForgeDirection.WEST if dir == ForgeDirection.SOUTH => Some(this.waterTank)
case ForgeDirection.NORTH if dir == ForgeDirection.WEST => Some(this.waterTank)
case ForgeDirection.NORTH if dir == ForgeDirection.EAST => Some(this.energeticTank)
case ForgeDirection.SOUTH if dir == ForgeDirection.WEST => Some(this.energeticTank)
case ForgeDirection.SOUTH if dir == ForgeDirection.EAST => Some(this.waterTank)
case _ => None
}
override final def getTankInfo(from: ForgeDirection) = Array(this.getTank(from) map { _.getInfo } getOrElse null)
override final def canDrain(from: ForgeDirection, fluid: Fluid) = this.getTank(from).isDefined
override final def canFill(from: ForgeDirection, fluid: Fluid) = this.getTank(from) map { _ canAccept fluid } getOrElse false
override final def fill(from: ForgeDirection, resource: FluidStack, perform: Boolean) = this.getTank(from) map
{
x => if(x canAccept resource) x.fill(resource, perform) else 0
} getOrElse 0
override final def drain(from: ForgeDirection, resource: FluidStack, perform: Boolean) = this.getTank(from) map
{
x => if(x.getFluid == resource.getFluid) x.drain(resource.amount, perform) else null
} getOrElse null
override final def drain(from: ForgeDirection, maxDrain: Int, perform: Boolean) =
this.getTank(from) map { _.drain(maxDrain, perform) } getOrElse null
def getWaterTankState = this.waterTank.getFluid
def getEnergeticTankState = this.energeticTank.getFluid
// Data Synchronizations //
final def storeSpecificDataTo(tag: NBTTagCompound) =
{
tag.setTag(WaterKey, this.waterTank.synchronizeData)
tag.setTag(EnergeticKey, this.energeticTank.synchronizeData)
tag
}
final def loadSpecificDataFrom(tag: NBTTagCompound) =
{
this.waterTank synchronizeDataFrom tag.getTag(WaterKey).asInstanceOf[NBTTagCompound]
this.energeticTank synchronizeDataFrom tag.getTag(EnergeticKey).asInstanceOf[NBTTagCompound]
this
}
override final def writeToNBT(tag: NBTTagCompound) =
{
super.writeToNBT(tag)
tag.setInteger(BlockDirectionKey, this.dir.ordinal)
this.storeSpecificDataTo(tag)
}
override final def readFromNBT(tag: NBTTagCompound) =
{
super.readFromNBT(tag)
this.dir = ForgeDirection.values()(tag.getInteger(BlockDirectionKey))
this.loadSpecificDataFrom(tag)
}
override final def getDescriptionPacket() =
this.storeSpecificDataTo _ andThen
(new S35PacketUpdateTileEntity(this.xCoord, this.yCoord, this.zCoord, 1, _)) apply new NBTTagCompound
override final def onDataPacket(net: NetworkManager, packet: S35PacketUpdateTileEntity) =
((_: S35PacketUpdateTileEntity).func_148857_g()) andThen this.loadSpecificDataFrom apply packet
final def updateTileInfo() =
{
if(this.worldObj == null) FMEntry.logger.warn("World Object for EnergyInjector.TEBase is null!")
else
{
this.worldObj.markBlockForUpdate(this.xCoord, this.yCoord, this.zCoord)
this.markDirty()
}
}
// TileEntity Interacts //
// Called when power transmitted(Unit: RF/t)(Returns used energy)
final def injectFluids(amount: Int) =
{
// 1:1 = energy:water => energeticFluid
val newEnergeticFluid = Fluids.newEnergeticFluidStack(amount)
val drainable = Option(this.waterTank.drain(amount, false)) map { _.amount } getOrElse 0
val acceptable = this.energeticTank.fill(newEnergeticFluid, false)
val converted = Math.min(drainable, acceptable)
if(converted > 0)
{
newEnergeticFluid.amount = converted
this.waterTank.drain(converted, true); this.energeticTank.fill(newEnergeticFluid, true)
converted
}
else 0
}
// Information Provider //
override final def provideInformation(list: java.util.List[String]) =
{
list add s"Facing on ${this.dir}"
list add s"Input(Water) Tank amount: ${this.waterTank.getFluidAmount} mb"
list add s"Output(EnergeticFluid) Tank amount: ${this.energeticTank.getFluidAmount} mb"
}
// unnecessary because this is always synchronized for TileEntityRenderer
override final def forceSynchronize(){}
}
// Energy Injector Module Tile Entity
final class TEModuled extends TEBase(FluidLimits.Module)
// Energy Injector Tile Entity
@Optional.Interface(iface = "mekanism.api.energy.IStrictEnergyAcceptor", modid="Mekanism", striprefs = true)
final class TEStandalone extends TEBase(FluidLimits.Standalone)
with mekanism.api.energy.IStrictEnergyAcceptor
{
// Energy Acceptor Interacts //
override def transferEnergyToAcceptor(side: ForgeDirection, amount: Double) = side match
{
case ForgeDirection.UP => this.injectFluids(amount.asInstanceOf[Int]).toDouble
case _ => 0.0d
}
override def canReceiveEnergy(side: ForgeDirection) = side == ForgeDirection.UP
// Energy Storage Exports(Note: EnergyInjector does not store any energies) //
override val getEnergy = 0.0d
override def setEnergy(newValue: Double) = ()
override val getMaxEnergy = FluidLimits.Standalone.toDouble // provides max acceptable energy in EnergyAcceptor
}
// Renderers //
import net.minecraft.client.renderer.tileentity.TileEntitySpecialRenderer
import cpw.mods.fml.client.registry.ISimpleBlockRenderingHandler
@SideOnly(Side.CLIENT)
object TileEntityRenderer extends TileEntitySpecialRenderer
{
import net.minecraft.tileentity.TileEntity
override def renderTileEntityAt(entity: TileEntity, x: Double, y: Double, z: Double, f: Float)
{
entity match
{
case t: TEModuled => renderContent(t, x, y, z, FluidLimits.Module, 0.5f)
case t: TEStandalone => renderContent(t, x, y, z, FluidLimits.Standalone, 1.0f)
}
}
def renderContent(entity: TEBase, x: Double, y: Double, z: Double, capacity: Int, height: Float)
{
import org.lwjgl.opengl.GL11._, net.minecraft.client.renderer.Tessellator
import net.minecraft.client.renderer.texture.TextureMap
import utils.RenderPipeline
val tess = Tessellator.instance
this.bindTexture(TextureMap.locationBlocksTexture)
glPushMatrix()
glTranslated(x + 0.5d, y, z + 0.5d)
glRotated(entity.facingDegree, 0.0d, -1.0d, 0.0d)
glScaled(1.0f - 1.0f / 256.0f, 1.0f - 1.0f / 256.0f, 1.0f - 1.0f / 256.0f)
glTranslated(-0.5d, 0.0d, -0.5d)
glEnable(GL_BLEND)
glDepthMask(false)
glColor4f(1.0f, 1.0f, 1.0f, 1.0f)
tess.startDrawingQuads()
tess.setColorRGBA_F(1.0f, 1.0f, 1.0f, 1.0f)
// val colFactor = if(entity.isFacingXAxis) RenderPipeline.xFactor else RenderPipeline.zFactor
if(entity.getWaterTankState.amount > 0)
{
val amountPercent = (entity.getWaterTankState.amount.toFloat / capacity) * height
val icon = entity.getWaterTankState.getFluid.getStillIcon
val (u1, v1, u2, v2) = (icon.getMinU, icon.getMinV, icon.getInterpolatedU(7.0d), icon.getMaxV)
val (u0, v0, v4, v3) =
(icon.getInterpolatedU(2.0d), icon.getInterpolatedV(2.0d), icon.getInterpolatedV(14.0d), icon.getInterpolatedV(amountPercent * 16.0d))
val (margin, limit) = (1.0f / 8.0f, 0.5f - 1.0f / 16.0f)
// top
// tess.setColorRGBA_F(1.0f, 1.0f, 1.0f, 1.0f)
tess.setNormal(0.0f, 1.0f, 0.0f)
tess.addVertexWithUV( 0.0f, amountPercent, margin, u1, v0)
tess.addVertexWithUV( 0.0f, amountPercent, 1.0f - margin, u1, v4)
tess.addVertexWithUV(margin, amountPercent, 1.0f - margin, u0, v4)
tess.addVertexWithUV(margin, amountPercent, margin, u0, v0)
tess.addVertexWithUV(margin, amountPercent, 0.0f, u0, v1)
tess.addVertexWithUV(margin, amountPercent, 1.0f, u0, v2)
tess.addVertexWithUV( limit, amountPercent, 1.0f, u2, v2)
tess.addVertexWithUV( limit, amountPercent, 0.0f, u2, v1)
// front
// tess.setColorRGBA_F(colFactor, colFactor, colFactor, 1.0f)
tess.setNormal(0.0f, 0.0f, -1.0f)
tess.addVertexWithUV(margin, 0.0f, 0.0f, u0, v1)
tess.addVertexWithUV(margin, amountPercent, 0.0f, u0, v3)
tess.addVertexWithUV( limit, amountPercent, 0.0f, u2, v3)
tess.addVertexWithUV( limit, 0.0f, 0.0f, u2, v1)
// back
// tess.setColorRGBA_F(colFactor, colFactor, colFactor, 1.0f)
tess.setNormal(0.0f, 0.0f, 1.0f)
tess.addVertexWithUV( limit, 0.0f, 1.0f, u2, v1)
tess.addVertexWithUV( limit, amountPercent, 1.0f, u2, v3)
tess.addVertexWithUV(margin, amountPercent, 1.0f, u0, v3)
tess.addVertexWithUV(margin, 0.0f, 1.0f, u0, v1)
}
tess.draw()
glDepthMask(true)
glDisable(GL_BLEND)
glPopMatrix()
}
}
@SideOnly(Side.CLIENT)
object BlockRenderer extends ISimpleBlockRenderingHandler
{
import org.lwjgl.opengl.GL11._
import net.minecraft.block.Block, net.minecraft.client.renderer.{RenderBlocks, Tessellator}
import net.minecraft.world.IBlockAccess
override def renderInventoryBlock(block: Block, modelId: Int, meta: Int, renderer: RenderBlocks) =
{
renderer.setRenderBoundsFromBlock(block)
glRotatef(90.0f, 0.0f, 1.0f, 0.0f)
glTranslatef(-0.5f, -0.5f, -0.5f)
this.renderBlockWithNormals(block, 0.0d, 0.0d, 0.0d, renderer)
glTranslatef(0.5f, 0.5f, 0.5f)
}
def renderHull(rp: utils.RenderPipeline, renderer: RenderBlocks, insideRender: Boolean)
{
val margin = 1.0f / 8.0f
renderer.renderFromInside = insideRender
// render Y Faces(under box)
rp.render2FacesY(0.0f, 1.0f / 256.0f, margin, margin, 0.5f, 1.0f - margin)
rp.render2FacesY(margin, 1.0f / 256.0f, 0.0f, 1.0f - margin, 0.5f, 1.0f)
rp.render2FacesY(1.0f - margin, 1.0f / 256.0f, margin, 1.0f, 0.5f, 1.0f - margin)
// render Z Faces
renderer.flipTexture = insideRender
rp.render2FacesZ(margin, 0.0f, 0.0f, 1.0f - margin, 0.5f, 1.0f)
renderer.flipTexture = false
rp.render2FacesZ(0.0f, 0.0f, margin, margin, 0.5f, 1.0f - margin)
rp.render2FacesZ(1.0f - margin, 0.0f, margin, 1.0f, 0.5f, 1.0f - margin)
// render X Faces
renderer.flipTexture = insideRender
rp.render2FacesX(0.0f, 0.0f, margin, 1.0f, 0.5f, 1.0f - margin)
renderer.flipTexture = false
rp.render2FacesX(margin, 0.0f, 0.0f, 1.0f - margin, 0.5f, margin)
rp.render2FacesX(margin, 0.0f, 1.0f - margin, 1.0f - margin, 0.5f, 1.0f)
renderer.renderFromInside = false
}
def renderSeparator(rp: utils.RenderPipeline, facing: Int)
{
val margin = 1.0f / 8.0f
if((facing & 0x01) != 0)
{
rp.render2FacesZ(0.0f, 0.0f, 0.5f - margin * 0.5f, 1.0f, 0.5f, 0.5f + margin * 0.5f, 6)
}
else
{
rp.render2FacesX(0.5f - margin * 0.5f, 0.0f, 0.0f, 0.5f + margin * 0.5f, 0.5f, 1.0f, 6)
}
}
override def renderWorldBlock(world: IBlockAccess, x: Int, y: Int, z: Int, block: Block, modelId: Int, renderer: RenderBlocks) =
{
val facing = world.getBlockMetadata(x, y, z) & 0x03
val rp = new utils.RenderPipeline(renderer, block.asInstanceOf[BlockBase], x, y, z, facing)
val margin = 1.0f / 8.0f
renderHull(rp, renderer, false)
renderHull(rp, renderer, true)
renderSeparator(rp, facing)
rp.close()
true
}
override def shouldRender3DInInventory(meta: Int) = true
override def getRenderId = BlockModuled.renderType
private def renderBlockWithNormals(blk: Block, x: Double, y: Double, z: Double, renderer: RenderBlocks) =
{
import net.minecraft.init.Blocks._
val margin = 1.0d / 8.0d
val tex = iron_block.getBlockTextureFromSide(0)
// shrinked render with RenderBlocks
val tess = Tessellator.instance
def renderShell() =
{
tess.startDrawingQuads(); tess.setNormal(0.0f, -1.0f, 0.0f)
renderer.renderFaceYNeg(blk, x, y + 1.0d / 256.0d, z, tex)
// tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 1.0f, 0.0f)
// renderer.renderFaceYPos(blk, x, y - 1.0d / 256.0d, z, tex)
renderer.renderMinZ += margin; renderer.renderMaxZ -= margin
tess.draw(); tess.startDrawingQuads(); tess.setNormal(-1.0f, 0.0f, 0.0f)
renderer.renderFaceXNeg(blk, x, y, z, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(1.0f, 0.0f, 0.0f)
renderer.renderFaceXPos(blk, x, y, z, tex)
renderer.renderMinZ -= margin; renderer.renderMaxZ += margin
renderer.renderMinX += margin; renderer.renderMaxX -= margin
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, -1.0f)
renderer.renderFaceZNeg(blk, x, y, z, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, 1.0f)
renderer.renderFaceZPos(blk, x, y, z, tex)
renderer.renderMinX -= margin; renderer.renderMaxX += margin
renderer.renderMinX = 0; renderer.renderMaxX = margin
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, 1.0f)
renderer.renderFaceZPos(blk, x, y, z - margin, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, -1.0f)
renderer.renderFaceZNeg(blk, x, y, z + margin, tex)
renderer.renderMinX = 1.0d - margin; renderer.renderMaxX = 1.0d
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, 1.0f)
renderer.renderFaceZPos(blk, x, y, z - margin, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(0.0f, 0.0f, -1.0f)
renderer.renderFaceZNeg(blk, x, y, z + margin, tex)
renderer.renderMinX = 0.0d
renderer.renderMinZ = 0; renderer.renderMaxZ = margin
tess.draw(); tess.startDrawingQuads(); tess.setNormal(1.0f, 0.0f, 0.0f)
renderer.renderFaceXPos(blk, x - margin, y, z, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(-1.0f, 0.0f, 0.0f)
renderer.renderFaceXNeg(blk, x + margin, y, z, tex)
renderer.renderMinZ = 1.0d - margin; renderer.renderMaxZ = 1.0d
tess.draw(); tess.startDrawingQuads(); tess.setNormal(1.0f, 0.0f, 0.0f)
renderer.renderFaceXPos(blk, x - margin, y, z, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(-1.0f, 0.0f, 0.0f)
renderer.renderFaceXNeg(blk, x + margin, y, z, tex)
tess.draw()
renderer.renderMinZ = 0.0d
}
renderer.renderFromInside = true; renderShell()
renderer.renderFromInside = false; renderShell()
// separator
tess.startDrawingQuads(); tess.setNormal(1.0f, 0.0f, 0.0f)
renderer.renderFaceXPos(blk, x - 0.5d + 0.5d * margin, y, z, tex)
tess.draw(); tess.startDrawingQuads(); tess.setNormal(-1.0f, 0.0f, 0.0f)
renderer.renderFaceXNeg(blk, x + 0.5d - 0.5d * margin, y, z, tex)
tess.draw()
}
}
}
|
Pctg-x8/fluidMechanics
|
src/contents/energyInjector.scala
|
Scala
|
lgpl-2.1
| 23,216 |
package org.jetbrains.plugins.scala.actions
import java.awt.event.{MouseAdapter, MouseEvent}
import java.awt.{Color, Point}
import javax.swing._
import javax.swing.border.Border
import javax.swing.event.{ListSelectionEvent, ListSelectionListener}
import com.intellij.codeInsight.CodeInsightBundle
import com.intellij.openapi.actionSystem._
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.editor.colors.EditorFontType
import com.intellij.openapi.keymap.KeymapUtil
import com.intellij.openapi.project.Project
import com.intellij.openapi.ui.popup.{JBPopup, JBPopupFactory}
import com.intellij.openapi.util.IconLoader
import com.intellij.psi._
import com.intellij.psi.util.PsiUtilBase
import com.intellij.util.Alarm
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.presentation.ScImplicitFunctionListCellRenderer
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil
import org.jetbrains.plugins.scala.util.IntentionUtils.showMakeExplicitPopup
import org.jetbrains.plugins.scala.util.{IntentionUtils, JListCompatibility}
import scala.collection.mutable.ArrayBuffer
/**
* User: Alexander Podkhalyuzin
* Date: 02.06.2010
*/
object GoToImplicitConversionAction {
var popup: JBPopup = null
def getPopup: JBPopup = popup
def setPopup(p: JBPopup) {
popup = p
}
}
class GoToImplicitConversionAction extends AnAction("Go to implicit conversion action") {
private var hint: LightBulbHint = null
private val hintAlarm: Alarm = new Alarm
override def update(e: AnActionEvent) {
ScalaActionUtil.enableAndShowIfInScalaFile(e)
}
def actionPerformed(e: AnActionEvent) {
val context = e.getDataContext
val project = CommonDataKeys.PROJECT.getData(context)
val editor = CommonDataKeys.EDITOR.getData(context)
if (project == null || editor == null) return
val file = PsiUtilBase.getPsiFileInEditor(editor, project)
if (!file.isInstanceOf[ScalaFile]) return
def forExpr(expr: ScExpression): Boolean = {
val (implicitElement: Option[PsiNamedElement], fromUnderscore: Boolean) = {
def additionalImplicitElement = expr.getAdditionalExpression.flatMap {
case (additional, tp) => additional.implicitElement(expectedOption = Some(tp))
}
if (ScUnderScoreSectionUtil.isUnderscoreFunction(expr)) {
expr.implicitElement(fromUnderscore = true) match {
case someElement@Some(_) => (someElement, true)
case _ => (expr.implicitElement().orElse(additionalImplicitElement), false)
}
} else (additionalImplicitElement.orElse(expr.implicitElement()), false)
}
val conversions = expr.getAllImplicitConversions(fromUnderscore = fromUnderscore)
if (conversions.isEmpty) return true
val conversionFun = implicitElement.orNull
val model = JListCompatibility.createDefaultListModel()
var actualIndex = -1
//todo actualIndex should be another if conversionFun is not one
for (element <- conversions) {
val elem = Parameters(element, expr, project, editor, conversions)
JListCompatibility.addElement(model, elem)
if (element == conversionFun) actualIndex = model.indexOf(elem)
}
val list = JListCompatibility.createJListFromModel(model)
val renderer = new ScImplicitFunctionListCellRenderer(conversionFun)
val font = editor.getColorsScheme.getFont(EditorFontType.PLAIN)
renderer.setFont(font)
list.setFont(font)
JListCompatibility.setCellRenderer(list, renderer)
list.getSelectionModel.addListSelectionListener(new ListSelectionListener {
def valueChanged(e: ListSelectionEvent) {
hintAlarm.cancelAllRequests
val item = list.getSelectedValue.asInstanceOf[Parameters]
if (item == null) return
updateHint(item)
}
})
JListCompatibility.GoToImplicitConversionAction.setList(list)
val builder = JBPopupFactory.getInstance.createListPopupBuilder(list)
val popup = builder.setTitle("Choose implicit conversion method:").setAdText("Press Alt+Enter").
setMovable(false).setResizable(false).setRequestFocus(true).
setItemChoosenCallback(new Runnable {
def run() {
val entity = list.getSelectedValue.asInstanceOf[Parameters]
entity.newExpression match {
case f: ScFunction =>
f.getSyntheticNavigationElement match {
case Some(n: NavigatablePsiElement) => n.navigate(true)
case _ => f.navigate(true)
}
case n: NavigatablePsiElement => n.navigate(true)
case _ => //do nothing
}
}
}).createPopup
popup.showInBestPositionFor(editor)
if (actualIndex >= 0 && actualIndex < list.getModel.getSize) {
list.getSelectionModel.setSelectionInterval(actualIndex, actualIndex)
list.ensureIndexIsVisible(actualIndex)
}
GoToImplicitConversionAction.setPopup(popup)
hint = new LightBulbHint(editor, project, expr, conversions)
false
}
if (editor.getSelectionModel.hasSelection) {
val selectionStart = editor.getSelectionModel.getSelectionStart
val selectionEnd = editor.getSelectionModel.getSelectionEnd
val opt = ScalaRefactoringUtil.getExpression(project, editor, file, selectionStart, selectionEnd)
opt match {
case Some((expr, _)) =>
if (forExpr(expr)) return
case _ =>
}
} else {
val offset = editor.getCaretModel.getOffset
val element: PsiElement = file.findElementAt(offset) match {
case w: PsiWhiteSpace if w.getTextRange.getStartOffset == offset &&
w.getText.contains("\\n") => file.findElementAt(offset - 1)
case p => p
}
def getExpressions(guard: Boolean): Array[ScExpression] = {
val res = new ArrayBuffer[ScExpression]
var parent = element
while (parent != null) {
parent match {
case expr: ScReferenceExpression if guard =>
expr.getContext match {
case postf: ScPostfixExpr if postf.operation == expr =>
case pref: ScPrefixExpr if pref.operation == expr =>
case inf: ScInfixExpr if inf.operation == expr =>
case _ => res += expr
}
case expr: ScExpression if guard || expr.implicitElement().isDefined ||
(ScUnderScoreSectionUtil.isUnderscoreFunction(expr) &&
expr.implicitElement(fromUnderscore = true).isDefined) || expr.getAdditionalExpression.flatMap {
case (additional, tp) => additional.implicitElement(expectedOption = Some(tp))
}.isDefined =>
res += expr
case _ =>
}
parent = parent.getParent
}
res.toArray
}
val expressions = {
val falseGuard = getExpressions(guard = false)
if (falseGuard.length != 0) falseGuard
else getExpressions(guard = true)
}
def chooseExpression(expr: ScExpression) {
editor.getSelectionModel.setSelection(expr.getTextRange.getStartOffset,
expr.getTextRange.getEndOffset)
forExpr(expr)
}
if (expressions.length == 0)
editor.getSelectionModel.selectLineAtCaret()
else if (expressions.length == 1) {
chooseExpression(expressions(0))
} else {
ScalaRefactoringUtil.showChooser(editor, expressions, (elem: ScExpression)=>
chooseExpression(elem), "Expressions", (expr: ScExpression) => {
ScalaRefactoringUtil.getShortText(expr)
})
}
}
}
private def updateHint(element: Parameters): Unit = {
if (element.newExpression == null || !element.newExpression.isValid) return
val list = JListCompatibility.GoToImplicitConversionAction.getList
if (hint != null) {
list.remove(hint)
hint = null
list.revalidate()
list.repaint()
}
hintAlarm.addRequest(new Runnable {
def run() {
hint = new LightBulbHint(element.editor, element.project, element.oldExpression, element.elements)
list.add(hint, 20, 0)
hint.setBulbLayout()
}
}, 500)
}
class LightBulbHint(editor: Editor, project: Project, expr: ScExpression, elements: Seq[PsiNamedElement]) extends JLabel {
private final val INACTIVE_BORDER: Border = BorderFactory.createEmptyBorder(4, 4, 4, 4)
private final val ACTIVE_BORDER: Border =
BorderFactory.createCompoundBorder(BorderFactory.createLineBorder(Color.BLACK, 1),
BorderFactory.createEmptyBorder(3, 3, 3, 3))
private final val INDENT = 20
setOpaque(false)
setBorder(INACTIVE_BORDER)
setIcon(IconLoader.findIcon("/actions/intentionBulb.png"))
private val toolTipText: String = KeymapUtil.getFirstKeyboardShortcutText(
ActionManager.getInstance.getAction(IdeActions.ACTION_SHOW_INTENTION_ACTIONS))
if (toolTipText.length > 0) {
setToolTipText(CodeInsightBundle.message("lightbulb.tooltip", toolTipText))
}
addMouseListener(new MouseAdapter {
override def mouseEntered(e: MouseEvent): Unit = {
setBorder(ACTIVE_BORDER)
}
override def mouseExited(e: MouseEvent): Unit = {
setBorder(INACTIVE_BORDER)
}
override def mousePressed(e: MouseEvent): Unit = {
if (!e.isPopupTrigger && e.getButton == MouseEvent.BUTTON1) {
selectedValue.newExpression match {
case function: ScFunction =>
showMakeExplicitPopup(project, expr, function, editor, elements)
case _ =>
}
}
}
})
def setBulbLayout(): Unit = {
if (selectedValue.newExpression != null) {
val bounds = IntentionUtils.getCurrentItemBounds
setSize(getPreferredSize)
setLocation(new Point(bounds.x + bounds.width - getWidth - INDENT, bounds.y))
}
}
private def selectedValue =
JListCompatibility.GoToImplicitConversionAction.getList
.getSelectedValue.asInstanceOf[Parameters]
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/actions/GoToImplicitConversionAction.scala
|
Scala
|
apache-2.0
| 10,292 |
package org.eknet.county
/**
* @author Eike Kettner [email protected]
* @since 23.03.13 12:30
*/
sealed trait Granularity extends Serializable {
def name: String
def keyFor(millis: Long): TimeKey
}
object Granularity {
val values = List(Millis, Second, Minute, Hour, Day, Month, Year)
@SerialVersionUID(20130322L)
object Millis extends Granularity {
val name = "millis"
def keyFor(millis: Long) = TimeKey(millis)
}
@SerialVersionUID(20130322L)
object Second extends Granularity {
val name = "second"
def keyFor(millis: Long) = TimeKey(millis).bySeconds
}
@SerialVersionUID(20130322L)
object Minute extends Granularity {
val name = "minute"
def keyFor(millis: Long) = TimeKey(millis).byMinutes
}
@SerialVersionUID(20130322L)
object Hour extends Granularity {
val name = "hour"
def keyFor(millis: Long) = TimeKey(millis).byHour
}
@SerialVersionUID(20130322L)
object Day extends Granularity {
val name = "day"
def keyFor(millis: Long) = TimeKey(millis).byDay
}
@SerialVersionUID(20130322L)
object Month extends Granularity {
val name = "month"
def keyFor(millis: Long) = TimeKey(millis).byMonth
}
@SerialVersionUID(20130322L)
object Year extends Granularity {
val name = "year"
def keyFor(millis: Long) = TimeKey(millis).byYear
}
}
|
eikek/county
|
api/src/main/scala/org/eknet/county/Granularity.scala
|
Scala
|
apache-2.0
| 1,354 |
package org.gbif.population.spark
import org.junit.Assert._
import org.junit._
import org.scalatest.FunSpec
import org.gbif.population.spark._
import scala.collection.mutable.Stack
@Test
class LinearRegressionTest extends FunSpec {
describe("A simple linear regression") {
it("should cross y axis at 0 with a slope of 1") {
var data = List((0.0,0.0), (1.0,1.0), (2.0,2.0))
var result = LinearRegression.process(data);
assert(result.c === 0.0)
assert(result.m === 1.0)
}
}
}
|
gbif/species-population
|
spark-process/src/test/scala/org/gbif/population/spark/LinearRegressionTest.scala
|
Scala
|
apache-2.0
| 577 |
package org.elasticmq.rest.sqs
import com.amazonaws.auth.{AWSStaticCredentialsProvider, BasicAWSCredentials}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.sqs.model.{Message, ReceiveMessageRequest}
import com.amazonaws.services.sqs.{AmazonSQS, AmazonSQSClientBuilder}
import org.apache.http.impl.client.{CloseableHttpClient, HttpClients}
import org.elasticmq.util.Logging
import org.elasticmq.{NodeAddress, RelaxedSQSLimits}
import org.scalatest.{Args, BeforeAndAfter, Status}
import org.scalatest.funsuite.AnyFunSuite
import scala.collection.JavaConverters._
import scala.util.Try
trait SqsClientServerCommunication extends AnyFunSuite with BeforeAndAfter with Logging {
var client: AmazonSQS = _ // strict server
var relaxedClient: AmazonSQS = _
var httpClient: CloseableHttpClient = _
var currentTestName: String = _
var strictServer: SQSRestServer = _
var relaxedServer: SQSRestServer = _
val awsAccountId = "123456789012"
val awsRegion = "elasticmq"
before {
logger.info(s"\\n---\\nRunning test: $currentTestName\\n---\\n")
strictServer = SQSRestServerBuilder
.withPort(9321)
.withServerAddress(NodeAddress(port = 9321))
.withAWSAccountId(awsAccountId)
.withAWSRegion(awsRegion)
.start()
relaxedServer = SQSRestServerBuilder
.withPort(9322)
.withServerAddress(NodeAddress(port = 9322))
.withSQSLimits(RelaxedSQSLimits)
.start()
strictServer.waitUntilStarted()
relaxedServer.waitUntilStarted()
client = AmazonSQSClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("x", "x")))
.withEndpointConfiguration(new EndpointConfiguration("http://localhost:9321", "us-east-1"))
.build()
relaxedClient = AmazonSQSClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("x", "x")))
.withEndpointConfiguration(new EndpointConfiguration("http://localhost:9322", "us-east-1"))
.build()
httpClient = HttpClients.createDefault()
}
after {
client.shutdown()
relaxedClient.shutdown()
httpClient.close()
// TODO: Figure out why this intermittently isn't able to unbind cleanly
Try(strictServer.stopAndWait())
Try(relaxedServer.stopAndWait())
logger.info(s"\\n---\\nTest done: $currentTestName\\n---\\n")
}
override protected def runTest(testName: String, args: Args): Status = {
currentTestName = testName
val result = super.runTest(testName, args)
currentTestName = null
result
}
def receiveSingleMessageObject(queueUrl: String): Option[Message] = {
receiveSingleMessageObject(queueUrl, List("All"))
}
def receiveSingleMessageObject(queueUrl: String, requestedAttributes: List[String]): Option[Message] = {
client
.receiveMessage(new ReceiveMessageRequest(queueUrl).withMessageAttributeNames(requestedAttributes.asJava))
.getMessages
.asScala
.headOption
}
def receiveSingleMessage(queueUrl: String): Option[String] = {
receiveSingleMessage(queueUrl, List("All"))
}
def receiveSingleMessage(queueUrl: String, requestedAttributes: List[String]): Option[String] = {
val messages = client.receiveMessage(new ReceiveMessageRequest(queueUrl)).getMessages.asScala
messages.headOption.map(_.getBody)
}
}
|
adamw/elasticmq
|
rest/rest-sqs-testing-amazon-java-sdk/src/test/scala/org/elasticmq/rest/sqs/SqsClientServerCommunication.scala
|
Scala
|
apache-2.0
| 3,412 |
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package play.api.db
import java.sql.{ Connection, Driver, DriverManager }
import javax.sql.DataSource
import scala.util.control.{ NonFatal, ControlThrowable }
import play.api.Configuration
import play.utils.{ ProxyDriver, Reflect }
/**
* Database API.
*/
trait Database {
/**
* The configuration name for this database.
*/
def name: String
/**
* The underlying JDBC data source for this database.
*/
def dataSource: DataSource
/**
* The JDBC connection URL this database, i.e. `jdbc:...`
* Normally retrieved via a connection.
*/
def url: String
/**
* Get a JDBC connection from the underlying data source.
* Autocommit is enabled by default.
*
* Don't forget to release the connection at some point by calling close().
*
* @param autocommit determines whether to autocommit the connection
* @return a JDBC connection
*/
def getConnection(): Connection
/**
* Get a JDBC connection from the underlying data source.
*
* Don't forget to release the connection at some point by calling close().
*
* @param autocommit determines whether to autocommit the connection
* @return a JDBC connection
*/
def getConnection(autocommit: Boolean): Connection
/**
* Execute a block of code, providing a JDBC connection.
* The connection and all created statements are automatically released.
*
* @param block code to execute
* @return the result of the code block
*/
def withConnection[A](block: Connection => A): A
/**
* Execute a block of code, providing a JDBC connection.
* The connection and all created statements are automatically released.
*
* @param autocommit determines whether to autocommit the connection
* @param block code to execute
* @return the result of the code block
*/
def withConnection[A](autocommit: Boolean)(block: Connection => A): A
/**
* Execute a block of code in the scope of a JDBC transaction.
* The connection and all created statements are automatically released.
* The transaction is automatically committed, unless an exception occurs.
*
* @param block code to execute
* @return the result of the code block
*/
def withTransaction[A](block: Connection => A): A
/**
* Shutdown this database, closing the underlying data source.
*/
def shutdown(): Unit
}
/**
* Creation helpers for manually instantiating databases.
*/
object Database {
/**
* Create a pooled database with the given configuration.
*
* @param name the database name
* @param driver the database driver class
* @param url the database url
* @param config a map of extra database configuration
* @return a configured database
*/
def apply(name: String, driver: String, url: String, config: Map[String, _ <: Any] = Map.empty): Database = {
val dbConfig = Configuration.from(Map("driver" -> driver, "url" -> url) ++ config)
new PooledDatabase(name, dbConfig)
}
/**
* Create a pooled database named "default" with the given configuration.
*
* @param driver the database driver class
* @param url the database url
* @param config a map of extra database configuration
* @return a configured database
*/
def apply(driver: String, url: String, config: Map[String, _ <: Any]): Database = {
Database("default", driver, url, config)
}
/**
* Create a pooled database named "default" with the given driver and url.
*
* @param driver the database driver class
* @param url the database url
* @return a configured database
*/
def apply(driver: String, url: String): Database = {
Database("default", driver, url, Map.empty)
}
/**
* Create an in-memory H2 database.
*
* @param name the database name (defaults to "default")
* @param urlOptions a map of extra url options
* @param config a map of extra database configuration
* @return a configured in-memory h2 database
*/
def inMemory(name: String = "default", urlOptions: Map[String, String] = Map.empty, config: Map[String, _ <: Any] = Map.empty): Database = {
val driver = "org.h2.Driver"
val urlExtra = urlOptions.map { case (k, v) => k + "=" + v }.mkString(";", ";", "")
val url = "jdbc:h2:mem:" + name + urlExtra
Database(name, driver, url, config)
}
}
/**
* Default implementation of the database API.
* Provides driver registration and connection methods.
*/
abstract class DefaultDatabase(val name: String, configuration: Configuration, classLoader: ClassLoader) extends Database {
// abstract methods to be implemented
def createDataSource(): DataSource
def closeDataSource(dataSource: DataSource): Unit
// driver registration
lazy val driver: Driver = {
val driverClass = configuration.getString("driver").getOrElse {
throw configuration.reportError(name, s"Missing configuration [db.$name.driver]")
}
try {
val proxyDriver = new ProxyDriver(Reflect.createInstance[Driver](driverClass, classLoader))
DriverManager.registerDriver(proxyDriver)
proxyDriver
} catch {
case NonFatal(e) => throw configuration.reportError("driver", s"Driver not found: [$driverClass]", Some(e))
}
}
// lazy data source creation
lazy val dataSource: DataSource = {
driver // trigger driver registration
createDataSource
}
lazy val url: String = {
val connection = dataSource.getConnection
try {
connection.getMetaData.getURL
} finally {
connection.close()
}
}
// connection methods
def getConnection(): Connection = {
getConnection(autocommit = true)
}
def getConnection(autocommit: Boolean): Connection = {
val connection = dataSource.getConnection
connection.setAutoCommit(autocommit)
connection
}
def withConnection[A](block: Connection => A): A = {
withConnection(autocommit = true)(block)
}
def withConnection[A](autocommit: Boolean)(block: Connection => A): A = {
val connection = getConnection(autocommit)
try {
block(connection)
} finally {
connection.close()
}
}
def withTransaction[A](block: Connection => A): A = {
withConnection(autocommit = false) { connection =>
try {
val r = block(connection)
connection.commit()
r
} catch {
case e: ControlThrowable =>
connection.commit()
throw e
case e: Throwable =>
connection.rollback()
throw e
}
}
}
// shutdown
def shutdown(): Unit = {
closeDataSource(dataSource)
deregisterDriver()
}
def deregisterDriver(): Unit = {
DriverManager.deregisterDriver(driver)
}
}
/**
* Default implementation of the database API using a connection pool.
*/
class PooledDatabase(name: String, configuration: Configuration, classLoader: ClassLoader, pool: ConnectionPool)
extends DefaultDatabase(name, configuration, classLoader) {
def this(name: String, configuration: Configuration) = this(name, configuration, classOf[PooledDatabase].getClassLoader, new BoneConnectionPool)
def createDataSource(): DataSource = pool.create(name, configuration, classLoader)
def closeDataSource(dataSource: DataSource): Unit = pool.close(dataSource)
}
|
jyotikamboj/container
|
pf-framework/src/play-jdbc/src/main/scala/play/api/db/Database.scala
|
Scala
|
mit
| 7,327 |
type Id[A] = A
|
hmemcpy/milewski-ctfp-pdf
|
src/content/1.8/code/scala/snippet05.scala
|
Scala
|
gpl-3.0
| 14 |
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
import cats.Show
import cats.parse.Parser
import cats.parse.Rfc5234
import org.http4s.EntityTag.Strong
import org.http4s.EntityTag.Weakness
import org.http4s.internal.parsing.Rfc7230
import org.http4s.util.Renderable
import org.http4s.util.Writer
final case class EntityTag(tag: String, weakness: Weakness = Strong) extends Renderable {
override def toString(): String =
weakness match {
case EntityTag.Weak => "W/\"" + tag + '"'
case EntityTag.Strong => "\"" + tag + '"'
}
override def render(writer: Writer): writer.type =
writer << toString()
}
object EntityTag {
implicit val http4sShowForEntityTag: Show[EntityTag] =
Show.fromToString
sealed trait Weakness extends Product with Serializable
case object Weak extends Weakness
case object Strong extends Weakness
/*
* entity-tag = [ weak ] opaque-tag
*
* @see [[https://datatracker.ietf.org/doc/html/rfc7232#section-2.3]]
*/
private[http4s] val parser: Parser[EntityTag] = {
import Parser.{charIn, string}
import Rfc5234.dquote
import Rfc7230.obsText
// weak = %x57.2F ; "W/", case-sensitive
val weak = string("W/").as(EntityTag.Weak)
// etagc = %x21 / %x23-7E / obs-text
// ; VCHAR except double quotes, plus obs-text
val etagc = charIn(0x21.toChar, 0x23.toChar to 0x7e.toChar: _*).orElse(obsText)
// opaque-tag = DQUOTE *etagc DQUOTE
val opaqueTag = dquote *> etagc.rep0.string <* dquote
(weak.?.with1 ~ opaqueTag).map { case (weakness, tag) =>
new EntityTag(tag, weakness.getOrElse(EntityTag.Strong))
}
}
}
|
http4s/http4s
|
core/shared/src/main/scala/org/http4s/EntityTag.scala
|
Scala
|
apache-2.0
| 2,221 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api
import org.specs2.mutable.Specification
class LoggerConfiguratorSpec extends Specification {
"generateProperties" should {
"generate in the simplest case" in {
val env = Environment.simple()
val config = Configuration.empty
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties.size must beEqualTo(1)
properties must havePair("application.home" -> env.rootPath.getAbsolutePath)
}
"generate in the case of including string config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.string.in.application.conf" -> "hello"
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties must havePair("my.string.in.application.conf" -> "hello")
}
"generate in the case of including integer config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.number.in.application.conf" -> 1
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
properties must havePair("my.number.in.application.conf" -> "1")
}
"generate in the case of including null config property" in {
val env = Environment.simple()
val config = Configuration(
"play.logger.includeConfigProperties" -> true,
"my.null.in.application.conf" -> null
)
val properties = LoggerConfigurator.generateProperties(env, config, Map.empty)
// nulls are excluded, you must specify them directly
// https://typesafehub.github.io/config/latest/api/com/typesafe/config/Config.html#entrySet--
properties must not haveKey ("my.null.in.application.conf")
}
"generate in the case of direct properties" in {
val env = Environment.simple()
val config = Configuration.empty
val optProperties = Map("direct.map.property" -> "goodbye")
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties.size must beEqualTo(2)
properties must havePair("application.home" -> env.rootPath.getAbsolutePath)
properties must havePair("direct.map.property" -> "goodbye")
}
"generate a null using direct properties" in {
val env = Environment.simple()
val config = Configuration.empty
val optProperties = Map("direct.null.property" -> null)
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties must havePair("direct.null.property" -> null)
}
"override config property with direct properties" in {
val env = Environment.simple()
val config = Configuration("some.property" -> "AAA")
val optProperties = Map("some.property" -> "BBB")
val properties = LoggerConfigurator.generateProperties(env, config, optProperties)
properties must havePair("some.property" -> "BBB")
}
}
}
|
aradchykov/playframework
|
framework/src/play/src/test/scala/play/api/LoggerConfiguratorSpec.scala
|
Scala
|
apache-2.0
| 3,153 |
package com.github.diegopacheco.sandbox.scripts.scala.basic.varargs
object VarargsMain extends App {
def sumItAll(vs:Int*):Int = vs.foldLeft(0)(_+_)
println( sumItAll(1,2,3,4,5) )
}
|
diegopacheco/scala-playground
|
scala-playground/src/com/github/diegopacheco/sandbox/scripts/scala/basic/varargs/VarargsMain.scala
|
Scala
|
unlicense
| 195 |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db;
import java.io.File
import scouter.lang.TextTypes
import scouter.server.{Configure, Logger}
import scouter.server.core.cache.TextCache
import scouter.server.util.ThreadScala
import scouter.util.RequestQueue
import scouter.util.StringUtil
import scouter.server.db.text.TextPermIndex
import scouter.server.db.text.TextPermData
object TextPermWR {
val queue = new RequestQueue[Data](DBCtr.LARGE_MAX_QUE_SIZE);
//에러만 날짜별로 저장한다.-20151110
def isA(divs: String): Boolean = {
val conf = Configure.getInstance();
divs match {
case TextTypes.ERROR => false
case TextTypes.SERVICE => !conf.mgr_text_db_daily_service_enabled
case TextTypes.APICALL => !conf.mgr_text_db_daily_api_enabled
case TextTypes.USER_AGENT => !conf.mgr_text_db_daily_ua_enabled
case _ => true
}
}
ThreadScala.start("scouter.server.db.TextPermWR") {
while (DBCtr.running) {
val data = queue.get();
var (indexDb, dataDb) = open(data.div);
try {
if (indexDb == null) {
queue.clear();
Logger.println("S137", 10, "can't open db");
} else {
val ok = indexDb.hasKey(data.hash);
if (ok == false) {
val dataPos = dataDb.write(data.text.getBytes("UTF8"));
indexDb.set(data.hash, dataPos);
}
}
} catch {
case t: Throwable => t.printStackTrace();
}
}
close();
}
def add(divHash: String, hash: Int, text: String) {
if (StringUtil.isEmpty(text))
return
TextCache.put(divHash, hash, text)
val ok = queue.put(new Data(divHash, hash, text))
if (ok == false) {
Logger.println("S138", 10, "queue exceeded!!");
}
}
class Data(_divs: String, _hash: Int, _text: String) {
val div = _divs;
val hash = _hash;
val text = _text;
}
def open(div: String): (TextPermIndex, TextPermData) = {
try {
var indexDb = TextPermIndex.get(div);
var dataDb = TextPermData.get(div);
if (indexDb != null && dataDb != null) {
return (indexDb, dataDb);
}
this.synchronized {
val path = getDBPath();
val f = new File(path);
if (f.exists() == false)
f.mkdirs();
val file = path + "/text_" + div;
indexDb = TextPermIndex.open(div, file);
dataDb = TextPermData.open(div, file);
return (indexDb, dataDb);
}
} catch {
case e: Throwable =>
e.printStackTrace();
close();
}
return (null,null);
}
def getDBPath(): String = {
val sb = new StringBuffer();
sb.append(DBCtr.getRootPath());
sb.append("/00000000/text");
return sb.toString();
}
def close() {
TextPermIndex.closeAll();
TextPermData.closeAll();
}
}
|
scouter-project/scouter
|
scouter.server/src/main/scala/scouter/server/db/TextPermWR.scala
|
Scala
|
apache-2.0
| 3,499 |
package org.twitterist.utils.solr.schemamanager
class SchemaManagerException(msg: String = null, cause: Throwable = null) extends java.lang.Exception(msg, cause) {}
object SchemaManagerException {
def apply(msg: String) = new SchemaManagerException(msg)
def apply(msg: String, throwable: Throwable) = new SchemaManagerException(msg, throwable)
}
class SchemaOverrideException(msg: String = null, cause: Throwable = null) extends SchemaManagerException(msg, cause) {}
object SchemaOverrideException {
def apply(msg: String) = new SchemaOverrideException(msg)
def apply(msg: String, throwable: Throwable) = new SchemaOverrideException(msg, throwable)
}
|
twitterist/solr-schema-manager
|
src/main/scala/org/twitterist/utils/solr/schemamanager/SchemaManagerException.scala
|
Scala
|
mit
| 662 |
import java.util.Properties
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.phoenix.spark._
import com.vader.SentimentAnalyzer
import edu.stanford.nlp.ling.CoreAnnotations
import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations
import edu.stanford.nlp.pipeline.StanfordCoreNLP
import edu.stanford.nlp.sentiment.SentimentCoreAnnotations
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.sql._
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
case class Tweet(coordinates: String, geo:String, handle: String, hashtags: String, language: String,
location: String, msg: String, time: String, tweet_id: String, unixtime: String, user_name: String, tag: String, profile_image_url: String,
source: String, place: String, friends_count: String, followers_count: String, retweet_count: String,
time_zone: String, sentiment: String, stanfordSentiment: String)
val message = convert(anyMessage)
val pipeline = new StanfordCoreNLP(nlpProps)
val annotation = pipeline.process(message)
var sentiments: ListBuffer[Double] = ListBuffer()
var sizes: ListBuffer[Int] = ListBuffer()
var longest = 0
var mainSentiment = 0
for (sentence <- annotation.get(classOf[CoreAnnotations.SentencesAnnotation])) {
val tree = sentence.get(classOf[SentimentCoreAnnotations.AnnotatedTree])
val sentiment = RNNCoreAnnotations.getPredictedClass(tree)
val partText = sentence.toString
if (partText.length() > longest) {
mainSentiment = sentiment
longest = partText.length()
}
sentiments += sentiment.toDouble
sizes += partText.length
}
val averageSentiment:Double = {
if(sentiments.nonEmpty) sentiments.sum / sentiments.size
else -1
}
val weightedSentiments = (sentiments, sizes).zipped.map((sentiment, size) => sentiment * size)
var weightedSentiment = weightedSentiments.sum / (sizes.fold(0)(_ + _))
if(sentiments.isEmpty) {
mainSentiment = -1
weightedSentiment = -1
}
weightedSentiment match {
case s if s <= 0.0 => NOT_UNDERSTOOD
case s if s < 1.0 => VERY_NEGATIVE
case s if s < 2.0 => NEGATIVE
case s if s < 3.0 => NEUTRAL
case s if s < 4.0 => POSITIVE
case s if s < 5.0 => VERY_POSITIVE
case s if s > 5.0 => NOT_UNDERSTOOD
}
}
trait SENTIMENT_TYPE
case object VERY_NEGATIVE extends SENTIMENT_TYPE
case object NEGATIVE extends SENTIMENT_TYPE
case object NEUTRAL extends SENTIMENT_TYPE
case object POSITIVE extends SENTIMENT_TYPE
case object VERY_POSITIVE extends SENTIMENT_TYPE
case object NOT_UNDERSTOOD extends SENTIMENT_TYPE
|
tspannhw/nlp-utilities
|
stanford.scala
|
Scala
|
apache-2.0
| 2,784 |
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.bsp.codec
import _root_.sjsonnew.{ Unbuilder, Builder, JsonFormat, deserializationError }
trait DebugSessionAddressFormats { self: sjsonnew.BasicJsonProtocol =>
implicit lazy val DebugSessionAddressFormat: JsonFormat[sbt.internal.bsp.DebugSessionAddress] = new JsonFormat[sbt.internal.bsp.DebugSessionAddress] {
override def read[J](__jsOpt: Option[J], unbuilder: Unbuilder[J]): sbt.internal.bsp.DebugSessionAddress = {
__jsOpt match {
case Some(__js) =>
unbuilder.beginObject(__js)
val uri = unbuilder.readField[java.net.URI]("uri")
unbuilder.endObject()
sbt.internal.bsp.DebugSessionAddress(uri)
case None =>
deserializationError("Expected JsObject but found None")
}
}
override def write[J](obj: sbt.internal.bsp.DebugSessionAddress, builder: Builder[J]): Unit = {
builder.beginObject()
builder.addField("uri", obj.uri)
builder.endObject()
}
}
}
|
xuwei-k/xsbt
|
protocol/src/main/contraband-scala/sbt/internal/bsp/codec/DebugSessionAddressFormats.scala
|
Scala
|
apache-2.0
| 1,063 |
/**
* Digi-Lib - base library for Digi components
*
* Copyright (c) 2012-2013 Alexey Aksenov [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.digi.lib.cache
import org.digimead.digi.lib.DependencyInjection
import org.digimead.digi.lib.aop.{ Caching ⇒ AOPCaching }
import org.digimead.lib.test.LoggingHelper
import org.scalatest.ConfigMap
import org.scalatest.WordSpec
import org.scalatest.Matchers
import com.escalatesoft.subcut.inject.NewBindingModule
class CacheSpec000 extends CacheSpec.Base {
"A Cache Singleton" should {
"be persistent" in {
val config = org.digimead.digi.lib.cache.default ~ org.digimead.digi.lib.default
DependencyInjection(config)
config.inject[Caching](None) should be theSameInstanceAs (config.inject[Caching](None))
val caching1 = AOPCaching.inner
val caching2 = AOPCaching.inner
caching1 should be theSameInstanceAs (caching2)
caching1.inner should be theSameInstanceAs (caching2.inner)
caching1.ttl should equal(caching2.ttl)
DependencyInjection(config ~ (NewBindingModule.newBindingModule(module ⇒ {})), false)
val caching3 = AOPCaching.inner
caching1 should be theSameInstanceAs (caching3)
caching2 should be theSameInstanceAs (caching3)
}
}
}
class CacheSpec001 extends CacheSpec.Base {
"A Cache Singleton" should {
"create instance with default parameters" in {
val config = org.digimead.digi.lib.cache.default ~ org.digimead.digi.lib.default
DependencyInjection(config)
val instance = AOPCaching.inner
instance.inner should not be (null)
instance.ttl should be(org.digimead.digi.lib.cache.default.inject[Long](Some("Cache.TTL")))
}
}
}
class CacheSpec002 extends CacheSpec.Base {
"A Cache Singleton" should {
"create instance with apropriate parameters" in {
val innerCacheImplementation = new NilCache[String, Any]
DependencyInjection(new NewBindingModule(module ⇒ {
module.bind[Cache[String, Any]] identifiedBy "Cache.Engine" toSingle { innerCacheImplementation }
module.bind[Long] identifiedBy "Cache.TTL" toSingle { 70L }
module.bind[Caching] identifiedBy "Cache.Instance" toModuleSingle { implicit module ⇒ new Caching }
}) ~ org.digimead.digi.lib.cache.default ~ org.digimead.digi.lib.default)
val instance = AOPCaching.inner
instance.ttl should be(70)
instance.inner should be(innerCacheImplementation)
}
}
}
object CacheSpec {
trait Base extends WordSpec with LoggingHelper with Matchers {
override def beforeAll(configMap: ConfigMap) { adjustLoggingBeforeAll(configMap) }
}
}
|
ezh/digi-lib
|
src/test/scala/org/digimead/digi/lib/cache/CacheSpec.scala
|
Scala
|
apache-2.0
| 3,191 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2014 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.latrobe.blaze.modules.raw
import edu.latrobe._
import edu.latrobe.blaze._
import edu.latrobe.blaze.modules._
import edu.latrobe.io.FileTensor
import edu.latrobe.sizes._
import edu.latrobe.io.image._
final class DecodeBitmaps(override val builder: DecodeBitmapsBuilder,
override val inputHints: BuildHints,
override val seed: InstanceSeed,
override val weightBufferBuilder: ValueTensorBufferBuilder)
extends RawLayer[DecodeBitmapsBuilder]
with NonTrainableLayer[DecodeBitmapsBuilder]
with NonPenalizing {
override protected def doPredict(input: RawTensor)
: BitmapTensor = {
val result = input match {
case input: FileTensor =>
input.mapSampleHandles(Bitmap.decode)
case _ =>
input.mapSampleBytes(Bitmap.decode)
}
BitmapTensor(result)
}
}
final class DecodeBitmapsBuilder
extends RawLayerBuilder[DecodeBitmapsBuilder] {
override def repr
: DecodeBitmapsBuilder = this
override def canEqual(that: Any)
: Boolean = that.isInstanceOf[DecodeBitmapsBuilder]
override protected def doCopy()
: DecodeBitmapsBuilder = DecodeBitmapsBuilder()
// ---------------------------------------------------------------------------
// Weights / Building related.
// ---------------------------------------------------------------------------
override def weightLayoutFor(hints: BuildHints,
builder: TensorLayoutBufferBuilder)
: BuildHints = outputHintsFor(hints)
def outputSizeFor(sizeHint: Size)
: Size2 = sizeHint match {
case sizeHint: Size2 =>
sizeHint.withNoChannels(3)
case _ =>
Size2(sizeHint.noTuples, 1, 3)
}
def outputLayoutFor(layoutHint: TensorLayout)
: IndependentTensorLayout = layoutHint.derive(outputSizeFor(layoutHint.size))
override def outputHintsFor(hints: BuildHints)
: BuildHints = {
val layout = outputLayoutFor(outputLayoutFor(hints.layout))
hints.derive(JVM, layout)
}
override def build(hints: BuildHints,
seed: InstanceSeed,
weightsBuilder: ValueTensorBufferBuilder)
: DecodeBitmaps = new DecodeBitmaps(this, hints, seed, weightsBuilder)
}
object DecodeBitmapsBuilder {
final def apply()
: DecodeBitmapsBuilder = new DecodeBitmapsBuilder
}
|
bashimao/ltudl
|
blaze/src/main/scala/edu/latrobe/blaze/modules/raw/DecodeBitmaps.scala
|
Scala
|
apache-2.0
| 3,080 |
package fm.common.rich
import fm.common.OptionCache
final class RichSomeObject(val module: Some.type) extends AnyVal {
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Boolean): Some[Boolean] = OptionCache.valueOf(v)
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Byte): Some[Byte] = OptionCache.valueOf(v)
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Char): Some[Char] = OptionCache.valueOf(v)
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Short): Some[Short] = OptionCache.valueOf(v)
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Int): Some[Int] = OptionCache.valueOf(v)
/** Returns a cached copy of the Option instance (if available) to avoid allocation */
def cached(v: Long): Some[Long] = OptionCache.valueOf(v)
}
|
frugalmechanic/fm-common
|
shared/src/main/scala/fm/common/rich/RichSomeObject.scala
|
Scala
|
apache-2.0
| 1,023 |
package util
package config
import play.api.Application
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicBoolean
import java.io.File
import scala.collection.JavaConverters._
object Registry {
private val registered = new ConcurrentHashMap[String,Configurable]()
private val initialized = new AtomicBoolean(false)
private val logger = play.api.Logger(getClass)
def add(name: String, config: Configurable) {
logger.info("Registered %s".format(name))
registered.put(name, config)
}
def onChange(config: TypesafeConfiguration) {
registered.values.asScala.foreach { c => c.onChange(config) }
}
def shutdown() {
ConfigWatch.stop()
}
/**
* Here we discover all classes implementing Configurable and call the apply() method which brings
* that singleton instance to life, and in the process handles basic initialization and ensures
* registration. Thus when validate is called, all classes extending Configurable are validated.
*/
def initializeAll(app: Application) {
if (initialized.compareAndSet(false, true)) {
ConfigWatch.start
logger.info("Initializing all subclasses")
getSubclassesOfConfigurable(app).foreach { k =>
val klassName = getNormalizedClassName(k.getName)
logger.info("Initializing %s".format(klassName))
getClassFromName(klassName).foreach { klass =>
callOnceOnClass(klass)
}
}
} else {
logger.warn("Already initialized")
}
}
def validate() {
registered.values.asScala.foreach { c => c.initialize() }
}
// Given a class attempt to call the apply method on it
protected def callOnceOnClass[_](klass: Class[_]) = try {
val meth = klass.getDeclaredMethod("once")
meth.invoke(null)
} catch {
case e =>
logger.info("Error calling once method on %s: %s".format(klass.getName, e.getMessage), e)
}
// Given a class name return the class
protected def getClassFromName(name: String) = try {
Some(Class.forName(name))
} catch {
case e =>
logger.info("Class name %s is invalid".format(name))
None
}
// Convert a possible companion object name to a reflection friendly name
protected def getNormalizedClassName(name: String): String = name.replace("$", "")
// Return a Set of classes extending util.config.Configurable
protected def getSubclassesOfConfigurable(app: Application) = {
app.configuration.underlying.getStringList("config.validations").asScala.toSet[String].map { k =>
Class.forName(k)
}
}
}
|
Shopify/collins
|
app/util/config/Registry.scala
|
Scala
|
apache-2.0
| 2,576 |
package org.apache.mesos.chronos.scheduler.api
import org.apache.mesos.chronos.scheduler.jobs.JobScheduler
import javax.ws.rs.core.{MediaType, Response}
import javax.ws.rs.{GET, Path, Produces}
import com.google.inject.Inject
@Path(PathConstants.getLeaderPath)
@Produces(Array(MediaType.APPLICATION_JSON))
class LeaderResource @Inject()(
val jobScheduler: JobScheduler
) {
@GET
def getLeader(): Response = {
Response.ok(Map("leader" -> jobScheduler.getLeader)).build
}
}
|
hamo/chronos
|
src/main/scala/org/apache/mesos/chronos/scheduler/api/LeaderRestModule.scala
|
Scala
|
apache-2.0
| 486 |
package chapter08
object Exercise3 extends App {
class Game(i: Int) { println("The constructor of Game") }
class BoardGame(i: Int) extends Game(i) { println("The constructor of BoardGame") }
class Chess() extends BoardGame(11) { println("The constructor of Chess") }
new Chess()
}
|
vsuharnikov/books-exercises
|
scala/scala-for-the-impatient/src/main/scala/chapter08/Exercise3.scala
|
Scala
|
mit
| 293 |
/*
* Copyright 2017-2018 Azad Bolour
* Licensed under GNU Affero General Public License v3.0 -
* https://github.com/azadbolour/boardgame/blob/master/LICENSE.md
*/
package com.bolour.boardgame.scala.server.service
import javax.inject.{Inject}
import akka.actor.{ActorSystem}
import org.slf4j.LoggerFactory
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
/**
* Periodically harvests long-running games considering them as abandoned.
*
* @param actorSystem Needed for scheduling harvesting actor.
* @param service The game service.
* @param executionContext Context in which to run the harvester.
*/
class GameHarvester @Inject() (actorSystem: ActorSystem, service: GameService)(implicit executionContext: ExecutionContext) {
actorSystem.scheduler.schedule(initialDelay = 10.minutes, interval = 10.minutes) {
service.timeoutLongRunningGames()
}
}
|
azadbolour/boardgame
|
scala-server/app/com/bolour/boardgame/scala/server/service/GameHarvester.scala
|
Scala
|
agpl-3.0
| 906 |
import com.bowlingx.scalablog._
import org.scalatra._
import javax.servlet.ServletContext
class ScalatraBootstrap extends LifeCycle {
override def init(context: ServletContext) {
context.mount(new ScalablogServlet, "/*")
}
}
|
BowlingX/scalablog
|
src/main/scala/ScalatraBootstrap.scala
|
Scala
|
apache-2.0
| 234 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
/**
* Represents the way edges are assigned to edge partitions based on their source and destination
* vertex IDs.
*/
trait PartitionStrategy extends Serializable {
/** Returns the partition number for a given edge. */
def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID
}
/**
* Collection of built-in [[PartitionStrategy]] implementations.
*/
object PartitionStrategy {
/**
* Assigns edges to partitions using a 2D partitioning of the sparse edge adjacency matrix,
* guaranteeing a `2 * sqrt(numParts)` bound on vertex replication.
*
* Suppose we have a graph with 12 vertices that we want to partition
* over 9 machines. We can use the following sparse matrix representation:
*
* <pre>
* __________________________________
* v0 | P0 * | P1 | P2 * |
* v1 | **** | * | |
* v2 | ******* | ** | **** |
* v3 | ***** | * * | * |
* ----------------------------------
* v4 | P3 * | P4 *** | P5 ** * |
* v5 | * * | * | |
* v6 | * | ** | **** |
* v7 | * * * | * * | * |
* ----------------------------------
* v8 | P6 * | P7 * | P8 * *|
* v9 | * | * * | |
* v10 | * | ** | * * |
* v11 | * <-E | *** | ** |
* ----------------------------------
* </pre>
*
* The edge denoted by `E` connects `v11` with `v1` and is assigned to processor `P6`. To get the
* processor number we divide the matrix into `sqrt(numParts)` by `sqrt(numParts)` blocks. Notice
* that edges adjacent to `v11` can only be in the first column of blocks `(P0, P3,
* P6)` or the last
* row of blocks `(P6, P7, P8)`. As a consequence we can guarantee that `v11` will need to be
* replicated to at most `2 * sqrt(numParts)` machines.
*
* Notice that `P0` has many edges and as a consequence this partitioning would lead to poor work
* balance. To improve balance we first multiply each vertex id by a large prime to shuffle the
* vertex locations.
*
* When the number of partitions requested is not a perfect square we use a slightly different
* method where the last column can have a different number of rows than the others while still
* maintaining the same size per block.
*/
case object EdgePartition2D extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val ceilSqrtNumParts: PartitionID = math.ceil(math.sqrt(numParts)).toInt
val mixingPrime: VertexId = 1125899906842597L
if (numParts == ceilSqrtNumParts * ceilSqrtNumParts) {
// Use old method for perfect squared to ensure we get same results
val col: PartitionID = (math.abs(src * mixingPrime) % ceilSqrtNumParts).toInt
val row: PartitionID = (math.abs(dst * mixingPrime) % ceilSqrtNumParts).toInt
(col * ceilSqrtNumParts + row) % numParts
} else {
// Otherwise use new method
val cols = ceilSqrtNumParts
val rows = (numParts + cols - 1) / cols
val lastColRows = numParts - rows * (cols - 1)
val col = (math.abs(src * mixingPrime) % numParts / rows).toInt
val row = (math.abs(dst * mixingPrime) % (if (col < cols - 1) rows else lastColRows)).toInt
col * rows + row
}
}
}
/**
* Assigns edges to partitions using only the source vertex ID, colocating edges with the same
* source.
*/
case object EdgePartition1D extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val mixingPrime: VertexId = 1125899906842597L
(math.abs(src * mixingPrime) % numParts).toInt
}
}
/**
* Assigns edges to partitions by hashing the source and destination vertex IDs, resulting in a
* random vertex cut that colocates all same-direction edges between two vertices.
*/
case object RandomVertexCut extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
math.abs((src, dst).hashCode()) % numParts
}
}
/**
* Assigns edges to partitions by hashing the source and destination vertex IDs in a canonical
* direction, resulting in a random vertex cut that colocates all edges between two vertices,
* regardless of direction.
*/
case object CanonicalRandomVertexCut extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
if (src < dst) {
math.abs((src, dst).hashCode()) % numParts
} else {
math.abs((dst, src).hashCode()) % numParts
}
}
}
/** Returns the PartitionStrategy with the specified name. */
def fromString(s: String): PartitionStrategy = s match {
case "RandomVertexCut" => RandomVertexCut
case "EdgePartition1D" => EdgePartition1D
case "EdgePartition2D" => EdgePartition2D
case "CanonicalRandomVertexCut" => CanonicalRandomVertexCut
case _ => throw new IllegalArgumentException("Invalid PartitionStrategy: " + s)
}
}
|
akopich/spark
|
graphx/src/main/scala/org/apache/spark/graphx/PartitionStrategy.scala
|
Scala
|
apache-2.0
| 6,122 |
package org.opencompare.io.bestbuy
import java.io.{FileReader, File}
import com.github.tototoshi.csv.CSVReader
import scala.io.Source
import scala.xml
import scala.xml.XML
/**
* Created by gbecan on 4/7/15.
*/
class ProductInfoLoader {
def load(overview : File, specification : File, xmlDescription : File) : ProductInfo = {
val productInfo = new ProductInfo
// TODO : productInfo.sku
// TODO : productInfo.name
// FIXME : not really deserializing the product info but it should be close enough for the experiment
// Read overview
// for (feature <- Source.fromFile(overview).getLines()) {
// productInfo.addFeature(feature)
// }
// Read specification
val csvReader = CSVReader.open(specification)
val detailNames = csvReader.readNext().get
val detailValues = csvReader.readNext().get
val details = detailNames.zip(detailValues)
for ((name, value) <- details) {
productInfo.addDetail(name, value)
}
// Read XML description
productInfo.completeXMLDescription = XML.loadFile(xmlDescription)
productInfo
}
}
|
gbecan/OpenCompare
|
org.opencompare/io-best-buy/src/main/scala/org/opencompare/io/bestbuy/ProductInfoLoader.scala
|
Scala
|
apache-2.0
| 1,103 |
package com.github.fntzr.spray.routing.ext
import scala.language.experimental.macros
import scala.reflect.macros.Context
import spray.routing._
import scala.language.implicitConversions
import spray.routing.PathMatchers._
import spray.routing._
import spray.routing.PathMatcher1
/**
* Object for transformation String* into List
* {{{
* exclude("index")
* }}}
*/
object exclude {
def apply(xs: String*) = xs.toList
}
/*
def resource[C, M](exclude: List[String]) = macro RoutableImpl.resource0Impl[C, M]
def resource[C, M](block: Route) = macro RoutableImpl.resource1Impl[C, M]
def resource[C, M](exclude: List[String], block: Route) = macro RoutableImpl.resourceImpl[C, M]
def resource[C, M] = macro RoutableImpl.resource4Impl[C, M]
subroute:
def resource[C, M](exclude: List[String], sub: PathMatcher1[_])
def resource[C, M](sub: PathMatcher1[_], block: Route)
def resource[C, M](exclude: List[String], sub: PathMatcher1[_], block: Route)
*/
/**
* Trait contatin methods for resource implementation.
* With resource method you might quick create routes for you controller. Also map form information onto Model.
* {{{
* resource[Controller, Model]
* }}}
* transform to
* {{{
* pathPrefix("model") {
* //methods for controller.index
* // controller.show
* // ...
* }
* }}}
* Note: for `new` method in controller use `fresh` name.
*/
trait Routable extends HttpService with HttpMethods with HttpHelpers with Helpers with HttpFormSupport {
/** Define routes without excluded pathes.
* {{{
* resource[Controller, Model](exclude("index", "show", "new"))
* }}}
* @param exclude - list with excluded methods (index, show, ...)
* @tparam C - you controller
* @tparam M - you model
* @return Route
*/
def resource[C, M](exclude: List[String]) = macro RoutableImpl.resource0Impl[C, M]
/** Define routes with nested block
* {{{
* resource[Controller, Model] {
* get0[Controller]("other")
* }
* }}}
* @param block Route - block with nested routes
* @tparam C - you controller
* @tparam M - you model
* @return Route
*/
def resource[C, M](block: Route) = macro RoutableImpl.resource1Impl[C, M]
/** Define routes without excluded actions, and nested block
* {{{
* resource[Controller, Model](exclude("index"), {
* get0[Controller]("other")
* })
* }}}
* @param exclude - excluded actions
* @param block Route - nested block
* @tparam C - you controller
* @tparam M - you model
* @return Route
*/
def resource[C, M](exclude: List[String], block: Route) = macro RoutableImpl.resourceImpl[C, M]
/** Simple define routes
* {{{
* resource[Controller, Model]
* }}}
* @tparam C - you controller
* @tparam M - you model
* @return Route
*/
def resource[C, M] = macro RoutableImpl.resource4Impl[C, M]
/**
* Define resource with specified subroute
*
* {{{
* resource[Controller, Model](exclude("show"), Segment)
* }}}
*
* @param exclude - list of excluded methods
* @param sub - part of route, which will be pass into controller method, and use for create a url
* @tparam C - Controller
* @tparam M - Model
* @return Route
*/
def resource[C, M](exclude: List[String], sub: PathMatcher1[_]): Route = macro RoutableImpl.resource5Impl[C, M]
/**
* Define resource with subroute and nexted block
*
* {{{
* resource[Controller, Model](IntNumber, {
* pathPrefix("/path") {
* get { complete { "path" } }
* }
* }
* }}}
*
* @param sub - part of route, which will be pass into controller method
* @param block - nested route block, and use for create a url
* @tparam C - Controller
* @tparam M - Model
* @return Route
*/
def resource[C, M](sub: PathMatcher1[_], block: Route): Route = macro RoutableImpl.resource6Impl[C, M]
/**
* Define resource with subroute, block and excluded methods.
*
* {{{
* resource[Controller, Method](exclude("show"), {
* pathPrefix("/path") {
* complete{ "path" }
* }
* },
* IntNumber)
* }}}
*
* @param exclude - list of excluded methods
* @param block - nested route block
* @param sub - part of route, which will be pass into controller method, and use for create a url
* @tparam C
* @tparam M
* @return
*/
def resource[C, M](exclude: List[String], block: Route, sub: PathMatcher1[_]): Route = macro RoutableImpl.resource7Impl[C, M]
}
/** Object, which contatain resource implementation.
*
*/
private [ext] object RoutableImpl {
import spray.routing.Route
def resource7Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(exclude: c.Expr[List[String]], block: c.Expr[Route], sub: c.Expr[PathMatcher1[_]]): c.Expr[Route] = {
import c.universe._
val startPath = convertToPath(s"${c.weakTypeOf[M].typeSymbol.name.toString}")
val result = getRoute[C, M](c)(exclude, sub)
val route = result match {
case Some(x) =>
q"""
pathPrefix($startPath) {
$x ~
$block
}
"""
case None =>
q"""
pathPrefix($startPath) {
$block
}
"""
}
c.Expr[Route](route)
}
def resource6Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(sub: c.Expr[PathMatcher1[_]], block: c.Expr[Route]): c.Expr[Route] = {
import c.universe._
val route = q"""resource[${c.weakTypeOf[C]}, ${c.weakTypeOf[M]}](List[String](), $block, $sub)"""
c.Expr[Route](route)
}
def resource5Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(exclude: c.Expr[List[String]], sub: c.Expr[PathMatcher1[_]]): c.Expr[Route] = {
import c.universe._
val startPath = convertToPath(s"${c.weakTypeOf[M].typeSymbol.name.toString}")
val result = getRoute[C, M](c)(exclude, sub)
if (result.isEmpty) {
c.error(c.enclosingPosition, s"resource should have a Route type")
}
val route = q"""
pathPrefix($startPath) {
${result.get}
}
"""
c.Expr[Route](route)
}
def resource4Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context): c.Expr[Route] = {
import c.universe._
val route = q"""resource[${c.weakTypeOf[C]}, ${c.weakTypeOf[M]}](List[String]())"""
c.Expr[Route](route)
}
def resource1Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(block: c.Expr[Route]): c.Expr[Route] = {
import c.universe._
val route = q"""resource[${c.weakTypeOf[C]}, ${c.weakTypeOf[M]}](List[String](), $block)"""
c.Expr[Route](route)
}
def resource0Impl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(exclude: c.Expr[List[String]]): c.Expr[Route] = {
import c.universe._
val startPath = convertToPath(s"${c.weakTypeOf[M].typeSymbol.name.toString}")
val result = getRoute[C, M](c)(exclude, c.Expr[PathMatcher1[_]](q"IntNumber"))
if (result.isEmpty) {
c.error(c.enclosingPosition, s"resource should have a Route type")
}
val route = q"""
pathPrefix($startPath) {
${result.get}
}
"""
c.Expr[Route](route)
}
def resourceImpl[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(exclude: c.Expr[List[String]], block: c.Expr[Route]): c.Expr[Route] = {
import c.universe._
val startPath = convertToPath(s"${c.weakTypeOf[M].typeSymbol.name.toString}")
val result = getRoute[C, M](c)(exclude, c.Expr[PathMatcher1[_]](q"IntNumber"))
val route = result match {
case Some(x) =>
q"""
pathPrefix($startPath) {
$x ~
$block
}
"""
case None =>
q"""
pathPrefix($startPath) {
$block
}
"""
}
c.Expr[Route](route)
}
private def convertToPath(s: String) = {
val r = "[A-Z]".r
(r replaceAllIn(s"${s.head.toString.toLowerCase}${s.tail}", "-$0")).toLowerCase
}
private def getRoute[C: c.WeakTypeTag, M: c.WeakTypeTag](c: Context)
(exclude: c.Expr[List[String]], sub: c.Expr[PathMatcher1[_]]):Option[c.Expr[Route]] = {
import c.universe._
val params = c.weakTypeOf[M].declarations.collect {
case x: MethodSymbol if x.isConstructor =>
x.paramss.map(_.map(_.asTerm))
}.flatMap(_.flatten)
if (params.exists(_.isParamWithDefault)) {
c.warning(c.enclosingPosition, s"Class `${c.weakTypeOf[M]}` have parameter with default!")
}
val list = exclude.tree.collect {
case Literal(Constant(x)) => s"$x"
}.toList
val paramNames = params.map(_.name.toString).map(Symbol(_))
val extract = paramNames.zip(params.map(_.typeSignature)).map{
case (s, t) =>
if (t.<:<(typeOf[Option[_]]))
q"${s}.?"
else
q"${s}.as[$t]"
}.toList
if (extract.isEmpty) {
c.abort(c.enclosingPosition, s"Model `${c.weakTypeOf[M]}` should have a parameters!")
}
val model = newTermName(s"${c.weakTypeOf[M].typeSymbol.name}")
val controller = c.weakTypeOf[C]
val show = q"""get0[$controller]($sub ~> "show")"""
val index = q"""get0[$controller]("index")"""
val edit = q"""get0[$controller](($sub / "edit") ~> "edit")"""
val update = q"""put0[$controller]($sub ~> "update")"""
val delete = q"""delete0[$controller]($sub ~> "delete")"""
val (sum: List[ValDef], names: List[Ident]) = HelpersImpl.extractValuesFromOuterMethod(c)
val anonClassName = newTypeName(c.fresh("Controller"))
val create = q"""
post {
requestInstance { request0 =>
case class $anonClassName(..$sum) extends ${c.weakTypeOf[C]}
val controller = new $anonClassName(..$names)
formFields(..$extract).as($model) { (model) =>
controller.create(model)
}
}
}
"""
val fresh = q"""get0[$controller]("new" ~> "fresh")"""
val original0 = List(
("new", fresh), ("index", index), ("create", create)
)
val original1 = List(
("edit", edit), ("show", show), ("update", update), ("delete", delete)
)
val exclude0 = original0.filter { x => list.contains(x._1)}
val exclude1 = original1.filter { x => list.contains(x._1)}
val resultForBlock = (original1 diff exclude1) map(_._2)
val resultOutBlock = (original0 diff exclude0) map(_._2)
val route0 = if(resultForBlock.isEmpty) {
None
} else {
val sum = resultForBlock.reduce((a,b) => q"$a ~ $b")
Some(q"""
overrideMethodWithParameter("_method") {
$sum
}
""")
}
val route1 = if(resultOutBlock.isEmpty) {
None
} else {
Some(resultOutBlock.reduce((a,b) => q"$a ~ $b"))
}
val route = (route0, route1) match {
case (Some(a), Some(b)) => Some(c.Expr[Route](q"$a ~ $b"))
case (Some(a), None) => Some(c.Expr[Route](q"$a"))
case (None, Some(a)) => Some(c.Expr[Route](q"$a"))
case (None, None) => None
}
route
}
}
|
fntz/spray-routing-ext
|
src/main/scala/Routable.scala
|
Scala
|
mit
| 11,157 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.mappings.FieldType.NestedType
import org.scalatest.{FreeSpec, Matchers}
import com.sksamuel.elastic4s.testkit.ElasticSugar
import org.elasticsearch.common.text.Text
import org.elasticsearch.search.highlight.HighlightField
class NestedQueryTest extends FreeSpec with Matchers with ElasticSugar {
client.execute {
create index "nested" mappings {
"show" as {
"actor" typed NestedType
}
}
}.await
client.execute(
index into "nested/show" fields (
"name" -> "game of thrones",
"actor" -> Seq(
Map("name" -> "peter dinklage", "birthplace" -> "Morristown"),
Map("name" -> "pedro pascal", "birthplace" -> "Santiago")
)
)
).await
refresh("nested")
blockUntilCount(1, "nested")
"nested object" - {
"should be searchable by nested field" in {
val resp1 = client.execute {
search in "nested/show" query nestedQuery("actor").query(termQuery("actor.name" -> "dinklage"))
}.await
resp1.totalHits shouldEqual 1
val resp2 = client.execute {
search in "nested/show" query nestedQuery("actor").query(termQuery("actor.name" -> "simon"))
}.await
resp2.totalHits shouldEqual 0
}
}
"nested object" - {
"should be presented in highlighting" in {
val resp1 = client.execute {
search in "nested/show" query nestedQuery("actor").query(termQuery("actor.name" -> "dinklage")).inner {
innerHits("actor").highlighting(highlight.field("actor.name").matchedFields("actor.name").fragmentSize(20))
}
}.await
resp1.totalHits shouldEqual 1
val maybeHits = resp1.hits(0).innerHits.get("actor")
maybeHits.isDefined shouldBe true
maybeHits.get.getTotalHits shouldEqual 1
val fields = maybeHits.get.getAt(0).highlightFields
fields.containsKey("actor.name") shouldBe true
val fragments = fields.get("actor.name").fragments()
fragments.length shouldBe 1
fragments(0).string shouldBe "peter <em>dinklage</em>"
}
}
"nested object" - {
"should have correct inner hit source" in {
val resp1 = client.execute {
search in "nested/show" query nestedQuery("actor").query(termQuery("actor.name" -> "dinklage")).inner {
innerHits("actor").sourceExclude("birthplace")
}
}.await
resp1.totalHits shouldEqual 1
val maybeInnerHits = resp1.hits(0).innerHits.get("actor")
maybeInnerHits.isDefined shouldBe true
maybeInnerHits.get.getTotalHits shouldEqual 1
maybeInnerHits.get.getAt(0).sourceAsMap.containsKey("birthplace") shouldBe false
maybeInnerHits.get.getAt(0).sourceAsMap.containsKey("name") shouldBe true
}
}
}
|
sjoerdmulder/elastic4s
|
elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/NestedQueryTest.scala
|
Scala
|
apache-2.0
| 2,808 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010, 2011 Mark Harrah
*/
package sbt.internal.util
import sbt.util._
import java.io.{ BufferedWriter, PrintStream, PrintWriter }
import java.util.Locale
object ConsoleLogger {
@deprecated("Moved to ConsoleOut", "0.13.0")
def systemOut: ConsoleOut = ConsoleOut.systemOut
@deprecated("Moved to ConsoleOut", "0.13.0")
def overwriteContaining(s: String): (String, String) => Boolean = ConsoleOut.overwriteContaining(s)
@deprecated("Moved to ConsoleOut", "0.13.0")
def systemOutOverwrite(f: (String, String) => Boolean): ConsoleOut = ConsoleOut.systemOutOverwrite(f)
@deprecated("Moved to ConsoleOut", "0.13.0")
def printStreamOut(out: PrintStream): ConsoleOut = ConsoleOut.printStreamOut(out)
@deprecated("Moved to ConsoleOut", "0.13.0")
def printWriterOut(out: PrintWriter): ConsoleOut = ConsoleOut.printWriterOut(out)
@deprecated("Moved to ConsoleOut", "0.13.0")
def bufferedWriterOut(out: BufferedWriter): ConsoleOut = bufferedWriterOut(out)
/** Escape character, used to introduce an escape sequence. */
final val ESC = '\\u001B'
/**
* An escape terminator is a character in the range `@` (decimal value 64) to `~` (decimal value 126).
* It is the final character in an escape sequence.
*
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
*/
private[sbt] def isEscapeTerminator(c: Char): Boolean =
c >= '@' && c <= '~'
/**
* Test if the character AFTER an ESC is the ANSI CSI.
*
* see: http://en.wikipedia.org/wiki/ANSI_escape_code
*
* The CSI (control sequence instruction) codes start with ESC + '['. This is for testing the second character.
*
* There is an additional CSI (one character) that we could test for, but is not frequnetly used, and we don't
* check for it.
*
* cf. http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
*/
private def isCSI(c: Char): Boolean = c == '['
/**
* Tests whether or not a character needs to immediately terminate the ANSI sequence.
*
* c.f. http://en.wikipedia.org/wiki/ANSI_escape_code#Sequence_elements
*/
private def isAnsiTwoCharacterTerminator(c: Char): Boolean =
(c >= '@') && (c <= '_')
/**
* Returns true if the string contains the ESC character.
*
* TODO - this should handle raw CSI (not used much)
*/
def hasEscapeSequence(s: String): Boolean =
s.indexOf(ESC) >= 0
/**
* Returns the string `s` with escape sequences removed.
* An escape sequence starts with the ESC character (decimal value 27) and ends with an escape terminator.
* @see isEscapeTerminator
*/
def removeEscapeSequences(s: String): String =
if (s.isEmpty || !hasEscapeSequence(s))
s
else {
val sb = new java.lang.StringBuilder
nextESC(s, 0, sb)
sb.toString
}
private[this] def nextESC(s: String, start: Int, sb: java.lang.StringBuilder): Unit = {
val escIndex = s.indexOf(ESC, start)
if (escIndex < 0) {
sb.append(s, start, s.length)
()
} else {
sb.append(s, start, escIndex)
val next: Int =
// If it's a CSI we skip past it and then look for a terminator.
if (isCSI(s.charAt(escIndex + 1))) skipESC(s, escIndex + 2)
else if (isAnsiTwoCharacterTerminator(s.charAt(escIndex + 1))) escIndex + 2
else {
// There could be non-ANSI character sequences we should make sure we handle here.
skipESC(s, escIndex + 1)
}
nextESC(s, next, sb)
}
}
/** Skips the escape sequence starting at `i-1`. `i` should be positioned at the character after the ESC that starts the sequence. */
private[this] def skipESC(s: String, i: Int): Int = {
if (i >= s.length) {
i
} else if (isEscapeTerminator(s.charAt(i))) {
i + 1
} else {
skipESC(s, i + 1)
}
}
val formatEnabled =
{
import java.lang.Boolean.{ getBoolean, parseBoolean }
val value = System.getProperty("sbt.log.format")
if (value eq null) (ansiSupported && !getBoolean("sbt.log.noformat")) else parseBoolean(value)
}
private[this] def jline1to2CompatMsg = "Found class jline.Terminal, but interface was expected"
private[this] def ansiSupported =
try {
val terminal = jline.TerminalFactory.get
terminal.restore // #460
terminal.isAnsiSupported
} catch {
case e: Exception => !isWindows
// sbt 0.13 drops JLine 1.0 from the launcher and uses 2.x as a normal dependency
// when 0.13 is used with a 0.12 launcher or earlier, the JLine classes from the launcher get loaded
// this results in a linkage error as detected below. The detection is likely jvm specific, but the priority
// is avoiding mistakenly identifying something as a launcher incompatibility when it is not
case e: IncompatibleClassChangeError if e.getMessage == jline1to2CompatMsg =>
throw new IncompatibleClassChangeError("JLine incompatibility detected. Check that the sbt launcher is version 0.13.x or later.")
}
val noSuppressedMessage = (_: SuppressedTraceContext) => None
private[this] def os = System.getProperty("os.name")
private[this] def isWindows = os.toLowerCase(Locale.ENGLISH).indexOf("windows") >= 0
def apply(out: PrintStream): ConsoleLogger = apply(ConsoleOut.printStreamOut(out))
def apply(out: PrintWriter): ConsoleLogger = apply(ConsoleOut.printWriterOut(out))
def apply(out: ConsoleOut = ConsoleOut.systemOut, ansiCodesSupported: Boolean = formatEnabled,
useColor: Boolean = formatEnabled, suppressedMessage: SuppressedTraceContext => Option[String] = noSuppressedMessage): ConsoleLogger =
new ConsoleLogger(out, ansiCodesSupported, useColor, suppressedMessage)
private[this] val EscapeSequence = (27.toChar + "[^@-~]*[@-~]").r
def stripEscapeSequences(s: String): String =
EscapeSequence.pattern.matcher(s).replaceAll("")
}
/**
* A logger that logs to the console. On supported systems, the level labels are
* colored.
*
* This logger is not thread-safe.
*/
class ConsoleLogger private[ConsoleLogger] (val out: ConsoleOut, override val ansiCodesSupported: Boolean, val useColor: Boolean, val suppressedMessage: SuppressedTraceContext => Option[String]) extends BasicLogger {
import scala.Console.{ BLUE, GREEN, RED, RESET, YELLOW }
def messageColor(level: Level.Value) = RESET
def labelColor(level: Level.Value) =
level match {
case Level.Error => RED
case Level.Warn => YELLOW
case _ => RESET
}
def successLabelColor = GREEN
def successMessageColor = RESET
override def success(message: => String): Unit = {
if (successEnabled)
log(successLabelColor, Level.SuccessLabel, successMessageColor, message)
}
def trace(t: => Throwable): Unit =
out.lockObject.synchronized {
val traceLevel = getTrace
if (traceLevel >= 0)
out.print(StackTrace.trimmed(t, traceLevel))
if (traceLevel <= 2)
for (msg <- suppressedMessage(new SuppressedTraceContext(traceLevel, ansiCodesSupported && useColor)))
printLabeledLine(labelColor(Level.Error), "trace", messageColor(Level.Error), msg)
}
def log(level: Level.Value, message: => String): Unit = {
if (atLevel(level))
log(labelColor(level), level.toString, messageColor(level), message)
}
private def reset(): Unit = setColor(RESET)
private def setColor(color: String): Unit = {
if (ansiCodesSupported && useColor)
out.lockObject.synchronized { out.print(color) }
}
private def log(labelColor: String, label: String, messageColor: String, message: String): Unit =
out.lockObject.synchronized {
for (line <- message.split("""\\n"""))
printLabeledLine(labelColor, label, messageColor, line)
}
private def printLabeledLine(labelColor: String, label: String, messageColor: String, line: String): Unit =
{
reset()
out.print("[")
setColor(labelColor)
out.print(label)
reset()
out.print("] ")
setColor(messageColor)
out.print(line)
reset()
out.println()
}
def logAll(events: Seq[LogEvent]) = out.lockObject.synchronized { events.foreach(log) }
def control(event: ControlEvent.Value, message: => String): Unit = log(labelColor(Level.Info), Level.Info.toString, BLUE, message)
}
final class SuppressedTraceContext(val traceLevel: Int, val useColor: Boolean)
|
Duhemm/util
|
internal/util-logging/src/main/scala/sbt/internal/util/ConsoleLogger.scala
|
Scala
|
bsd-3-clause
| 8,429 |
package tastytest
import tastytest.{dottyErasure => dotc, scala2Erasure => nsc}
object TestErasure extends Suite("TestErasure") {
val z = new dotc.Z
test("erasure of scala 3 from scala 2") {
z.a_01(anyObj)
z.a_02(anyObj)
z.a_02X(anyObj)
z.a_03(anyObj)
z.a_04(anyObj)
z.a_04X(anyObj)
z.a_05(anyObj)
z.a_06(anyObj)
z.a_07(anyObj)
z.a_08(anyObj)
z.a_09(anyObj)
z.a_10(anyObj)
z.b_11(anyObj)
z.subb_12(anyObj)
z.d_13(anyObj)
z.d_13x(anyObj)
z.d_14(anyObj)
z.d_14x(anyObj)
z.d_15(anyObj)
z.d_15b(anyObj)
z.d_16(anyObj)
z.d_16b(anyObj)
z.d_17(anyObj)
z.d_18(anyObj)
z.d_19(anyObj)
z.d_19x(anyObj)
z.d_20(anyObj)
z.a_21(anyObj)
z.a_22(anyObj)
z.z_23(anyObj)
z.z_24(anyObj)
z.a_25(anyObj)
z.a_26(anyObj)
z.a_27(anyObj) @@ ExpectCastOrNull
z.a_28(anyObj) @@ ExpectCastOrNull
z.e_29(anyObj)
z.e_30(anyObj)
z.e_31(anyObj)
z.e_32(anyObj)
z.e_33(anyObj)
z.e_34(anyObj)
z.d_35(anyObj)
z.d_36(anyObj)
z.d_37(anyObj)
z.d_38(anyObj)
z.b_39(anyObj)
z.b_40(anyObj)
z.b_41(anyObj)
z.b_42(anyObj)
z.b_43(anyObj)
z.b_44(anyObj)
z.b_45(anyObj)
z.b_46(anyObj)
z.a_47(anyObj)
z.a_48(anyObj)
z.a_49(anyObj)
z.a_50(anyObj)
z.a_51(anyObj)
z.a_52(anyObj)
z.a_53(anyObj)
z.a_54(anyObj)
z.a_55(anyObj)
z.a_56(anyObj)
z.a_57(anyObj)
z.int_58(1)
z.int_59(1)
z.int_60(1)
z.int_61(1)
z.int_62(1)
z.int_63(1)
z.intARRAY_64(anyObj)
z.intARRAY_65(anyObj)
z.intARRAY_66(anyObj)
z.intARRAY_67(anyObj)
z.intARRAY_68(anyObj)
z.intARRAY_69(anyObj)
z.intARRAY_70(anyObj)
z.intARRAY_71(anyObj)
// z.intARRAY_71a(anyObj) // illegal union type
// z.intARRAY_71b(anyObj) // illegal union type
z.stringARRAY_72(anyObj)
z.stringARRAY_73(anyObj)
z.stringARRAY_74(anyObj)
z.stringARRAY_75(anyObj)
z.stringARRAY_76(anyObj)
z.stringARRAY_77(anyObj)
z.stringARRAY_78(anyObj)
z.stringARRAY_79(anyObj)
// z.stringARRAY_79a(anyObj) // illegal union type
// z.stringARRAY_79b(anyObj) // illegal union type
z.object_80(anyObj)
z.object_81(anyObj)
z.objectARRAY_82(anyObj)
z.object_83(anyObj)
z.object_83a(anyObj)
// z.object_83b(anyObj) // illegal union type
// z.object_83c(anyObj) // illegal union type
// z.object_83d(anyObj) // illegal union type
// z.object_83e(anyObj) // illegal union type
z.serializableARRAY_84(anyObj)
z.univARRAY_85(anyObj)
z.aARRAY_86(anyObj)
z.aARRAY_87(anyObj)
z.objectARRAY_88(anyObj)
z.objectARRAY_89(anyObj)
z.objectARRAY_90(anyObj)
z.stringARRAY_91(anyObj)
z.stringARRAY_92(anyObj)
z.stringARRAY_93(anyObj)
z.covARRAY_94(anyObj)
z.aARRAY_95(anyObj)
z.aARRAY_96(anyObj)
z.zARRAY_97(anyObj)
z.aARRAY_98(anyObj)
z.stringARRAY_99(anyObj)
z.aARRAY_100(anyObj)
z.dARRAY_101(anyObj)
z.aARRAY_102(anyObj)
z.aARRAY_103(anyObj)
z.dARRAY_104(anyObj)
z.intARRAY_105(anyObj)
z.vcARRAY_106(anyObj)
z.listARRAY_107(anyObj)
z.intARRAY_108(anyObj)
z.intARRAY_109(anyObj)
z.a_110(anyObj) @@ ExpectCastOrNull
z.a_111(anyObj) @@ ExpectCastOrNull
z.vcARRAY_112(anyObj)
z.vcARRAY_113(anyObj)
z.a_114(anyObj) @@ ExpectCastOrNull
z.a_115(anyObj) @@ ExpectCastOrNull
z.a_116(anyObj) @@ ExpectCastOrNull
z.a_117(anyObj) @@ ExpectCastOrNull
z.a_118(anyObj) @@ ExpectCastOrNull
z.a_119(anyObj) @@ ExpectCastOrNull
z.a_120(anyObj) @@ ExpectCastOrNull
z.object_121(anyObj)
z.object_122(anyObj)
z.objectARRAY_123(anyObj)
z.object_124(anyObj)
z.objectARRAY_125(anyObj)
z.covARRAY_126(anyObj)
z.covARRAY_127(anyObj)
z.object_128(anyObj)
z.intARRAYARRAY_129(anyObj)
z.intARRAYARRAY_130(anyObj)
z.objectARRAY_130(anyObj)
z.intARRAY_131(anyObj)
z.enumerated_132(anyObj)
z.enumerated_133(anyObj)
z.enumerated_134(anyObj)
z.enumeratedARRAY_135(anyObj)
z.enumeratedARRAY_136(anyObj)
z.enumeratedARRAY_137(anyObj)
}
test("erasure matches name") {
val methods = classOf[nsc.Z].getDeclaredMethods.toList ++ classOf[dotc.Z].getDeclaredMethods.toList
methods.foreach { m =>
m.getName match {
case s"${prefix}_${suffix}" =>
val paramClass = m.getParameterTypes()(0).getSimpleName.toLowerCase.replaceAll("""\\[\\]""", "ARRAY")
assert(prefix == paramClass, s"Method `$m` erased to `$paramClass` which does not match its prefix `$prefix`")
case _ =>
}
}
}
}
|
scala/scala
|
test/tasty/run/src-2/tastytest/TestErasure.scala
|
Scala
|
apache-2.0
| 4,674 |
package io.getquill
import java.util.Date
import io.getquill.context.cassandra.encoding.{ CassandraMapper, CassandraType }
import io.getquill.context.cassandra.{ CassandraContext, CqlIdiom }
import java.time.{ Instant, LocalDate }
import scala.reflect.ClassTag
class CassandraMirrorContextWithQueryProbing extends CassandraMirrorContext(Literal) with QueryProbing
class CassandraMirrorContext[Naming <: NamingStrategy](naming: Naming)
extends MirrorContext[CqlIdiom, Naming](CqlIdiom, naming) with CassandraContext[Naming] {
implicit val timestampDecoder: Decoder[Instant] = decoder[Instant]
implicit val timestampEncoder: Encoder[Instant] = encoder[Instant]
implicit val cassandraLocalDateDecoder: Decoder[LocalDate] = decoder[LocalDate]
implicit val cassandraLocalDateEncoder: Encoder[LocalDate] = encoder[LocalDate]
implicit def listDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[List[T]] = decoderUnsafe[List[T]]
implicit def setDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[Set[T]] = decoderUnsafe[Set[T]]
implicit def mapDecoder[K, V, KCas: ClassTag, VCas: ClassTag](
implicit
keyMapper: CassandraMapper[KCas, K],
valMapper: CassandraMapper[VCas, V]
): Decoder[Map[K, V]] = decoderUnsafe[Map[K, V]]
implicit def listEncoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[T, Cas]): Encoder[List[T]] = encoder[List[T]]
implicit def setEncoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[T, Cas]): Encoder[Set[T]] = encoder[Set[T]]
implicit def mapEncoder[K, V, KCas: ClassTag, VCas: ClassTag](
implicit
keyMapper: CassandraMapper[K, KCas],
valMapper: CassandraMapper[V, VCas]
): Encoder[Map[K, V]] = encoder[Map[K, V]]
implicit def udtCassandraType[T <: Udt]: CassandraType[T] = CassandraType.of[T]
implicit def udtDecoder[T <: Udt: ClassTag]: Decoder[T] = decoder[T]
implicit def udtEncoder[T <: Udt]: Encoder[T] = encoder[T]
}
|
getquill/quill
|
quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala
|
Scala
|
apache-2.0
| 1,968 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import java.util.Locale
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchFunctionException}
import org.apache.spark.sql.catalyst.catalog.{CatalogFunction, FunctionResource}
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.util.StringUtils
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.types.{StringType, StructField, StructType}
/**
* The DDL command that creates a function.
* To create a temporary function, the syntax of using this command in SQL is:
* {{{
* CREATE [OR REPLACE] TEMPORARY FUNCTION functionName
* AS className [USING JAR\FILE 'uri' [, JAR|FILE 'uri']]
* }}}
*
* To create a permanent function, the syntax in SQL is:
* {{{
* CREATE [OR REPLACE] FUNCTION [IF NOT EXISTS] [databaseName.]functionName
* AS className [USING JAR\FILE 'uri' [, JAR|FILE 'uri']]
* }}}
*
* @param ignoreIfExists: When true, ignore if the function with the specified name exists
* in the specified database.
* @param replace: When true, alter the function with the specified name
*/
case class CreateFunctionCommand(
databaseName: Option[String],
functionName: String,
className: String,
resources: Seq[FunctionResource],
isTemp: Boolean,
ignoreIfExists: Boolean,
replace: Boolean)
extends RunnableCommand {
if (ignoreIfExists && replace) {
throw new AnalysisException("CREATE FUNCTION with both IF NOT EXISTS and REPLACE" +
" is not allowed.")
}
// Disallow to define a temporary function with `IF NOT EXISTS`
if (ignoreIfExists && isTemp) {
throw new AnalysisException(
"It is not allowed to define a TEMPORARY function with IF NOT EXISTS.")
}
// Temporary function names should not contain database prefix like "database.function"
if (databaseName.isDefined && isTemp) {
throw new AnalysisException(s"Specifying a database in CREATE TEMPORARY FUNCTION " +
s"is not allowed: '${databaseName.get}'")
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
val func = CatalogFunction(FunctionIdentifier(functionName, databaseName), className, resources)
if (isTemp) {
if (!replace && catalog.isRegisteredFunction(func.identifier)) {
throw QueryCompilationErrors.functionAlreadyExistsError(func.identifier)
}
// We first load resources and then put the builder in the function registry.
catalog.loadFunctionResources(resources)
catalog.registerFunction(func, overrideIfExists = replace)
} else {
// Handles `CREATE OR REPLACE FUNCTION AS ... USING ...`
if (replace && catalog.functionExists(func.identifier)) {
// alter the function in the metastore
catalog.alterFunction(func)
} else {
// For a permanent, we will store the metadata into underlying external catalog.
// This function will be loaded into the FunctionRegistry when a query uses it.
// We do not load it into FunctionRegistry right now, to avoid loading the resource and
// UDF class immediately, as the Spark application to create the function may not have
// access to the resource and/or UDF class.
catalog.createFunction(func, ignoreIfExists)
}
}
Seq.empty[Row]
}
}
/**
* A command for users to get the usage of a registered function.
* The syntax of using this command in SQL is
* {{{
* DESCRIBE FUNCTION [EXTENDED] upper;
* }}}
*/
case class DescribeFunctionCommand(
functionName: FunctionIdentifier,
isExtended: Boolean) extends RunnableCommand {
override val output: Seq[Attribute] = {
val schema = StructType(StructField("function_desc", StringType, nullable = false) :: Nil)
schema.toAttributes
}
override def run(sparkSession: SparkSession): Seq[Row] = {
// Hard code "<>", "!=", "between", and "case" for now as there is no corresponding functions.
functionName.funcName.toLowerCase(Locale.ROOT) match {
case "<>" =>
Row(s"Function: $functionName") ::
Row("Usage: expr1 <> expr2 - " +
"Returns true if `expr1` is not equal to `expr2`.") :: Nil
case "!=" =>
Row(s"Function: $functionName") ::
Row("Usage: expr1 != expr2 - " +
"Returns true if `expr1` is not equal to `expr2`.") :: Nil
case "between" =>
Row("Function: between") ::
Row("Usage: expr1 [NOT] BETWEEN expr2 AND expr3 - " +
"evaluate if `expr1` is [not] in between `expr2` and `expr3`.") :: Nil
case "case" =>
Row("Function: case") ::
Row("Usage: CASE expr1 WHEN expr2 THEN expr3 " +
"[WHEN expr4 THEN expr5]* [ELSE expr6] END - " +
"When `expr1` = `expr2`, returns `expr3`; " +
"when `expr1` = `expr4`, return `expr5`; else return `expr6`.") :: Nil
case _ =>
try {
val info = sparkSession.sessionState.catalog.lookupFunctionInfo(functionName)
val name = if (info.getDb != null) info.getDb + "." + info.getName else info.getName
val result =
Row(s"Function: $name") ::
Row(s"Class: ${info.getClassName}") ::
Row(s"Usage: ${info.getUsage}") :: Nil
if (isExtended) {
result :+
Row(s"Extended Usage:${info.getExtended}")
} else {
result
}
} catch {
case _: NoSuchFunctionException => Seq(Row(s"Function: $functionName not found."))
}
}
}
}
/**
* The DDL command that drops a function.
* ifExists: returns an error if the function doesn't exist, unless this is true.
* isTemp: indicates if it is a temporary function.
*/
case class DropFunctionCommand(
databaseName: Option[String],
functionName: String,
ifExists: Boolean,
isTemp: Boolean)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
if (isTemp) {
if (databaseName.isDefined) {
throw new AnalysisException(s"Specifying a database in DROP TEMPORARY FUNCTION " +
s"is not allowed: '${databaseName.get}'")
}
if (FunctionRegistry.builtin.functionExists(FunctionIdentifier(functionName))) {
throw new AnalysisException(s"Cannot drop native function '$functionName'")
}
catalog.dropTempFunction(functionName, ifExists)
} else {
// We are dropping a permanent function.
catalog.dropFunction(
FunctionIdentifier(functionName, databaseName),
ignoreIfNotExists = ifExists)
}
Seq.empty[Row]
}
}
/**
* A command for users to list all of the registered functions.
* The syntax of using this command in SQL is:
* {{{
* SHOW FUNCTIONS [LIKE pattern]
* }}}
* For the pattern, '*' matches any sequence of characters (including no characters) and
* '|' is for alternation.
* For example, "show functions like 'yea*|windo*'" will return "window" and "year".
*/
case class ShowFunctionsCommand(
db: Option[String],
pattern: Option[String],
showUserFunctions: Boolean,
showSystemFunctions: Boolean) extends RunnableCommand {
override val output: Seq[Attribute] = {
val schema = StructType(StructField("function", StringType, nullable = false) :: Nil)
schema.toAttributes
}
override def run(sparkSession: SparkSession): Seq[Row] = {
val dbName = db.getOrElse(sparkSession.sessionState.catalog.getCurrentDatabase)
// If pattern is not specified, we use '*', which is used to
// match any sequence of characters (including no characters).
val functionNames =
sparkSession.sessionState.catalog
.listFunctions(dbName, pattern.getOrElse("*"))
.collect {
case (f, "USER") if showUserFunctions => f.unquotedString
case (f, "SYSTEM") if showSystemFunctions => f.unquotedString
}
// Hard code "<>", "!=", "between", and "case" for now as there is no corresponding functions.
// "<>", "!=", "between", and "case" is SystemFunctions, only show when showSystemFunctions=true
if (showSystemFunctions) {
(functionNames ++
StringUtils.filterPattern(FunctionsCommand.virtualOperators, pattern.getOrElse("*")))
.sorted.map(Row(_))
} else {
functionNames.sorted.map(Row(_))
}
}
}
/**
* A command for users to refresh the persistent function.
* The syntax of using this command in SQL is:
* {{{
* REFRESH FUNCTION functionName
* }}}
*/
case class RefreshFunctionCommand(
databaseName: Option[String],
functionName: String)
extends RunnableCommand {
override def run(sparkSession: SparkSession): Seq[Row] = {
val catalog = sparkSession.sessionState.catalog
if (FunctionRegistry.builtin.functionExists(FunctionIdentifier(functionName, databaseName))) {
throw new AnalysisException(s"Cannot refresh built-in function $functionName")
}
if (catalog.isTemporaryFunction(FunctionIdentifier(functionName, databaseName))) {
throw new AnalysisException(s"Cannot refresh temporary function $functionName")
}
val identifier = FunctionIdentifier(
functionName, Some(databaseName.getOrElse(catalog.getCurrentDatabase)))
// we only refresh the permanent function.
if (catalog.isPersistentFunction(identifier)) {
// register overwrite function.
val func = catalog.getFunctionMetadata(identifier)
catalog.registerFunction(func, true)
} else {
// clear cached function and throw exception
catalog.unregisterFunction(identifier)
throw new NoSuchFunctionException(identifier.database.get, identifier.funcName)
}
Seq.empty[Row]
}
}
object FunctionsCommand {
// operators that do not have corresponding functions.
// They should be handled `DescribeFunctionCommand`, `ShowFunctionsCommand`
val virtualOperators = Seq("!=", "<>", "between", "case")
}
|
witgo/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
|
Scala
|
apache-2.0
| 10,999 |
package mesosphere.marathon.state
import com.wix.accord.dsl._
import com.wix.accord._
import mesosphere.marathon.api.v2.Validation._
import org.apache.mesos.{ Protos => Mesos }
import scala.collection.immutable.Seq
// TODO: trait Container and specializations?
// Current implementation with type defaulting to DOCKER and docker to NONE makes no sense
case class Container(
`type`: Mesos.ContainerInfo.Type = Mesos.ContainerInfo.Type.DOCKER,
volumes: Seq[Volume] = Nil,
docker: Option[Container.Docker] = None)
object Container {
object Empty extends Container
/**
* Docker-specific container parameters.
*/
case class Docker(
image: String = "",
network: Option[Mesos.ContainerInfo.DockerInfo.Network] = None,
portMappings: Option[Seq[Docker.PortMapping]] = None,
privileged: Boolean = false,
parameters: Seq[Parameter] = Nil,
forcePullImage: Boolean = false)
object Docker {
/**
* @param containerPort The container port to expose
* @param hostPort The host port to bind
* @param servicePort The well-known port for this service
* @param protocol Layer 4 protocol to expose (i.e. "tcp", "udp" or "udp,tcp" for both).
* @param name Name of the service hosted on this port.
* @param labels This can be used to decorate the message with metadata to be
* interpreted by external applications such as firewalls.
*/
case class PortMapping(
containerPort: Int = 0,
hostPort: Int = 0,
servicePort: Int = 0,
protocol: String = "tcp",
name: Option[String] = None,
labels: Map[String, String] = Map.empty[String, String])
object PortMapping {
val TCP = "tcp"
val UDP = "udp"
implicit val uniqueProtocols: Validator[Iterable[String]] =
isTrue[Iterable[String]]("protocols must be unique.") { protocols =>
protocols.size == protocols.toSet.size
}
implicit val portMappingValidator = validator[PortMapping] { portMapping =>
portMapping.protocol.split(',').toIterable is uniqueProtocols and every(oneOf(TCP, UDP))
portMapping.containerPort should be >= 0
portMapping.hostPort should be >= 0
portMapping.servicePort should be >= 0
portMapping.name is optional(matchRegexFully(PortAssignment.PortNamePattern))
}
}
object PortMappings {
implicit val portMappingsValidator: Validator[Seq[PortMapping]] = validator[Seq[PortMapping]] { portMappings =>
portMappings is every(valid)
portMappings is elementsAreUniqueByOptional(_.name, "Port names must be unique.")
}
}
implicit val dockerValidator = validator[Docker] { docker =>
docker.image is notEmpty
docker.portMappings is optional(valid(PortMappings.portMappingsValidator))
}
}
// We need validation based on the container type, but don't have dedicated classes. Therefore this approach manually
// delegates validation to the matching validator
implicit val validContainer: Validator[Container] = {
val validGeneralContainer = validator[Container] { container =>
container.volumes is every(valid)
}
val validDockerContainer: Validator[Container] = validator[Container] { container =>
container.docker is notEmpty
container.docker.each is valid
}
val validMesosContainer: Validator[Container] = validator[Container] { container =>
container.docker is empty
}
new Validator[Container] {
override def apply(c: Container): Result = c.`type` match {
case Mesos.ContainerInfo.Type.MESOS => validate(c)(validMesosContainer)
case Mesos.ContainerInfo.Type.DOCKER => validate(c)(validDockerContainer)
case _ => Failure(Set(RuleViolation(c.`type`, "unknown", None)))
}
} and validGeneralContainer
}
}
|
titosand/marathon
|
src/main/scala/mesosphere/marathon/state/Container.scala
|
Scala
|
apache-2.0
| 3,922 |
package com.atomist.source.filter
import java.nio.file._
case class GitDirFilter(rootPath: String) extends ArtifactFilter {
private val dotGitDir = Paths.get(rootPath, ".git")
override def apply(path: String): Boolean =
!Paths.get(path).toAbsolutePath.startsWith(dotGitDir.toAbsolutePath)
}
|
atomist/artifact-source
|
src/main/scala/com/atomist/source/filter/GitDirFilter.scala
|
Scala
|
gpl-3.0
| 303 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api.services
import slamdata.Predef._
import quasar.{Variables, VariablesArbitrary}
import quasar.api._, ApiErrorEntityDecoder._, PathUtils._
import quasar.api.matchers._
import quasar.contrib.pathy._, PathArbitrary._
import quasar.contrib.scalaz.catchable._
import quasar.fp._
import quasar.fs._, InMemory._
import quasar.fs.mount._
import quasar.main.CoreEffIO
import quasar.sql._
import quasar.sql.Arbitraries._
import argonaut._, Argonaut._, EncodeJsonScalaz._
import eu.timepit.refined.numeric.{NonNegative, Positive => RPositive}
import eu.timepit.refined.scalacheck.numeric._
import matryoshka.data.Fix
import org.http4s._
import org.http4s.argonaut._
import org.http4s.syntax.service._
import pathy.Path._
import pathy.scalacheck.PathyArbitrary._
import scalaz.{Lens => _, _}
import scalaz.Scalaz._
import shapeless.tag.@@
object MetadataFixture {
def service(mem: InMemState, mnts: Map[APath, MountConfig]): Service[Request, Response] = {
val inter = Fixture.inMemFSWeb(mem, MountingsConfig(mnts)).unsafePerformSync
metadata.service[CoreEffIO].toHttpService(inter).orNotFound
}
}
class MetadataServiceSpec extends quasar.Qspec with FileSystemFixture with Http4s {
import metadata.FsNode
import VariablesArbitrary._
import FileSystemTypeArbitrary._, ConnectionUriArbitrary._
import MetadataFixture._
import PathError._
"Metadata Service" should {
"respond with NotFound" >> {
// TODO: escaped paths do not survive being embedded in error messages
"if directory does not exist" >> prop { dir: ADir => (dir != rootDir) ==> {
val response = service(InMemState.empty, Map())(Request(uri = pathUri(dir))).unsafePerformSync
response.as[ApiError].unsafePerformSync must beApiErrorLike(pathNotFound(dir))
}}
"file does not exist" >> prop { file: AFile =>
val response = service(InMemState.empty, Map())(Request(uri = pathUri(file))).unsafePerformSync
response.as[ApiError].unsafePerformSync must beApiErrorLike(pathNotFound(file))
}
"if file with same name as existing directory (without trailing slash)" >> prop { s: SingleFileMemState =>
depth(s.file) > 1 ==> {
val parent = fileParent(s.file)
// .get here is because we know thanks to the property guard, that the parent directory has a name
val fileWithSameName = parentDir(parent).get </> file(dirName(parent).get.value)
val response = service(s.state, Map())(Request(uri = pathUri(fileWithSameName))).unsafePerformSync
response.as[ApiError].unsafePerformSync must beApiErrorLike(pathNotFound(fileWithSameName))
}
}
}
"respond with OK" >> {
"and empty list for existing empty directory" >> {
service(InMemState.empty, Map())(Request(uri = Uri(path = "/")))
.as[Json].unsafePerformSync must_== Json("children" := List[FsNode]())
}
"and list of children for existing nonempty directory" >> prop { s: NonEmptyDir =>
val childNodes = s.ls.map(FsNode(_, None))
service(s.state, Map())(Request(uri = pathUri(s.dir)))
.as[Json].unsafePerformSync must_== Json("children" := childNodes.toIList.sorted)
}.set(minTestsOk = 10) // NB: this test is slow because NonEmptyDir instances are still relatively large
.flakyTest("scalacheck: 'Gave up after only 2 passed tests'")
"and mounts when any children happen to be mount points" >> prop { (
fileName: FileName,
directoryName: DirName,
fsMountName: DirName,
viewName: FileName,
moduleName: DirName,
vcfg: (ScopedExpr[Fix[Sql]], Variables),
fsCfg: (FileSystemType, ConnectionUri)
) => (fileName ≠ viewName &&
directoryName ≠ fsMountName &&
directoryName ≠ moduleName &&
fsMountName ≠ moduleName) ==> {
val moduleConfig: List[Statement[Fix[Sql]]] = List(
FunctionDecl(CIName("FOO"), List(CIName("Bar")), Fix(boolLiteral(true))))
val parent: ADir = rootDir </> dir("foo")
val mnts = Map[APath, MountConfig](
(parent </> file1(viewName), MountConfig.viewConfig(vcfg)),
(parent </> dir1(fsMountName), MountConfig.fileSystemConfig(fsCfg)),
(parent </> dir1(moduleName), MountConfig.moduleConfig(moduleConfig)))
val mem = InMemState fromFiles Map(
(parent </> file1(fileName), Vector()),
(parent </> dir1(directoryName) </> file("quux"), Vector()),
(parent </> file1(viewName), Vector()),
(parent </> dir1(fsMountName) </> file("bar"), Vector()))
service(mem, mnts)(Request(uri = pathUri(parent)))
.as[Json].unsafePerformSync must_=== Json("children" := List(
FsNode(fileName.value, "file", None, None),
FsNode(directoryName.value, "directory", None, None),
FsNode(viewName.value, "file", Some("view"), None),
FsNode(fsMountName.value, "directory", Some(fsCfg._1.value), None),
FsNode(moduleName.value, "directory", Some("module"), None)
).toIList.sorted)
}}
"and functions as files on a module mount with additional info about functions parameters" >> prop { dir: ADir =>
val moduleConfig: List[Statement[Fix[Sql]]] = List(
FunctionDecl(CIName("FOO"), List(CIName("BAR")), Fix(boolLiteral(true))),
FunctionDecl(CIName("BAR"), List(CIName("BAR"), CIName("BAZ")), Fix(boolLiteral(false))))
val mnts = Map[APath, MountConfig](
(dir, MountConfig.moduleConfig(moduleConfig)))
val mem = InMemState.empty
service(mem, mnts)(Request(uri = pathUri(dir)))
.as[Json].unsafePerformSync must_=== Json("children" := List(
FsNode("FOO", "file", mount = None, args = Some(List("BAR"))),
FsNode("BAR", "file", mount = None, args = Some(List("BAR", "BAZ")))
).toIList.sorted)
}
"support offset and limit" >> prop { (dir: NonEmptyDir, offset: Int @@ NonNegative, limit: Int @@ RPositive) =>
val childNodes = dir.ls.map(FsNode(_, None))
service(dir.state, Map())(Request(uri = pathUri(dir.dir).+?("offset", offset.toString).+?("limit", limit.toString)))
.as[Json].unsafePerformSync must_== Json("children" := childNodes.toIList.sorted.drop(offset).take(limit))
}.set(minTestsOk = 10) // NB: this test is slow because NonEmptyDir instances are still relatively large
"and empty object for existing file" >> prop { s: SingleFileMemState =>
service(s.state, Map())(Request(uri = pathUri(s.file)))
.as[Json].unsafePerformSync must_=== Json.obj()
}
}
}
}
|
drostron/quasar
|
web/src/test/scala/quasar/api/services/MetadataServiceSpec.scala
|
Scala
|
apache-2.0
| 7,306 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package forms
import forms.FormValidation.{mandatory, matchesRegex}
import play.api.data.Form
import play.api.data.Forms._
import play.api.data.validation.Constraints.maxLength
import uk.gov.hmrc.play.mappers.StopOnFirstFail
class ApplicationReferenceForm {
def apply(): Form[String] = Form[String](
single(
ApplicationReferenceForm.fieldName -> text.verifying(
StopOnFirstFail(
mandatory("applicationReference.error.missing"),
maxLength(ApplicationReferenceForm.lengthLimit, "applicationReference.error.length"),
matchesRegex(ApplicationReferenceForm.regex, "applicationReference.error.invalid")
)
)
)
)
}
object ApplicationReferenceForm {
val fieldName: String = "value"
val regex = """^[A-Za-z0-9\\s'‘()\\[\\]{}<>!«»"ʺ˝ˮ?/\\\\ +=%#*&$€£_\\-@¥.,:;]{1,100}$""".r
val lengthLimit = 100
}
|
hmrc/vat-registration-frontend
|
app/forms/ApplicationReferenceForm.scala
|
Scala
|
apache-2.0
| 1,499 |
/*
* Copyright (c) 2011-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless.examples
import org.junit.Test
import org.junit.Assert._
import shapeless._
class LiftTests {
import syntax.std.function._
import test._
import Lift._
@Test
def testLiftO {
val sum : (Int, Int) => Int = _ + _
val prd : (Int, Int, Int) => Int = _ * _ * _
val hlsum = sum.toProduct
typed[Int :: Int :: HNil => Int](hlsum)
val hlprd = prd.toProduct
typed[Int :: Int :: Int :: HNil => Int](hlprd)
val l1 = 2 :: 3 :: HNil
val l2 = 2 :: 3 :: 4 :: HNil
val s1 = hlsum(l1)
assertEquals(5, s1)
val p1 = hlprd(l2)
assertEquals(24, p1)
val l3 = Option(2) :: Option(3) :: HNil
val isDef3 = l3.foldMap(true)(isDefined)(_ & _)
assertTrue(isDef3)
val l3a = l3 map get
val s2a = hlsum(l3a)
assertEquals(5, s2a)
val l4 = Option(2) :: Option(3) :: Option(4) :: HNil
val isDef4 = l4.foldMap(true)(isDefined)(_ & _)
assertTrue(isDef4)
val l4a = l4 map get
val p2a = hlprd(l4a)
assertEquals(24, p2a)
val sumO = liftO(sum)
typed[(Option[Int], Option[Int]) => Option[Int]](sumO)
val s2 = sumO(Some(1), Some(2))
assertTrue(s2.isDefined)
assertEquals(3, s2.get)
val s3 = sumO(Some(1), None)
assertTrue(s3.isEmpty)
val s4 = sumO(None, Some(2))
assertTrue(s4.isEmpty)
val s5 = sumO(None, None)
assertTrue(s5.isEmpty)
val s6 = List(Some(1), Some(2), Some(3), Some(4)).reduce(sumO)
assertTrue(s6.isDefined)
assertEquals(10, s6.get)
val prdO = liftO(prd)
typed[(Option[Int], Option[Int], Option[Int]) => Option[Int]](prdO)
val p2 = prdO(Some(2), Some(3), Some(4))
assertTrue(p2.isDefined)
assertEquals(24, p2.get)
val p3 = prdO(Some(2), None, Some(4))
assertTrue(p3.isEmpty)
val p4 = prdO(Some(2), Some(3), None)
assertTrue(p4.isEmpty)
}
}
|
mandubian/shapeless
|
examples/src/test/scala/shapeless/examples/lift.scala
|
Scala
|
apache-2.0
| 2,500 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.