code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package iotus.core
import com.datastax.driver.core.querybuilder.QueryBuilder
import com.datastax.driver.core.querybuilder.QueryBuilder._
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.{FunSpec, Matchers}
class ConnectionAndQuerySpec extends FunSpec with Matchers {
val config:Config = ConfigFactory.load("ldaptest.conf")
// "cassandra://localhost:9042/test"
val dbUrl = config.getString("dburl")
describe("Connecting and querying a Cassandra database") {
it("should just work") {
val uri = CassandraConnectionUri(dbUrl)
//val uri = CassandraConnectionUri("cassandra://localhost:9042/cd")
val session = Helper.createSessionAndInitKeyspace(uri)
session.execute("CREATE TABLE IF NOT EXISTS things (id int, name text, PRIMARY KEY (id))")
session.execute("INSERT INTO things (id, name) VALUES (1, 'foo');")
val selectStmt = select().column("name")
.from("things")
.where(QueryBuilder.eq("id", 1))
.limit(1)
val resultSet = session.execute(selectStmt)
val row = resultSet.one()
row.getString("name") should be("foo")
session.execute("DROP TABLE things;")
}
}
}
|
petermichalek/iotan-core
|
src/test/scala/iotus/core/ConnectionAndQuerySpec.scala
|
Scala
|
apache-2.0
| 1,202 |
/*
* Copyright 2012-2013 Eligotech BV.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eligosource.eventsourced.journal.mongodb.casbah
import org.scalatest.{Suite, BeforeAndAfterAll}
import de.flapdoodle.embed.mongo.{Command, MongodStarter}
import de.flapdoodle.embed.mongo.config.{RuntimeConfigBuilder, MongodConfig}
import de.flapdoodle.embed.process.io.{NullProcessor, Processors}
import de.flapdoodle.embed.process.config.io.ProcessOutput
/**
* This class provides test support for starting and stopping the embedded mongo instance.
*/
trait MongodbSpecSupport extends BeforeAndAfterAll { this: Suite =>
def mongoPort = mongoDefaultPort
override def beforeAll() {
// Used to filter out console output messages.
val processOutput = new ProcessOutput(Processors.named("[mongod>]", new NullProcessor),
Processors.named("[MONGOD>]", new NullProcessor), Processors.named("[console>]", new NullProcessor))
val runtimeConfig = new RuntimeConfigBuilder()
.defaults(Command.MongoD)
.processOutput(processOutput)
.build()
// Startup embedded mongodb.
mongoStarter = MongodStarter.getInstance(runtimeConfig)
mongoExe = mongoStarter.prepare(new MongodConfig(mongoVer, mongoPort, mongoLocalHostIPV6))
mongod = mongoExe.start()
}
override def afterAll() {
mongod.stop()
mongoExe.stop()
}
}
|
CoderPaulK/eventsourced
|
es-journal/es-journal-mongodb-casbah/src/test/scala/org/eligosource/eventsourced/journal/mongodb/casbah/MongodbSpecSupport.scala
|
Scala
|
apache-2.0
| 1,869 |
package it.milczarek.gpwquoter.domain
import scala.beans.BeanProperty
case class OHLC(@BeanProperty open: Double, @BeanProperty high: Double, @BeanProperty low: Double, @BeanProperty close: Double, @BeanProperty volume: Double)
|
milczarekIT/gpw-quoter
|
src/main/scala/it/milczarek/gpwquoter/domain/OHLC.scala
|
Scala
|
apache-2.0
| 230 |
package japgolly.scalajs.react.core.vdom
import japgolly.scalajs.react._
import japgolly.scalajs.react.test._
import japgolly.scalajs.react.test.TestUtil._
import japgolly.scalajs.react.vdom.html_<^._
import utest._
object VdomTest extends TestSuite {
val C = ScalaComponent.static("")(<.br)
val Span = ScalaComponent.builder[Unit]("").render_C(<.span(_)).build
override def tests = Tests {
'returnTypes {
def test(subj: VdomNode, exp: String): Unit = {
val comp = ScalaComponent.static("tmp")(subj)
assertRender(comp(), exp)
}
'byte - test(50: Byte, "50")
'short - test(45: Short, "45")
'int - test(666, "666")
'long - test(123L, "123")
'double - test(12.3, "12.3")
'string - test("yo", "yo")
'empty - test(EmptyVdom, "")
'optionN - test(Option.empty[Int], "")
'optionS - test(Option(13), "13")
'optionSN - test(Option(Option.empty[Int]), "")
'optionSS - test(Option(Option(13)), "13")
'vdomArray - test(VdomArray("hehe", <.div(^.key := 1, "one")), "hehe<div>one</div>") // needs keys apparently
'fragment - test(ReactFragment("hehe", <.div("one")), "hehe<div>one</div>") // keys not required
'fragmentK - test(ReactFragment.withKey(1)("hehe", <.div("one")), "hehe<div>one</div>") // keyABLE
'booleanF - compileError("""test(false, "")""")
'booleanT - compileError("""test(true, "")""")
}
'tagModToJs - {
'childrenAsVdomNodes - {
val vdom = TagMod("hehe", 123, <.em(456L), C())
val expect = "<span>hehe123<em>456</em><br/></span>"
assertRender(<.span(vdom), expect)
assertRender(Span(vdom.toJs.childrenAsVdomNodes: _*), expect)
}
}
'portal - {
ReactTestUtils.withNewBodyElement { portalTarget =>
val comp = ScalaComponent.static("tmp")(
<.div("Here we go...",
ReactPortal(<.div("NICE"), portalTarget)))
ReactTestUtils.withRenderedIntoBody(comp()) { m =>
val compHtml = m.outerHtmlScrubbed()
val portalHtml = ReactTestUtils.removeReactInternals(portalTarget.innerHTML)
assertEq((compHtml, portalHtml), ("<div>Here we go...</div>", "<div>NICE</div>"))
}
}
}
}
}
|
matthughes/scalajs-react
|
test/src/test/scala/japgolly/scalajs/react/core/vdom/VdomTest.scala
|
Scala
|
apache-2.0
| 2,707 |
package org.http4s
import cats.{Order, Show}
import org.http4s.internal.parboiled2.{Parser => PbParser}
import org.http4s.parser.{AdditionalRules, Http4sParser}
import org.http4s.util.Writer
import scala.reflect.macros.whitebox
/**
* A Quality Value. Represented as thousandths for an exact representation rounded to three
* decimal places.
*
* @param thousandths between 0 (for q=0) and 1000 (for q=1)
* @see [[http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.9 RFC 2616, Section 3.9]]
*/
final class QValue private (val thousandths: Int) extends AnyVal with Ordered[QValue] {
def toDouble: Double = 0.001 * thousandths
def isAcceptable: Boolean = thousandths > 0
override def toString = s"QValue(${0.001 * thousandths})"
override def compare(that: QValue): Int = thousandths - that.thousandths
def render(writer: Writer): writer.type =
if (thousandths == 1000) writer
else {
writer.append(";q=")
formatq(writer)
}
// Assumes that q is in the proper bounds, otherwise you get an exception!
private def formatq(b: Writer): b.type =
// Skip the rest of the stuff if we are 1.0
if (thousandths == 1000) b.append("1.0")
else if (thousandths == 0) b.append('0')
else {
// Need to start appending stuff
b.append("0.")
@inline
def convert(i: Int): Char = ('0' + i).toChar
val mod100 = thousandths % 100
if (thousandths > 99) {
b.append(convert(thousandths / 100))
if (mod100 == 0) return b // First digit only
} else b.append('0')
val mod10 = thousandths % 10
if (thousandths > 9) {
b.append(convert(mod100 / 10))
if (mod10 == 0) return b // Second digit only
} else b.append('0')
b.append(convert(mod10)) // Last digit
}
}
object QValue {
lazy val One: QValue = new QValue(1000) // scalastyle:ignore
lazy val Zero: QValue = new QValue(0)
private def mkQValue(thousandths: Int, s: => String): ParseResult[QValue] =
if (thousandths < 0 || thousandths > 1000)
ParseResult.fail("Invalid q-value", s"$s must be between 0.0 and 1.0")
else ParseResult.success(new QValue(thousandths))
def fromThousandths(thousandths: Int): ParseResult[QValue] =
mkQValue(thousandths, (thousandths * .001).toString)
def fromDouble(d: Double): ParseResult[QValue] =
mkQValue(Math.round(1000 * d).toInt, d.toString)
def fromString(s: String): ParseResult[QValue] =
try fromDouble(s.toDouble)
catch {
case _: NumberFormatException => ParseResult.fail("Invalid q-value", s"${s} is not a number")
}
def unsafeFromString(s: String): QValue =
fromString(s).fold(throw _, identity)
def parse(s: String): ParseResult[QValue] =
new Http4sParser[QValue](s, "Invalid q-value") with QValueParser {
def main = QualityValue
}.parse
private[http4s] trait QValueParser extends AdditionalRules { self: PbParser =>
def QualityValue = rule { // QValue is already taken
";" ~ OptWS ~ "q" ~ "=" ~ QValue | push(org.http4s.QValue.One)
}
}
/** Exists to support compile-time verified literals. Do not call directly. */
def ☠(thousandths: Int): QValue = new QValue(thousandths)
class Macros(val c: whitebox.Context) {
import c.universe._
def qValueLiteral(d: c.Expr[Double]): Tree =
d.tree match {
case Literal(Constant(d: Double)) =>
QValue
.fromDouble(d)
.fold(
e => c.abort(c.enclosingPosition, e.details),
qValue => q"_root_.org.http4s.QValue.☠(${qValue.thousandths})"
)
case _ =>
c.abort(c.enclosingPosition, s"literal Double value required")
}
}
/**
* Supports a literal syntax for validated QValues.
*
* Example:
* {{{
* q(0.5).success == QValue.fromDouble(0.5)
* q(1.1) // does not compile: out of range
* val d = 0.5
* q(d) // does not compile: not a literal
* }}}
*/
@deprecated("""use qValue"" string interpolation instead""", "0.20")
def q(d: Double): QValue = macro Macros.qValueLiteral
implicit val http4sOrderForQValue: Order[QValue] = Order.fromOrdering[QValue]
implicit val http4sShowForQValue: Show[QValue] = Show.fromToString[QValue]
implicit val http4sHttpCodecForQValue: HttpCodec[QValue] = new HttpCodec[QValue] {
def parse(s: String): ParseResult[QValue] = QValue.parse(s)
def render(writer: Writer, q: QValue): writer.type = q.render(writer)
}
}
trait HasQValue {
def qValue: QValue
def withQValue(q: QValue): HasQValue
}
|
ChristopherDavenport/http4s
|
core/src/main/scala/org/http4s/QValue.scala
|
Scala
|
apache-2.0
| 4,588 |
package mesosphere.marathon
package core.storage.store.impl
import java.time.OffsetDateTime
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.storage.store.impl.memory.{InMemoryPersistenceStore, RamId}
import mesosphere.marathon.core.storage.store.{IdResolver, PersistenceStoreTest, TestClass1}
import mesosphere.marathon.metrics.dummy.DummyMetrics
import mesosphere.marathon.storage.store.InMemoryStoreSerialization
trait InMemoryTestClass1Serialization {
implicit object InMemTestClass1Resolver extends IdResolver[String, TestClass1, String, RamId] {
override def toStorageId(id: String, version: Option[OffsetDateTime]): RamId =
RamId(category, id, version)
override val category: String = "test-class"
override val hasVersions = true
override def fromStorageId(key: RamId): String = key.id
override def version(v: TestClass1): OffsetDateTime = v.version
}
}
class InMemoryPersistenceStoreTest extends AkkaUnitTest with PersistenceStoreTest
with InMemoryStoreSerialization with InMemoryTestClass1Serialization {
def inMemoryStore: InMemoryPersistenceStore = {
val metrics = DummyMetrics
val store = new InMemoryPersistenceStore(metrics)
store.markOpen()
store
}
behave like basicPersistenceStore("InMemoryPersistenceStore", inMemoryStore)
behave like backupRestoreStore("InMemoryPersistenceStore", inMemoryStore)
}
|
gsantovena/marathon
|
src/test/scala/mesosphere/marathon/core/storage/store/impl/InMemoryPersistenceStoreTest.scala
|
Scala
|
apache-2.0
| 1,394 |
package org.jetbrains.plugins.scala.lang.refactoring.introduceParameter
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.TextRange
import com.intellij.psi._
import com.intellij.refactoring.introduceParameter.{IntroduceParameterData, JavaExpressionWrapper}
import gnu.trove.TIntArrayList
import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.changeInfo.ScalaChangeInfo
/**
* @author Nikolay.Tropin
*/
case class ScalaIntroduceParameterData(methodLike: ScMethodLike,
methodToSearchFor: ScMethodLike,
elems: Seq[PsiElement],
paramName: String,
possibleTypes: Array[ScType],
tp: ScType,
occurrences: Array[TextRange],
mainOcc: TextRange,
replaceAll: Boolean,
defaultArg: String,
functionalArgParams: Option[String] = None) extends IntroduceParameterData {
def getParametersToRemove: TIntArrayList = new TIntArrayList()
def getForcedType: PsiType = ScType.toPsi(tp, getProject, methodLike.getResolveScope)
def getScalaForcedType: ScType = tp
def isGenerateDelegate: Boolean = false
def isDeclareFinal: Boolean = false
def getReplaceFieldsWithGetters: Int = 0
def getParameterName: String = paramName
def getParameterInitializer =
new JavaExpressionWrapper(
JavaPsiFacade.getElementFactory(methodLike.getProject).createExpressionFromText(getParameterName, elems.head.getContext)
)
def getMethodToSearchFor: PsiMethod = methodToSearchFor
def getMethodToReplaceIn: PsiMethod = methodLike
def getProject: Project = methodLike.getProject
}
object isIntroduceParameter {
def unapply(scInfo: ScalaChangeInfo): Option[ScalaIntroduceParameterData] = {
scInfo.introducedParameterData
}
}
|
LPTK/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterData.scala
|
Scala
|
apache-2.0
| 2,188 |
package io.buoyant.linkerd
import com.twitter.finagle.Path
import com.twitter.finagle.liveness.{FailureAccrualFactory, FailureAccrualPolicy}
import com.twitter.finagle.loadbalancer.LoadBalancerFactory._
import com.twitter.finagle.loadbalancer.buoyant.DeregisterLoadBalancerFactory
import com.twitter.finagle.loadbalancer.{FlagBalancerFactory, LoadBalancerFactory}
import com.twitter.finagle.ssl.client.SslClientConfiguration
import com.twitter.finagle.transport.Transport
import com.twitter.util.Duration
import io.buoyant.config.Parser
import io.buoyant.test.FunSuite
import scala.language.reflectiveCalls
class ClientTest extends FunSuite {
def parse(yaml: String): Client =
Parser.objectMapper(yaml, Seq(Seq(new FooFailureAccrual))).readValue[Client](yaml)
test("default applies to all clients") {
val client = parse("""|loadBalancer:
| kind: ewma""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/foo"))
val Param(fooBalancer) = fooParams[LoadBalancerFactory.Param]
val fooBal = fooBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(fooBal.toString == "P2CPeakEwma")
val barParams = client.clientParams.paramsFor(Path.read("/bar"))
val Param(barBalancer) = barParams[LoadBalancerFactory.Param]
val barBal = barBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(barBal.toString == "P2CPeakEwma")
}
test("per client config") {
val client = parse("""|kind: io.l5d.static
|configs:
|- prefix: "/#/io.l5d.fs/foo"
| loadBalancer:
| kind: ewma
|- prefix: "/#/io.l5d.fs/bar"
| loadBalancer:
| kind: aperture""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/foo"))
val Param(fooBalancer) = fooParams[LoadBalancerFactory.Param]
val fooBal = fooBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(fooBal.toString == "P2CPeakEwma")
val barParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/bar"))
val Param(barBalancer) = barParams[LoadBalancerFactory.Param]
val barBal = barBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(barBal.toString == "ApertureLeastLoaded")
// bas, not configured, gets default values
val basParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/bas"))
val Param(basBalancer) = basParams[LoadBalancerFactory.Param]
val basBal = basBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case flb: LoadBalancerFactory => flb
case _ => fail("Unexpected load balancer configured")
}
assert(basBal == FlagBalancerFactory)
}
test("later client configs override earlier ones") {
val client = parse("""|kind: io.l5d.static
|configs:
|- prefix: "/"
| loadBalancer:
| kind: ewma
|- prefix: "/#/io.l5d.fs/bar"
| loadBalancer:
| kind: aperture""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/foo"))
val Param(fooBalancer) = fooParams[LoadBalancerFactory.Param]
val fooBal = fooBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(fooBal.toString == "P2CPeakEwma")
val barParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/bar"))
val Param(barBalancer) = barParams[LoadBalancerFactory.Param]
val barBal = barBalancer match {
case DeregisterLoadBalancerFactory(lbf) => lbf
case _ => fail("Unexpected load balancer configured")
}
assert(barBal.toString == "ApertureLeastLoaded")
}
test("variable capture from prefix") {
val client = parse("""|kind: io.l5d.static
|configs:
|- prefix: "/#/io.l5d.fs/{service}"
| tls:
| commonName: "{service}.com"
|- prefix: "/#/io.l5d.fs/bar"
| tls:
| commonName: barbarbar""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/foo"))
val Transport.ClientSsl(Some(SslClientConfiguration(Some(fooCn), _, _, _, _, _))) =
fooParams[Transport.ClientSsl]
assert(fooCn == "foo.com")
val barParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/bar"))
val Transport.ClientSsl(Some(SslClientConfiguration(Some(barCn), _, _, _, _, _))) =
barParams[Transport.ClientSsl]
assert(barCn == "barbarbar")
val basParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.wrong/bas"))
assert(basParams[Transport.ClientSsl].sslClientConfiguration.isEmpty)
}
test("variable capture from prefix with fragment") {
val client = parse("""|kind: io.l5d.static
|configs:
|- prefix: "/#/io.l5d.serversets/s/*/staging/{service}:https"
| tls:
| commonName: "*.{service}.com"
|- prefix: "/#/io.l5d.serversets/s/{role}/prod/{service}:https"
| tls:
| commonName: "{role}.{service}.com"""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.serversets/s/nobody/staging/foo:https"))
val Transport.ClientSsl(Some(SslClientConfiguration(Some(fooCn), _, _, _, _, _))) =
fooParams[Transport.ClientSsl]
assert(fooCn == "*.foo.com")
val barParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.serversets/s/www/prod/bar:https"))
val Transport.ClientSsl(Some(SslClientConfiguration(Some(barCn), _, _, _, _, _))) =
barParams[Transport.ClientSsl]
assert(barCn == "www.bar.com")
val basParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.wrong/bas"))
assert(basParams[Transport.ClientSsl].sslClientConfiguration.isEmpty)
}
test("failure accrual") {
val client = parse("""|kind: io.l5d.static
|configs:
|- prefix: "/"
| failureAccrual:
| kind: io.l5d.foo
|- prefix: "/#/io.l5d.fs/foo"
| # this should inherit the failure accrual policy
| # from above
|""".stripMargin)
val fooParams = client.clientParams.paramsFor(Path.read("/#/io.l5d.fs/foo"))
val policy = fooParams[FailureAccrualFactory.Param]
.asInstanceOf[{ def failureAccrualPolicy: () => FailureAccrualPolicy }]
.failureAccrualPolicy()
assert(policy.toString == "FooFailureAccrual")
}
}
class FooFailureAccrual extends FailureAccrualInitializer {
val configClass = classOf[FooConfig]
override def configId = "io.l5d.foo"
}
class FooConfig extends FailureAccrualConfig {
override def policy =
() => new FailureAccrualPolicy {
def recordSuccess(): Unit = ???
def markDeadOnFailure(): Option[Duration] = ???
def revived(): Unit = ???
override def toString = "FooFailureAccrual"
override def name: String = "FooConfig"
override def show(): String = ""
}
}
|
linkerd/linkerd
|
linkerd/core/src/test/scala/io/buoyant/linkerd/ClientTest.scala
|
Scala
|
apache-2.0
| 7,882 |
package x7c1.linen.modern.init.settings.order
import android.app.Activity
import android.support.v7.widget.LinearLayoutManager
import android.support.v7.widget.helper.ItemTouchHelper
import x7c1.linen.database.control.DatabaseHelper
import x7c1.linen.database.struct.HasAccountId
import x7c1.linen.glue.activity.ActivityControl
import x7c1.linen.glue.res.layout.SettingChannelOrderLayout
import x7c1.linen.glue.service.ServiceControl
import x7c1.linen.repository.channel.order.ChannelOrderUpdater
import x7c1.linen.repository.channel.subscribe.SubscribedChannel
import x7c1.linen.repository.loader.crawling.CrawlerContext
import x7c1.linen.scene.updater.ChannelNormalizerService
import x7c1.wheat.lore.resource.AdapterDelegatee
import x7c1.wheat.macros.intent.IntentExpander
import x7c1.wheat.macros.logger.Log
import x7c1.wheat.modern.decorator.Imports.toRichToolbar
import x7c1.wheat.modern.observer.recycler.order.{DraggableSequenceRoute, OnDragListenerToReload, OnDragListenerToSave, SequenceReloader}
class ChannelOrderDelegatee (
activity: Activity with ActivityControl with ServiceControl,
layout: SettingChannelOrderLayout,
providers: ChannelOrderRowProviders ){
private var targetAccountId: Option[Long] = None
def onCreate(): Unit = {
Log info s"[init]"
layout.toolbar onClickNavigation { _ =>
activity.finish()
}
val listener = new OnDragListenerToStyle append
OnDragListenerToSave(updater) append
OnDragListenerToReload(CrawlerContext, reloader) append
new OnDragListenerToNotify(activity)
val touchHelper = new ItemTouchHelper(route createCallback listener)
layout.channelList setLayoutManager new LinearLayoutManager(activity)
layout.channelList setAdapter new ChannelOrderRowAdapter(
delegatee = AdapterDelegatee.create(providers, route.loader.sequence),
onDragStart = holder => touchHelper startDrag holder
)
touchHelper attachToRecyclerView layout.channelList
layout.channelList addItemDecoration touchHelper
IntentExpander executeBy activity.getIntent
}
def onDestroy(): Unit = {
Log info s"[init]"
targetAccountId match {
case Some(id) =>
ChannelNormalizerService(activity) normalizeRanks id
case None =>
Log info "channel not reordered"
}
helper.close()
}
def showChannels(accountId: Long): Unit = {
updater.updateDefaultRanks(accountId)
reloader.reload(accountId) run CrawlerContext atLeft {
Log error _.detail
}
targetAccountId = Some(accountId)
}
private lazy val helper = {
new DatabaseHelper(activity)
}
private lazy val reloader = {
SequenceReloader.on[CrawlerContext].create(route.loader, layout.channelList)
}
private lazy val route = {
DraggableSequenceRoute[CrawlerContext, HasAccountId, SubscribedChannel](
db = helper.getReadableDatabase
)
}
private lazy val updater = {
ChannelOrderUpdater[SubscribedChannel](helper.getReadableDatabase)
}
}
|
x7c1/Linen
|
linen-modern/src/main/scala/x7c1/linen/modern/init/settings/order/ChannelOrderDelegatee.scala
|
Scala
|
mit
| 2,989 |
package fr.janalyse.ssh
class SSHTimeoutException(val stdout:String, val stderr:String) extends Exception("SSH Timeout") {
}
|
dacr/jassh
|
src/main/scala/fr/janalyse/ssh/SSHTimeoutException.scala
|
Scala
|
apache-2.0
| 126 |
package com.twitter.finatra.httpclient
import scala.util.control.NoStackTrace
import com.twitter.finagle.http.Status
class HttpClientException(val status: Status, msg: String) extends Exception(msg) with NoStackTrace
|
twitter/finatra
|
http-client/src/main/scala/com/twitter/finatra/httpclient/HttpClientException.scala
|
Scala
|
apache-2.0
| 219 |
package dpla.ingestion3.harvesters.oai.refactor
import com.holdenkarau.spark.testing.SharedSparkContext
import org.apache.spark.sql.SparkSession
import org.scalatest.{FlatSpec, FunSuite}
class WhitelistOaiRelationTest extends FlatSpec with SharedSparkContext {
private val oaiConfiguration = OaiConfiguration(Map(
"verb" -> "ListRecords",
"setlist" -> "ennie,meenie,miney,moe"
))
private val oaiMethods = new OaiMethods with Serializable {
override def parsePageIntoRecords(pageEither: Either[OaiError, OaiPage], removeDeleted: Boolean) = Seq(
Right(OaiRecord("a", "document", Seq()))
)
override def listAllRecordPages = listAllSetPages()
override def listAllSetPages() = Seq(
Right(OaiPage("1")),
Right(OaiPage("2"))
)
override def listAllRecordPagesForSet(setEither: Either[OaiError, OaiSet]) = listAllSetPages()
override def parsePageIntoSets(pageEither: Either[OaiError, OaiPage]) = Seq(
Right(OaiSet("1", "")), Right(OaiSet("moe", ""))
)
}
private lazy val sqlContext = SparkSession.builder().getOrCreate().sqlContext
private lazy val relation = new WhitelistOaiRelation(oaiConfiguration, oaiMethods)(sqlContext)
"A WhitelistOaiRelation" should "build a scan using OaiMethods" in {
val rdd = relation.buildScan()
assert(rdd.count === 8)
}
}
|
dpla/ingestion3
|
src/test/scala/dpla/ingestion3/harvesters/oai/refactor/WhitelistOaiRelationTest.scala
|
Scala
|
mit
| 1,348 |
package org.apache.spark.asyspark.core.messages.master
/**
* Created by wjf on 16-9-22.
*/
private[asyspark] case class ServerList()
|
CASISCAS/asyspark
|
src/main/scala/org/apache/spark/asyspark/core/messages/master/ServerList.scala
|
Scala
|
mit
| 138 |
/*
* Copyright (C) 2012 reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.batch.storage
import java.io._
import org.openmole.core.communication.storage._
import org.openmole.core.preference.PreferenceLocation
import org.openmole.plugin.environment.batch.environment._
import org.openmole.plugin.environment.batch.refresh._
import org.openmole.tool.logger.JavaLogger
import squants.time.TimeConversions._
object StorageService extends JavaLogger {
val DirRegenerate = PreferenceLocation("StorageService", "DirRegenerate", Some(1 hours))
def rmFile[S](s: S, path: String, background: Boolean)(implicit services: BatchEnvironment.Services, storageInterface: StorageInterface[S]): Unit = {
def action = { rmFile(s, path); false }
if (background) JobManager ! RetryAction(() ⇒ action)
else rmFile(s, path)
}
def rmDirectory[S](s: S, path: String, background: Boolean)(implicit services: BatchEnvironment.Services, storageInterface: HierarchicalStorageInterface[S]): Unit = {
def action = { rmDirectory(s, path); false }
if (background) JobManager ! RetryAction(() ⇒ action)
else rmDirectory(s, path)
}
def rmFile[S](s: S, directory: String)(implicit storageInterface: StorageInterface[S]): Unit =
storageInterface.rmFile(s, directory)
def rmDirectory[S](s: S, directory: String)(implicit hierarchicalStorageInterface: HierarchicalStorageInterface[S]): Unit =
hierarchicalStorageInterface.rmDir(s, directory)
def id[S](s: S)(implicit environmentStorage: EnvironmentStorage[S]) = environmentStorage.id(s)
def download[S](s: S, src: String, dest: File, options: TransferOptions = TransferOptions.default)(implicit storageService: StorageInterface[S]) =
storageService.download(s, src, dest, options)
def upload[S](s: S, src: File, dest: String, options: TransferOptions = TransferOptions.default)(implicit storageInterface: StorageInterface[S]) =
storageInterface.upload(s, src, dest, options)
def child[S](s: S, path: String, name: String)(implicit storageService: HierarchicalStorageInterface[S]) = storageService.child(s, path, name)
def exists[S](s: S, path: String)(implicit storageInterface: StorageInterface[S]) =
storageInterface.exists(s, path)
def uploadInDirectory[S: StorageInterface: HierarchicalStorageInterface](s: S, file: File, directory: String, transferOptions: TransferOptions) = {
val path = child(s, directory, StorageSpace.timedUniqName)
upload(s, file, path, transferOptions)
path
}
}
|
openmole/openmole
|
openmole/plugins/org.openmole.plugin.environment.batch/src/main/scala/org/openmole/plugin/environment/batch/storage/StorageService.scala
|
Scala
|
agpl-3.0
| 3,157 |
/*
* Gain.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.mellite.util
import de.sciss.serial.{DataInput, DataOutput, ConstFormat, Writable}
import de.sciss.synth
object Gain {
private final val COOKIE = 0x4762 // was "Ga"
def immediate (decibels: Double): Gain = Gain(decibels, normalized = false)
def normalized(decibels: Double): Gain = Gain(decibels, normalized = true )
implicit object format extends ConstFormat[Gain] {
def write(v: Gain, out: DataOutput): Unit = v.write(out)
def read(in: DataInput): Gain = Gain.read(in)
}
def read(in: DataInput): Gain = {
val cookie = in.readShort()
require(cookie == COOKIE, s"Unexpected cookie $cookie (requires $COOKIE)")
val decibels = in.readDouble()
val normalized = in.readByte() != 0
Gain(decibels, normalized)
}
}
final case class Gain(decibels: Double, normalized: Boolean) extends Writable {
def linear: Double = {
import synth.Import._
decibels.dbAmp
}
def write(out: DataOutput): Unit = {
out.writeShort(Gain.COOKIE)
out.writeDouble(decibels)
out.writeByte(if (normalized) 1 else 0)
}
}
|
Sciss/Mellite
|
app/src/main/scala/de/sciss/mellite/util/Gain.scala
|
Scala
|
agpl-3.0
| 1,372 |
//
// Logger.scala -- Scala object Logger
// Project OrcScala
//
// $Id: Logger.scala 2933 2011-12-15 16:26:02Z jthywissen $
//
// Created by jthywiss on Aug 21, 2010.
//
// Copyright (c) 2011 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.compile
/** Logger for the orc.compile subsystem
*
* @author jthywiss
*/
object Logger extends orc.util.Logger("orc.compile")
|
laurenyew/cOrcS
|
src/orc/compile/Logger.scala
|
Scala
|
bsd-3-clause
| 608 |
package org.bitcoins.keymanager.bip39
import org.bitcoins.core.wallet.keymanagement.{
KeyManagerParams,
KeyManagerUnlockError
}
import org.bitcoins.crypto.AesPassword
import org.bitcoins.keymanager.ReadMnemonicError.{
DecryptionError,
JsonParsingError
}
import org.bitcoins.keymanager._
import grizzled.slf4j.Logging
/** Represents a */
object BIP39LockedKeyManager extends Logging {
/** Unlock the wallet by decrypting the [[EncryptedMnemonic]] seed
* @param passphrase the password to decrypt the wallet
* @param kmParams parameters needed to create the key manager
*/
def unlock(
passphraseOpt: Option[AesPassword],
bip39PasswordOpt: Option[String],
kmParams: KeyManagerParams): Either[
KeyManagerUnlockError,
BIP39KeyManager] = {
logger.debug(s"Trying to unlock wallet with seedPath=${kmParams.seedPath}")
val resultE =
WalletStorage.decryptSeedFromDisk(kmParams.seedPath, passphraseOpt)
resultE match {
case Right(mnemonic: DecryptedMnemonic) =>
Right(
BIP39KeyManager.fromMnemonic(mnemonic.mnemonicCode,
kmParams,
bip39PasswordOpt,
mnemonic.creationTime))
case Right(xprv: DecryptedExtPrivKey) =>
val km = new BIP39KeyManager(xprv.xprv, kmParams, xprv.creationTime)
Right(km)
case Left(result) =>
result match {
case DecryptionError =>
logger.error(s"Bad password for unlocking wallet!")
Left(KeyManagerUnlockError.BadPassword)
case JsonParsingError(message) =>
logger.error(s"JSON parsing error when unlocking wallet: $message")
Left(KeyManagerUnlockError.JsonParsingError(message))
case ReadMnemonicError.NotFoundError =>
logger.error(
s"Encrypted mnemonic not found when unlocking the wallet!")
Left(KeyManagerUnlockError.MnemonicNotFound)
}
}
}
}
|
bitcoin-s/bitcoin-s
|
key-manager/src/main/scala/org/bitcoins/keymanager/bip39/BIP39LockedKeyManager.scala
|
Scala
|
mit
| 2,038 |
// Copyright 2019 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.twofishes.indexer.util
import io.fsq.common.scala.Identity._
import io.fsq.common.scala.Lists.Implicits._
object JaroWinkler {
// Returns a double from 0 to 1 (0 is totally different, 1 is exactly the same)
// Only looks at letters and ignores case.
// Based on https://github.com/rockymadden/stringmetric
def score(s1: String, s2: String): Double = {
// b is longer string
val List(a, b) = List(s1, s2)
.map(_.collect({ case c if c.isLetter => c.toLower }))
.sortBy(_.length)
if (a.isEmpty) {
0d
} else if (a =? b) {
1d
} else {
val aLen = a.length
val bLen = b.length
val window = math.abs(bLen / 2 - 1)
// matches is kind of like a.map(b.indexOf).filter(_ > -1)
// but it uses a sliding window in b to match letters in a, and each letter
// in b can only be used once.
val matches = (Vector[Int]() /: a.indices)((seen, i) => {
seen ++ ((i - window) to (i + window)).find(ii => {
b.isDefinedAt(ii) && a(i) =? b(ii) && !seen.has(ii)
})
})
if (matches.isEmpty) {
0d
} else {
// number of letters in common
val ms = matches.size.toDouble
// transposition score
val ts = matches.zip(matches.sorted).count({ case (i, j) => b(i) !=? b(j) }) / 2
val jaro = ((ms / aLen) + (ms / bLen) + ((ms - ts) / ms)) / 3
// jaro winkler
val prefixLen = if (b.startsWith(a)) aLen else a.zip(b).indexWhere({ case (c1, c2) => c1 !=? c2 })
jaro + (math.min(prefixLen, 4) * 0.1 * (1 - jaro))
}
}
}
}
|
foursquare/fsqio
|
src/jvm/io/fsq/twofishes/indexer/util/JaroWinkler.scala
|
Scala
|
apache-2.0
| 1,678 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.responsiblepeople
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.{JsSuccess, Json}
class ApprovalFlagsSpec extends PlaySpec {
"ApprovalFlags" when {
"all flags are defined" must {
"be complete" in {
val approvalFlags = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(true),
hasAlreadyPaidApprovalCheck = Some(false)
)
approvalFlags.isComplete() must be(true)
}
}
"not all flags are defined" must {
"no be complete" in {
val approvalFlags = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(true),
hasAlreadyPaidApprovalCheck = None
)
approvalFlags.isComplete() must be(false)
}
}
}
"ApprovalFlags Json" when {
"there are both flags provided" must {
"Read successfully" in {
val json = Json.parse(
"""{
| "hasAlreadyPassedFitAndProper": false,
| "hasAlreadyPaidApprovalCheck": false
|}""".stripMargin
)
val expected = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(false), hasAlreadyPaidApprovalCheck = Some(false))
ApprovalFlags.format.reads(json) must be(
JsSuccess(expected)
)
}
"write successfully" in {
val model = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(false), hasAlreadyPaidApprovalCheck = Some(false))
ApprovalFlags.format.writes(model) must be (
Json.obj(
"hasAlreadyPassedFitAndProper" -> false,
"hasAlreadyPaidApprovalCheck" -> false
)
)
}
}
"there is only one flag provided" must {
"Read successfully" in {
val expected = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(true))
val json = Json.parse(
"""{
| "hasAlreadyPassedFitAndProper": true
|}""".stripMargin
)
ApprovalFlags.format.reads(json) must be(
JsSuccess(expected)
)
}
"Read successfully using designated reader" in {
val expected = ApprovalFlags(hasAlreadyPassedFitAndProper = Some(true))
val json = Json.parse(
"""{
| "hasAlreadyPassedFitAndProper": true
|}""".stripMargin
)
ApprovalFlags.reads.reads(json) must be(
JsSuccess(expected)
)
}
"write successfully" in {
val expected = ApprovalFlags(hasAlreadyPaidApprovalCheck = Some(true))
ApprovalFlags.format.writes(expected) must be (
Json.obj(
"hasAlreadyPaidApprovalCheck" -> true
)
)
}
"write successfully using designated writer" in {
val expected = ApprovalFlags(hasAlreadyPaidApprovalCheck = Some(true))
ApprovalFlags.writes.writes(expected) must be (
Json.obj(
"hasAlreadyPaidApprovalCheck" -> true
)
)
}
}
}
}
|
hmrc/amls-frontend
|
test/models/responsiblepeople/ApprovalFlagsSpec.scala
|
Scala
|
apache-2.0
| 3,570 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.lang.reflect.InvocationTargetException
import java.nio.ByteBuffer
import java.util.UUID
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hive.service.cli.OperationHandle
import org.apache.hive.service.cli.operation.{GetCatalogsOperation, Operation, OperationManager}
import org.apache.hive.service.cli.session.{HiveSession, HiveSessionImpl, SessionManager}
import org.apache.hive.service.rpc.thrift.{THandleIdentifier, TOperationHandle, TOperationType}
import org.apache.spark.SparkFunSuite
class HiveSessionImplSuite extends SparkFunSuite {
private var session: HiveSessionImpl = _
private var operationManager: OperationManagerMock = _
override def beforeAll() {
super.beforeAll()
val sessionManager = new SessionManager(null)
operationManager = new OperationManagerMock()
session = new HiveSessionImpl(
ThriftserverShimUtils.testedProtocolVersions.head,
"",
"",
new HiveConf(),
""
)
session.setSessionManager(sessionManager)
session.setOperationManager(operationManager)
session.open(Map.empty[String, String].asJava)
}
test("SPARK-31387 - session.close() closes all sessions regardless of thrown exceptions") {
val operationHandle1 = session.getCatalogs
val operationHandle2 = session.getCatalogs
session.close()
assert(operationManager.getCalledHandles.contains(operationHandle1))
assert(operationManager.getCalledHandles.contains(operationHandle2))
}
}
class GetCatalogsOperationMock(parentSession: HiveSession)
extends GetCatalogsOperation(parentSession) {
override def runInternal(): Unit = {}
override def getHandle: OperationHandle = {
val uuid: UUID = UUID.randomUUID()
val tHandleIdentifier: THandleIdentifier = new THandleIdentifier()
tHandleIdentifier.setGuid(getByteBufferFromUUID(uuid))
tHandleIdentifier.setSecret(getByteBufferFromUUID(uuid))
val tOperationHandle: TOperationHandle = new TOperationHandle()
tOperationHandle.setOperationId(tHandleIdentifier)
tOperationHandle.setOperationType(TOperationType.GET_TYPE_INFO)
tOperationHandle.setHasResultSetIsSet(false)
new OperationHandle(tOperationHandle)
}
private def getByteBufferFromUUID(uuid: UUID): Array[Byte] = {
val bb: ByteBuffer = ByteBuffer.wrap(new Array[Byte](16))
bb.putLong(uuid.getMostSignificantBits)
bb.putLong(uuid.getLeastSignificantBits)
bb.array
}
}
class OperationManagerMock extends OperationManager {
private val calledHandles: mutable.Set[OperationHandle] = new mutable.HashSet[OperationHandle]()
override def newGetCatalogsOperation(parentSession: HiveSession): GetCatalogsOperation = {
val operation = new GetCatalogsOperationMock(parentSession)
try {
val m = classOf[OperationManager].getDeclaredMethod("addOperation", classOf[Operation])
m.setAccessible(true)
m.invoke(this, operation)
} catch {
case e@(_: NoSuchMethodException | _: IllegalAccessException |
_: InvocationTargetException) =>
throw new RuntimeException(e)
}
operation
}
override def closeOperation(opHandle: OperationHandle): Unit = {
calledHandles.add(opHandle)
throw new RuntimeException
}
def getCalledHandles: mutable.Set[OperationHandle] = calledHandles
}
|
spark-test/spark
|
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveSessionImplSuite.scala
|
Scala
|
apache-2.0
| 4,234 |
package org.scalatest.examples.flatspec
import org.scalatest.FlatSpec
class SetSpec extends FlatSpec {
behavior of "An empty Set"
it should "have size 0" in {
assert(Set.empty.size === 0)
}
it should "produce NoSuchElementException when head is invoked" in {
intercept[NoSuchElementException] {
Set.empty.head
}
}
}
|
dvpablo/scala-welcomepack
|
src/test/scala/ExampleSpec.scala
|
Scala
|
mit
| 348 |
val list =
1 :: 2 :: 3 :: 4 :: 5 :: Nil
@scala.annotation.tailrec
def prnt(l: List[_]): Unit = l match {
case head :: tail =>
print(head + ", ")
prnt(tail)
case Nil => // do nothing
}
prnt(list)
|
deanwampler/SeductionsOfScalaTutorial
|
code-examples/match-list-example2.scala
|
Scala
|
apache-2.0
| 213 |
/*
* Copyright (C) 2017-present, Chenai Nakam([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hobby.chenai.nakam.autotx.core.coin
import hobby.chenai.nakam.autotx.core.exch.AbsExchange
import hobby.chenai.nakam.lang.TypeBring
import hobby.chenai.nakam.util.NumFmt
/**
* @author Chenai Nakam([email protected])
* @version 1.0, 25/05/2017
*/
abstract class AbsCoinGroup {
groupSelf =>
// COIN表示的是同一个对象(如BtcZone)下的路径依赖类型,BTC, CONG等属于BtcZone.COIN(或BtcZone.Token)类的实例,
// 可以new BtcZone.COIN()创建新实例,但不是AbsCoinZone#AbsCoin的实例,不过后者可以用于模式匹配,从属范围更广。
type COIN <: AbsCoin
type UNIT <: COIN with Unt
protected[coin] type GROUP <: AbsCoinGroup
def unitStd: UNIT
def make(count: Long, unit: UNIT): COIN
override def toString = s"GROUP[${unitStd.name}]"
abstract class AbsCoin(private[core] val count: Long) extends NumFmt
with Equals with Ordered[COIN] with TypeBring[UNIT, COIN, AbsCoinGroup#AbsCoin] {
require(count >= 0, s"[`Coin.count`溢出: $count].")
val isCash: Boolean
final val group: GROUP = groupSelf.asInstanceOf[GROUP]
def unit: UNIT
def value: Double = this / unit
def +(that: COIN): COIN = make(this.count + that.count, unit)
def -(that: COIN): COIN = make(this.count - that.count, unit)
// 由于toString.formatted也会进行round操作,如果这里再进行round会越算越多:
// 例如6.45 FEN, round后的count = 65(给最低单位多保留了1位,即64.5, round(64.5) = 65),
// 最终toString的时候round(6.5) = 7. 因此这里直接进行toLong舍弃小数。
def *(x: Double): COIN = make((this.count * x).toLong /*round*/ , unit)
def /(x: Double): COIN = make((this.count / x).toLong /*round*/ , unit)
def /(that: COIN): Double = this.count.toDouble / that.count
/**
* 将单位标准化。由于某些预定义的[作为枚举的]常量将自己本身作为了标准单位。
*/
def std: COIN = if (unit eq unitStd) this else mod(unitStd)
/**
* 转换到参数指定单位。
*/
// 注意unit的参数类型,在某些情况下,即使我们知道是同一个对象(如CnyZone单例对象)下的路径依赖类型,
// 但无法用类型参数进行规约,导致编译器无法认为是同一个路径依赖类型。
// 因此这里使用了更宽泛的类型并进行了类型转换,这意味着,如果在运行时类型确实不是同一个对象路径下的,那么会抛异常。
def mod(unit: UNIT): COIN = if (unit eq this.unit) this else make(count, unit)
// protected def requireGroupSame(unit: AbsCoinGroup#Unt): Unit = {
// require(unit.group == group, s"unit group mismatch. require: $group, found: ${unit.group}")
// }
override def compare(that: COIN) = this.count compare that.count
def min(that: COIN): COIN = if (this < that) this else that.mod(unit)
def max(that: COIN): COIN = if (this > that) this else that.mod(unit)
// COIN 会被擦除,子类实现。
// override def equals(any: scala.Any)
// override def canEqual(that: Any)
override def hashCode() = 41 * (41 + count.hashCode()) + group.hashCode
/**
* 在不同货币之间换算。
*
* @param that 目标单位。注意这个参数的类型与其它不同,一般参数类型COIN用于
* 同一个路径依赖类型,而本参数可以接受多个不同的路径依赖类型。
* @param exchange 交易平台。
* @return 若兑换成功,则返回值与 `that` 同类型;若不成功,则直接返回本对象。因此返回值的类型不确定。
*/
def to(that: AbsCoinGroup#Unt)(implicit exchange: AbsExchange)
: AbsCoinGroup#AbsCoin = if (that.group eq this.group) mod(that) else exchange.applyExch(this, that)
// protected def format: String = value formatted s"%.${unit.decmlFmt}f"
override final def toString = if (unit eq this) unit.name else formatted(-1, unit.decmlFmt)(null)
override protected def unitNameFmt = unit.nameFmt
}
// 不能用Unit, 会与系统类型冲突。
trait Unt extends AbsCoin {
override final def unit = this
def name: String
def nameFmt: String = name
def decmlFmt: Int = decimals(count)
def decimals(n: Double): Int = if (n == 1) 0 else 1 + decimals(n / 10)
def <<(coin: AbsCoinGroup#AbsCoin): COIN = coin mod unit
}
}
|
chenakam/AutoTX
|
src/main/scala/hobby/chenai/nakam/autotx/core/coin/AbsCoinGroup.scala
|
Scala
|
apache-2.0
| 5,104 |
package suggestions
package gui
import scala.language.reflectiveCalls
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{ Try, Success, Failure }
import scala.swing.Reactions.Reaction
import scala.swing.event.Event
import rx.lang.scala.Observable
import rx.lang.scala.subscriptions.Subscription
/** Basic facilities for dealing with Swing-like components.
*
* Instead of committing to a particular widget implementation
* functionality has been factored out here to deal only with
* abstract types like `ValueChanged` or `TextField`.
* Extractors for abstract events like `ValueChanged` have also
* been factored out into corresponding abstract `val`s.
*/
trait SwingApi {
type ValueChanged <: Event
val ValueChanged: {
def unapply(x: Event): Option[TextField]
}
type ButtonClicked <: Event
val ButtonClicked: {
def unapply(x: Event): Option[Button]
}
type TextField <: {
def text: String
def subscribe(r: Reaction): Unit
def unsubscribe(r: Reaction): Unit
}
type Button <: {
def subscribe(r: Reaction): Unit
def unsubscribe(r: Reaction): Unit
}
implicit class TextFieldOps(field: TextField) {
/** Returns a stream of text field values entered in the given text field.
*
* @param field the text field
* @return an observable with a stream of text field updates
*/
def textValues: Observable[String] = Observable((
observer => {
val reaction = Reaction({
case event: ValueChanged => observer.onNext(field.text)
case _ => ()
})
field.subscribe(reaction)
Subscription {
field.unsubscribe(reaction)
}
}))
}
implicit class ButtonOps(button: Button) {
/** Returns a stream of button clicks.
*
* @param field the button
* @return an observable with a stream of buttons that have been clicked
*/
def clicks: Observable[Button] = Observable((
observer => {
val reaction = Reaction({
case event: ButtonClicked => {
observer.onNext(button)
}
case _ => ()
})
button.subscribe(reaction)
Subscription {
button.unsubscribe(reaction)
}
}))
}
}
|
juliengrimault/Scala-Class
|
suggestions/src/main/scala/suggestions/gui/SwingApi.scala
|
Scala
|
mit
| 2,373 |
object Foo {
def test(s: String) = {
(s: Seq[Char]) match {
case Seq('f', 'o', 'o', ' ', rest1 @ _*) =>
rest1
case Seq('b', 'a', 'r', ' ', ' ', rest2 @ _*) =>
rest2
case _ =>
s
}
}
}
|
loskutov/intellij-scala
|
testdata/scalacTests/pos/t2945.scala
|
Scala
|
apache-2.0
| 248 |
/*******************************************************************************/
/* */
/* Copyright (C) 2016 by Max Lv <[email protected]> */
/* Copyright (C) 2016 by Mygod Studio <[email protected]> */
/* */
/* This program is free software: you can redistribute it and/or modify */
/* it under the terms of the GNU General Public License as published by */
/* the Free Software Foundation, either version 3 of the License, or */
/* (at your option) any later version. */
/* */
/* This program is distributed in the hope that it will be useful, */
/* but WITHOUT ANY WARRANTY; without even the implied warranty of */
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the */
/* GNU General Public License for more details. */
/* */
/* You should have received a copy of the GNU General Public License */
/* along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* */
/*******************************************************************************/
package com.github.shadowsocks.database
import java.net.URLEncoder
import java.util.Locale
import android.os.Binder
import android.util.Base64
import com.j256.ormlite.field.{DataType, DatabaseField}
class Profile {
@DatabaseField(generatedId = true)
var id: Int = 0
@DatabaseField
var name: String = "Untitled"
@DatabaseField
var host: String = ""
// hopefully hashCode = mHandle doesn't change, currently this is true from KitKat to Nougat
@DatabaseField
var localPort: Int = 1080 + Binder.getCallingUserHandle.hashCode
@DatabaseField
var remotePort: Int = 8388
@DatabaseField
var password: String = ""
@DatabaseField
var method: String = "aes-256-cfb"
@DatabaseField
var route: String = "all"
@DatabaseField
var remoteDns: String = "8.8.8.8"
@DatabaseField
var proxyApps: Boolean = false
@DatabaseField
var bypass: Boolean = false
@DatabaseField
var udpdns: Boolean = false
@DatabaseField
var auth: Boolean = false
@DatabaseField
var ipv6: Boolean = false
@DatabaseField(dataType = DataType.LONG_STRING)
var individual: String = ""
@DatabaseField
var tx: Long = 0
@DatabaseField
var rx: Long = 0
@DatabaseField
val date: java.util.Date = new java.util.Date()
@DatabaseField
var userOrder: Long = _
@DatabaseField
var kcp: Boolean = false
@DatabaseField
var kcpPort: Int = 8399
@DatabaseField
var kcpcli: String = "--crypt none --mode normal --mtu 1200 --nocomp --dscp 46 --parityshard 0"
override def toString: String = "ss://" + Base64.encodeToString("%s%s:%s@%s:%d".formatLocal(Locale.ENGLISH,
method, if (auth) "-auth" else "", password, host, remotePort).getBytes, Base64.NO_PADDING | Base64.NO_WRAP) +
'#' + URLEncoder.encode(name, "utf-8")
def isMethodUnsafe: Boolean = "table".equalsIgnoreCase(method) || "rc4".equalsIgnoreCase(method)
}
|
anyforever/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/database/Profile.scala
|
Scala
|
gpl-3.0
| 3,434 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.box
import uk.gov.hmrc.ct.box.retriever.BoxRetriever
trait Calculated[T, C <: BoxRetriever] {
def calculate(boxRetriever: C): T
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/box/Calculated.scala
|
Scala
|
apache-2.0
| 765 |
/*
* Copyright 2012 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.webui.jclouds
import org.jclouds.providers.ProviderMetadata
import org.jclouds.apis.ApiMetadata
import io.fabric8.webui.Services
import io.fabric8.webui.jclouds.Utils._
/**
* @author Stan Lewis
*/
object Utils {
def asResource(x: ProviderMetadata) : ComputeProviderResource = new ComputeProviderResource(x.getId, "provider")
def asResource(x: ApiMetadata) : ComputeProviderResource = new ComputeProviderResource(x.getId, "api")
}
|
alexeev/jboss-fuse-mirror
|
sandbox/fmc/fmc-rest/src/main/scala/org/fusesource/fabric/webui/jclouds/Utils.scala
|
Scala
|
apache-2.0
| 1,085 |
package com.dataintuitive.luciuscore
package api
import model.v4._
import AnnotatedIds._
import com.dataintuitive.luciuscore.TestData
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should._
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.SparkSession
class AnnotatedIdsTest extends AnyFlatSpec with Matchers with TestData {
val signature = List("MELK", "BRCA1")
// Select the first 2 ids from the generated dataset
val ids = testData.take(2).map(_.id).toList
}
|
data-intuitive/LuciusCore
|
src/test/scala/com/dataintuitive/luciuscore/api/AnnotatedIdsTest.scala
|
Scala
|
apache-2.0
| 515 |
package com.airbnb.common.ml.util
import com.typesafe.scalalogging.slf4j.Logger
import org.junit.Test
class ScalaLoggingTest {
@Test def testLogger(): Unit = {
// Create a dummy object that can check if it has a valid logger
object Dummy extends ScalaLogging {
def hasLogger: Boolean = {
logger.isInstanceOf[Logger]
}
}
assert(Dummy.hasLogger)
}
}
|
TDDFT/aerosolve
|
airlearner/airlearner-utils/src/test/scala/com/airbnb/common/ml/util/ScalaLoggingTest.scala
|
Scala
|
apache-2.0
| 393 |
package com.twitter.finatra.http.tests.integration.deprecated.main
class DeprecatedServer {
}
|
syamantm/finatra
|
http/src/test/scala/com/twitter/finatra/http/tests/integration/deprecated/main/DeprecatedServer.scala
|
Scala
|
apache-2.0
| 96 |
package models
import play.api.libs.json.Json
import reactivemongo.bson.BSONObjectID
/**
* Created by ravi on 16/10/15.
*/
case class User(_id: BSONObjectID, email: String, password: String, firstName: String, lastName: String)
object User {
import play.modules.reactivemongo.json.BSONFormats._
implicit val userFormat = Json.format[User]
}
case class UserLogin(email: String, password: String)
object UserLogin {
import play.modules.reactivemongo.json.BSONFormats._
implicit val userFormat = Json.format[UserLogin]
}
|
ravi006/Blogger-play-mongo
|
app/models/User.scala
|
Scala
|
apache-2.0
| 539 |
package de.htw.pgerhard.domain.users
import scala.concurrent.{ExecutionContext, Future}
class UserCommandService(
private val userRepository: UserRepository)(
implicit
private val ec: ExecutionContext) {
def registerUser(handle: String, name: String): Future[User] =
userRepository.registerUser(handle, name)
def setUserName(userId: String, name: String): Future[User] =
userRepository.setUserName(userId, name)
def addSubscription(userId: String, subscriptionId: String): Future[User] =
userRepository.addSubscription(userId, subscriptionId)
def removeSubscription(userId: String, subscriptionId: String): Future[User] =
userRepository.removeSubscription(userId, subscriptionId)
def deleteUser(userId: String): Future[Boolean] =
userRepository.deleteUser(userId)
}
|
peter-gerhard/my-twitter-playground
|
src/main/scala/de/htw/pgerhard/domain/users/UserCommandService.scala
|
Scala
|
apache-2.0
| 814 |
package SqlDsl
case class SqlTable(name: String)
|
fadeddata/sqldsl
|
src/main/scala/SqlDsl/SqlTable.scala
|
Scala
|
mit
| 50 |
package com.twitter.finagle.memcached.unit
import com.twitter.finagle.memcached._
import com.twitter.finagle.memcached.protocol.Value
import org.scalatest.FunSuite
import org.scalatestplus.mockito.MockitoSugar
import scala.collection.immutable
class GetResultTest extends FunSuite with MockitoSugar {
class Context {
val value1 = mock[Value]
val value2 = mock[Value]
val ex1 = mock[Exception]
val ex2 = mock[Exception]
val empty = GetResult()
val left = GetResult(
hits = Map("h1" -> value1),
misses = immutable.Set("m1"),
failures = Map("f1" -> ex1)
)
val right = GetResult(
hits = Map("h2" -> value2),
misses = immutable.Set("m2"),
failures = Map("f2" -> ex2)
)
}
test("add together hits/misses/failures with ++") {
val context = new Context
import context._
info("both empty")
assert(empty ++ empty == empty)
info("non-empty left, empty right")
assert(left ++ empty == left)
info("Empty left, non-empty right")
assert(empty ++ right == right)
info("non-empty left, non-empty right")
assert(
left ++ right == GetResult(
hits = Map("h1" -> value1, "h2" -> value2),
misses = immutable.Set("m1", "m2"),
failures = Map("f1" -> ex1, "f2" -> ex2)
)
)
}
test("merged of empty seq produces empty GetResult") {
val context = new Context
assert(GetResult.merged(Seq[GetResult]()) == GetResult())
}
test("merged of single item produces that item") {
val context = new Context
val getResult = GetResult()
assert(GetResult.merged(Seq(getResult)) == getResult)
}
test("merge is the same as ++") {
val context = new Context
val subResults = (1 to 10) map { i =>
GetResult(
hits = Map("h" + i -> mock[Value]),
misses = immutable.Set("m" + i),
failures = Map("f" + i -> mock[Exception])
)
}
assert(GetResult.merged(subResults) == (subResults.reduceLeft { _ ++ _ }))
}
}
|
luciferous/finagle
|
finagle-memcached/src/test/scala/com/twitter/finagle/memcached/unit/GetResultTest.scala
|
Scala
|
apache-2.0
| 2,014 |
// Copyright 2010 Twitter, Inc.
package com.twitter.concurrent
import java.util.concurrent.ConcurrentSkipListMap
@deprecated("use guava's Multimaps.synchronizedMultimap", "6.2.x")
class ConcurrentMultiMap[K <% Ordered[K], V <% Ordered[V]] {
class Container(k: K, v: Option[V])
// TODO: extending tuples is deprecated and will be removed in the next version.
// Remove this inheritance in the next major version
extends Tuple2[K, Option[V]](k, v)
with Comparable[Container]
{
def key = k
def value = v
def isDefined = value.isDefined
def compareTo(that: Container) = this.key.compare(that.key) match {
case 0 if ( this.isDefined && that.isDefined) => this.value.get.compare(that.value.get)
case 0 if (!this.isDefined && !that.isDefined) => 0
case 0 if (!this.isDefined) => -1
case 0 if (!that.isDefined) => 1
case x => x
}
}
val underlying = new ConcurrentSkipListMap[Container, Unit]
def +=(kv:(K, V)) {
val (k, v) = kv
underlying.putIfAbsent(new Container(k, Some(v)), ())
}
def get(k:K):List[V] = {
def traverse(entry: Container): List[V] = {
val nextEntry = underlying.higherKey(entry)
if (nextEntry == null || nextEntry.key != k) {
Nil
} else {
assert(nextEntry.value.isDefined)
nextEntry.value.get :: traverse(nextEntry)
}
}
traverse(new Container(k, None))
}
}
|
travisbrown/util
|
util-core/src/main/scala/com/twitter/concurrent/ConcurrentMultiMap.scala
|
Scala
|
apache-2.0
| 1,461 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.cache
import javax.inject.Inject
import scala.concurrent.duration.{ Duration, _ }
import scala.concurrent.{ Await, Future }
import scala.reflect.ClassTag
/**
* A cache API that uses synchronous calls rather than async calls. Useful when you know you have a fast in-memory cache.
*/
trait SyncCacheApi {
/**
* Set a value into the cache.
*
* @param key Item key.
* @param value Item value.
* @param expiration Expiration time.
*/
def set(key: String, value: Any, expiration: Duration = Duration.Inf): Unit
/**
* Remove a value from the cache
*/
def remove(key: String): Unit
/**
* Retrieve a value from the cache, or set it from a default function.
*
* @param key Item key.
* @param expiration expiration period in seconds.
* @param orElse The default function to invoke if the value was not found in cache.
*/
def getOrElseUpdate[A: ClassTag](key: String, expiration: Duration = Duration.Inf)(orElse: => A): A
/**
* Retrieve a value from the cache for the given type
*
* @param key Item key.
* @return result as Option[T]
*/
def get[T: ClassTag](key: String): Option[T]
}
/**
* A cache API. This class is deprecated and will be removed in future versions.
*/
@deprecated("Use SyncCacheApi or AsyncCacheApi instead", "2.6.0")
trait CacheApi {
/**
* Set a value into the cache.
*
* @param key Item key.
* @param value Item value.
* @param expiration Expiration time.
*/
def set(key: String, value: Any, expiration: Duration = Duration.Inf): Unit
/**
* Remove a value from the cache
*/
def remove(key: String): Unit
/**
* Retrieve a value from the cache, or set it from a default function.
*
* @param key Item key.
* @param expiration expiration period in seconds.
* @param orElse The default function to invoke if the value was not found in cache.
*/
def getOrElse[A: ClassTag](key: String, expiration: Duration = Duration.Inf)(orElse: => A): A
/**
* Retrieve a value from the cache for the given type
*
* @param key Item key.
* @return result as Option[T]
*/
def get[T: ClassTag](key: String): Option[T]
}
/**
* A SyncCacheApi that wraps an AsyncCacheApi
*/
class DefaultSyncCacheApi @Inject() (cacheApi: AsyncCacheApi) extends SyncCacheApi with CacheApi {
protected val awaitTimeout: Duration = 5.seconds
def set(key: String, value: Any, expiration: Duration): Unit = {
Await.result(cacheApi.set(key, value, expiration), awaitTimeout)
}
def get[T: ClassTag](key: String): Option[T] = {
Await.result(cacheApi.get(key), awaitTimeout)
}
def getOrElseUpdate[A: ClassTag](key: String, expiration: Duration)(orElse: => A): A = {
Await.result(cacheApi.getOrElseUpdate(key, expiration)(Future.successful(orElse)), awaitTimeout)
}
@deprecated("Use getOrElseUpdate", "2.6.0")
def getOrElse[A: ClassTag](key: String, expiration: Duration)(orElse: => A): A = {
getOrElseUpdate(key, expiration)(orElse)
}
def remove(key: String): Unit = {
Await.result(cacheApi.remove(key), awaitTimeout)
}
}
|
ktoso/playframework
|
framework/src/play-cache/src/main/scala/play/api/cache/SyncCacheApi.scala
|
Scala
|
apache-2.0
| 3,188 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package search
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.util.Computable
import com.intellij.psi.PsiMember
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.psi.search.searches.AnnotatedElementsSearch
import com.intellij.util.{Processor, QueryExecutor}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAnnotation, ScAnnotations}
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys
/**
* User: Alexander Podkhalyuzin
* Date: 10.01.2009
*/
class ScalaAnnotatedMembersSearcher extends QueryExecutor[PsiMember, AnnotatedElementsSearch.Parameters] {
def execute(p: AnnotatedElementsSearch.Parameters, consumer: Processor[_ >: PsiMember]): Boolean = {
val annClass = p.getAnnotationClass
assert(annClass.isAnnotationType, "Annotation type should be passed to annotated members search")
val annotationFQN = annClass.qualifiedName
assert(annotationFQN != null)
ApplicationManager.getApplication.runReadAction(new Computable[Boolean] {
def compute: Boolean = {
val scope = p.getScope match {
case searchScope: GlobalSearchScope => searchScope
case _ => return true
}
import ScalaIndexKeys._
val iter = ANNOTATED_MEMBER_KEY.elements(annClass.name, scope, classOf[ScAnnotation])(annClass.getProject)
.iterator
while (iter.hasNext) {
val annotation = iter.next
annotation.getParent match {
case ann: ScAnnotations => ann.getParent match {
case member: PsiMember => if (!consumer.process(member)) return false
case _ =>
}
case _ =>
}
}
true
}
})
true
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/search/ScalaAnnotatedMembersSearcher.scala
|
Scala
|
apache-2.0
| 1,891 |
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt
import sbt.internal.util.{ complete, AttributeEntry, AttributeKey, LineRange, MessageOnlyException, RangePosition, Settings }
import java.io.File
import compiler.{ Eval, EvalImports }
import complete.DefaultParsers.validID
import Def.{ ScopedKey, Setting }
import Scope.GlobalScope
import sbt.internals.parser.SbtParser
import scala.annotation.tailrec
import sbt.io.IO
/**
* This file is responsible for compiling the .sbt files used to configure sbt builds.
*
* Compilation is done in three phases:
*
* 1. Parsing high-level constructs (definitions, settings, imports)
* 2. Compiling scala code into local .class files
* 3. Evaluating the expressions and obtaining in-memory objects of the results (Setting[_] instances, or val references).
*
*
*/
object EvaluateConfigurations {
type LazyClassLoaded[T] = ClassLoader => T
private[sbt] case class TrackedEvalResult[T](generated: Seq[File], result: LazyClassLoaded[T])
/**
* This represents the parsed expressions in a build sbt, as well as where they were defined.
*/
private[this] final class ParsedFile(val imports: Seq[(String, Int)], val definitions: Seq[(String, LineRange)], val settings: Seq[(String, LineRange)])
/** The keywords we look for when classifying a string as a definition. */
private[this] val DefinitionKeywords = Seq("lazy val ", "def ", "val ")
/**
* Using an evaluating instance of the scala compiler, a sequence of files and
* the default imports to use, this method will take a ClassLoader of sbt-classes and
* return a parsed, compiled + evaluated [[LoadedSbtFile]]. The result has
* raw sbt-types that can be accessed and used.
*/
@deprecated("We no longer merge build.sbt files together unless they are in the same directory.", "0.13.6")
def apply(eval: Eval, srcs: Seq[File], imports: Seq[String]): LazyClassLoaded[LoadedSbtFile] =
{
val loadFiles = srcs.sortBy(_.getName) map { src => evaluateSbtFile(eval, src, IO.readLines(src), imports, 0) }
loader => (LoadedSbtFile.empty /: loadFiles) { (loaded, load) => loaded merge load(loader) }
}
/**
* Reads a given .sbt file and evaluates it into a sequence of setting values.
*
* Note: This ignores any non-Setting[_] values in the file.
*/
def evaluateConfiguration(eval: Eval, src: File, imports: Seq[String]): LazyClassLoaded[Seq[Setting[_]]] =
evaluateConfiguration(eval, src, IO.readLines(src), imports, 0)
/**
* Parses a sequence of build.sbt lines into a [[ParsedFile]]. The result contains
* a fragmentation of all imports, settings and definitions.
*
* @param builtinImports The set of import statements to add to those parsed in the .sbt file.
*/
private[this] def parseConfiguration(file: File, lines: Seq[String], builtinImports: Seq[String], offset: Int): ParsedFile =
{
val (importStatements, settingsAndDefinitions) = splitExpressions(file, lines)
val allImports = builtinImports.map(s => (s, -1)) ++ addOffset(offset, importStatements)
val (definitions, settings) = splitSettingsDefinitions(addOffsetToRange(offset, settingsAndDefinitions))
new ParsedFile(allImports, definitions, settings)
}
/**
* Evaluates a parsed sbt configuration file.
*
* @param eval The evaluating scala compiler instance we use to handle evaluating scala configuration.
* @param file The file we've parsed
* @param imports The default imports to use in this .sbt configuration
* @param lines The lines of the configurtion we'd like to evaluate.
*
* @return Just the Setting[_] instances defined in the .sbt file.
*/
def evaluateConfiguration(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[Seq[Setting[_]]] =
{
val l = evaluateSbtFile(eval, file, lines, imports, offset)
loader => l(loader).settings
}
/**
* Evaluates a parsed sbt configuration file.
*
* @param eval The evaluating scala compiler instance we use to handle evaluating scala configuration.
* @param file The file we've parsed
* @param imports The default imports to use in this .sbt configuration.
*
* @return A function which can take an sbt classloader and return the raw types/configuration
* which was compiled/parsed for the given file.
*/
private[sbt] def evaluateSbtFile(eval: Eval, file: File, lines: Seq[String], imports: Seq[String], offset: Int): LazyClassLoaded[LoadedSbtFile] =
{
// TODO - Store the file on the LoadedSbtFile (or the parent dir) so we can accurately do
// detection for which project project manipulations should be applied.
val name = file.getPath
val parsed = parseConfiguration(file, lines, imports, offset)
val (importDefs, definitions) =
if (parsed.definitions.isEmpty) (Nil, DefinedSbtValues.empty) else {
val definitions = evaluateDefinitions(eval, name, parsed.imports, parsed.definitions, Some(file))
val imp = BuildUtil.importAllRoot(definitions.enclosingModule :: Nil)
val projs = (loader: ClassLoader) => definitions.values(loader).map(p => resolveBase(file.getParentFile, p.asInstanceOf[Project]))
(imp, DefinedSbtValues(definitions))
}
val allImports = importDefs.map(s => (s, -1)) ++ parsed.imports
val dslEntries = parsed.settings map {
case (dslExpression, range) =>
evaluateDslEntry(eval, name, allImports, dslExpression, range)
}
eval.unlinkDeferred()
// Tracks all the files we generated from evaluating the sbt file.
val allGeneratedFiles = (definitions.generated ++ dslEntries.flatMap(_.generated))
loader => {
val projects =
definitions.values(loader).collect {
case p: Project => resolveBase(file.getParentFile, p)
}
val (settingsRaw, manipulationsRaw) =
dslEntries map (_.result apply loader) partition {
case internals.ProjectSettings(_) => true
case _ => false
}
val settings = settingsRaw flatMap {
case internals.ProjectSettings(settings) => settings
case _ => Nil
}
val manipulations = manipulationsRaw map {
case internals.ProjectManipulation(f) => f
}
// TODO -get project manipulations.
new LoadedSbtFile(settings, projects, importDefs, manipulations, definitions, allGeneratedFiles)
}
}
/** move a project to be relative to this file after we've evaluated it. */
private[this] def resolveBase(f: File, p: Project) = p.copy(base = IO.resolve(f, p.base))
@deprecated("Will no longer be public.", "0.13.6")
def flatten(mksettings: Seq[ClassLoader => Seq[Setting[_]]]): ClassLoader => Seq[Setting[_]] =
loader => mksettings.flatMap(_ apply loader)
def addOffset(offset: Int, lines: Seq[(String, Int)]): Seq[(String, Int)] =
lines.map { case (s, i) => (s, i + offset) }
def addOffsetToRange(offset: Int, ranges: Seq[(String, LineRange)]): Seq[(String, LineRange)] =
ranges.map { case (s, r) => (s, r shift offset) }
/**
* The name of the class we cast DSL "setting" (vs. definition) lines to.
*/
val SettingsDefinitionName = {
val _ = classOf[sbt.internals.DslEntry] // this line exists to try to provide a compile-time error when the following line needs to be changed
"sbt.internals.DslEntry"
}
/**
* This actually compiles a scala expression which represents a sbt.internals.DslEntry.
*
* @param eval The mechanism to compile and evaluate Scala expressions.
* @param name The name for the thing we're compiling
* @param imports The scala imports to have in place when we compile the expression
* @param expression The scala expression we're compiling
* @param range The original position in source of the expression, for error messages.
*
* @return A method that given an sbt classloader, can return the actual [[internals.DslEntry]] defined by
* the expression, and the sequence of .class files generated.
*/
private[sbt] def evaluateDslEntry(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): TrackedEvalResult[internals.DslEntry] = {
// TODO - Should we try to namespace these between.sbt files? IF they hash to the same value, they may actually be
// exactly the same setting, so perhaps we don't care?
val result = try {
eval.eval(expression, imports = new EvalImports(imports, name), srcName = name, tpeName = Some(SettingsDefinitionName), line = range.start)
} catch {
case e: sbt.compiler.EvalException => throw new MessageOnlyException(e.getMessage)
}
// TODO - keep track of configuration classes defined.
TrackedEvalResult(result.generated,
loader => {
val pos = RangePosition(name, range shift 1)
result.getValue(loader).asInstanceOf[internals.DslEntry].withPos(pos)
})
}
/**
* This actually compiles a scala expression which represents a Seq[Setting[_]], although the
* expression may be just a single setting.
*
* @param eval The mechanism to compile and evaluate Scala expressions.
* @param name The name for the thing we're compiling
* @param imports The scala imports to have in place when we compile the expression
* @param expression The scala expression we're compiling
* @param range The original position in source of the expression, for error messages.
*
* @return A method that given an sbt classloader, can return the actual Seq[Setting[_]] defined by
* the expression.
*/
@deprecated("Build DSL now includes non-Setting[_] type settings.", "0.13.6") // Note: This method is used by the SET command, so we may want to evaluate that sucker a bit.
def evaluateSetting(eval: Eval, name: String, imports: Seq[(String, Int)], expression: String, range: LineRange): LazyClassLoaded[Seq[Setting[_]]] =
{
evaluateDslEntry(eval, name, imports, expression, range).result andThen {
case internals.ProjectSettings(values) => values
case _ => Nil
}
}
private[this] def isSpace = (c: Char) => Character isWhitespace c
private[this] def fstS(f: String => Boolean): ((String, Int)) => Boolean = { case (s, i) => f(s) }
private[this] def firstNonSpaceIs(lit: String) = (_: String).view.dropWhile(isSpace).startsWith(lit)
private[this] def or[A](a: A => Boolean, b: A => Boolean): A => Boolean = in => a(in) || b(in)
/** Configures the use of the old sbt parser. */
private[sbt] def useOldParser: Boolean =
sys.props.get("sbt.parser.simple").exists(java.lang.Boolean.parseBoolean)
/**
* Splits a set of lines into (imports, expressions). That is,
* anything on the right of the tuple is a scala expression (definition or setting).
*/
private[sbt] def splitExpressions(file: File, lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) =
{
if (useOldParser) splitExpressions(lines)
else {
val split = SbtParser(file, lines)
// TODO - Look at pulling the parsed expression trees from the SbtParser and stitch them back into a different
// scala compiler rather than re-parsing.
(split.imports, split.settings)
}
}
@deprecated("This method is no longer part of the public API.", "0.13.7")
def splitExpressions(lines: Seq[String]): (Seq[(String, Int)], Seq[(String, LineRange)]) = {
val blank = (_: String).forall(isSpace)
val isImport = firstNonSpaceIs("import ")
val comment = firstNonSpaceIs("//")
val blankOrComment = or(blank, comment)
val importOrBlank = fstS(or(blankOrComment, isImport))
val (imports, settings) = lines.zipWithIndex span importOrBlank
(imports filterNot fstS(blankOrComment), groupedLines(settings, blank, blankOrComment))
}
@deprecated("This method is deprecated and no longer used.", "0.13.7")
def groupedLines(lines: Seq[(String, Int)], delimiter: String => Boolean, skipInitial: String => Boolean): Seq[(String, LineRange)] =
{
val fdelim = fstS(delimiter)
@tailrec def group0(lines: Seq[(String, Int)], accum: Seq[(String, LineRange)]): Seq[(String, LineRange)] =
if (lines.isEmpty) accum.reverse
else {
val start = lines dropWhile fstS(skipInitial)
val (next, tail) = start.span { case (s, _) => !delimiter(s) }
val grouped = if (next.isEmpty) accum else (next.map(_._1).mkString("\\n"), LineRange(next.head._2, next.last._2 + 1)) +: accum
group0(tail, grouped)
}
group0(lines, Nil)
}
private[this] def splitSettingsDefinitions(lines: Seq[(String, LineRange)]): (Seq[(String, LineRange)], Seq[(String, LineRange)]) =
lines partition { case (line, range) => isDefinition(line) }
private[this] def isDefinition(line: String): Boolean =
{
val trimmed = line.trim
DefinitionKeywords.exists(trimmed startsWith _)
}
private[this] def extractedValTypes: Seq[String] =
Seq(classOf[Project], classOf[InputKey[_]], classOf[TaskKey[_]], classOf[SettingKey[_]]).map(_.getName)
private[this] def evaluateDefinitions(eval: Eval, name: String, imports: Seq[(String, Int)], definitions: Seq[(String, LineRange)], file: Option[File]): compiler.EvalDefinitions =
{
val convertedRanges = definitions.map { case (s, r) => (s, r.start to r.end) }
eval.evalDefinitions(convertedRanges, new EvalImports(imports, name), name, file, extractedValTypes)
}
}
object Index {
def taskToKeyMap(data: Settings[Scope]): Map[Task[_], ScopedKey[Task[_]]] =
{
// AttributeEntry + the checked type test 'value: Task[_]' ensures that the cast is correct.
// (scalac couldn't determine that 'key' is of type AttributeKey[Task[_]] on its own and a type match still required the cast)
val pairs = for (scope <- data.scopes; AttributeEntry(key, value: Task[_]) <- data.data(scope).entries) yield (value, ScopedKey(scope, key.asInstanceOf[AttributeKey[Task[_]]])) // unclear why this cast is needed even with a type test in the above filter
pairs.toMap[Task[_], ScopedKey[Task[_]]]
}
def allKeys(settings: Seq[Setting[_]]): Set[ScopedKey[_]] =
settings.flatMap(s => if (s.key.key.isLocal) Nil else s.key +: s.dependencies).filter(!_.key.isLocal).toSet
def attributeKeys(settings: Settings[Scope]): Set[AttributeKey[_]] =
settings.data.values.flatMap(_.keys).toSet[AttributeKey[_]]
def stringToKeyMap(settings: Set[AttributeKey[_]]): Map[String, AttributeKey[_]] =
stringToKeyMap0(settings)(_.rawLabel) ++ stringToKeyMap0(settings)(_.label)
private[this] def stringToKeyMap0(settings: Set[AttributeKey[_]])(label: AttributeKey[_] => String): Map[String, AttributeKey[_]] =
{
val multiMap = settings.groupBy(label)
val duplicates = multiMap collect { case (k, xs) if xs.size > 1 => (k, xs.map(_.manifest)) } collect { case (k, xs) if xs.size > 1 => (k, xs) }
if (duplicates.isEmpty)
multiMap.collect { case (k, v) if validID(k) => (k, v.head) } toMap
else
sys.error(duplicates map { case (k, tps) => "'" + k + "' (" + tps.mkString(", ") + ")" } mkString ("Some keys were defined with the same name but different types: ", ", ", ""))
}
private[this]type TriggerMap = collection.mutable.HashMap[Task[_], Seq[Task[_]]]
def triggers(ss: Settings[Scope]): Triggers[Task] =
{
val runBefore = new TriggerMap
val triggeredBy = new TriggerMap
for ((_, amap) <- ss.data; AttributeEntry(_, value: Task[_]) <- amap.entries) {
val as = value.info.attributes
update(runBefore, value, as get Keys.runBefore)
update(triggeredBy, value, as get Keys.triggeredBy)
}
val onComplete = Keys.onComplete in GlobalScope get ss getOrElse { () => () }
new Triggers[Task](runBefore, triggeredBy, map => { onComplete(); map })
}
private[this] def update(map: TriggerMap, base: Task[_], tasksOpt: Option[Seq[Task[_]]]): Unit =
for (tasks <- tasksOpt; task <- tasks)
map(task) = base +: map.getOrElse(task, Nil)
}
|
dansanduleac/sbt
|
main/src/main/scala/sbt/EvaluateConfigurations.scala
|
Scala
|
bsd-3-clause
| 16,310 |
package io.vamp.common.http
import com.typesafe.scalalogging.Logger
import dispatch._
import org.json4s._
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization._
import org.slf4j.LoggerFactory
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect._
object RestClient {
private val logger = Logger(LoggerFactory.getLogger(RestClient.getClass))
object Method extends Enumeration {
val HEAD, GET, POST, PUT, DELETE, PATCH, TRACE, OPTIONS = Value
}
val acceptEncodingIdentity: (String, String) = "accept-encoding" -> "identity"
val jsonHeaders: List[(String, String)] = List("Accept" -> "application/json", "Content-Type" -> "application/json")
def get[A](url: String, headers: List[(String, String)] = jsonHeaders)
(implicit executor: ExecutionContext, mf: scala.reflect.Manifest[A], formats: Formats = DefaultFormats): Future[A] = {
http[A](Method.GET, url, None, headers)
}
def post[A](url: String, body: Any, headers: List[(String, String)] = jsonHeaders)
(implicit executor: ExecutionContext, mf: scala.reflect.Manifest[A], formats: Formats = DefaultFormats): Future[A] = {
http[A](Method.POST, url, body, headers)
}
def put[A](url: String, body: Any, headers: List[(String, String)] = jsonHeaders)
(implicit executor: ExecutionContext, mf: scala.reflect.Manifest[A], formats: Formats = DefaultFormats): Future[A] = {
http[A](Method.PUT, url, body, headers)
}
def delete(url: String, headers: List[(String, String)] = jsonHeaders)(implicit executor: ExecutionContext) = {
http(Method.DELETE, url, None)
}
def http[A](method: Method.Value, url: String, body: Any, headers: List[(String, String)] = jsonHeaders)
(implicit executor: ExecutionContext, mf: scala.reflect.Manifest[A], formats: Formats = DefaultFormats): Future[A] = {
val requestWithUrl = dispatch.url(url).setMethod(method.toString)
val requestWithHeaders = headers.foldLeft(requestWithUrl)((http, header) => http.setHeader(header._1, header._2))
val requestWithBody = bodyAsString(body) match {
case Some(some) =>
logger.trace(s"req [${method.toString} $url] - $some")
requestWithHeaders.setBody(some)
case None =>
logger.trace(s"req [${method.toString} $url]")
requestWithHeaders
}
if (classTag[A].runtimeClass == classOf[Nothing]) {
Http(requestWithBody OK as.String).map { string =>
logger.trace(s"rsp [${method.toString} $url] - $string")
string.asInstanceOf[A]
}
} else if (classTag[A].runtimeClass == classOf[String]) {
Http(requestWithBody OK as.String).map { string =>
logger.trace(s"rsp [${method.toString} $url] - $string")
string.asInstanceOf[A]
}
} else {
Http(requestWithBody OK dispatch.as.json4s.Json).map { json =>
logger.trace(s"rsp [${method.toString} $url] - ${compact(render(json))}")
json.extract[A](formats, mf)
}
}
}
private def bodyAsString(body: Any)(implicit formats: Formats): Option[String] = body match {
case string: String => Some(string)
case Some(string: String) => Some(string)
case Some(some: AnyRef) => Some(write(some))
case any: AnyRef if any != null && any != None => Some(write(any))
case any if any != null && any != None => Some(any.toString)
case _ => None
}
}
|
BanzaiMan/vamp-common
|
src/main/scala/io/vamp/common/http/RestClient.scala
|
Scala
|
apache-2.0
| 3,413 |
package de.m7w3.signal.account
import java.security.InvalidKeyException
import de.m7w3.signal.store.SignalDesktopProtocolStore
import org.whispersystems.libsignal.IdentityKeyPair
import org.whispersystems.libsignal.ecc.Curve
import org.whispersystems.libsignal.state.{PreKeyRecord, SignedPreKeyRecord}
import org.whispersystems.libsignal.util.Medium
import org.whispersystems.signalservice.api.SignalServiceAccountManager
import scala.util.{Success, Try}
case class PreKeyRefreshResult(oneTimePreKeys: List[PreKeyRecord], lastResortKey: PreKeyRecord, signedPreKeyRecord: SignedPreKeyRecord)
trait PreKeyRefresher {
def accountManager: SignalServiceAccountManager
def refreshPreKeys(store: SignalDesktopProtocolStore): PreKeyRefreshResult = {
import scala.collection.JavaConverters.seqAsJavaListConverter
val oneTimePreKeys = generatePreKeys(store)
val lastResortKey = getOrGenerateLastResortPreKey(store)
val signedPreKeyRecord = generateSignedPreKey(store.getIdentityKeyPair(), store)
accountManager.setPreKeys(store.getIdentityKeyPair().getPublicKey, lastResortKey, signedPreKeyRecord, oneTimePreKeys.asJava)
PreKeyRefreshResult(oneTimePreKeys, lastResortKey, signedPreKeyRecord)
}
def generatePreKeys(store: SignalDesktopProtocolStore): List[PreKeyRecord] = {
val records = (0 until PreKeyRefresher.PREKEY_BATCH_SIZE).map(i => {
val preKeyId = store.preKeyStore.incrementAndGetPreKeyId()
val keyPair = Curve.generateKeyPair()
val record = new PreKeyRecord(preKeyId, keyPair)
store.storePreKey(preKeyId, record)
record
})
records.toList
}
def getOrGenerateLastResortPreKey(store: SignalDesktopProtocolStore): PreKeyRecord = {
Try(store.containsPreKey(Medium.MAX_VALUE)) match {
case Success(true) => store.loadPreKey(Medium.MAX_VALUE)
case _ => {
store.removePreKey(Medium.MAX_VALUE)
val keyPair = Curve.generateKeyPair()
val record = new PreKeyRecord(Medium.MAX_VALUE, keyPair)
store.storePreKey(Medium.MAX_VALUE, record)
record
}
}
}
def generateSignedPreKey(identityKeyPair: IdentityKeyPair, store: SignalDesktopProtocolStore): SignedPreKeyRecord = {
try {
val nextSignedPreKeyId = store.signedPreKeyStore.getSignedPreKeyId
val keyPair = Curve.generateKeyPair()
val signature = Curve.calculateSignature(identityKeyPair.getPrivateKey, keyPair.getPublicKey.serialize())
val record = new SignedPreKeyRecord(nextSignedPreKeyId, System.currentTimeMillis(), keyPair, signature)
store.storeSignedPreKey(nextSignedPreKeyId, record)
store.signedPreKeyStore.incrementAndGetSignedPreKeyId()
record
} catch {
case e: InvalidKeyException => throw new AssertionError(e)
}
}
}
object PreKeyRefresher {
val PREKEY_BATCH_SIZE = 100
}
|
ayoub-benali/signal-desktop-client
|
src/main/scala/de/m7w3/signal/account/PreKeyRefresher.scala
|
Scala
|
apache-2.0
| 2,848 |
package glint.partitioning
/**
* An abstract partition
*
* @param index The index of this partition
*/
abstract class Partition(val index: Int) extends Serializable {
/**
* Checks whether given global key falls within this partition
*
* @param key The key
* @return True if the global key falls within this partition, false otherwise
*/
@inline
def contains(key: Long): Boolean
/**
* Converts given global key to a continuous local array index [0, 1, ...]
*
* @param key The global key
* @return The local index
*/
@inline
def globalToLocal(key: Long): Int
/**
* Computes the size of this partition
*
* @return The size of this partition
*/
def size: Int
}
|
rjagerman/glint
|
src/main/scala/glint/partitioning/Partition.scala
|
Scala
|
mit
| 742 |
import sbt._
class Plugins(info: ProjectInfo) extends PluginDefinition(info) {
val untypedRepo = "Untyped Repo" at "http://repo.untyped.com"
val closureCompiler = "com.untyped" % "sbt-closure" % "0.1"
}
|
tommycli/stargame
|
project/plugins/Plugins.scala
|
Scala
|
agpl-3.0
| 208 |
package net.janvsmachine.fpinscala
import scala.annotation.tailrec
object Chapter2 {
// Exercise 2.2.
// A compact version, using some of the niceties in the standard Scala library.
def isSortedC[A](as: Array[A], ordered: (A, A) => Boolean): Boolean = {
val pairs = as.view.zip(as drop 1)
pairs.forall(p => p._1 == p._2 || ordered(p._1, p._2))
}
// A more verbose but fundamental version.
def isSorted[A](as: Array[A], ordered: (A, A) => Boolean): Boolean = {
@tailrec
def loop(n: Int): Boolean = {
if (as.length - n < 2)
true
else (as(n) == as(n + 1) || ordered(as(n), as(n + 1))) && loop(n + 1)
}
loop(0)
}
def partial1[A, B, C](a: A, f: (A, B) => C): B => C =
(b: B) => f(a, b)
// Exercise 2.3.
def curry[A, B, C](f: (A, B) => C): A => (B => C) =
a => b => f(a, b)
// Exercise 2.4.
def uncurry[A, B, C](f: A => B => C): (A, B) => C =
(a, b) => f(a)(b)
// Exercise 2.5.
def compose[A, B, C](f: B => C, g: A => B): A => C =
a => f(g(a))
}
|
stettix/fp-in-scala
|
src/main/scala/net/janvsmachine/fpinscala/Chapter2.scala
|
Scala
|
apache-2.0
| 1,037 |
package ru.org.codingteam.keter.game.objects.equipment.bodyparts
import ru.org.codingteam.keter.game.objects.equipment.{Capability, ManipulatorCapability, WalkCapability}
abstract class Bodypart() {
def name: String
def health: Double
def provides: Set[Capability]
}
case class Leg(name: String,
health: Double,
provides: Set[Capability] = Set(WalkCapability)) extends Bodypart
case class Arm(name: String,
health: Double,
provides: Set[Capability] = Set(ManipulatorCapability)) extends Bodypart
case class Head(name: String,
health: Double,
provides: Set[Capability] = Set()) extends Bodypart
case class Torso(name: String,
health: Double,
provides: Set[Capability] = Set()) extends Bodypart
|
codingteam/Keter
|
src/main/scala/ru/org/codingteam/keter/game/objects/equipment/bodyparts/Bodypart.scala
|
Scala
|
mit
| 833 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Explicitly
import org.scalactic.StringNormalizations._
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import collection.GenTraversable
import SharedHelpers._
import matchers.should.Matchers._
class NoElementsOfContainMatcherDeciderSpec extends funspec.AnyFunSpec with Explicitly {
private val prettifier = Prettifier.default
val mapTrimmed: Uniformity[(Int, String)] =
new Uniformity[(Int, String)] {
def normalized(s: (Int, String)): (Int, String) = (s._1, s._2.trim)
def normalizedCanHandle(b: Any) =
b match {
case (_: Int, _: String) => true
case _ => false
}
def normalizedOrSame(b: Any): Any =
b match {
case (k: Int, v: String) => normalized((k, v))
case _ => b
}
}
// SKIP-SCALATESTJS,NATIVE-START
val javaMapTrimmed: Uniformity[java.util.Map.Entry[Int, String]] =
new Uniformity[java.util.Map.Entry[Int, String]] {
def normalized(s: java.util.Map.Entry[Int, String]): java.util.Map.Entry[Int, String] = Entry(s.getKey, s.getValue.trim)
def normalizedCanHandle(b: Any) =
b match {
case entry: java.util.Map.Entry[_, _] =>
(entry.getKey, entry.getValue) match {
case (_: Int, _: String) => true
case _ => false
}
case _ => false
}
def normalizedOrSame(b: Any): Any =
b match {
case entry: java.util.Map.Entry[_, _] =>
(entry.getKey, entry.getValue) match {
case (k: Int, v: String) => normalized(Entry(k, v))
case _ => b
}
case _ => b
}
}
// SKIP-SCALATESTJS,NATIVE-END
val incremented: Uniformity[Int] =
new Uniformity[Int] {
var count = 0
def normalized(s: Int): Int = {
count += 1
s + count
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[Int]
def normalizedOrSame(b: Any): Any =
b match {
case i: Int => normalized(i)
case _ => b
}
}
val mapIncremented: Uniformity[(Int, String)] =
new Uniformity[(Int, String)] {
var count = 0
def normalized(s: (Int, String)): (Int, String) = {
count += 1
(s._1 + count, s._2)
}
def normalizedCanHandle(b: Any) =
b match {
case (_: Int, _: String) => true
case _ => false
}
def normalizedOrSame(b: Any): Any =
b match {
case (k: Int, v: String) => normalized((k, v))
case _ => b
}
}
val appended: Uniformity[String] =
new Uniformity[String] {
var count = 0
def normalized(s: String): String = {
count += 1
s + count
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
val mapAppended: Uniformity[(Int, String)] =
new Uniformity[(Int, String)] {
var count = 0
def normalized(s: (Int, String)): (Int, String) = {
count += 1
(s._1, s._2 + count)
}
def normalizedCanHandle(b: Any) =
b match {
case (_: Int, _: String) => true
case _ => false
}
def normalizedOrSame(b: Any): Any =
b match {
case (k: Int, v: String) => normalized(b.asInstanceOf[(Int, String)])
case _ => b
}
}
// SKIP-SCALATESTJS,NATIVE-START
val javaMapAppended: Uniformity[java.util.Map.Entry[Int, String]] =
new Uniformity[java.util.Map.Entry[Int, String]] {
var count = 0
def normalized(s: java.util.Map.Entry[Int, String]): java.util.Map.Entry[Int, String] = {
count += 1
Entry(s.getKey, s.getValue + count)
}
def normalizedCanHandle(b: Any) =
b match {
case entry: java.util.Map.Entry[_, _] =>
(entry.getKey, entry.getValue) match {
case (_: Int, _: String) => true
case _ => false
}
case _ => false
}
def normalizedOrSame(b: Any): Any =
b match {
case entry: java.util.Map.Entry[_, _] =>
(entry.getKey, entry.getValue) match {
case (k: Int, v: String) =>
normalized(Entry(k, v))
case _ => b
}
case _ => b
}
}
// SKIP-SCALATESTJS,NATIVE-END
val lowerCaseEquality =
new Equality[String] {
def areEqual(left: String, right: Any) =
left.toLowerCase == (right match {
case s: String => s.toLowerCase
case other => other
})
}
val mapLowerCaseEquality =
new Equality[(Int, String)] {
def areEqual(left: (Int, String), right: Any) =
right match {
case t2: Tuple2[_, _] =>
left._1 == t2._1 &&
left._2.toLowerCase == (t2._2 match {
case s: String => s.toLowerCase
case other => other
})
case right => left == right
}
}
// SKIP-SCALATESTJS,NATIVE-START
val javaMapLowerCaseEquality =
new Equality[java.util.Map.Entry[Int, String]] {
def areEqual(left: java.util.Map.Entry[Int, String], right: Any) =
right match {
case entry: java.util.Map.Entry[_, _] =>
left.getKey == entry.getKey &&
left.getValue.toLowerCase == (entry.getValue match {
case s: String => s.toLowerCase
case other => other
})
case right => left == right
}
}
// SKIP-SCALATESTJS,NATIVE-END
val reverseEquality =
new Equality[String] {
def areEqual(left: String, right: Any) =
left.reverse == (right match {
case s: String => s.toLowerCase
case other => other
})
}
val mapReverseEquality =
new Equality[(Int, String)] {
def areEqual(left: (Int, String), right: Any) =
right match {
case t2: Tuple2[_, _] =>
left._1 == t2._1 &&
left._2.reverse == (t2._2 match {
case s: String => s.toLowerCase
case other => other
})
case right => left == right
}
}
// SKIP-SCALATESTJS,NATIVE-START
val javaMapReverseEquality =
new Equality[java.util.Map.Entry[Int, String]] {
def areEqual(left: java.util.Map.Entry[Int, String], right: Any) =
right match {
case entry: java.util.Map.Entry[_, _] =>
left.getKey == entry.getKey &&
left.getValue.reverse == (entry.getValue match {
case s: String => s.toLowerCase
case other => other
})
case right => left == right
}
}
// SKIP-SCALATESTJS,NATIVE-END
describe("noElementsOf ") {
def checkShouldContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
e.message should be (Some(FailureMessages.containedAtLeastOneElementOf(prettifier, left, right)))
e.failedCodeFileName should be (Some("NoElementsOfContainMatcherDeciderSpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
def checkShouldNotContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(FailureMessages.didNotContainAtLeastOneElementOf(prettifier, left, right)))
e.failedCodeFileName should be (Some("NoElementsOfContainMatcherDeciderSpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
it("should take specified normalization when 'should contain' is used") {
(List("1", "2", "3") should contain noElementsOf Seq("1", "2", "3")) (after being appended)
(Set("1", "2", "3") should contain noElementsOf Seq("1", "2", "3")) (after being appended)
(Array("1", "2", "3") should contain noElementsOf Seq("1", "2", "3")) (after being appended)
(Map(1 -> "one", 2 -> " two", 3 -> "three") should contain noElementsOf Seq(1 -> "one", 2 -> " two", 3 -> "three")) (after being mapAppended)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("1", "2", "3") should contain noElementsOf Seq("1", "2", "3")) (after being appended)
(javaSet("1", "2", "3") should contain noElementsOf Seq("1", "2", "3")) (after being appended)
(javaMap(Entry(1, "one"), Entry(2, " two"), Entry(3, "three")) should contain noElementsOf Seq(Entry(1, "one"), Entry(2, " two"), Entry(3, "three"))) (after being javaMapAppended)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take specified normalization when 'should not contain' is used") {
(List("1 ", " 2", "3 ") should not contain noElementsOf (Seq(" 1", "2 ", " 3"))) (after being trimmed)
(Set("1 ", " 2", "3 ") should not contain noElementsOf (Seq(" 1", "2 ", " 3"))) (after being trimmed)
(Array("1 ", " 2", "3 ") should not contain noElementsOf (Seq(" 1", "2 ", " 3"))) (after being trimmed)
(Map(1 -> "one ", 2 -> " two", 3 -> "three ") should not contain noElementsOf (Seq(1 -> " one", 2 -> "two ", 3 -> " three"))) (after being mapTrimmed)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("1 ", " 2", "3 ") should not contain noElementsOf (Seq(" 1", "2 ", " 3"))) (after being trimmed)
(javaSet("1 ", " 2", "3 ") should not contain noElementsOf (Seq(" 1", "2 ", " 3"))) (after being trimmed)
(javaMap(Entry(1, "one "), Entry(2, " two"), Entry(3, "three ")) should not contain noElementsOf (Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three")))) (after being javaMapTrimmed)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with specified normalization") {
val left1 = List("1 ", " 2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should contain noElementsOf Seq(" 1", "2 ", " 3")) (after being trimmed)
}
checkShouldContainStackDepth(e1, left1, Seq(" 1", "2 ", " 3"), thisLineNumber - 2)
val left2 = Set("1 ", " 2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should contain noElementsOf Seq(" 1", "2 ", " 3")) (after being trimmed)
}
checkShouldContainStackDepth(e2, left2, Seq(" 1", "2 ", " 3"), thisLineNumber - 2)
val left3 = Array("1 ", " 2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should contain noElementsOf Seq(" 1", "2 ", " 3")) (after being trimmed)
}
checkShouldContainStackDepth(e3, left3, Seq(" 1", "2 ", " 3"), thisLineNumber - 2)
val left4 = Map(1 -> "one ", 2 -> " two", 3 -> "three ")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should contain noElementsOf Seq(1 -> " one", 2 -> "two ", 3 -> " three")) (after being mapTrimmed)
}
checkShouldContainStackDepth(e4, left4, Seq(1 -> " one", 2 -> "two ", 3 -> " three"), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList("1 ", " 2", "3 ")
val e5 = intercept[exceptions.TestFailedException] {
(left5 should contain noElementsOf Seq(" 1", "2 ", " 3")) (after being trimmed)
}
checkShouldContainStackDepth(e5, left5, Seq(" 1", "2 ", " 3"), thisLineNumber - 2)
val left6 = javaMap(Entry(1, "one "), Entry(2, " two"), Entry(3, "three "))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should contain noElementsOf Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three"))) (after being javaMapTrimmed)
}
checkShouldContainStackDepth(e6, left6, Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with specified normalization") {
val left1 = List("1", "2", "3")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should not contain noElementsOf (Seq("1", "2", "3"))) (after being appended)
}
checkShouldNotContainStackDepth(e1, left1, Seq("1", "2", "3"), thisLineNumber - 2)
val left2 = Set("1", "2", "3")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should not contain noElementsOf (Seq("1", "2", "3"))) (after being appended)
}
checkShouldNotContainStackDepth(e2, left2, Seq("1", "2", "3"), thisLineNumber - 2)
val left3 = Array("1", "2", "3")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should not contain noElementsOf (Seq("1", "2", "3"))) (after being appended)
}
checkShouldNotContainStackDepth(e3, left3, Seq("1", "2", "3"), thisLineNumber - 2)
val left4 = Map(1 -> "one", 2 -> "two", 3 -> "three")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should not contain noElementsOf (Seq(1 -> "one", 2 -> "two", 3 -> "three"))) (after being mapAppended)
}
checkShouldNotContainStackDepth(e4, left4, Seq(1 -> "one", 2 -> "two", 3 -> "three"), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList("1", "2", "3")
val e5 = intercept[exceptions.TestFailedException] {
(left5 should not contain noElementsOf (Seq("1", "2", "3"))) (after being appended)
}
checkShouldNotContainStackDepth(e5, left5, Seq("1", "2", "3"), thisLineNumber - 2)
val left6 = javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should not contain noElementsOf (Seq(Entry(1, "one"), Entry(2, "two"), Entry(3, "three")))) (after being javaMapAppended)
}
checkShouldNotContainStackDepth(e6, left6, Seq(Entry(1, "one"), Entry(2, "two"), Entry(3, "three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take specified equality and normalization when 'should contain' is used") {
(List("one ", " two", "three ") should contain noElementsOf Seq(" one", "two ", " three")) (decided by reverseEquality afterBeing trimmed)
(Set("one ", " two", "three ") should contain noElementsOf Seq(" one", "two ", " three")) (decided by reverseEquality afterBeing trimmed)
(Array("one ", " two", "three ") should contain noElementsOf Seq(" one", "two ", " three")) (decided by reverseEquality afterBeing trimmed)
(Map(1 -> "one ", 2 -> " two", 3 -> "three ") should contain noElementsOf Seq(1 -> " one", 2 -> "two ", 3 -> " three")) (decided by mapReverseEquality afterBeing mapTrimmed)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("one ", " two", "three ") should contain noElementsOf Seq(" one", "two ", " three")) (decided by reverseEquality afterBeing trimmed)
(javaSet("one ", " two", "three ") should contain noElementsOf Seq(" one", "two ", " three")) (decided by reverseEquality afterBeing trimmed)
(javaMap(Entry(1, "one "), Entry(2, " two"), Entry(3, "three ")) should contain noElementsOf Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three"))) (decided by javaMapReverseEquality afterBeing javaMapTrimmed)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take specified equality and normalization when 'should not contain' is used") {
(List(" ONE", "TWO ", " THREE") should not contain noElementsOf (Seq("one ", " two", "three "))) (decided by lowerCaseEquality afterBeing trimmed)
(Set(" ONE", "TWO ", " THREE") should not contain noElementsOf (Seq("one ", " two", "three "))) (decided by lowerCaseEquality afterBeing trimmed)
(Array(" ONE", "TWO ", " THREE") should not contain noElementsOf (Seq("one ", " two", "three "))) (decided by lowerCaseEquality afterBeing trimmed)
(Map(1 -> " ONE", 2 -> "TWO ", 3 -> " THREE") should not contain noElementsOf (Seq(1 -> "one ", 2 -> " two", 3 -> "three "))) (decided by mapLowerCaseEquality afterBeing mapTrimmed)
// SKIP-SCALATESTJS,NATIVE-START
(javaList(" ONE", "TWO ", " THREE") should not contain noElementsOf (Seq("one ", " two", "three "))) (decided by lowerCaseEquality afterBeing trimmed)
(javaSet(" ONE", "TWO ", " THREE") should not contain noElementsOf (Seq("one ", " two", "three "))) (decided by lowerCaseEquality afterBeing trimmed)
(javaMap(Entry(1, " ONE"), Entry(2, "TWO "), Entry(3, " THREE")) should not contain noElementsOf (Seq(Entry(1, "one "), Entry(2, " two"), Entry(3, "three ")))) (decided by javaMapLowerCaseEquality afterBeing javaMapTrimmed)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with specified equality and normalization") {
val left1 = List(" ONE", "TWO ", " THREE")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should contain noElementsOf Seq("one ", " two", "three ")) (decided by lowerCaseEquality afterBeing trimmed)
}
checkShouldContainStackDepth(e1, left1, Seq("one ", " two", "three "), thisLineNumber - 2)
val left2 = Set(" ONE", "TWO ", " THREE")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should contain noElementsOf Seq("one ", " two", "three ")) (decided by lowerCaseEquality afterBeing trimmed)
}
checkShouldContainStackDepth(e2, left2, Seq("one ", " two", "three "), thisLineNumber - 2)
val left3 = Array(" ONE", "TWO ", " THREE")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should contain noElementsOf Seq("one ", " two", "three ")) (decided by lowerCaseEquality afterBeing trimmed)
}
checkShouldContainStackDepth(e3, left3, Seq("one ", " two", "three "), thisLineNumber - 2)
val left4 = Map(1 -> " ONE", 2 -> "TWO ", 3 -> " THREE")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should contain noElementsOf Seq(1 -> "one ", 2 -> " two", 3 -> "three ")) (decided by mapLowerCaseEquality afterBeing mapTrimmed)
}
checkShouldContainStackDepth(e4, left4, Seq(1 -> "one ", 2 -> " two", 3 -> "three "), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList(" ONE", "TWO ", " THREE")
val e5 = intercept[exceptions.TestFailedException] {
(left5 should contain noElementsOf Seq("one ", " two", "three ")) (decided by lowerCaseEquality afterBeing trimmed)
}
checkShouldContainStackDepth(e5, left5, Seq("one ", " two", "three "), thisLineNumber - 2)
val left6 = javaMap(Entry(1, " ONE"), Entry(2, "TWO "), Entry(3, " THREE"))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should contain noElementsOf Seq(Entry(1, "one "), Entry(2, " two"), Entry(3, "three "))) (decided by javaMapLowerCaseEquality afterBeing javaMapTrimmed)
}
checkShouldContainStackDepth(e6, left6, Seq(Entry(1, "one "), Entry(2, " two"), Entry(3, "three ")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with specified equality and normalization") {
val left1 = List("one ", " two", "three ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should not contain noElementsOf (Seq(" one", "two ", " three"))) (decided by reverseEquality afterBeing trimmed)
}
checkShouldNotContainStackDepth(e1, left1, Seq(" one", "two ", " three"), thisLineNumber - 2)
val left2 = Set("one ", " two", "three ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should not contain noElementsOf (Seq(" one", "two ", " three"))) (decided by reverseEquality afterBeing trimmed)
}
checkShouldNotContainStackDepth(e2, left2, Seq(" one", "two ", " three"), thisLineNumber - 2)
val left3 = Array("one ", " two", "three ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should not contain noElementsOf (Seq(" one", "two ", " three"))) (decided by reverseEquality afterBeing trimmed)
}
checkShouldNotContainStackDepth(e3, left3, Seq(" one", "two ", " three"), thisLineNumber - 2)
val left4 = Map(1 -> "one ", 2 -> " two", 3 -> "three ")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should not contain noElementsOf (Seq(1 -> " one", 2 -> "two ", 3 -> " three"))) (decided by mapReverseEquality afterBeing mapTrimmed)
}
checkShouldNotContainStackDepth(e4, left4, Seq(1 -> " one", 2 -> "two ", 3 -> " three"), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList("one ", " two", "three ")
val e5 = intercept[exceptions.TestFailedException] {
(left5 should not contain noElementsOf (Seq(" one", "two ", " three"))) (decided by reverseEquality afterBeing trimmed)
}
checkShouldNotContainStackDepth(e5, left5, Seq(" one", "two ", " three"), thisLineNumber - 2)
val left6 = javaMap(Entry(1, "one "), Entry(2, " two"), Entry(3, "three "))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should not contain noElementsOf (Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three")))) (decided by javaMapReverseEquality afterBeing javaMapTrimmed)
}
checkShouldNotContainStackDepth(e6, left6, Seq(Entry(1, " one"), Entry(2, "two "), Entry(3, " three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/NoElementsOfContainMatcherDeciderSpec.scala
|
Scala
|
apache-2.0
| 22,492 |
package org.jetbrains.plugins.scala
package lang
package resolve
package processor
import com.intellij.openapi.progress.ProgressManager
import com.intellij.openapi.util.{RecursionManager, Key}
import com.intellij.psi.scope._
import com.intellij.psi._
import collection.{mutable, Set}
import impl.compiled.ClsClassImpl
import impl.light.LightMethod
import org.jetbrains.plugins.scala.lang.psi.api._
import base.types.ScTypeProjection
import statements.ScTypeAlias
import psi.types._
import psi.impl.toplevel.typedef.TypeDefinitionMembers
import org.jetbrains.plugins.scala.lang.psi.types.result.{TypeResult, Success, TypingContext}
import toplevel.imports.usages.ImportUsed
import ResolveTargets._
import psi.impl.toplevel.synthetic.{ScSyntheticFunction, SyntheticClasses}
import toplevel.ScTypedDefinition
import toplevel.typedef.{ScObject, ScTemplateDefinition}
import org.jetbrains.plugins.scala.extensions._
import psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.resolve.processor.PrecedenceHelper.PrecedenceTypes
object BaseProcessor {
def unapply(p: BaseProcessor) = Some(p.kinds)
val boundClassKey: Key[PsiClass] = Key.create("bound.class.key")
val FROM_TYPE_KEY: Key[ScType] = Key.create("from.type.key")
val COMPOUND_TYPE_THIS_TYPE_KEY: Key[Option[ScType]] = Key.create("compound.type.this.type.key")
val FORWARD_REFERENCE_KEY: Key[java.lang.Boolean] = Key.create("forward.reference.key")
val guard = RecursionManager.createGuard("process.element.guard")
def isImplicitProcessor(processor: PsiScopeProcessor): Boolean = {
processor match {
case b: BaseProcessor => b.isImplicitProcessor
case _ => false
}
}
}
abstract class BaseProcessor(val kinds: Set[ResolveTargets.Value]) extends PsiScopeProcessor {
protected val candidatesSet: mutable.HashSet[ScalaResolveResult] = new mutable.HashSet[ScalaResolveResult]
def isImplicitProcessor: Boolean = false
def changedLevel: Boolean = true
private var knownPriority: Option[Int] = None
def definePriority(p: Int)(body: => Unit) {
val oldPriority = knownPriority
knownPriority = Some(p)
try {
body
} finally {
knownPriority = oldPriority
}
}
def isPredefPriority = knownPriority == Some(PrecedenceTypes.SCALA_PREDEF)
def specialPriority: Option[Int] = knownPriority
protected var accessibility = true
def doNotCheckAccessibility() {accessibility = false}
def rrcandidates: Array[ResolveResult] = {
val set = candidatesS
val size = set.size
val res = JavaArrayFactoryUtil.ResolveResultFactory.create(size)
if (size == 0) return res
val iter = set.iterator
var count = 0
while (iter.hasNext) {
val next = iter.next()
res(count) = next
count += 1
}
res
}
def candidates: Array[ScalaResolveResult] = {
val set = candidatesS
val size = set.size
val res = JavaArrayFactoryUtil.ScalaResolveResultFactory.create(size)
if (size == 0) return res
val iter = set.iterator
var count = 0
while (iter.hasNext) {
val next = iter.next()
res(count) = next
count += 1
}
res
}
def candidatesS: Set[ScalaResolveResult] = candidatesSet
//todo: fix this ugly performance improvement
private var classKind = true
def setClassKind(classKind: Boolean) {
this.classKind = classKind
}
def getClassKind = {
classKind && getClassKindInner
}
def getClassKindInner = {
(kinds contains ResolveTargets.CLASS) ||
(kinds contains ResolveTargets.OBJECT) ||
(kinds contains ResolveTargets.METHOD)
}
//java compatibility
object MyElementClassHint extends ElementClassHint {
import ElementClassHint.DeclarationKind
def shouldProcess(kind: DeclarationKind): Boolean = {
kind match {
case null => true
case DeclarationKind.PACKAGE => kinds contains ResolveTargets.PACKAGE
case DeclarationKind.CLASS if classKind =>
(kinds contains ResolveTargets.CLASS) || (kinds contains ResolveTargets.OBJECT) ||
(kinds contains ResolveTargets.METHOD) //case classes get 'apply' generated
case DeclarationKind.VARIABLE => (kinds contains ResolveTargets.VAR) || (kinds contains ResolveTargets.VAL)
case DeclarationKind.FIELD => (kinds contains ResolveTargets.VAR) || (kinds contains ResolveTargets.VAL)
case DeclarationKind.METHOD => kinds contains ResolveTargets.METHOD
case _ => false
}
}
}
def getHint[T](hintKey: Key[T]): T = {
hintKey match {
case ElementClassHint.KEY => MyElementClassHint.asInstanceOf[T]
case _ => null.asInstanceOf[T]
}
}
def handleEvent(event: PsiScopeProcessor.Event, associated: Object) {}
protected def kindMatches(element: PsiElement): Boolean = ResolveUtils.kindMatches(element, kinds)
def processType(t: ScType, place: PsiElement): Boolean = processType(t, place, ResolveState.initial)
def processType(t: ScType, place: PsiElement, state: ResolveState): Boolean =
processType(t, place, state, updateWithProjectionSubst = true)
def processType(t: ScType, place: PsiElement, state: ResolveState,
updateWithProjectionSubst: Boolean): Boolean = {
ProgressManager.checkCanceled()
t match {
case ScDesignatorType(clazz: PsiClass) if clazz.qualifiedName == "java.lang.String" =>
val plusMethod: ScType => ScSyntheticFunction = SyntheticClasses.get(place.getProject).stringPlusMethod
if (plusMethod != null) execute(plusMethod(t), state) //add + method
case _ =>
}
t match {
case ScThisType(clazz) =>
val thisSubst = new ScSubstitutor(ScThisType(clazz))
if (clazz.selfType.isEmpty) {
processElement(clazz, thisSubst, place, state)
} else {
val selfType = clazz.selfType.get
val clazzType: ScType = clazz.getTypeWithProjections(TypingContext.empty).getOrElse(return true)
if (selfType == ScThisType(clazz)) {
//to prevent SOE, let's process Element
processElement(clazz, thisSubst, place, state)
} else if (selfType.conforms(clazzType)) {
processType(selfType, place, state.put(BaseProcessor.COMPOUND_TYPE_THIS_TYPE_KEY, Some(t)).
put(ScSubstitutor.key, thisSubst))
} else if (clazzType.conforms(selfType)) {
processElement(clazz, thisSubst, place, state)
} else {
processType(clazz.selfType.map(Bounds.glb(_, clazzType)).get, place,
state.put(BaseProcessor.COMPOUND_TYPE_THIS_TYPE_KEY, Some(t)))
}
}
case d@ScDesignatorType(e: PsiClass) if d.isStatic && !e.isInstanceOf[ScTemplateDefinition] =>
//not scala from scala
var break = true
for (method <- e.getMethods if break && method.hasModifierProperty("static")) {
if (!execute(method, state)) break = false
}
for (cl <- e.getInnerClasses if break && cl.hasModifierProperty("static")) {
if (!execute(cl, state)) break = false
}
for (field <- e.getFields if break && field.hasModifierProperty("static")) {
if (!execute(field, state)) break = false
}
//todo: duplicate TypeDefinitionMembers
//fake enum static methods
val isJavaSourceEnum = !e.isInstanceOf[ClsClassImpl] && e.isEnum
if (isJavaSourceEnum) {
val elementFactory: PsiElementFactory = JavaPsiFacade.getInstance(e.getProject).getElementFactory
//todo: cache like in PsiClassImpl
val valuesMethod: PsiMethod = elementFactory.createMethodFromText("public static " + e.name +
"[] values() {}", e)
val valueOfMethod: PsiMethod = elementFactory.createMethodFromText("public static " + e.name +
" valueOf(String name) throws IllegalArgumentException {}", e)
val values = new LightMethod(e.getManager, valuesMethod, e)
val valueOf = new LightMethod(e.getManager, valueOfMethod, e)
if (!execute(values, state)) return false
if (!execute(valueOf, state)) return false
}
break
case ScDesignatorType(o: ScObject) => processElement(o, ScSubstitutor.empty, place, state)
case ScDesignatorType(e: ScTypedDefinition) if place.isInstanceOf[ScTypeProjection] =>
val result: TypeResult[ScType] =
e match {
case p: ScParameter => p.getRealParameterType(TypingContext.empty)
case _ => e.getType(TypingContext.empty)
}
result match {
case Success(tp, _) => processType(tp, place, state)
case _ => true
}
case ScDesignatorType(e) => processElement(e, ScSubstitutor.empty, place, state)
case ScTypeParameterType(_, Nil, _, upper, _) =>
processType(upper.v, place, state, updateWithProjectionSubst = false)
case j: JavaArrayType =>
processType(j.getParameterizedType(place.getProject, place.getResolveScope).
getOrElse(return true), place, state)
case p@ScParameterizedType(des, typeArgs) =>
p.designator match {
case ScTypeParameterType(_, _, _, upper, _) => processType(p.substitutor.subst(upper.v), place,
state.put(ScSubstitutor.key, new ScSubstitutor(p)))
case _ => ScType.extractDesignated(p, withoutAliases = false) match {
case Some((designator, subst)) => processElement(designator, subst, place, state)
case None => true
}
}
case proj@ScProjectionType(projectd, _, _) if proj.actualElement.isInstanceOf[ScTypeAlias] =>
val ta = proj.actualElement.asInstanceOf[ScTypeAlias]
val subst = proj.actualSubst
val upper = ta.upperBound.getOrElse(return true)
processType(subst.subst(upper), place, state.put(ScSubstitutor.key, ScSubstitutor.empty))
case proj@ScProjectionType(des, elem, _) =>
val s: ScSubstitutor = if (updateWithProjectionSubst)
new ScSubstitutor(Map.empty, Map.empty, Some(proj)) followed proj.actualSubst
else proj.actualSubst
processElement(proj.actualElement, s, place, state)
case StdType(name, tSuper) =>
SyntheticClasses.get(place.getProject).byName(name) match {
case Some(c) =>
if (!c.processDeclarations(this, state, null, place) ||
!(tSuper match {
case Some(ts) => processType(ts, place)
case _ => true
})) return false
case None => //nothing to do
}
val scope = place.getResolveScope
val obj: PsiClass = ScalaPsiManager.instance(place.getProject).getCachedClass(scope, "java.lang.Object")
if (obj != null) {
val namesSet = Set("hashCode", "toString", "equals", "getClass")
val methods = obj.getMethods.iterator
while (methods.hasNext) {
val method = methods.next()
if (name == "AnyRef" || namesSet.contains(method.name)) {
if (!execute(method, state)) return false
}
}
}
true
case comp@ScCompoundType(components, signaturesMap, typesMap) =>
TypeDefinitionMembers.processDeclarations(comp, this, state, null, place)
case ex: ScExistentialType => processType(ex.skolem, place, state.put(ScSubstitutor.key, ScSubstitutor.empty))
case ScSkolemizedType(_, _, lower, upper) => processType(upper, place, state, updateWithProjectionSubst)
case _ => true
}
}
private def processElement(e: PsiNamedElement, s: ScSubstitutor, place: PsiElement, state: ResolveState): Boolean = {
val subst = state.get(ScSubstitutor.key)
val compound = state.get(BaseProcessor.COMPOUND_TYPE_THIS_TYPE_KEY) //todo: looks like ugly workaround
val newSubst =
compound match {
case Some(_) => subst
case _ => if (subst != null) subst followed s else s
}
e match {
case ta: ScTypeAlias =>
processType(s.subst(ta.upperBound.getOrAny), place, state.put(ScSubstitutor.key, ScSubstitutor.empty))
//need to process scala way
case clazz: PsiClass =>
TypeDefinitionMembers.processDeclarations(clazz, BaseProcessor.this, state.put(ScSubstitutor.key, newSubst), null, place)
case des: ScTypedDefinition =>
val typeResult: TypeResult[ScType] =
des match {
case p: ScParameter => p.getRealParameterType(TypingContext.empty)
case _ => des.getType(TypingContext.empty)
}
typeResult match {
case Success(tp, _) =>
processType(newSubst subst tp, place, state.put(ScSubstitutor.key, ScSubstitutor.empty),
updateWithProjectionSubst = false)
case _ => true
}
case pack: ScPackage =>
pack.processDeclarations(BaseProcessor.this, state.put(ScSubstitutor.key, newSubst), null, place)
case des =>
des.processDeclarations(BaseProcessor.this, state.put(ScSubstitutor.key, newSubst), null, place)
}
}
protected def getSubst(state: ResolveState) = {
val subst: ScSubstitutor = state.get(ScSubstitutor.key)
if (subst == null) ScSubstitutor.empty else subst
}
protected def getImports(state: ResolveState): Set[ImportUsed] = {
val used = state.get(ImportUsed.key)
if (used == null) Set[ImportUsed]() else used
}
protected def getBoundClass(state: ResolveState): PsiClass = {
state.get(BaseProcessor.boundClassKey)
}
protected def getFromType(state: ResolveState): Option[ScType] = {
state.get(BaseProcessor.FROM_TYPE_KEY).toOption
}
protected def isForwardReference(state: ResolveState): Boolean = {
val res: java.lang.Boolean = state.get(BaseProcessor.FORWARD_REFERENCE_KEY)
if (res != null) res
else false
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/resolve/processor/BaseProcessor.scala
|
Scala
|
apache-2.0
| 13,969 |
//
// NQueens.scala -- Scala benchmark NQueens
// Project OrcTests
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.test.item.scalabenchmarks
import NQueensTypes.{ Queen, Solutions }
object NQueensTypes {
type Queen = (Int, Int)
type Solutions = Iterable[List[Queen]]
}
// From: https://gist.github.com/ornicar/1115259
// Solves the n-queens problem for an arbitrary board size
// Run for a board size of ten: scala nqueen.scala 10
object NQueens extends BenchmarkApplication[Unit, Solutions] with ExpectedBenchmarkResult[Solutions] {
val N: Int = (8 + math.log10(BenchmarkConfig.problemSize)).toInt
def isSafe(queen: Queen, others: List[Queen]) =
others forall (!isAttacked(queen, _))
def isAttacked(q1: Queen, q2: Queen) =
q1._1 == q2._1 ||
q1._2 == q2._2 ||
(q2._1 - q1._1).abs == (q2._2 - q1._2).abs
def benchmark(ctx: Unit): Solutions = {
def placeQueens(n: Int): Solutions = n match {
case _ if n == 0 => List(Nil)
case _ => for {
queens <- placeQueens(n - 1)
y <- 0 until N
queen = (n, y)
if (isSafe(queen, queens))
} yield queens :+ queen
}
placeQueens(N)
}
def setup(): Unit = ()
val name: String = "N-Queens"
def factorial(n: BigInt): BigInt = {
if (n > 1)
n * factorial(n-1)
else
1
}
val size: Int = factorial(N).toInt
override def hash(results: Solutions): Int = results.toSet.##()
val expectedMap: Map[Int, Int] = Map(
10 -> 0xae0ba7ef,
100 -> 0xdcf13a95,
//100 -> 0xabcb3752,
)
}
|
orc-lang/orc
|
OrcTests/src/orc/test/item/scalabenchmarks/NQueens.scala
|
Scala
|
bsd-3-clause
| 1,820 |
package java.lang
import java.io._
import scala.scalajs.js
import js.Dynamic.global
import scala.scalajs.LinkingInfo.assumingES6
import scala.scalajs.runtime.linkingInfo
import java.{util => ju}
import scalajs.runtime.environmentInfo
object System {
var out: PrintStream = new JSConsoleBasedPrintStream(isErr = false)
var err: PrintStream = new JSConsoleBasedPrintStream(isErr = true)
var in: InputStream = null
def setIn(in: InputStream): Unit =
this.in = in
def setOut(out: PrintStream): Unit =
this.out = out
def setErr(err: PrintStream): Unit =
this.err = err
def currentTimeMillis(): scala.Long = {
(new js.Date).getTime().toLong
}
private[this] val getHighPrecisionTime: js.Function0[scala.Double] = {
import js.DynamicImplicits.truthValue
// We've got to use selectDynamic explicitly not to crash Scala 2.10
if (global.selectDynamic("performance")) {
if (global.performance.selectDynamic("now")) {
() => global.performance.now().asInstanceOf[scala.Double]
} else if (global.performance.selectDynamic("webkitNow")) {
() => global.performance.webkitNow().asInstanceOf[scala.Double]
} else {
() => new js.Date().getTime()
}
} else {
() => new js.Date().getTime()
}
}
def nanoTime(): scala.Long =
(getHighPrecisionTime() * 1000000).toLong
def arraycopy(src: Object, srcPos: scala.Int, dest: Object,
destPos: scala.Int, length: scala.Int): Unit = {
import scala.{Boolean, Char, Byte, Short, Int, Long, Float, Double}
@inline def checkIndices(srcLen: Int, destLen: Int): Unit = {
if (srcPos < 0 || destPos < 0 || length < 0 ||
srcPos + length > srcLen || destPos + length > destLen)
throw new ArrayIndexOutOfBoundsException("Array index out of bounds")
}
def mismatch(): Nothing =
throw new ArrayStoreException("Incompatible array types")
val forward = (src ne dest) || destPos < srcPos || srcPos + length < destPos
def copyPrim[@specialized T](src: Array[T], dest: Array[T]): Unit = {
checkIndices(src.length, dest.length)
if (forward) {
var i = 0
while (i < length) {
dest(i+destPos) = src(i+srcPos)
i += 1
}
} else {
var i = length-1
while (i >= 0) {
dest(i+destPos) = src(i+srcPos)
i -= 1
}
}
}
def copyRef(src: Array[AnyRef], dest: Array[AnyRef]): Unit = {
checkIndices(src.length, dest.length)
if (forward) {
var i = 0
while (i < length) {
dest(i+destPos) = src(i+srcPos)
i += 1
}
} else {
var i = length-1
while (i >= 0) {
dest(i+destPos) = src(i+srcPos)
i -= 1
}
}
}
if (src == null || dest == null) {
throw new NullPointerException()
} else (src match {
case src: Array[AnyRef] =>
dest match {
case dest: Array[AnyRef] => copyRef(src, dest)
case _ => mismatch()
}
case src: Array[Boolean] =>
dest match {
case dest: Array[Boolean] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Char] =>
dest match {
case dest: Array[Char] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Byte] =>
dest match {
case dest: Array[Byte] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Short] =>
dest match {
case dest: Array[Short] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Int] =>
dest match {
case dest: Array[Int] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Long] =>
dest match {
case dest: Array[Long] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Float] =>
dest match {
case dest: Array[Float] => copyPrim(src, dest)
case _ => mismatch()
}
case src: Array[Double] =>
dest match {
case dest: Array[Double] => copyPrim(src, dest)
case _ => mismatch()
}
case _ =>
mismatch()
})
}
def identityHashCode(x: Object): scala.Int = {
(x: Any) match {
case null => 0
case _:scala.Boolean | _:scala.Double | _:String | () =>
x.hashCode()
case _ =>
import IDHashCode._
if (x.getClass == null) {
// This is not a Scala.js object: delegate to x.hashCode()
x.hashCode()
} else if (assumingES6 || idHashCodeMap != null) {
// Use the global WeakMap of attributed id hash codes
val hash = idHashCodeMap.get(x.asInstanceOf[js.Any])
if (!js.isUndefined(hash)) {
hash.asInstanceOf[Int]
} else {
val newHash = nextIDHashCode()
idHashCodeMap.set(x.asInstanceOf[js.Any], newHash)
newHash
}
} else {
val hash = x.asInstanceOf[js.Dynamic].selectDynamic("$idHashCode$0")
if (!js.isUndefined(hash)) {
/* Note that this can work even if x is sealed, if
* identityHashCode() was called for the first time before x was
* sealed.
*/
hash.asInstanceOf[Int]
} else if (!js.Object.isSealed(x.asInstanceOf[js.Object])) {
/* If x is not sealed, we can (almost) safely create an additional
* field with a bizarre and relatively long name, even though it is
* technically undefined behavior.
*/
val newHash = nextIDHashCode()
x.asInstanceOf[js.Dynamic].updateDynamic("$idHashCode$0")(newHash)
newHash
} else {
// Otherwise, we unfortunately have to return a constant.
42
}
}
}
}
private object IDHashCode {
private var lastIDHashCode: Int = 0
val idHashCodeMap =
if (assumingES6 || !js.isUndefined(global.WeakMap))
js.Dynamic.newInstance(global.WeakMap)()
else
null
def nextIDHashCode(): Int = {
val r = lastIDHashCode + 1
lastIDHashCode = r
r
}
}
private object SystemProperties {
var value = loadSystemProperties()
private[System] def loadSystemProperties(): ju.Properties = {
val sysProp = new ju.Properties()
sysProp.setProperty("java.version", "1.8")
sysProp.setProperty("java.vm.specification.version", "1.8")
sysProp.setProperty("java.vm.specification.vendor", "Oracle Corporation")
sysProp.setProperty("java.vm.specification.name", "Java Virtual Machine Specification")
sysProp.setProperty("java.vm.name", "Scala.js")
linkingInfo.linkerVersion.foreach(v => sysProp.setProperty("java.vm.version", v))
sysProp.setProperty("java.specification.version", "1.8")
sysProp.setProperty("java.specification.vendor", "Oracle Corporation")
sysProp.setProperty("java.specification.name", "Java Platform API Specification")
sysProp.setProperty("file.separator", "/")
sysProp.setProperty("path.separator", ":")
sysProp.setProperty("line.separator", "\\n")
for {
jsEnvProperties <- environmentInfo.javaSystemProperties
(key, value) <- jsEnvProperties
} {
sysProp.setProperty(key, value)
}
sysProp
}
}
def getProperties(): ju.Properties =
SystemProperties.value
def setProperties(properties: ju.Properties): Unit = {
SystemProperties.value =
if (properties != null) properties
else SystemProperties.loadSystemProperties()
}
def getProperty(key: String): String =
SystemProperties.value.getProperty(key)
def getProperty(key: String, default: String): String =
SystemProperties.value.getProperty(key, default)
def clearProperty(key: String): String =
SystemProperties.value.remove(key).asInstanceOf[String]
def setProperty(key: String, value: String): String =
SystemProperties.value.setProperty(key, value).asInstanceOf[String]
def getenv(): ju.Map[String, String] =
ju.Collections.emptyMap()
def getenv(name: String): String = {
if (name eq null)
throw new NullPointerException
null
}
def exit(status: scala.Int): Unit = Runtime.getRuntime().exit(status)
def gc(): Unit = Runtime.getRuntime().gc()
}
private[lang] final class JSConsoleBasedPrintStream(isErr: Boolean)
extends PrintStream(new JSConsoleBasedPrintStream.DummyOutputStream) {
import JSConsoleBasedPrintStream._
/** Whether the buffer is flushed.
* This can be true even if buffer != "" because of line continuations.
* However, the converse is never true, i.e., !flushed => buffer != "".
*/
private var flushed: scala.Boolean = true
private var buffer: String = ""
override def write(b: Int): Unit =
write(Array(b.toByte), 0, 1)
override def write(buf: Array[scala.Byte], off: Int, len: Int): Unit = {
/* This does *not* decode buf as a sequence of UTF-8 code units.
* This is not really useful, and would uselessly pull in the UTF-8 decoder
* in all applications that use OutputStreams (not just PrintStreams).
* Instead, we use a trivial ISO-8859-1 decoder in here.
*/
if (off < 0 || len < 0 || len > buf.length - off)
throw new IndexOutOfBoundsException
var i = 0
while (i < len) {
print((buf(i + off) & 0xff).toChar)
i += 1
}
}
override def print(b: scala.Boolean): Unit = printString(String.valueOf(b))
override def print(c: scala.Char): Unit = printString(String.valueOf(c))
override def print(i: scala.Int): Unit = printString(String.valueOf(i))
override def print(l: scala.Long): Unit = printString(String.valueOf(l))
override def print(f: scala.Float): Unit = printString(String.valueOf(f))
override def print(d: scala.Double): Unit = printString(String.valueOf(d))
override def print(s: Array[scala.Char]): Unit = printString(String.valueOf(s))
override def print(s: String): Unit = printString(if (s == null) "null" else s)
override def print(obj: AnyRef): Unit = printString(String.valueOf(obj))
override def println(): Unit = printString("\\n")
// This is the method invoked by Predef.println(x).
@inline
override def println(obj: AnyRef): Unit = printString("" + obj + "\\n")
private def printString(s: String): Unit = {
var rest: String = s
while (rest != "") {
val nlPos = rest.indexOf("\\n")
if (nlPos < 0) {
buffer += rest
flushed = false
rest = ""
} else {
doWriteLine(buffer + rest.substring(0, nlPos))
buffer = ""
flushed = true
rest = rest.substring(nlPos+1)
}
}
}
/**
* Since we cannot write a partial line in JavaScript, we write a whole
* line with continuation symbol at the end and schedule a line continuation
* symbol for the new line if the buffer is flushed.
*/
override def flush(): Unit = if (!flushed) {
doWriteLine(buffer + LineContEnd)
buffer = LineContStart
flushed = true
}
override def close(): Unit = ()
private def doWriteLine(line: String): Unit = {
import js.DynamicImplicits.truthValue
// We've got to use selectDynamic explicitly not to crash Scala 2.10
if (global.selectDynamic("console")) {
if (isErr && global.console.selectDynamic("error"))
global.console.error(line)
else
global.console.log(line)
}
}
}
private[lang] object JSConsoleBasedPrintStream {
private final val LineContEnd: String = "\\u21A9"
private final val LineContStart: String = "\\u21AA"
class DummyOutputStream extends OutputStream {
def write(c: Int): Unit =
throw new AssertionError(
"Should not get in JSConsoleBasedPrintStream.DummyOutputStream")
}
}
|
lrytz/scala-js
|
javalanglib/src/main/scala/java/lang/System.scala
|
Scala
|
bsd-3-clause
| 12,185 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.runtime
import scala.collection.JavaConversions._
import org.apache.spark.SparkConf
import org.slf4j.LoggerFactory
import com.asakusafw.iterative.launch.IterativeLaunchConfiguration
import com.asakusafw.runtime.core.context.RuntimeContext
object Launcher {
val Logger = LoggerFactory.getLogger(getClass)
def main(args: Array[String]): Unit = {
try {
val cl = Thread.currentThread.getContextClassLoader
RuntimeContext.set(RuntimeContext.DEFAULT.apply(System.getenv))
RuntimeContext.get.verifyApplication(cl)
val conf = IterativeLaunchConfiguration.parse(cl, args: _*)
val sparkClient = conf.getStageClient.asSubclass(classOf[SparkClient]).newInstance()
val sparkConf = new SparkConf()
conf.getHadoopProperties.foreach {
case (key, value) => sparkConf.setHadoopConf(key, value)
}
conf.getEngineProperties.foreach {
case (key, value) => sparkConf.set(key, value)
}
if (!sparkConf.contains(Props.Parallelism)
&& !sparkConf.contains("spark.default.parallelism")) {
if (Logger.isWarnEnabled) {
Logger.warn(
s"`${Props.Parallelism}` is not set, " +
s"we set parallelism to ${Props.ParallelismFallback}.")
}
}
if (!RuntimeContext.get.isSimulation) {
val status = sparkClient.execute(sparkConf, conf.getStageInfo)
if (status != 0) {
throw new RuntimeException(s"Spark execution returned non-zero value: ${status}")
}
}
} catch {
case t: Throwable =>
Logger.error(s"SparkClient throws: ${t.getMessage}", t)
throw t
}
}
}
|
ueshin/asakusafw-spark
|
runtime/src/main/scala/com/asakusafw/spark/runtime/Launcher.scala
|
Scala
|
apache-2.0
| 2,298 |
package com.twitter.penguin.korean.phrase_extractor
import java.util.logging.Logger
import com.twitter.penguin.korean.TestBase._
import com.twitter.penguin.korean.TwitterKoreanProcessor.tokenize
import com.twitter.penguin.korean.normalizer.KoreanNormalizer
import com.twitter.penguin.korean.tokenizer.KoreanTokenizer.KoreanToken
import com.twitter.penguin.korean.util.KoreanPos
import com.twitter.penguin.korean.{TestBase, TwitterKoreanProcessor}
class KoreanPhraseExtractorTest extends TestBase {
val LOG = Logger.getLogger(getClass.getSimpleName)
case class SampleTextPair(text: String, phrases: String)
val sampleText = List[SampleTextPair](
SampleTextPair(
"블랙프라이데이: 이날 미국의 수백만 소비자들은 크리스마스 선물을 할인된 가격에 사는 것을 주 목적으로 블랙프라이데이 쇼핑을 한다.",
"블랙프라이데이(Noun: 0, 7), 이날(Noun: 9, 2), 이날 미국(Noun: 9, 5), 이날 미국의 수백만(Noun: 9, 10), " +
"미국의 수백만(Noun: 12, 7), 수백만(Noun: 16, 3), 이날 미국의 수백만 소비자들(Noun: 9, 15), " +
"미국의 수백만 소비자들(Noun: 12, 12), 수백만 소비자들(Noun: 16, 8), 크리스마스(Noun: 26, 5), " +
"크리스마스 선물(Noun: 26, 8), 할인(Noun: 36, 2), 할인된 가격(Noun: 36, 6), 가격(Noun: 40, 2), " +
"주 목적(Noun: 50, 4), 블랙프라이데이 쇼핑(Noun: 57, 10), 미국(Noun: 12, 2), 소비자들(Noun: 20, 4), " +
"선물(Noun: 32, 2), 목적(Noun: 52, 2), 쇼핑(Noun: 65, 2)"
),
SampleTextPair(
"결정했어. 마키 코레썸 사주시는 분께는 허니버터칩 한 봉지를 선물할 것이다.",
"결정(Noun: 0, 2), 마키(Noun: 6, 2), 마키 코레썸(Noun: 6, 6), " +
"마키 코레썸 사주시는 분께는 허니버터칩(Noun: 6, 21), 코레썸 사주시는 분께는 허니버터칩(Noun: 9, 18), " +
"허니버터칩(Noun: 22, 5), 마키 코레썸 사주시는 분께는 허니버터칩 한 봉지(Noun: 6, 26), " +
"코레썸 사주시는 분께는 허니버터칩 한 봉지(Noun: 9, 23), 허니버터칩 한 봉지(Noun: 22, 10), " +
"봉지(Noun: 30, 2), 코레썸(Noun: 9, 3)"
),
SampleTextPair(
"[단독]정부, 새 고용 형태 '중규직' 만든다 http://url.com 이름도 바뀌겟군. 정규직은 상규직, " +
"비정규직은 하규직. 중규직 참 창조적이다. 결국 기업은 비정규직으로 이용할게 뻔함.",
"단독(Noun: 1, 2), 정부(Noun: 4, 2), 새 고용(Noun: 8, 4), 새 고용 형태(Noun: 8, 7), " +
"고용 형태(Noun: 10, 5), 중규직(Noun: 17, 3), 이름(Noun: 41, 2), 정규직(Noun: 51, 3), " +
"상규직(Noun: 56, 3), 비정규직(Noun: 61, 4), 하규직(Noun: 67, 3), 기업(Noun: 88, 2), " +
"고용(Noun: 10, 2), 형태(Noun: 13, 2), 하규(Noun: 67, 2)"
),
SampleTextPair(
"키? ...난 절대 키가 작은 게 아냐. 이소자키나 츠루기가 비정상적으로 큰거야. 1학년이 그렇게 큰 게 말이 돼!? ",
"난 절대(Noun: 6, 4), 난 절대 키(Noun: 6, 6), 절대 키(Noun: 8, 4), 작은 게(Noun: 14, 4), " +
"이소자키(Noun: 23, 4), 츠루기(Noun: 29, 3), 1학년(Noun: 46, 3), 절대(Noun: 8, 2), " +
"이소(Noun: 23, 2), 자키(Noun: 25, 2), 학년(Noun: 47, 2)"
),
SampleTextPair(
"Galaxy S5와 iPhone 6의 경쟁",
"Galaxy(Noun: 0, 6), Galaxy S5(Noun: 0, 9), iPhone(Noun: 11, 6), " +
"iPhone 6의(Noun: 11, 9), iPhone 6의 경쟁(Noun: 11, 12), 6의 경쟁(Noun: 18, 5), " +
"S5(Noun: 7, 2), 경쟁(Noun: 21, 2)"
),
SampleTextPair(
"ABCㅋㅋLTE갤럭시S4ㅋㅋ꼬마가",
"ABC(Noun: 0, 3), LTE갤럭시S4(Noun: 5, 8), 꼬마(Noun: 15, 2), LTE(Noun: 5, 3), " +
"갤럭시(Noun: 8, 3), S4(Noun: 11, 2)"
),
SampleTextPair(
"아름다운 트위터 #해쉬태그 평화로운 트위터의 #hashtag @mention",
"아름다운 트위터(Noun: 0, 8), 평화로운 트위터(Noun: 15, 8), 트위터(Noun: 5, 3), " +
"#해쉬태그(Hashtag: 9, 5), #hashtag(Hashtag: 25, 8)"
)
)
val spamText = "레알 시발 저거 카지노 포르노 야동 보다가 개빡쳤음"
val superLongText: String = "허니버터칩정규직크리스마스" * 50
def time[R](block: => R): Long = {
val t0 = System.currentTimeMillis()
block
val t1 = System.currentTimeMillis()
t1 - t0
}
test("collapsePos correctly collapse KoreanPos sequences") {
assert(KoreanPhraseExtractor.collapsePos(
Seq(
KoreanToken("N", KoreanPos.Noun, 0, 1),
KoreanToken("N", KoreanPos.Noun, 1, 1)
)).mkString("/") ===
"N(Noun: 0, 1)/N(Noun: 1, 1)"
)
assert(KoreanPhraseExtractor.collapsePos(
Seq(
KoreanToken("X", KoreanPos.KoreanParticle, 0, 1),
KoreanToken("p", KoreanPos.NounPrefix, 1, 1),
KoreanToken("N", KoreanPos.Noun, 2, 1)
)).mkString("/") ===
"X(KoreanParticle: 0, 1)/pN(Noun: 1, 2)"
)
assert(KoreanPhraseExtractor.collapsePos(
Seq(
KoreanToken("p", KoreanPos.NounPrefix, 0, 1),
KoreanToken("X", KoreanPos.KoreanParticle, 1, 1),
KoreanToken("N", KoreanPos.Noun, 2, 1)
)).mkString("/") ===
"p(Noun: 0, 1)/X(KoreanParticle: 1, 1)/N(Noun: 2, 1)"
)
assert(KoreanPhraseExtractor.collapsePos(
Seq(
KoreanToken("p", KoreanPos.NounPrefix, 0, 1),
KoreanToken("N", KoreanPos.Noun, 1, 1),
KoreanToken("X", KoreanPos.KoreanParticle, 2, 1)
)).mkString("/") ===
"pN(Noun: 0, 2)/X(KoreanParticle: 2, 1)"
)
assert(KoreanPhraseExtractor.collapsePos(tokenize(sampleText(0).text)).mkString("") ===
"블랙프라이데이(Noun: 0, 7):(Punctuation: 7, 1) (Space: 8, 1)이날(Noun: 9, 2) (Space: 11, 1)" +
"미국(ProperNoun: 12, 2)의(Josa: 14, 1) (Space: 15, 1)수백만(Noun: 16, 3) (Space: 19, 1)" +
"소비자들(Noun: 20, 4)은(Josa: 24, 1) (Space: 25, 1)크리스마스(Noun: 26, 5) (Space: 31, 1)" +
"선물(Noun: 32, 2)을(Josa: 34, 1) (Space: 35, 1)할인(Noun: 36, 2)" +
"된(Verb: 38, 1) (Space: 39, 1)가격(Noun: 40, 2)에(Josa: 42, 1) (Space: 43, 1)" +
"사는(Verb: 44, 2) (Space: 46, 1)것(Noun: 47, 1)을(Josa: 48, 1) (Space: 49, 1)" +
"주(Noun: 50, 1) (Space: 51, 1)목적(Noun: 52, 2)으로(Josa: 54, 2) (Space: 56, 1)" +
"블랙프라이데이(Noun: 57, 7) (Space: 64, 1)쇼핑(Noun: 65, 2)을(Josa: 67, 1) (Space: 68, 1)" +
"한다(Verb: 69, 2).(Punctuation: 71, 1)")
assert(KoreanPhraseExtractor.collapsePos(tokenize(sampleText(1).text)).mkString("") ===
"결정(Noun: 0, 2)했어(Verb: 2, 2).(Punctuation: 4, 1) (Space: 5, 1)" +
"마키(Noun: 6, 2) (Space: 8, 1)코레썸(ProperNoun: 9, 3) (Space: 12, 1)" +
"사주시는(Verb: 13, 4) (Space: 17, 1)분께는(Verb: 18, 3) (Space: 21, 1)" +
"허니버터칩(Noun: 22, 5) (Space: 27, 1)한(Verb: 28, 1) (Space: 29, 1)" +
"봉지(Noun: 30, 2)를(Josa: 32, 1) (Space: 33, 1)선물할(Verb: 34, 3) (Space: 37, 1)" +
"것(Noun: 38, 1)이다(Josa: 39, 2).(Punctuation: 41, 1)")
}
test("extractPhrases correctly extracts phrases") {
assert(KoreanPhraseExtractor.extractPhrases(
tokenize(sampleText(0).text), filterSpam = false
).mkString(", ") ===
"블랙프라이데이(Noun: 0, 7), 이날(Noun: 9, 2), 이날 미국(Noun: 9, 5), 이날 미국의 수백만(Noun: 9, 10), " +
"미국의 수백만(Noun: 12, 7), 수백만(Noun: 16, 3), 이날 미국의 수백만 소비자들(Noun: 9, 15), " +
"미국의 수백만 소비자들(Noun: 12, 12), 수백만 소비자들(Noun: 16, 8), 크리스마스(Noun: 26, 5), " +
"크리스마스 선물(Noun: 26, 8), 할인(Noun: 36, 2), 할인된 가격(Noun: 36, 6), 가격(Noun: 40, 2), " +
"주 목적(Noun: 50, 4), 블랙프라이데이 쇼핑(Noun: 57, 10), " +
"미국(Noun: 12, 2), 소비자들(Noun: 20, 4), 선물(Noun: 32, 2), 목적(Noun: 52, 2), " +
"쇼핑(Noun: 65, 2)")
}
test("extractPhrases correctly extracts phrases from a string") {
sampleText.foreach {
case SampleTextPair(text: String, phrases: String) =>
assertExtraction(text, phrases)
}
}
test("extractPhrases should extract long noun-only phrases in reasonable time") {
assertExtraction(superLongText, "허니버터칩(Noun: 0, 5), 정규직(Noun: 5, 3), 크리스마스(Noun: 8, 5)")
val tokens = tokenize(superLongText)
assert(time(KoreanPhraseExtractor.extractPhrases(tokens)) < 10000)
}
test("extractPhrases should correctly extract the example set") {
def phraseExtractor(text: String) = {
val normalized = KoreanNormalizer.normalize(text)
val tokens = tokenize(normalized)
KoreanPhraseExtractor.extractPhrases(tokens).mkString("/")
}
assertExamples(
"current_phrases.txt", LOG,
phraseExtractor
)
}
test("extractPhrases should filter out spam and profane words") {
assertExtraction(spamText, "레알(Noun: 0, 2), 레알 시발(Noun: 0, 5), 레알 시발 저거(Noun: 0, 8), 시발 저거(Noun: 3, 5), " +
"레알 시발 저거 카지노(Noun: 0, 12), 시발 저거 카지노(Noun: 3, 9), 저거 카지노(Noun: 6, 6), " +
"레알 시발 저거 카지노 포르노(Noun: 0, 16), 시발 저거 카지노 포르노(Noun: 3, 13), " +
"저거 카지노 포르노(Noun: 6, 10), 카지노 포르노(Noun: 9, 7), " +
"레알 시발 저거 카지노 포르노 야동(Noun: 0, 19), 시발 저거 카지노 포르노 야동(Noun: 3, 16), " +
"저거 카지노 포르노 야동(Noun: 6, 13), 카지노 포르노 야동(Noun: 9, 10), 포르노 야동(Noun: 13, 6), " +
"시발(Noun: 3, 2), 저거(Noun: 6, 2), 카지노(Noun: 9, 3), 포르노(Noun: 13, 3), 야동(Noun: 17, 2)")
assert(KoreanPhraseExtractor.extractPhrases(tokenize(spamText), filterSpam = true).mkString(", ") ===
"레알(Noun: 0, 2), 저거(Noun: 6, 2)")
}
test("extractPhrases should detect numbers with special chars") {
assertExtraction("트위터 25.2% 상승.",
"트위터(Noun: 0, 3), 트위터 25.2%(Noun: 0, 9), 트위터 25.2% 상승(Noun: 0, 12), " +
"25.2% 상승(Noun: 4, 8), 25.2%(Noun: 4, 5), 상승(Noun: 10, 2)")
assertExtraction("짜장면 3400원.", "짜장면(Noun: 0, 3), 짜장면 3400원(Noun: 0, 9), 3400원(Noun: 4, 5)")
assertExtraction("떡볶이 3,444,231원 + 400원.",
"떡볶이(Noun: 0, 3), 떡볶이 3,444,231원(Noun: 0, 14), 400원(Noun: 17, 4), 3,444,231원(Noun: 4, 10)")
assertExtraction("트위터 $200으로 상승",
"트위터(Noun: 0, 3), 트위터 $200(Noun: 0, 8), 상승(Noun: 11, 2), $200(Noun: 4, 4)")
assertExtraction("1,200.34원. 1,200.34엔. 1,200.34옌. 1,200.34위안.",
"1,200.34원(Noun: 0, 9), 1,200.34엔(Noun: 11, 9), 1,200.34옌(Noun: 22, 9), 1,200.34위안(Noun: 33, 10)")
assertExtraction("200달러 3위 3000유로",
"200달러(Noun: 0, 5), 200달러 3위(Noun: 0, 8), 200달러 3위 3000유로(Noun: 0, 15), " +
"3위 3000유로(Noun: 6, 9), 3000유로(Noun: 9, 6)")
}
def assertExtraction(s: String, expected: String): Unit = {
val tokens = tokenize(s)
assert(KoreanPhraseExtractor.extractPhrases(tokens).mkString(", ") ===
expected)
}
}
|
tglstory/twitter-korean-text
|
src/test/scala/com/twitter/penguin/korean/phrase_extractor/KoreanPhraseExtractorTest.scala
|
Scala
|
apache-2.0
| 11,497 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor
object InitializationDocSpec {
class PreStartInitExample extends Actor {
override def receive = {
case _ => // Ignore
}
//#preStartInit
override def preStart(): Unit = {
// Initialize children here
}
// Overriding postRestart to disable the call to preStart()
// after restarts
override def postRestart(reason: Throwable): Unit = ()
// The default implementation of preRestart() stops all the children
// of the actor. To opt-out from stopping the children, we
// have to override preRestart()
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
// Keep the call to postStop(), but no stopping of children
postStop()
}
//#preStartInit
}
class MessageInitExample extends Actor {
//#messageInit
var initializeMe: Option[String] = None
override def receive = {
case "init" =>
initializeMe = Some("Up and running")
context.become(initialized, discardOld = true)
}
def initialized: Receive = {
case "U OK?" => initializeMe foreach { sender() ! _ }
}
//#messageInit
}
}
class InitializationDocSpec extends AkkaSpec with ImplicitSender {
import InitializationDocSpec._
"Message based initialization example" must {
"work correctly" in {
val example = system.actorOf(Props[MessageInitExample], "messageInitExample")
val probe = "U OK?"
example ! probe
expectNoMsg()
example ! "init"
example ! probe
expectMsg("Up and running")
}
}
}
|
ktoso/asciidoctor-sbt-plugin
|
src/sbt-test/sbt-asciidoctor/simple-doc/src/test/scala/akka/actor/InitializationDocSpec.scala
|
Scala
|
apache-2.0
| 1,660 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util.{Optional, Properties}
import kafka.log.LogConfig
import kafka.utils.TestUtils
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.ApiKeys
import org.apache.kafka.common.record.MemoryRecords
import org.apache.kafka.common.requests.{FetchRequest, FetchResponse}
import org.apache.kafka.common.requests.FetchRequest.PartitionData
import org.junit.{Assert, Test}
import scala.collection.JavaConverters._
/**
* This test verifies that the KIP-541 broker-level FetchMaxBytes configuration is honored.
*/
class FetchRequestMaxBytesTest extends BaseRequestTest {
override def brokerCount: Int = 1
private var producer: KafkaProducer[Array[Byte], Array[Byte]] = null
private val testTopic = "testTopic"
private val testTopicPartition = new TopicPartition(testTopic, 0)
private val messages = IndexedSeq(
multiByteArray(1),
multiByteArray(500),
multiByteArray(1040),
multiByteArray(500),
multiByteArray(50))
private def multiByteArray(length: Int): Array[Byte] = {
val array = new Array[Byte](length)
array.indices.foreach(i => array(i) = (i % 5).toByte)
array
}
private def oneByteArray(value: Byte): Array[Byte] = {
val array = new Array[Byte](1)
array(0) = value
array
}
override def setUp(): Unit = {
super.setUp()
producer = TestUtils.createProducer(TestUtils.getBrokerListStrFromServers(servers))
}
override def tearDown(): Unit = {
if (producer != null)
producer.close()
super.tearDown()
}
override protected def brokerPropertyOverrides(properties: Properties): Unit = {
super.brokerPropertyOverrides(properties)
properties.put(KafkaConfig.FetchMaxBytes, "1024")
}
private def createTopics(): Unit = {
val topicConfig = new Properties
topicConfig.setProperty(LogConfig.MinInSyncReplicasProp, 1.toString)
createTopic(testTopic,
numPartitions = 1,
replicationFactor = 1,
topicConfig = topicConfig)
// Produce several messages as single batches.
messages.indices.foreach(i => {
val record = new ProducerRecord(testTopic, 0, oneByteArray(i.toByte), messages(i))
val future = producer.send(record)
producer.flush()
future.get()
})
}
private def sendFetchRequest(leaderId: Int, request: FetchRequest): FetchResponse[MemoryRecords] = {
val response = connectAndSend(request, ApiKeys.FETCH, destination = brokerSocketServer(leaderId))
FetchResponse.parse(response, request.version)
}
/**
* Tests that each of our fetch requests respects FetchMaxBytes.
*
* Note that when a single batch is larger than FetchMaxBytes, it will be
* returned in full even if this is larger than FetchMaxBytes. See KIP-74.
*/
@Test
def testConsumeMultipleRecords(): Unit = {
createTopics()
expectNextRecords(IndexedSeq(messages(0), messages(1)), 0)
expectNextRecords(IndexedSeq(messages(2)), 2)
expectNextRecords(IndexedSeq(messages(3), messages(4)), 3)
}
private def expectNextRecords(expected: IndexedSeq[Array[Byte]],
fetchOffset: Long): Unit = {
val response = sendFetchRequest(0,
FetchRequest.Builder.forConsumer(Int.MaxValue, 0,
Map(testTopicPartition ->
new PartitionData(fetchOffset, 0, Integer.MAX_VALUE, Optional.empty())).asJava).build(3))
val records = response.responseData().get(testTopicPartition).records.records()
Assert.assertNotNull(records)
val recordsList = records.asScala.toList
Assert.assertEquals(expected.size, recordsList.size)
recordsList.zipWithIndex.foreach {
case (record, i) => {
val buffer = record.value().duplicate()
val array = new Array[Byte](buffer.remaining())
buffer.get(array)
Assert.assertArrayEquals(s"expectNextRecords unexpected element ${i}",
expected(i), array)
}
}
}
}
|
noslowerdna/kafka
|
core/src/test/scala/unit/kafka/server/FetchRequestMaxBytesTest.scala
|
Scala
|
apache-2.0
| 4,846 |
package mesosphere.marathon
package raml
import mesosphere.marathon.stream.Implicits._
import scala.concurrent.duration._
trait ReadinessConversions {
implicit val readinessProtocolWrites: Writes[core.readiness.ReadinessCheck.Protocol, HttpScheme] = Writes {
case core.readiness.ReadinessCheck.Protocol.HTTP => HttpScheme.Http
case core.readiness.ReadinessCheck.Protocol.HTTPS => HttpScheme.Https
}
implicit val readinessCheckWrites: Writes[core.readiness.ReadinessCheck, ReadinessCheck] = Writes { check =>
ReadinessCheck(
name = check.name,
protocol = check.protocol.toRaml,
path = check.path,
portName = check.portName,
intervalSeconds = check.interval.toSeconds.toInt,
timeoutSeconds = check.timeout.toSeconds.toInt,
httpStatusCodesForReady = Option(check.httpStatusCodesForReady),
preserveLastResponse = check.preserveLastResponse
)
}
implicit val readinessProtocolReads: Reads[HttpScheme, core.readiness.ReadinessCheck.Protocol] = Reads {
case HttpScheme.Http => core.readiness.ReadinessCheck.Protocol.HTTP
case HttpScheme.Https => core.readiness.ReadinessCheck.Protocol.HTTPS
}
implicit val appReadinessRamlReader: Reads[ReadinessCheck, core.readiness.ReadinessCheck] = Reads { check =>
core.readiness.ReadinessCheck(
name = check.name,
protocol = check.protocol.fromRaml,
path = check.path,
portName = check.portName,
interval = check.intervalSeconds.seconds,
timeout = check.timeoutSeconds.seconds,
httpStatusCodesForReady = check.httpStatusCodesForReady.getOrElse(
// normalization should have taken care of this already..
throw SerializationFailedException("httpStatusCodesForReady must be specified")),
preserveLastResponse = check.preserveLastResponse
)
}
implicit val appReadinessProtocolProtoRamlWriter: Writes[Protos.ReadinessCheckDefinition.Protocol, HttpScheme] = Writes { proto =>
import Protos.ReadinessCheckDefinition.Protocol._
proto match {
case HTTP => HttpScheme.Http
case HTTPS => HttpScheme.Https
case badProtocol => throw new IllegalStateException(s"unsupported readiness check protocol $badProtocol")
}
}
implicit val appReadinessProtoRamlWriter: Writes[Protos.ReadinessCheckDefinition, ReadinessCheck] = Writes { rc =>
ReadinessCheck(
name = if (rc.hasName) rc.getName else ReadinessCheck.DefaultName,
protocol = if (rc.hasProtocol) rc.getProtocol.toRaml else ReadinessCheck.DefaultProtocol,
path = if (rc.hasPath) rc.getPath else ReadinessCheck.DefaultPath,
portName = if (rc.hasPortName) rc.getPortName else ReadinessCheck.DefaultPortName,
intervalSeconds = if (rc.hasIntervalMillis) (rc.getIntervalMillis / 1000).toInt else ReadinessCheck.DefaultIntervalSeconds,
timeoutSeconds = if (rc.hasTimeoutMillis) (rc.getTimeoutMillis / 1000).toInt else ReadinessCheck.DefaultTimeoutSeconds,
httpStatusCodesForReady = if (rc.getHttpStatusCodeForReadyCount > 0) Option(rc.getHttpStatusCodeForReadyList.map(_.intValue())(collection.breakOut)) else None,
preserveLastResponse = if (rc.hasPreserveLastResponse) rc.getPreserveLastResponse else ReadinessCheck.DefaultPreserveLastResponse
)
}
}
|
guenter/marathon
|
src/main/scala/mesosphere/marathon/raml/ReadinessConversions.scala
|
Scala
|
apache-2.0
| 3,275 |
package example
import scala.annotation._
object Recitation1 {
//def factorial(n: Int): Int = if (n <= 0) 1 else n * factorial(n - 1)
def factorial(n: Int): Int = {
@tailrec
def fact(n: Int, acc: Int): Int = {
if (n <= 1) acc
else fact(n - 1, acc * n)
}
fact(n, 1)
}
def sum(ls: List[Int]): Int = {
@tailrec
def _sum(ls: List[Int], acc: Int): Int = ls match {
case Nil => acc
case x :: xs => _sum(xs, x + acc)
}
_sum(ls, 0)
}
def fastExp(base: Int, n: Int): Int = {
@tailrec
def _fastExp(base: Int, n: Int, acc: Int): Int = {
if (n == 0) acc
else if(n == 1) acc * base
else if (n % 2 == 0) _fastExp(base * base, n / 2, acc)
else _fastExp(base * base, n / 2, acc * base)
}
_fastExp(base, n, 1)
}
def fibonacci(n: Int): Int = {
@tailrec
def fibo(n: Int, a: Int, b: Int): Int = {
if (n == 0) a
else if(n == 1) b
else fibo(n - 1, b, a + b)
}
fibo(n, 1, 1)
}
}
|
rusucosmin/courses
|
fp/recitation/src/main/scala/example/recitation1.scala
|
Scala
|
mit
| 1,008 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.stream.generic
import java.util.concurrent.{ExecutorService, Executors, LinkedBlockingQueue, TimeUnit}
import com.google.common.collect.Queues
import com.typesafe.config.Config
import org.apache.camel.CamelContext
import org.apache.camel.impl._
import org.apache.camel.scala.dsl.builder.RouteBuilder
import org.locationtech.geomesa.convert.{SimpleFeatureConverter, SimpleFeatureConverters}
import org.locationtech.geomesa.stream.{SimpleFeatureStreamSource, SimpleFeatureStreamSourceFactory}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.slf4j.LoggerFactory
import scala.util.Try
class GenericSimpleFeatureStreamSourceFactory extends SimpleFeatureStreamSourceFactory {
lazy val ctx: CamelContext = {
val context = new DefaultCamelContext()
context.start()
context
}
override def canProcess(conf: Config): Boolean =
if(conf.hasPath("type") && conf.getString("type").equals("generic")) true
else false
override def create(conf: Config): SimpleFeatureStreamSource = {
val sourceRoute = conf.getString("source-route")
val sft = SimpleFeatureTypes.createType(conf.getConfig("sft"))
val threads = Try(conf.getInt("threads")).getOrElse(1)
val converterConf = conf.getConfig("converter")
val fac = () => SimpleFeatureConverters.build[String](sft, converterConf)
new GenericSimpleFeatureStreamSource(ctx, sourceRoute, sft, threads, fac)
}
}
class GenericSimpleFeatureStreamSource(val ctx: CamelContext,
sourceRoute: String,
val sft: SimpleFeatureType,
threads: Int,
parserFactory: () => SimpleFeatureConverter[String])
extends SimpleFeatureStreamSource {
private val logger = LoggerFactory.getLogger(classOf[GenericSimpleFeatureStreamSource])
var inQ: LinkedBlockingQueue[String] = null
var outQ: LinkedBlockingQueue[SimpleFeature] = null
var parsers: Seq[SimpleFeatureConverter[String]] = null
var es: ExecutorService = null
override def init(): Unit = {
super.init()
inQ = Queues.newLinkedBlockingQueue[String]()
outQ = Queues.newLinkedBlockingQueue[SimpleFeature]()
val route = getProcessingRoute(inQ)
ctx.addRoutes(route)
parsers = List.fill(threads)(parserFactory())
es = Executors.newCachedThreadPool()
parsers.foreach { p => es.submit(getQueueProcessor(p)) }
}
def getProcessingRoute(inQ: LinkedBlockingQueue[String]): RouteBuilder = new RouteBuilder {
from(sourceRoute).process { e => inQ.put(e.getIn.getBody.asInstanceOf[String]) }
}
override def next: SimpleFeature = outQ.poll(500, TimeUnit.MILLISECONDS)
def getQueueProcessor(p: SimpleFeatureConverter[String]) = {
new Runnable {
override def run(): Unit = {
var running = true
val input = new Iterator[String] {
override def hasNext: Boolean = running
override def next(): String = {
var res: String = null
while (res == null) {
res = inQ.take() // blocks
}
res
}
}
try {
p.processInput(input).foreach(outQ.put)
} catch {
case t: InterruptedException => running = false
}
}
}
}
}
|
nagavallia/geomesa
|
geomesa-stream/geomesa-stream-generic/src/main/scala/org/locationtech/geomesa/stream/generic/GenericSimpleFeatureStreamSourceFactory.scala
|
Scala
|
apache-2.0
| 3,893 |
package org.scaladebugger.api.profiles.traits.requests.watchpoints
import org.scaladebugger.api.lowlevel.JDIArgument
import org.scaladebugger.api.lowlevel.events.data.JDIEventDataResult
import org.scaladebugger.api.lowlevel.watchpoints.ModificationWatchpointRequestInfo
import org.scaladebugger.api.pipelines.Pipeline.IdentityPipeline
import org.scaladebugger.api.profiles.traits.info.events.ModificationWatchpointEventInfo
import scala.util.Try
/**
* Represents the interface that needs to be implemented to provide
* modification watchpoint functionality for a specific debug profile.
*/
trait ModificationWatchpointRequest {
/** Represents a modification watchpoint event and any associated data. */
type ModificationWatchpointEventAndData =
(ModificationWatchpointEventInfo, Seq[JDIEventDataResult])
/**
* Retrieves the collection of active and pending modification watchpoint
* requests.
*
* @return The collection of information on modification watchpoint requests
*/
def modificationWatchpointRequests: Seq[ModificationWatchpointRequestInfo]
/**
* Constructs a stream of modification watchpoint events for field in the
* specified class.
*
* @param className The full name of the class whose field to watch
* @param fieldName The name of the field to watch
* @param extraArguments The additional JDI arguments to provide
* @return The stream of modification watchpoint events
*/
def tryGetOrCreateModificationWatchpointRequest(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): Try[IdentityPipeline[ModificationWatchpointEventInfo]] = {
tryGetOrCreateModificationWatchpointRequestWithData(
className: String,
fieldName: String,
extraArguments: _*
).map(_.map(_._1).noop())
}
/**
* Constructs a stream of modification watchpoint events for field in the
* specified class.
*
* @param className The full name of the class whose field to watch
* @param fieldName The name of the field to watch
* @param extraArguments The additional JDI arguments to provide
* @return The stream of modification watchpoint events and any retrieved data
* based on requests from extra arguments
*/
def tryGetOrCreateModificationWatchpointRequestWithData(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): Try[IdentityPipeline[ModificationWatchpointEventAndData]]
/**
* Constructs a stream of modification watchpoint events for field in the
* specified class.
*
* @param className The full name of the class whose field to watch
* @param fieldName The name of the field to watch
* @param extraArguments The additional JDI arguments to provide
* @return The stream of modification watchpoint events
*/
def getOrCreateModificationWatchpointRequest(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): IdentityPipeline[ModificationWatchpointEventInfo] = {
tryGetOrCreateModificationWatchpointRequest(
className,
fieldName,
extraArguments: _*
).get
}
/**
* Constructs a stream of modification watchpoint events for field in the
* specified class.
*
* @param className The full name of the class whose field to watch
* @param fieldName The name of the field to watch
* @param extraArguments The additional JDI arguments to provide
* @return The stream of modification watchpoint events and any retrieved data
* based on requests from extra arguments
*/
def getOrCreateModificationWatchpointRequestWithData(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): IdentityPipeline[ModificationWatchpointEventAndData] = {
tryGetOrCreateModificationWatchpointRequestWithData(
className,
fieldName,
extraArguments: _*
).get
}
/**
* Determines if there is any modification watchpoint request for the
* specified class field that is pending.
*
* @param className The full name of the class/object/trait containing the
* method being watched
* @param fieldName The name of the field being watched
* @return True if there is at least one modification watchpoint request with
* the specified field name in the specified class that is pending,
* otherwise false
*/
def isModificationWatchpointRequestPending(
className: String,
fieldName: String
): Boolean
/**
* Determines if there is any modification watchpoint request for the
* specified class field with matching arguments that is pending.
*
* @param className The full name of the class/object/trait containing the
* method being watched
* @param fieldName The name of the field being watched
* @param extraArguments The additional arguments provided to the specific
* modification watchpoint request
* @return True if there is at least one modification watchpoint request with
* the specified field name and arguments in the specified class that
* is pending, otherwise false
*/
def isModificationWatchpointRequestWithArgsPending(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): Boolean
/**
* Removes all modification watchpoint requests for the specified class field.
*
* @param className The full name of the class/object/trait containing the
* field being watched
* @param fieldName The name of the field being watched
* @return The collection of information about removed modification watchpoint requests
*/
def removeModificationWatchpointRequests(
className: String,
fieldName: String
): Seq[ModificationWatchpointRequestInfo]
/**
* Removes all modification watchpoint requests for the specified class field.
*
* @param className The full name of the class/object/trait containing the
* field being watched
* @param fieldName The name of the field being watched
* @return Success containing the collection of information about removed
* modification watchpoint requests, otherwise a failure
*/
def tryRemoveModificationWatchpointRequests(
className: String,
fieldName: String
): Try[Seq[ModificationWatchpointRequestInfo]] = Try(removeModificationWatchpointRequests(
className,
fieldName
))
/**
* Removes all modification watchpoint requests for the specified class field with
* the specified extra arguments.
*
* @param className The full name of the class/object/trait containing the
* field being watched
* @param fieldName The name of the field being watched
* @param extraArguments the additional arguments provided to the specific
* modification watchpoint request
* @return Some information about the removed request if it existed,
* otherwise None
*/
def removeModificationWatchpointRequestWithArgs(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): Option[ModificationWatchpointRequestInfo]
/**
* Removes all modification watchpoint requests for the specified class field with
* the specified extra arguments.
*
* @param className The full name of the class/object/trait containing the
* field being watched
* @param fieldName The name of the field being watched
* @param extraArguments the additional arguments provided to the specific
* modification watchpoint request
* @return Success containing Some information if it existed (or None if it
* did not), otherwise a failure
*/
def tryRemoveModificationWatchpointRequestWithArgs(
className: String,
fieldName: String,
extraArguments: JDIArgument*
): Try[Option[ModificationWatchpointRequestInfo]] = Try(removeModificationWatchpointRequestWithArgs(
className,
fieldName,
extraArguments: _*
))
/**
* Removes all modification watchpoint requests.
*
* @return The collection of information about removed modification watchpoint requests
*/
def removeAllModificationWatchpointRequests(): Seq[ModificationWatchpointRequestInfo]
/**
* Removes all modification watchpoint requests.
*
* @return Success containing the collection of information about removed
* modification watchpoint requests, otherwise a failure
*/
def tryRemoveAllModificationWatchpointRequests(): Try[Seq[ModificationWatchpointRequestInfo]] = Try(
removeAllModificationWatchpointRequests()
)
}
|
ensime/scala-debugger
|
scala-debugger-api/src/main/scala/org/scaladebugger/api/profiles/traits/requests/watchpoints/ModificationWatchpointRequest.scala
|
Scala
|
apache-2.0
| 8,653 |
package org.embulk.parser.xpath2
import org.scalatest.{DiagrammedAssertions, WordSpec}
abstract class UnitSpec extends WordSpec with DiagrammedAssertions
|
maji-KY/embulk-parser-xpath2
|
src/test/scala/org/embulk/parser/xpath2/UnitSpec.scala
|
Scala
|
gpl-2.0
| 156 |
package ch09_queue
class DynamicArrayQueueTest extends DemoQueueTest {
override def getInstance(): DemoQueue[Int] = new DynamicArrayQueue[Int](15)
it should "copy data when tail reach the end of the queue" in {
val queue = getInstance()
for (i <- Range(0, 15)) {
queue.enqueue(i)
}
queue.size should equal(15)
queue.dequeue().get should equal(0)
//enqueue another one
queue.enqueue(30)
queue.size should equal(15)
}
}
|
wangzheng0822/algo
|
scala/src/test/scala/ch09_queue/DynamicArrayQueueTest.scala
|
Scala
|
apache-2.0
| 467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.Platform
import org.apache.spark.unsafe.array.ByteArrayMethods
import org.apache.spark.unsafe.types.UTF8String
/**
* Returns an Array containing the evaluation of all children expressions.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns an array with the given elements.",
examples = """
Examples:
> SELECT _FUNC_(1, 2, 3);
[1,2,3]
""")
case class CreateArray(children: Seq[Expression]) extends Expression {
override def foldable: Boolean = children.forall(_.foldable)
override def checkInputDataTypes(): TypeCheckResult = {
TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName")
}
override def dataType: ArrayType = {
ArrayType(
TypeCoercion.findCommonTypeDifferentOnlyInNullFlags(children.map(_.dataType))
.getOrElse(StringType),
containsNull = children.exists(_.nullable))
}
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
new GenericArrayData(children.map(_.eval(input)).toArray)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val et = dataType.elementType
val (allocation, assigns, arrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, et, children, false, "createArray")
ev.copy(
code = code"${allocation}${assigns}",
value = JavaCode.variable(arrayData, dataType),
isNull = FalseLiteral)
}
override def prettyName: String = "array"
}
private [sql] object GenArrayData {
/**
* Return Java code pieces based on DataType and array size to allocate ArrayData class
*
* @param ctx a [[CodegenContext]]
* @param elementType data type of underlying array elements
* @param elementsExpr concatenated set of [[Expression]] for each element of an underlying array
* @param isMapKey if true, throw an exception when the element is null
* @param functionName string to include in the error message
* @return (array allocation, concatenated assignments to each array elements, arrayData name)
*/
def genCodeToCreateArrayData(
ctx: CodegenContext,
elementType: DataType,
elementsExpr: Seq[Expression],
isMapKey: Boolean,
functionName: String): (String, String, String) = {
val arrayDataName = ctx.freshName("arrayData")
val numElements = s"${elementsExpr.length}L"
val initialization = CodeGenerator.createArrayData(
arrayDataName, elementType, numElements, s" $functionName failed.")
val assignments = elementsExpr.zipWithIndex.map { case (expr, i) =>
val eval = expr.genCode(ctx)
val setArrayElement = CodeGenerator.setArrayElement(
arrayDataName, elementType, i.toString, eval.value)
val assignment = if (!expr.nullable) {
setArrayElement
} else {
val isNullAssignment = if (!isMapKey) {
s"$arrayDataName.setNullAt($i);"
} else {
"throw new RuntimeException(\\"Cannot use null as map key!\\");"
}
s"""
|if (${eval.isNull}) {
| $isNullAssignment
|} else {
| $setArrayElement
|}
""".stripMargin
}
s"""
|${eval.code}
|$assignment
""".stripMargin
}
val assignmentString = ctx.splitExpressionsWithCurrentInputs(
expressions = assignments,
funcName = "apply",
extraArguments = ("ArrayData", arrayDataName) :: Nil)
(initialization, assignmentString, arrayDataName)
}
}
/**
* Returns a catalyst Map containing the evaluation of all children expressions as keys and values.
* The children are a flatted sequence of kv pairs, e.g. (key1, value1, key2, value2, ...)
*/
@ExpressionDescription(
usage = "_FUNC_(key0, value0, key1, value1, ...) - Creates a map with the given key/value pairs.",
examples = """
Examples:
> SELECT _FUNC_(1.0, '2', 3.0, '4');
{1.0:"2",3.0:"4"}
""")
case class CreateMap(children: Seq[Expression]) extends Expression {
lazy val keys = children.indices.filter(_ % 2 == 0).map(children)
lazy val values = children.indices.filter(_ % 2 != 0).map(children)
override def foldable: Boolean = children.forall(_.foldable)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(
s"$prettyName expects a positive even number of arguments.")
} else if (!TypeCoercion.haveSameType(keys.map(_.dataType))) {
TypeCheckResult.TypeCheckFailure(
"The given keys of function map should all be the same type, but they are " +
keys.map(_.dataType.catalogString).mkString("[", ", ", "]"))
} else if (!TypeCoercion.haveSameType(values.map(_.dataType))) {
TypeCheckResult.TypeCheckFailure(
"The given values of function map should all be the same type, but they are " +
values.map(_.dataType.catalogString).mkString("[", ", ", "]"))
} else {
TypeCheckResult.TypeCheckSuccess
}
}
override def dataType: DataType = {
MapType(
keyType = TypeCoercion.findCommonTypeDifferentOnlyInNullFlags(keys.map(_.dataType))
.getOrElse(StringType),
valueType = TypeCoercion.findCommonTypeDifferentOnlyInNullFlags(values.map(_.dataType))
.getOrElse(StringType),
valueContainsNull = values.exists(_.nullable))
}
override def nullable: Boolean = false
override def eval(input: InternalRow): Any = {
val keyArray = keys.map(_.eval(input)).toArray
if (keyArray.contains(null)) {
throw new RuntimeException("Cannot use null as map key!")
}
val valueArray = values.map(_.eval(input)).toArray
new ArrayBasedMapData(new GenericArrayData(keyArray), new GenericArrayData(valueArray))
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val mapClass = classOf[ArrayBasedMapData].getName
val MapType(keyDt, valueDt, _) = dataType
val (allocationKeyData, assignKeys, keyArrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, keyDt, keys, true, "createMap")
val (allocationValueData, assignValues, valueArrayData) =
GenArrayData.genCodeToCreateArrayData(ctx, valueDt, values, false, "createMap")
val code =
code"""
final boolean ${ev.isNull} = false;
$allocationKeyData
$assignKeys
$allocationValueData
$assignValues
final MapData ${ev.value} = new $mapClass($keyArrayData, $valueArrayData);
"""
ev.copy(code = code)
}
override def prettyName: String = "map"
}
/**
* Returns a catalyst Map containing the two arrays in children expressions as keys and values.
*/
@ExpressionDescription(
usage = """
_FUNC_(keys, values) - Creates a map with a pair of the given key/value arrays. All elements
in keys should not be null""",
examples = """
Examples:
> SELECT _FUNC_(array(1.0, 3.0), array('2', '4'));
{1.0:"2",3.0:"4"}
""", since = "2.4.0")
case class MapFromArrays(left: Expression, right: Expression)
extends BinaryExpression with ExpectsInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(ArrayType, ArrayType)
override def dataType: DataType = {
MapType(
keyType = left.dataType.asInstanceOf[ArrayType].elementType,
valueType = right.dataType.asInstanceOf[ArrayType].elementType,
valueContainsNull = right.dataType.asInstanceOf[ArrayType].containsNull)
}
override def nullSafeEval(keyArray: Any, valueArray: Any): Any = {
val keyArrayData = keyArray.asInstanceOf[ArrayData]
val valueArrayData = valueArray.asInstanceOf[ArrayData]
if (keyArrayData.numElements != valueArrayData.numElements) {
throw new RuntimeException("The given two arrays should have the same length")
}
val leftArrayType = left.dataType.asInstanceOf[ArrayType]
if (leftArrayType.containsNull) {
var i = 0
while (i < keyArrayData.numElements) {
if (keyArrayData.isNullAt(i)) {
throw new RuntimeException("Cannot use null as map key!")
}
i += 1
}
}
new ArrayBasedMapData(keyArrayData.copy(), valueArrayData.copy())
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (keyArrayData, valueArrayData) => {
val arrayBasedMapData = classOf[ArrayBasedMapData].getName
val leftArrayType = left.dataType.asInstanceOf[ArrayType]
val keyArrayElemNullCheck = if (!leftArrayType.containsNull) "" else {
val i = ctx.freshName("i")
s"""
|for (int $i = 0; $i < $keyArrayData.numElements(); $i++) {
| if ($keyArrayData.isNullAt($i)) {
| throw new RuntimeException("Cannot use null as map key!");
| }
|}
""".stripMargin
}
s"""
|if ($keyArrayData.numElements() != $valueArrayData.numElements()) {
| throw new RuntimeException("The given two arrays should have the same length");
|}
|$keyArrayElemNullCheck
|${ev.value} = new $arrayBasedMapData($keyArrayData.copy(), $valueArrayData.copy());
""".stripMargin
})
}
override def prettyName: String = "map_from_arrays"
}
/**
* An expression representing a not yet available attribute name. This expression is unevaluable
* and as its name suggests it is a temporary place holder until we're able to determine the
* actual attribute name.
*/
case object NamePlaceholder extends LeafExpression with Unevaluable {
override lazy val resolved: Boolean = false
override def foldable: Boolean = false
override def nullable: Boolean = false
override def dataType: DataType = StringType
override def prettyName: String = "NamePlaceholder"
override def toString: String = prettyName
}
/**
* Returns a Row containing the evaluation of all children expressions.
*/
object CreateStruct extends FunctionBuilder {
def apply(children: Seq[Expression]): CreateNamedStruct = {
CreateNamedStruct(children.zipWithIndex.flatMap {
case (e: NamedExpression, _) if e.resolved => Seq(Literal(e.name), e)
case (e: NamedExpression, _) => Seq(NamePlaceholder, e)
case (e, index) => Seq(Literal(s"col${index + 1}"), e)
})
}
/**
* Entry to use in the function registry.
*/
val registryEntry: (String, (ExpressionInfo, FunctionBuilder)) = {
val info: ExpressionInfo = new ExpressionInfo(
"org.apache.spark.sql.catalyst.expressions.NamedStruct",
null,
"struct",
"_FUNC_(col1, col2, col3, ...) - Creates a struct with the given field values.",
"",
"",
"",
"")
("struct", (info, this))
}
}
/**
* Common base class for both [[CreateNamedStruct]] and [[CreateNamedStructUnsafe]].
*/
trait CreateNamedStructLike extends Expression {
lazy val (nameExprs, valExprs) = children.grouped(2).map {
case Seq(name, value) => (name, value)
}.toList.unzip
lazy val names = nameExprs.map(_.eval(EmptyRow))
override def nullable: Boolean = false
override def foldable: Boolean = valExprs.forall(_.foldable)
override lazy val dataType: StructType = {
val fields = names.zip(valExprs).map {
case (name, expr) =>
val metadata = expr match {
case ne: NamedExpression => ne.metadata
case _ => Metadata.empty
}
StructField(name.toString, expr.dataType, expr.nullable, metadata)
}
StructType(fields)
}
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size % 2 != 0) {
TypeCheckResult.TypeCheckFailure(s"$prettyName expects an even number of arguments.")
} else {
val invalidNames = nameExprs.filterNot(e => e.foldable && e.dataType == StringType)
if (invalidNames.nonEmpty) {
TypeCheckResult.TypeCheckFailure(
s"Only foldable ${StringType.catalogString} expressions are allowed to appear at odd" +
s" position, got: ${invalidNames.mkString(",")}")
} else if (!names.contains(null)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure("Field name should not be null")
}
}
}
/**
* Returns Aliased [[Expression]]s that could be used to construct a flattened version of this
* StructType.
*/
def flatten: Seq[NamedExpression] = valExprs.zip(names).map {
case (v, n) => Alias(v, n.toString)()
}
override def eval(input: InternalRow): Any = {
InternalRow(valExprs.map(_.eval(input)): _*)
}
}
/**
* Creates a struct with the given field names and values
*
* @param children Seq(name1, val1, name2, val2, ...)
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values.",
examples = """
Examples:
> SELECT _FUNC_("a", 1, "b", 2, "c", 3);
{"a":1,"b":2,"c":3}
""")
// scalastyle:on line.size.limit
case class CreateNamedStruct(children: Seq[Expression]) extends CreateNamedStructLike {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val rowClass = classOf[GenericInternalRow].getName
val values = ctx.freshName("values")
val valCodes = valExprs.zipWithIndex.map { case (e, i) =>
val eval = e.genCode(ctx)
s"""
|${eval.code}
|if (${eval.isNull}) {
| $values[$i] = null;
|} else {
| $values[$i] = ${eval.value};
|}
""".stripMargin
}
val valuesCode = ctx.splitExpressionsWithCurrentInputs(
expressions = valCodes,
funcName = "createNamedStruct",
extraArguments = "Object[]" -> values :: Nil)
ev.copy(code =
code"""
|Object[] $values = new Object[${valExprs.size}];
|$valuesCode
|final InternalRow ${ev.value} = new $rowClass($values);
|$values = null;
""".stripMargin, isNull = FalseLiteral)
}
override def prettyName: String = "named_struct"
}
/**
* Creates a struct with the given field names and values. This is a variant that returns
* UnsafeRow directly. The unsafe projection operator replaces [[CreateStruct]] with
* this expression automatically at runtime.
*
* @param children Seq(name1, val1, name2, val2, ...)
*/
case class CreateNamedStructUnsafe(children: Seq[Expression]) extends CreateNamedStructLike {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = GenerateUnsafeProjection.createCode(ctx, valExprs)
ExprCode(code = eval.code, isNull = FalseLiteral, value = eval.value)
}
override def prettyName: String = "named_struct_unsafe"
}
/**
* Creates a map after splitting the input text into key/value pairs using delimiters
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(text[, pairDelim[, keyValueDelim]]) - Creates a map after splitting the text into key/value pairs using delimiters. Default delimiters are ',' for `pairDelim` and ':' for `keyValueDelim`.",
examples = """
Examples:
> SELECT _FUNC_('a:1,b:2,c:3', ',', ':');
map("a":"1","b":"2","c":"3")
> SELECT _FUNC_('a');
map("a":null)
""")
// scalastyle:on line.size.limit
case class StringToMap(text: Expression, pairDelim: Expression, keyValueDelim: Expression)
extends TernaryExpression with CodegenFallback with ExpectsInputTypes {
def this(child: Expression, pairDelim: Expression) = {
this(child, pairDelim, Literal(":"))
}
def this(child: Expression) = {
this(child, Literal(","), Literal(":"))
}
override def children: Seq[Expression] = Seq(text, pairDelim, keyValueDelim)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def dataType: DataType = MapType(StringType, StringType)
override def checkInputDataTypes(): TypeCheckResult = {
if (Seq(pairDelim, keyValueDelim).exists(! _.foldable)) {
TypeCheckResult.TypeCheckFailure(s"$prettyName's delimiters must be foldable.")
} else {
super.checkInputDataTypes()
}
}
override def nullSafeEval(
inputString: Any,
stringDelimiter: Any,
keyValueDelimiter: Any): Any = {
val keyValues =
inputString.asInstanceOf[UTF8String].split(stringDelimiter.asInstanceOf[UTF8String], -1)
val iterator = new Iterator[(UTF8String, UTF8String)] {
var index = 0
val keyValueDelimiterUTF8String = keyValueDelimiter.asInstanceOf[UTF8String]
override def hasNext: Boolean = {
keyValues.length > index
}
override def next(): (UTF8String, UTF8String) = {
val keyValueArray = keyValues(index).split(keyValueDelimiterUTF8String, 2)
index += 1
(keyValueArray(0), if (keyValueArray.length < 2) null else keyValueArray(1))
}
}
ArrayBasedMapData(iterator, keyValues.size, identity, identity)
}
override def prettyName: String = "str_to_map"
}
|
michalsenkyr/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala
|
Scala
|
apache-2.0
| 18,229 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import akka.actor._
import akka.event.{ Logging, LoggingAdapter }
import akka.pattern.{ ask, pipe }
import akka.util.Timeout
import com.rbmhtechnology.eventuate.EventsourcingProtocol._
import com.typesafe.config.Config
import scala.concurrent.duration._
import scala.util._
private class EventsourcedViewSettings(config: Config) {
val replayBatchSize =
config.getInt("eventuate.log.replay-batch-size")
val replayRetryMax =
config.getInt("eventuate.log.replay-retry-max")
val replayRetryDelay =
config.getDuration("eventuate.log.replay-retry-delay", TimeUnit.MILLISECONDS).millis
val readTimeout =
config.getDuration("eventuate.log.read-timeout", TimeUnit.MILLISECONDS).millis
val loadTimeout =
config.getDuration("eventuate.snapshot.load-timeout", TimeUnit.MILLISECONDS).millis
val saveTimeout =
config.getDuration("eventuate.snapshot.save-timeout", TimeUnit.MILLISECONDS).millis
}
object EventsourcedView {
/**
* Callback handler invoked on an actor's dispatcher thread.
*/
type Handler[A] = Try[A] => Unit
object Handler {
def empty[A]: Handler[A] = (_: Try[A]) => Unit
}
/**
* Internal API.
*/
private[eventuate] val instanceIdCounter = new AtomicInteger(0)
}
/**
* An actor that derives internal state from events stored in an event log. Events are pushed from
* the `eventLog` actor to this actor and handled with the `onEvent` event handler. An event handler
* defines how internal state is updated from events.
*
* An `EventsourcedView` can also store snapshots of internal state with its `save` method. During
* (re-)start the latest snapshot saved by this actor (if any) is passed as argument to the `onSnapshot`
* handler, if the handler is defined at that snapshot. If the `onSnapshot` handler is not defined at
* that snapshot or is not overridden at all, event replay starts from scratch. Newer events that are
* not covered by the snapshot are handled by `onEvent` after `onSnapshot` returns.
*
* By default, an `EventsourcedView` does not define an `aggregateId`. In this case, the `eventLog`
* pushes all events to this actor. If it defines an `aggregateId`, the `eventLog` actor only pushes
* those events that contain that `aggregateId` value in their `routingDestinations` set.
*
* An `EventsourcedView` can only consume events from its `eventLog` but cannot produce new events.
* Commands sent to an `EventsourcedView` during recovery are delayed until recovery completes.
*
* Event replay is subject to backpressure. After a configurable number of events
* (see `eventuate.log.replay-batch-size` configuration parameter), replay is suspended until these
* events have been handled by `onEvent` and then resumed again. There's no backpressure mechanism
* for live event processing yet (but will come in future releases).
*
* @see [[DurableEvent]]
* @see [[EventsourcedActor]]
* @see [[EventsourcedWriter]]
* @see [[EventsourcedProcessor]]
*/
trait EventsourcedView extends Actor with Stash {
import EventsourcedView._
import context.dispatcher
type Handler[A] = EventsourcedView.Handler[A]
val instanceId: Int = instanceIdCounter.getAndIncrement()
private var _recovering: Boolean = true
private var _eventHandling: Boolean = false
private var _lastHandledEvent: DurableEvent = _
private var _lastReceivedSequenceNr = 0L
private val settings = new EventsourcedViewSettings(context.system.settings.config)
private var saveRequests: Map[SnapshotMetadata, Handler[SnapshotMetadata]] = Map.empty
private lazy val _commandContext: BehaviorContext = new DefaultBehaviorContext(onCommand)
private lazy val _eventContext: BehaviorContext = new DefaultBehaviorContext(onEvent)
private lazy val _snapshotContext: BehaviorContext = new DefaultBehaviorContext(onSnapshot)
/**
* This actor's logging adapter.
*/
val logger: LoggingAdapter =
Logging(context.system, this)
/**
* Optional aggregate id. It is used for routing [[DurableEvent]]s to event-sourced destinations
* which can be [[EventsourcedView]]s or [[EventsourcedActor]]s. By default, an event is routed
* to an event-sourced destination with an undefined `aggregateId`. If a destination's `aggregateId`
* is defined it will only receive events with a matching aggregate id in
* [[DurableEvent#destinationAggregateIds]].
*/
def aggregateId: Option[String] =
None
/**
* Maximum number of events to be replayed to this actor before replaying is suspended. A suspended replay
* is resumed automatically after all replayed events haven been handled by this actor's event handler
* (= backpressure). The default value for the maximum replay batch size is given by configuration item
* `eventuate.log.replay-batch-size`. Configured values can be overridden by overriding this method.
*/
def replayBatchSize: Int =
settings.replayBatchSize
/**
* Global unique actor id.
*/
def id: String
/**
* Event log actor.
*/
def eventLog: ActorRef
/**
* Returns the command [[BehaviorContext]].
*/
def commandContext: BehaviorContext =
_commandContext
/**
* Returns the event [[BehaviorContext]].
*/
def eventContext: BehaviorContext =
_eventContext
/**
* Returns the snapshot [[BehaviorContext]].
*/
def snapshotContext: BehaviorContext =
_snapshotContext
/**
* Command handler.
*/
def onCommand: Receive
/**
* Event handler.
*/
def onEvent: Receive
/**
* Snapshot handler.
*/
def onSnapshot: Receive =
Actor.emptyBehavior
/**
* Recovery completion handler. If called with a `Failure`, the actor will be stopped in
* any case, regardless of the action taken by the returned handler. The default handler
* implementation does nothing and can be overridden by implementations.
*/
def onRecovery: Handler[Unit] =
Handler.empty[Unit]
/**
* Returns `true` if this actor is currently recovering internal state by consuming
* replayed events from the event log. Returns `false` after recovery completed and
* the actor switches to consuming live events.
*/
final def recovering: Boolean =
_recovering
/**
* Internal API.
*/
private[eventuate] def eventHandling: Boolean =
_eventHandling
/**
* Internal API.
*/
private[eventuate] def recovered(): Unit = {
_recovering = false
onRecovery(Success(()))
}
/**
* Internal API.
*/
private[eventuate] def receiveEvent(event: DurableEvent): Unit = {
val behavior = _eventContext.current
val previous = lastHandledEvent
_lastHandledEvent = event
if (behavior.isDefinedAt(event.payload)) {
_eventHandling = true
receiveEventInternal(event)
behavior(event.payload)
if (!recovering) versionChanged(currentVersion)
_eventHandling = false
} else _lastHandledEvent = previous
_lastReceivedSequenceNr = event.localSequenceNr
}
/**
* Internal API.
*/
private[eventuate] def receiveEventInternal(event: DurableEvent): Unit = {
_lastHandledEvent = event
}
/**
* Internal API.
*/
private[eventuate] def receiveEventInternal(event: DurableEvent, failure: Throwable): Unit = {
_lastHandledEvent = event
}
/**
* Internal API.
*/
private[eventuate] def lastHandledEvent: DurableEvent =
_lastHandledEvent
/**
* Internal API.
*/
private[eventuate] def currentVersion: VectorTime =
VectorTime.Zero
/**
* Internal API.
*/
private[eventuate] def conditionalSend(condition: VectorTime, cmd: Any): Unit =
throw new ConditionalRequestException("Actor must extend ConditionalRequests to support ConditionalRequest processing")
/**
* Internal API.
*/
private[eventuate] def versionChanged(condition: VectorTime): Unit =
()
/**
* Sequence number of the last handled event.
*/
final def lastSequenceNr: Long =
lastHandledEvent.localSequenceNr
/**
* Wall-clock timestamp of the last handled event.
*/
final def lastSystemTimestamp: Long =
lastHandledEvent.systemTimestamp
/**
* Vector timestamp of the last handled event.
*/
final def lastVectorTimestamp: VectorTime =
lastHandledEvent.vectorTimestamp
/**
* Emitter aggregate id of the last handled event.
*/
final def lastEmitterAggregateId: Option[String] =
lastHandledEvent.emitterAggregateId
/**
* Emitter id of the last handled event.
*/
final def lastEmitterId: String =
lastHandledEvent.emitterId
/**
* Id of the local event log that initially wrote the event.
*/
final def lastProcessId: String =
lastHandledEvent.processId
/**
* Asynchronously saves the given `snapshot` and calls `handler` with the generated
* snapshot metadata. The `handler` can obtain a reference to the initial message
* sender with `sender()`.
*/
final def save(snapshot: Any)(handler: Handler[SnapshotMetadata]): Unit = {
implicit val timeout = Timeout(settings.saveTimeout)
val payload = snapshot match {
case tree: ConcurrentVersionsTree[_, _] => tree.copy()
case other => other
}
val prototype = Snapshot(payload, id, lastHandledEvent, currentVersion, _lastReceivedSequenceNr)
val metadata = prototype.metadata
val iid = instanceId
if (saveRequests.contains(metadata)) {
handler(Failure(new IllegalStateException(s"snapshot with metadata $metadata is currently being saved")))
} else {
saveRequests += (metadata -> handler)
val snapshot = snapshotCaptured(prototype)
eventLog.ask(SaveSnapshot(snapshot, sender(), iid)).recover {
case t => SaveSnapshotFailure(metadata, t, iid)
}.pipeTo(self)(sender())
}
}
/**
* Override to provide an application-defined log sequence number from which event replay will start.
*
* If `Some(snr)` is returned snapshot loading will be skipped and replay will start from
* the given sequence number `snr`.
*
* If `None` is returned the actor proceeds with the regular snapshot loading procedure.
*/
def replayFromSequenceNr: Option[Long] =
None
/**
* Internal API.
*/
private[eventuate] def snapshotCaptured(snapshot: Snapshot): Snapshot =
snapshot
/**
* Internal API.
*/
private[eventuate] def snapshotLoaded(snapshot: Snapshot): Unit =
_lastHandledEvent = snapshot.lastEvent
/**
* Internal API.
*/
private[eventuate] def unhandledMessage(msg: Any): Unit = {
val behavior = _commandContext.current
if (behavior.isDefinedAt(msg)) behavior(msg) else unhandled(msg)
}
/**
* Internal API.
*/
private[eventuate] def init(): Unit =
replayFromSequenceNr match {
case Some(snr) => replay(snr, subscribe = true)
case None => load()
}
/**
* Internal API.
*/
private[eventuate] def load(): Unit = {
implicit val timeout = Timeout(settings.loadTimeout)
val iid = instanceId
eventLog ? LoadSnapshot(id, iid) recover {
case t => LoadSnapshotFailure(t, iid)
} pipeTo self
}
/**
* Internal API.
*/
private[eventuate] def replay(fromSequenceNr: Long = 1L, subscribe: Boolean = false): Unit = {
implicit val timeout = Timeout(settings.readTimeout)
val sub = if (subscribe) Some(self) else None
val iid = instanceId
eventLog ? Replay(fromSequenceNr, replayBatchSize, sub, aggregateId, instanceId) recover {
case t => ReplayFailure(t, fromSequenceNr, iid)
} pipeTo self
}
/**
* Internal API.
*/
private[eventuate] def initiating(replayAttempts: Int): Receive = {
case LoadSnapshotSuccess(Some(snapshot), iid) => if (iid == instanceId) {
val behavior = _snapshotContext.current
if (behavior.isDefinedAt(snapshot.payload)) {
snapshotLoaded(snapshot)
behavior(snapshot.payload)
replay(snapshot.metadata.sequenceNr + 1L, subscribe = true)
} else {
logger.warning(s"snapshot loaded (metadata = ${snapshot.metadata}) but onSnapshot doesn't handle it, replaying from scratch")
replay(subscribe = true)
}
}
case LoadSnapshotSuccess(None, iid) => if (iid == instanceId) {
replay(subscribe = true)
}
case LoadSnapshotFailure(cause, iid) => if (iid == instanceId) {
replay(subscribe = true)
}
case ReplaySuccess(Seq(), progress, iid) => if (iid == instanceId) {
context.become(initiated)
versionChanged(currentVersion)
recovered()
unstashAll()
}
case ReplaySuccess(events, progress, iid) => if (iid == instanceId) {
events.foreach(receiveEvent)
// reset retry attempts
context.become(initiating(settings.replayRetryMax))
replay(progress + 1L)
}
case ReplayFailure(cause, progress, iid) => if (iid == instanceId) {
if (replayAttempts < 1) {
logger.error(cause, "replay failed (maximum number of {} replay attempts reached), stopping self", settings.replayRetryMax)
Try(onRecovery(Failure(cause)))
context.stop(self)
} else {
// retry replay request while decreasing the remaining attempts
val attemptsRemaining = replayAttempts - 1
logger.warning(
"replay failed [{}] ({} replay attempts remaining), scheduling retry in {}ms",
cause.getMessage, attemptsRemaining, settings.replayRetryDelay.toMillis)
context.become(initiating(attemptsRemaining))
context.system.scheduler.scheduleOnce(settings.replayRetryDelay, self, ReplayRetry(progress))
}
}
case ReplayRetry(progress) =>
replay(progress)
case Terminated(ref) if ref == eventLog =>
context.stop(self)
case other =>
stash()
}
/**
* Internal API.
*/
private[eventuate] def initiated: Receive = {
case Written(event) => if (event.localSequenceNr > lastSequenceNr) {
receiveEvent(event)
}
case ConditionalRequest(condition, cmd) =>
conditionalSend(condition, cmd)
case SaveSnapshotSuccess(metadata, iid) => if (iid == instanceId) {
saveRequests.get(metadata).foreach(handler => handler(Success(metadata)))
saveRequests = saveRequests - metadata
}
case SaveSnapshotFailure(metadata, cause, iid) => if (iid == instanceId) {
saveRequests.get(metadata).foreach(handler => handler(Failure(cause)))
saveRequests = saveRequests - metadata
}
case Terminated(ref) if ref == eventLog =>
context.stop(self)
case msg =>
unhandledMessage(msg)
}
/**
* Initialization behavior.
*/
final def receive = initiating(settings.replayRetryMax)
/**
* Adds the current command to the user's command stash. Must not be used in the event handler.
*/
override def stash(): Unit =
if (eventHandling) throw new StashError("stash() must not be used in event handler") else super.stash()
/**
* Prepends all stashed commands to the actor's mailbox and then clears the command stash.
* Has no effect if the actor is recovering i.e. if `recovering` returns `true`.
*/
override def unstashAll(): Unit =
if (!recovering) super.unstashAll()
/**
* Sets `recovering` to `false` before calling `super.preRestart`.
*/
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
_recovering = false
super.preRestart(reason, message)
}
/**
* Initiates recovery.
*/
override def preStart(): Unit = {
_lastHandledEvent = DurableEvent(null, id)
context.watch(eventLog)
init()
}
/**
* Sets `recovering` to `false` before calling `super.postStop`.
*/
override def postStop(): Unit = {
_recovering = false
super.postStop()
}
}
|
RBMHTechnology/eventuate
|
eventuate-core/src/main/scala/com/rbmhtechnology/eventuate/EventsourcedView.scala
|
Scala
|
apache-2.0
| 16,525 |
package unfiltered.request
import scala.util.control.Exception.{ allCatch, catching }
trait DateParser extends (String => java.util.Date)
object DateFormatting {
import java.text.SimpleDateFormat
import java.util.{ Date, Locale, TimeZone }
def format(date: Date) =
new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss z", Locale.ENGLISH) {
setTimeZone(TimeZone.getTimeZone("GMT"))
}.format(date)
def parseAs(fmt: String)(value: String): Option[Date] =
allCatch.opt(new SimpleDateFormat(fmt, Locale.US).parse(value))
/** Preferred HTTP date format Sun, 06 Nov 1994 08:49:37 GMT */
def RFC1123 = parseAs("EEE, dd MMM yyyy HH:mm:ss z")_
/** Sunday, 06-Nov-94 08:49:37 GMT */
def RFC1036 = parseAs("EEEEEE, dd-MMM-yy HH:mm:ss z")_
/** Sun Nov 6 08:49:37 1994 */
def ANSICTime = parseAs("EEE MMM d HH:mm:ss yyyy")_
/** @return various date coersion formats falling back on None value */
def parseDate(raw: String) = RFC1123(raw) orElse RFC1036(raw) orElse ANSICTime(raw)
}
/** A header with values mapped to keys in a Map. */
private [request] class MappedRequestHeader[A, B](val name: String)(parser: Iterator[String] => Map[A, B]) extends RequestExtractor[Map[A, B]] {
def unapply[T](req: HttpRequest[T]): Some[Map[A, B]] = Some(parser(req.headers(name)))
def apply[T](req: HttpRequest[T]) = parser(req.headers(name))
}
/** A header with comma delimited values. Implementations of this extractor
* will not match requests for which the header `name` is not present.*/
private [request] class SeqRequestHeader[T](val name: String)(parser: Iterator[String] => List[T]) extends RequestExtractor[List[T]] {
def unapply[A](req: HttpRequest[A]) =
Some(parser(req.headers(name))).filter { _.nonEmpty }
def apply[T](req: HttpRequest[T]) = parser(req.headers(name))
}
/** A header with a single value. Implementations of this extractor
* will not match requests for which the header `name` is not present.*/
private [request] class RequestHeader[A](val name: String)(parser: Iterator[String] => List[A]) extends RequestExtractor[A] {
def unapply[T](req: HttpRequest[T]) = parser(req.headers(name)).headOption
def apply[T](req: HttpRequest[T]) = parser(req.headers(name)).headOption
}
private [request] object DateValueParser extends (Iterator[String] => List[java.util.Date]) {
import DateFormatting._
def apply(values: Iterator[String]) =
values.toList.flatMap(parseDate)
}
private [request] object IntValueParser extends (Iterator[String] => List[Int]) {
def tryInt(raw: String) = catching(classOf[NumberFormatException]).opt(raw.toInt)
def apply(values: Iterator[String]) =
values.toList.flatMap(tryInt)
}
private [request] object StringValueParser extends (Iterator[String] => List[String]) {
def apply(values: Iterator[String]) =
values.toList
}
private [request] object UriValueParser extends (Iterator[String] => List[java.net.URI]) {
import java.net.{ URI, URISyntaxException }
def toUri(raw: String) =
catching(classOf[URISyntaxException], classOf[NullPointerException]).opt(new URI(raw))
def apply(values: Iterator[String]) =
values.toList.flatMap(toUri)
}
private [request] object SeqValueParser extends (Iterator[String] => List[String]) {
def apply(values: Iterator[String]) = {
def split(raw: String): List[String] =
(raw.split(",") map {
_.trim.takeWhile { _ != ';' }.mkString
}).toList
values.toList.flatMap(split)
}
}
private [request] case class Conneg(value: String, qualifier: Double = 1.0)
private [request] object Conneg {
val EqualsMatcher = """(\\w*)="?([a-zA-Z\\.0-9]*)"?""".r
def apply(input: String): Conneg = {
val split = input.trim().split(";").toList
val params = split.tail.foldLeft(Map[String, Option[String]]()) {
case (map, s) => {
val item = s.trim match {
case EqualsMatcher(a, b) => (a.trim, Some(b.trim))
case _ => (s, None)
}
map + item
}
}.collect{case (a, Some(b)) => (a, b)}
new Conneg(split.head, params.get("q").map(_.toDouble).getOrElse(1.0))
}
}
private [request] object ConnegValueParser extends (Iterator[String] => List[String]) {
def apply(values: Iterator[String]) = {
def parse: String => scala.List[Conneg] = {
raw => raw.split(",").map(Conneg(_)).toList
}
values.toList.flatMap(parse).sortBy(_.qualifier)(implicitly[Ordering[Double]].reverse).map(_.value)
}
}
/** Header whose value should be a date and time. Parsing is attempted
* for formats defined in the DateFormatting object, in this order:
* RFC1123, RFC1036, ANSICTime. */
class DateHeader(name: String) extends RequestHeader(name)(DateValueParser)
/** A repeatable header may be specified in more than one header k-v pair and
* whose values are a list delimited by comma
* see also [[https://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2]] */
class RepeatableHeader(name: String) extends SeqRequestHeader(name)(SeqValueParser)
/** Header whose value should be a valid URI. */
class UriHeader(name: String) extends RequestHeader(name)(UriValueParser)
/** Header whose value can be any string. */
class StringHeader(name: String) extends RequestHeader(name)(StringValueParser)
/** Header whose value should be an integer. (Is stored in an Int.) */
class IntHeader(name: String) extends RequestHeader(name)(IntValueParser)
/* Header where the value needs to be sorted by the qualifier attribute. */
class ConnegHeader(name: String) extends SeqRequestHeader(name)(ConnegValueParser)
// https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.10
object Accept extends ConnegHeader("Accept")
object AcceptCharset extends ConnegHeader("Accept-Charset")
object AcceptEncoding extends ConnegHeader("Accept-Encoding")
object AcceptLanguage extends ConnegHeader("Accept-Language")
/** To handle request body content encodings */
object RequestContentEncoding extends ConnegHeader("Content-Encoding") {
private def matching(t: String) =
RequestExtractor.predicate(RequestContentEncoding) { encs =>
encs.exists { _.equalsIgnoreCase(t) }
}
val GZip = matching("gzip")
val Deflate = matching("deflate")
val Compress = matching("compress")
val SDCH = matching("sdch")
val Identity = matching("identity")
}
object Authorization extends StringHeader("Authorization")
object Connection extends StringHeader("Connection")
object RequestContentType extends StringHeader("Content-Type")
object Expect extends StringHeader("Expect")
object From extends StringHeader("From")
object Host extends StringHeader("Host")
object IfMatch extends RepeatableHeader("If-Match")
object IfModifiedSince extends DateHeader("If-Modified-Since")
object IfNoneMatch extends RepeatableHeader("If-None-Match")
object IfRange extends StringHeader("If-Range") // can also be an http date
object IfUnmodifiedSince extends DateHeader("If-Unmodified-Since")
object MaxForwards extends IntHeader("Max-Forwards")
object ProxyAuthorization extends StringHeader("Proxy-Authorization")
object Range extends RepeatableHeader("Range")// there more structure here
object Referer extends UriHeader("Referer")
object TE extends RepeatableHeader("TE")
object Upgrade extends RepeatableHeader("Upgrade")
object UserAgent extends StringHeader("User-Agent")// maybe a bit more structure here
object Via extends RepeatableHeader("Via")
object XForwardedFor extends RepeatableHeader("X-Forwarded-For")
object XForwardedPort extends IntHeader("X-Forwarded-Port")
object XForwardedProto extends StringHeader("X-Forwarded-Proto")
/** Extracts the charset value from the Content-Type header, if present */
object Charset {
import unfiltered.util.MIMEType
def unapply[T](req: HttpRequest[T]) = {
for {
MIMEType(mimeType) <- RequestContentType(req)
charset <- mimeType.params.get("charset")
} yield charset
}
def apply[T](req: HttpRequest[T]) = unapply(req)
}
/** Extracts hostname and port separately from the Host header, setting
* a default port of 80 or 443 when none is specified */
object HostPort {
import unfiltered.util.Of
def unapply[T](req: HttpRequest[T]): Option[(String, Int)] =
req match {
case Host(hostname) => hostname.split(':') match {
case Array(host, Of.Int(port)) => Some((host, port))
case _ => Some((hostname, if(req.isSecure) 443 else 80))
}
case _ => None
}
}
//CORS request headers
//https://www.w3.org/TR/cors/#syntax
object Origin extends StringHeader("Origin")
object AccessControlRequestMethod extends StringHeader("Access-Control-Request-Method")
object AccessControlRequestHeaders extends RepeatableHeader("Access-Control-Request-Headers")
|
unfiltered/unfiltered
|
library/src/main/scala/request/headers.scala
|
Scala
|
mit
| 8,696 |
package org.http4s
package servlet
package syntax
import cats.effect._
import javax.servlet.{ServletContext, ServletRegistration}
import org.http4s.server.DefaultServiceErrorHandler
import org.http4s.server.defaults
import org.http4s.syntax.all._
trait ServletContextSyntax {
implicit def ToServletContextOps(self: ServletContext): ServletContextOps =
new ServletContextOps(self)
}
final class ServletContextOps private[syntax] (val self: ServletContext) extends AnyVal {
/** Wraps an [[HttpRoutes]] and mounts it as an [[AsyncHttp4sServlet]]
*
* Assumes non-blocking servlet IO is available, and thus requires at least Servlet 3.1.
*/
def mountService[F[_]: ConcurrentEffect: ContextShift](
name: String,
service: HttpRoutes[F],
mapping: String = "/*"): ServletRegistration.Dynamic =
mountHttpApp(name, service.orNotFound, mapping)
def mountHttpApp[F[_]: ConcurrentEffect: ContextShift](
name: String,
service: HttpApp[F],
mapping: String = "/*"): ServletRegistration.Dynamic = {
val servlet = new AsyncHttp4sServlet(
service = service,
asyncTimeout = defaults.ResponseTimeout,
servletIo = NonBlockingServletIo(DefaultChunkSize),
serviceErrorHandler = DefaultServiceErrorHandler[F]
)
val reg = self.addServlet(name, servlet)
reg.setLoadOnStartup(1)
reg.setAsyncSupported(true)
reg.addMapping(mapping)
reg
}
}
object servletContext extends ServletContextSyntax
|
ChristopherDavenport/http4s
|
servlet/src/main/scala/org/http4s/servlet/syntax/ServletContextSyntax.scala
|
Scala
|
apache-2.0
| 1,485 |
/*
* Copyright 2017 Workday, Inc.
*
* This software is available under the MIT license.
* Please see the LICENSE.txt file in this project.
*/
package com.workday.esclient.actions
import io.searchbox.action.{AbstractMultiTypeActionBuilder, GenericResultAbstractAction}
/**
* Builder class for [[com.workday.esclient.actions.SnapshotRestoreAction]].
* @param repository String repository name.
* @param name String name of snapshot.
* @param wait Boolean whether to wait for action completion.
*/
class SnapshotRestoreBuilder(repository: String, name: String, wait: Boolean = false)
extends AbstractMultiTypeActionBuilder[SnapshotRestoreAction, SnapshotRestoreBuilder] {
var indexList : Seq[String] = Nil
val snapshotRepository = repository
val snapName = name
val waitForCompletion = wait
/**
* Builds [[com.workday.esclient.actions.SnapshotRestoreAction]].
* @return [[com.workday.esclient.actions.SnapshotRestoreAction]].
*/
override def build: SnapshotRestoreAction = new SnapshotRestoreAction(this)
}
/**
* Action class for restoring snapshots using the Elasticsearch Snapshot API.
* @param builder [[com.workday.esclient.actions.SnapshotRestoreBuilder]].
*/
class SnapshotRestoreAction(builder: SnapshotRestoreBuilder) extends GenericResultAbstractAction(builder) {
val repository = builder.snapshotRepository
val snapName = builder.snapName
setURI(buildURI)
/**
* Gets the REST method name.
* @return String "POST".
*/
override def getRestMethodName: String = "POST"
/**
* Builds the URI for hitting the restore snapshots API.
* @return String URI.
*/
protected override def buildURI: String = s"_snapshot/${this.repository}/${this.snapName}/_restore?wait_for_completion=${builder.waitForCompletion}"
}
|
Workday/escalar
|
src/main/scala/com/workday/esclient/actions/SnapshotRestoreAction.scala
|
Scala
|
mit
| 1,800 |
package mesosphere.marathon
import com.fasterxml.jackson.databind.ObjectMapper
import com.github.fge.jackson.JsonLoader
import com.github.fge.jsonschema.main.{ JsonSchema, JsonSchemaFactory }
import org.apache.mesos.Protos.Offer
import org.rogach.scallop.ScallopConf
import mesosphere.marathon.state.AppDefinition
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.tasks.IterativeOfferMatcher
import mesosphere.mesos.protos._
trait MarathonTestHelper {
import mesosphere.mesos.protos.Implicits._
def makeConfig(args: String*): MarathonConf = {
val opts = new ScallopConf(args) with MarathonConf {
// scallop will trigger sys exit
override protected def onError(e: Throwable): Unit = throw e
}
opts.afterInit()
opts
}
def defaultConfig(
maxTasksPerOffer: Int = 1,
maxTasksPerOfferCycle: Int = 10,
mesosRole: Option[String] = None,
acceptedResourceRoles: Option[Set[String]] = None): MarathonConf = {
var args = Seq(
"--master", "127.0.0.1:5050",
"--max_tasks_per_offer", maxTasksPerOffer.toString,
"--max_tasks_per_offer_cycle", maxTasksPerOfferCycle.toString
)
mesosRole.foreach(args ++= Seq("--mesos_role", _))
acceptedResourceRoles.foreach(v => args ++= Seq("--default_accepted_resource_roles", v.mkString(",")))
makeConfig(args: _*)
}
def makeBasicOffer(cpus: Double = 4.0, mem: Double = 16000, disk: Double = 1.0,
beginPort: Int = 31000, endPort: Int = 32000, role: String = "*"): Offer.Builder = {
val cpusResource = ScalarResource(Resource.CPUS, cpus, role = role)
val memResource = ScalarResource(Resource.MEM, mem, role = role)
val diskResource = ScalarResource(Resource.DISK, disk, role = role)
val portsResource = if (beginPort <= endPort) {
Some(RangesResource(
Resource.PORTS,
Seq(Range(beginPort.toLong, endPort.toLong)),
role
))
}
else {
None
}
val offerBuilder = Offer.newBuilder
.setId(OfferID("1"))
.setFrameworkId(FrameworkID("marathon"))
.setSlaveId(SlaveID("slave0"))
.setHostname("localhost")
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
portsResource.foreach(offerBuilder.addResources(_))
offerBuilder
}
def makeBasicOfferWithRole(cpus: Double, mem: Double, disk: Double,
beginPort: Int, endPort: Int, role: String) = {
val portsResource = RangesResource(
Resource.PORTS,
Seq(Range(beginPort.toLong, endPort.toLong)),
role
)
val cpusResource = ScalarResource(Resource.CPUS, cpus, role)
val memResource = ScalarResource(Resource.MEM, mem, role)
val diskResource = ScalarResource(Resource.DISK, disk, role)
Offer.newBuilder
.setId(OfferID("1"))
.setFrameworkId(FrameworkID("marathon"))
.setSlaveId(SlaveID("slave0"))
.setHostname("localhost")
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
.addResources(portsResource)
}
def makeBasicApp() = AppDefinition(
id = "test-app".toPath,
cpus = 1,
mem = 64,
disk = 1,
executor = "//cmd"
)
def getSchemaMapper() = {
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import com.fasterxml.jackson.annotation.JsonInclude
import mesosphere.jackson.CaseClassModule
import mesosphere.marathon.api.v2.json.MarathonModule
val schemaMapper = new ObjectMapper
schemaMapper.registerModule(DefaultScalaModule)
schemaMapper.registerModule(new MarathonModule)
schemaMapper.registerModule(CaseClassModule)
schemaMapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY)
schemaMapper
}
val schemaMapper = getSchemaMapper()
def getAppSchema() = {
val appJson = "/mesosphere/marathon/api/v2/AppDefinition.json"
val appDefinition = JsonLoader.fromResource(appJson)
val factory = JsonSchemaFactory.byDefault()
factory.getJsonSchema(appDefinition)
}
val appSchema = getAppSchema()
def validateJsonSchema(app: AppDefinition, valid: Boolean = true) {
val appStr = schemaMapper.writeValueAsString(app)
val appJson = JsonLoader.fromString(appStr)
assert(appSchema.validate(appJson).isSuccess == valid)
}
}
object MarathonTestHelper extends MarathonTestHelper
|
quamilek/marathon
|
src/test/scala/mesosphere/marathon/MarathonTestHelper.scala
|
Scala
|
apache-2.0
| 4,397 |
package com.twitter.finagle.buoyant
import com.twitter.finagle._
import com.twitter.finagle.factory.{NameTreeFactory, ServiceFactoryCache}
import com.twitter.finagle.naming._
import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver}
import com.twitter.finagle.util.DefaultTimer
import com.twitter.logging.Logger
import com.twitter.util._
import java.util.concurrent.atomic.AtomicReference
trait DstBindingFactory[-Req, +Rsp] extends Closable {
final def apply(dst: Dst.Path): Future[Service[Req, Rsp]] =
apply(dst, ClientConnection.nil)
def apply(dst: Dst.Path, conn: ClientConnection): Future[Service[Req, Rsp]]
def status: Status
}
object DstBindingFactory {
private[buoyant] class RefCount {
// If non-None, refcount >= 0, indicating the number of active
// references. When None, the reference count may not change.
private[this] var refcount: Option[Long] = Some(0)
private[this] def update(f: Long => Long): Option[Long] = synchronized {
refcount = refcount.map(f).filter(_ > 0)
refcount
}
def get: Option[Long] = synchronized(refcount)
def incr(): Option[Long] = update(_ + 1)
def decr(): Option[Long] = update(_ - 1)
}
/**
* Ensures that a DstBindignFactory is only closed when all users of
* the factory have closed it.
*
* Note that acquire() / close() are only expected to be called in
* the context of process configuration and not, for example, in the
* request serving path.
*/
class RefCounted[Req, Rsp](underlying: DstBindingFactory[Req, Rsp]) {
private[this] val refCount = new RefCount
def references: Long = refCount.get.getOrElse(0)
private[this] val release = new Closable {
def close(deadline: Time) = refCount.decr() match {
case None =>
underlying.close(deadline)
case Some(c) if (c <= 0) =>
Future.exception(new IllegalStateException(s"Closing factory with $c references"))
case _ =>
Future.Unit
}
}
def acquire(): DstBindingFactory[Req, Rsp] = refCount.incr() match {
case None =>
throw new IllegalStateException("Acquiring factory after it was closed")
case Some(c) if (c <= 0) =>
throw new IllegalStateException(s"Acquiring factory with $c references")
case _ =>
// Ensure that we can only decrement once for each acquisition
// by proxying close() on the underlying RpcClientFactory.
val closable = Closable.ref(new AtomicReference(release))
new DstBindingFactory[Req, Rsp] {
def apply(dst: Dst.Path, conn: ClientConnection) = underlying(dst, conn)
def status = underlying.status
def close(deadline: Time) = closable.close(deadline)
}
}
}
def refcount[Req, Rsp](underlying: DstBindingFactory[Req, Rsp]): RefCounted[Req, Rsp] =
new RefCounted(underlying)
/**
* A convenience type for a function that modifies (e.g. filters) a
* ServiceFactory using a T-typed value.
*/
type Mk[T, Req, Rsp] = (T, ServiceFactory[Req, Rsp]) => ServiceFactory[Req, Rsp]
object Mk {
def identity[T, Req, Rsp]: Mk[T, Req, Rsp] =
(_: T, f: ServiceFactory[Req, Rsp]) => f
}
case class Namer(interpreter: NameInterpreter) {
/** For Java compatibility */
def mk(): (Namer, Stack.Param[Namer]) = (this, Namer)
}
implicit object Namer extends Stack.Param[Namer] {
val default = Namer(DefaultInterpreter)
}
/** The capacities for each layer of dst caching. */
case class Capacity(paths: Int, trees: Int, bounds: Int, clients: Int) {
/** For Java compatibility */
def mk(): (Capacity, Stack.Param[Capacity]) = (this, Capacity)
}
implicit object Capacity extends Stack.Param[Capacity] {
val default = Capacity(1000, 1000, 1000, 1000)
}
case class BindingTimeout(timeout: Duration)
implicit object BindingTimeout extends Stack.Param[BindingTimeout] {
val default = BindingTimeout(Duration.Top)
}
/**
* Binds a Dst to a ServiceFactory.
*
* Here, we're basically replicating the logic from Finagle's
* BindingFactory. This is done so we bind a destination before
* creating a client so that multiple requests to a single bound
* destination may share connection pools etc.
*
* The logic has been changed to account for the way residuals play
* into naming. We use the helper classes Bound and BoundTree
* instead of Name.Bound and NameTree[Name.Bound] so that we can
* control when residual paths factor into caching.
*/
class Cached[-Req, +Rsp](
mkClient: Name.Bound => ServiceFactory[Req, Rsp],
pathMk: Mk[Dst.Path, Req, Rsp] = Mk.identity[Dst.Path, Req, Rsp],
boundMk: Mk[Dst.Bound, Req, Rsp] = Mk.identity[Dst.Bound, Req, Rsp],
namer: NameInterpreter = DefaultInterpreter,
statsReceiver: StatsReceiver = DefaultStatsReceiver,
capacity: Capacity = Capacity.default,
bindingTimeout: BindingTimeout = BindingTimeout.default
)(implicit timer: Timer = DefaultTimer.twitter) extends DstBindingFactory[Req, Rsp] {
private[this]type Cache[Key] = ServiceFactoryCache[Key, Req, Rsp]
def apply(dst: Dst.Path, conn: ClientConnection): Future[Service[Req, Rsp]] = {
val exc = new RequestTimeoutException(bindingTimeout.timeout, s"binding ${dst.path.show}")
pathCache(dst, conn).raiseWithin(bindingTimeout.timeout, exc)
}
// The path cache is keyed on the resolution context and
// logical rpc name. It resolves the name with the Dtab and
// dispatches connections through the tree cache.
private[this] val pathCache: Cache[Dst.Path] = {
def mk(dst: Dst.Path): ServiceFactory[Req, Rsp] = {
// dtabs aren't available when NoBrokers is thrown so we add them here
// as well as add a binding timeout
val dyn = new ServiceFactoryProxy(new DynBoundFactory(dst.bind(namer), treeCache)) {
override def apply(conn: ClientConnection) = {
val exc = new RequestTimeoutException(bindingTimeout.timeout, s"dyn binding ${dst.path.show}")
self(conn).rescue(handleNoBrokers).raiseWithin(bindingTimeout.timeout, exc)
}
private val handleNoBrokers: PartialFunction[Throwable, Future[Service[Req, Rsp]]] = {
case e: NoBrokersAvailableException => nbae
}
private lazy val nbae = Future.exception(new NoBrokersAvailableException(
dst.path.show,
dst.baseDtab,
dst.localDtab
))
}
pathMk(dst, dyn)
}
new ServiceFactoryCache(mk, statsReceiver.scope("path"), capacity.paths)
}
// The tree cache is effectively keyed on a NameTree of Bound names
// with their residual paths.
private[this] val treeCache: Cache[Dst.BoundTree] = {
def mk(tree: Dst.BoundTree): ServiceFactory[Req, Rsp] =
NameTreeFactory(tree.path, tree.nameTree, boundCache)
new ServiceFactoryCache(mk, statsReceiver.scope("tree"), capacity.trees)
}
// The bound cache is effectively keyed on the underlying service id
// and the residual path. It rewrites downstream URIs as requests
// are dispatched to the underlying client.
private[this] val boundCache: Cache[Dst.Bound] = {
def mk(bound: Dst.Bound): ServiceFactory[Req, Rsp] = {
val client = new ServiceFactory[Req, Rsp] {
// The client cache doesn't take the residual Path into
// account, so we strip it here to reduce confusion.
val name = Name.Bound(bound.addr, bound.id, Path.empty)
def apply(conn: ClientConnection) = clientCache.apply(name, conn)
def close(deadline: Time) = Future.Done
override def status = clientCache.status(name)
}
boundMk(bound, client)
}
new ServiceFactoryCache(mk, statsReceiver.scope("bound"), capacity.bounds)
}
// The bottom cache is effectively keyed on the bound destination id
// (i.e. concrete service name).
private[this] val clientCache: Cache[Name.Bound] =
new ServiceFactoryCache(mkClient, statsReceiver.scope("client"), capacity.clients)
private[this] val caches: Seq[Cache[_]] =
Seq(pathCache, treeCache, boundCache, clientCache)
def close(deadline: Time) =
Closable.sequence(caches: _*).close(deadline)
def status = Status.worstOf[Cache[_]](caches, _.status)
}
}
|
hhtpcd/linkerd
|
router/core/src/main/scala/com/twitter/finagle/buoyant/DstBindingFactory.scala
|
Scala
|
apache-2.0
| 8,405 |
package com.datastax.spark.connector.cql
import java.net.InetAddress
import org.apache.cassandra.thrift.{AuthenticationRequest, TFramedTransportFactory, Cassandra}
import org.apache.spark.SparkConf
import org.apache.thrift.protocol.TBinaryProtocol
import org.apache.thrift.transport.TTransport
import com.datastax.driver.core.policies.ExponentialReconnectionPolicy
import com.datastax.driver.core.{Cluster, SocketOptions}
import com.datastax.spark.connector.util.ReflectionUtil
import scala.collection.JavaConversions._
/** Creates both native and Thrift connections to Cassandra.
* The connector provides a DefaultConnectionFactory.
* Other factories can be plugged in by setting `spark.cassandra.connection.factory` option.*/
trait CassandraConnectionFactory extends Serializable {
/** Creates and configures a Thrift client.
* To be removed in the near future, when the dependency from Thrift will be completely dropped. */
def createThriftClient(conf: CassandraConnectorConf, hostAddress: InetAddress): (Cassandra.Iface, TTransport)
/** Creates and configures native Cassandra connection */
def createCluster(conf: CassandraConnectorConf): Cluster
/** List of allowed custom property names passed in SparkConf */
def properties: Set[String] = Set.empty
}
/** Performs no authentication. Use with `AllowAllAuthenticator` in Cassandra. */
object DefaultConnectionFactory extends CassandraConnectionFactory {
/** Creates and configures a Thrift client.
* To be removed in the near future, when the dependency from Thrift will be completely dropped. */
override def createThriftClient(conf: CassandraConnectorConf, hostAddress: InetAddress) = {
var transport: TTransport = null
try {
val transportFactory = new TFramedTransportFactory()
transport = transportFactory.openTransport(hostAddress.getHostAddress, conf.rpcPort)
val client = new Cassandra.Client(new TBinaryProtocol(transport))
val creds = conf.authConf.thriftCredentials
if (creds.nonEmpty) {
client.login(new AuthenticationRequest(creds))
}
(client, transport)
}
catch {
case e: Throwable =>
if (transport != null)
transport.close()
throw e
}
}
/** Returns the Cluster.Builder object used to setup Cluster instance. */
def clusterBuilder(conf: CassandraConnectorConf): Cluster.Builder = {
val options = new SocketOptions()
.setConnectTimeoutMillis(conf.connectTimeoutMillis)
.setReadTimeoutMillis(conf.readTimeoutMillis)
Cluster.builder()
.addContactPoints(conf.hosts.toSeq: _*)
.withPort(conf.nativePort)
.withRetryPolicy(
new MultipleRetryPolicy(conf.queryRetryCount))
.withReconnectionPolicy(
new ExponentialReconnectionPolicy(conf.minReconnectionDelayMillis, conf.maxReconnectionDelayMillis))
.withLoadBalancingPolicy(
new LocalNodeFirstLoadBalancingPolicy(conf.hosts, conf.localDC))
.withAuthProvider(conf.authConf.authProvider)
.withSocketOptions(options)
}
/** Creates and configures native Cassandra connection */
override def createCluster(conf: CassandraConnectorConf): Cluster =
clusterBuilder(conf).build()
}
/** Entry point for obtaining `CassandraConnectionFactory` object from [[org.apache.spark.SparkConf SparkConf]],
* used when establishing connections to Cassandra. */
object CassandraConnectionFactory {
val ConnectionFactoryProperty = "spark.cassandra.connection.factory"
val Properties = Set(ConnectionFactoryProperty)
def fromSparkConf(conf: SparkConf): CassandraConnectionFactory = {
conf.getOption(ConnectionFactoryProperty)
.map(ReflectionUtil.findGlobalObject[CassandraConnectionFactory])
.getOrElse(DefaultConnectionFactory)
}
}
|
willgalen/REVEL
|
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/cql/CassandraConnectionFactory.scala
|
Scala
|
apache-2.0
| 3,793 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers.dsl
import org.scalatest._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ResultOfAtMostOneElementOfApplicationSpec extends AnyFunSpec {
describe("ResultOfAtMostOneElementOfApplication ") {
it("should have pretty toString when right is empty") {
val result = new ResultOfAtMostOneElementOfApplication(Vector.empty)
result.toString should be ("atMostOneElementOf (Vector())")
}
it("should have pretty toString when right contains 1 element") {
val result = new ResultOfAtMostOneElementOfApplication(Vector("Bob"))
result.toString should be ("atMostOneElementOf (Vector(\\"Bob\\"))")
}
it("should have pretty toString when right contains > 1 elements") {
val result = new ResultOfAtMostOneElementOfApplication(Vector("Bob", "Alice"))
result.toString should be ("atMostOneElementOf (Vector(\\"Bob\\", \\"Alice\\"))")
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/matchers/dsl/ResultOfAtMostOneElementOfApplicationSpec.scala
|
Scala
|
apache-2.0
| 1,564 |
package uk.gov.gds.ier.transaction.overseas.name
import uk.gov.gds.ier.test._
import uk.gov.gds.ier.model._
import uk.gov.gds.ier.transaction.overseas.InprogressOverseas
class NameMustacheTest
extends MustacheTestSuite
with NameMustache
with NameForms {
it should "empty progress form should produce empty Model" in {
val emptyApplicationForm = nameForm
val nameModel = mustache.data(
emptyApplicationForm,
Call("GET", "/register-to-vote/overseas/name"),
InprogressOverseas()
).asInstanceOf[NameModel]
nameModel.question.title should be("What is your full name?")
nameModel.question.postUrl should be("/register-to-vote/overseas/name")
nameModel.firstName.value should be("")
nameModel.middleNames.value should be("")
nameModel.lastName.value should be("")
nameModel.hasPreviousNameOptionFalse.value should be("false")
nameModel.hasPreviousNameOptionTrue.value should be("true")
nameModel.hasPreviousNameOptionOther.value should be("other")
nameModel.previousFirstName.value should be("")
nameModel.previousMiddleNames.value should be("")
nameModel.previousLastName.value should be("")
}
it should "progress form with filled applicant name should produce Mustache Model with name values present" in {
val partiallyFilledApplicationForm = nameForm.fill(InprogressOverseas(
name = Some(Name(
firstName = "John",
middleNames = None,
lastName = "Smith"
))
))
val nameModel = mustache.data(
partiallyFilledApplicationForm,
Call("GET", "/register-to-vote/overseas/name"),
InprogressOverseas()
).asInstanceOf[NameModel]
nameModel.question.title should be("What is your full name?")
nameModel.question.postUrl should be("/register-to-vote/overseas/name")
nameModel.firstName.value should be("John")
nameModel.middleNames.value should be("")
nameModel.lastName.value should be("Smith")
nameModel.hasPreviousNameOptionFalse.value should be("false")
nameModel.hasPreviousNameOptionTrue.value should be("true")
nameModel.hasPreviousNameOptionOther.value should be("other")
nameModel.previousFirstName.value should be("")
nameModel.previousMiddleNames.value should be("")
nameModel.previousLastName.value should be("")
}
it should "progress form with filled applicant name and previous should produce Mustache Model with name and previous name values present" in {
val partiallyFilledApplicationForm = nameForm.fill(InprogressOverseas(
name = Some(Name(
firstName = "John",
middleNames = None,
lastName = "Smith"
)),
previousName = Some(PreviousName(
hasPreviousName = true,
hasPreviousNameOption = "true",
previousName = Some(Name(
firstName = "Jan",
middleNames = None,
lastName = "Kovar"
))
))
))
val nameModel = mustache.data(
partiallyFilledApplicationForm,
Call("GET", "/register-to-vote/overseas/name"),
InprogressOverseas()
).asInstanceOf[NameModel]
nameModel.question.title should be("What is your full name?")
nameModel.question.postUrl should be("/register-to-vote/overseas/name")
nameModel.firstName.value should be("John")
nameModel.middleNames.value should be("")
nameModel.lastName.value should be("Smith")
nameModel.hasPreviousNameOptionFalse.value should be("false")
nameModel.hasPreviousNameOptionTrue.value should be("true")
nameModel.hasPreviousNameOptionOther.value should be("other")
nameModel.previousFirstName.value should be("Jan")
nameModel.previousMiddleNames.value should be("")
nameModel.previousLastName.value should be("Kovar")
}
it should "progress form with validation errors should produce Model with error list present" in {
val partiallyFilledApplicationFormWithErrors = nameForm.fillAndValidate(InprogressOverseas(
name = Some(Name(
firstName = "John",
middleNames = None,
lastName = ""
))
))
val nameModel = mustache.data(
partiallyFilledApplicationFormWithErrors,
Call("GET", "/register-to-vote/overseas/name"),
InprogressOverseas()
).asInstanceOf[NameModel]
nameModel.question.title should be("What is your full name?")
nameModel.question.postUrl should be("/register-to-vote/overseas/name")
nameModel.firstName.value should be("John")
nameModel.middleNames.value should be("")
nameModel.lastName.value should be("")
nameModel.hasPreviousNameOptionFalse.value should be("false")
nameModel.hasPreviousNameOptionTrue.value should be("true")
nameModel.hasPreviousNameOptionOther.value should be("other")
nameModel.previousFirstName.value should be("")
nameModel.previousMiddleNames.value should be("")
nameModel.previousLastName.value should be("")
nameModel.question.errorMessages.toSet should be(
Set("Please enter your last name", "Please answer this question")
)
}
}
|
michaeldfallen/ier-frontend
|
test/uk/gov/gds/ier/transaction/overseas/name/OverseasNameMustacheTest.scala
|
Scala
|
mit
| 5,037 |
package com.anishathalye.oscar
import java.util.Date
sealed abstract class Result
case class Success(report: Report) extends Result
case class Note(report: Report) extends Result
case class Failure(report: Report) extends Result
case class Report(date: Date, summary: Option[String], description: Option[String] = None)
|
anishathalye/oscar
|
src/main/scala/com/anishathalye/oscar/Result.scala
|
Scala
|
mit
| 326 |
// AORTA is copyright (C) 2012 Dustin Carlino, Mike Depinet, and Piyush
// Khandelwal of UT Austin
// License: GNU GPL v2
package utexas.aorta.sim.drivers
import utexas.aorta.map.{Edge, Road, Turn, Traversable, Position}
import utexas.aorta.sim.EV_Transition
import utexas.aorta.sim.intersections.Ticket
import scala.collection.mutable
import utexas.aorta.common.{Util, cfg, Physics}
abstract class Action
final case class Act_Set_Accel(new_accel: Double) extends Action
final case class Act_Done_With_Route() extends Action
abstract class Behavior(a: Agent) {
protected var debug_me = false
// asked every tick after everybody has moved
def choose_action(): Action
// only queried when the agent reaches a vertex
def choose_turn(e: Edge): Turn
// every time the agent moves to a new traversable
def transition(from: Traversable, to: Traversable)
// just for debugging
def dump_info()
def set_debug(value: Boolean) {
debug_me = value
}
}
// Never speeds up from rest, so effectively never does anything
class IdleBehavior(a: Agent) extends Behavior(a) {
def choose_action(): Action = Act_Set_Accel(0)
def choose_turn(e: Edge) = e.next_turns.head
def transition(from: Traversable, to: Traversable) {}
def dump_info() {
Util.log("Idle behavior")
}
}
// Reactively avoids collisions and obeys intersections by doing a conservative analysis of the
// next few steps.
class LookaheadBehavior(a: Agent, route: Route) extends Behavior(a) {
override def choose_turn(e: Edge) = a.get_ticket(e).get.turn
override def transition(from: Traversable, to: Traversable) {
route.transition(from, to)
a.lc.target_lane = None
}
override def dump_info() {
Util.log("Route-following behavior")
Util.log(s"Target lane: ${a.lc.target_lane}")
route.dump_info()
}
def choose_action(): Action = {
a.route.reroute_policy.react()
a.lc.decide_lc()
val accel = max_safe_accel
return accel
}
// Returns Act_Set_Accel almost always.
private def max_safe_accel(): Action = {
// the output.
var accel_for_stop: Option[Double] = None
var accel_for_agent: Option[Double] = None
var min_speed_limit = Double.MaxValue
var done_with_route = false
val accel_for_lc_agent = constraint_lc_agent
// Since we can't react instantly, we have to consider the worst-case of the
// next tick, which happens when we speed up as much as possible this tick.
var step = LookaheadStep(a.at, a.kinematic.max_lookahead_dist, 0, a)
// Verify lookahead doesn't cycle to the same lane twice, since an agent can only hold one
// ticket per origin lane at a time. Note agents may hit the same road twice in quick succession
// due to funky geometry and lane-changing.
val visited = new mutable.HashSet[Edge]()
// If we don't have to stop for an intersection, keep caring about staying
// far enough behind an agent. Once we have to stop somewhere, don't worry
// about agents beyond that point.
while (step != null && !accel_for_stop.isDefined) {
step.at.on match {
case e: Edge => {
if (visited.contains(e)) {
throw new Exception(s"Lookahead for $a visited $e twice!")
}
visited += e
}
case _ =>
}
if (!accel_for_agent.isDefined) {
accel_for_agent = constraint_agent(step)
}
if (!accel_for_stop.isDefined) {
constraint_stop(step) match {
case Left(constraint) => accel_for_stop = constraint
case Right(done) => done_with_route = true
}
}
min_speed_limit = math.min(min_speed_limit, constraint_speed_limit(step))
// Set the next step. If we're stopping here, don't bother -- next_step would fail to find a
// ticket to figure out where we want to go.
if (accel_for_stop.isDefined) {
step = null
} else {
step = step.next_step match {
case Some(s) => s
case None => null
}
}
}
return if (done_with_route) {
Act_Done_With_Route()
} else {
val conservative_accel = List(
accel_for_stop, accel_for_agent, accel_for_lc_agent,
Some(a.kinematic.accel_to_achieve(min_speed_limit)), Some(a.max_accel)
).flatten.min
//if (debug_me) {
// println(s"@ ${a.sim.tick}, ${a.id}'s at ${a.at.dist} with speed ${a.speed} and next accel $conservative_accel")
//}
// As the very last step, clamp based on our physical capabilities.
Act_Set_Accel(math.max(conservative_accel, -a.max_accel))
}
}
// All constraint functions return a limiting acceleration, if relevant
// Don't plow into people
private def constraint_agent(step: LookaheadStep): Option[Double] = {
val follow_agent = if (a.at.on == step.at.on)
a.cur_queue.ahead_of(a)
else
step.at.on.queue.last
return follow_agent match {
case Some(other) => {
Util.assert_ne(a, other)
val dist_away = if (other.on(a.at.on))
other.at.dist - a.at.dist
else
step.dist_so_far + other.at.dist
Some(a.kinematic.accel_to_follow(other.kinematic, dist_away))
}
case None => None
}
}
// When we're lane-changing, lookahead takes care of the new path. But we still have to pay
// attention to exactly one other agent: the one in front of us on our old lane. Since we're
// required to finish lane-changing before reaching the end of the lane, don't have to do full
// lookahead there.
private def constraint_lc_agent(): Option[Double] = a.lc.old_lane match {
case Some(e) => e.queue.ahead_of(a) match {
case Some(other) =>
Some(a.kinematic.accel_to_follow(other.kinematic, other.at.dist - a.at.dist))
case None => None
}
case None => None
}
// Returns a speed limit, not an acceleration
private def constraint_speed_limit(step: LookaheadStep) = step.at.on match {
case e: Edge => route.pick_final_lane(e) match {
// How many required LCs do we anticipate here? Slown down to increase chances of doing many
case (target, true) => {
val num_lcs = math.abs(e.lane_num - target.lane_num)
// TODO (This is a bit ad-hoc)
// TODO maybe dont do this while in the turn, only when physically on the lane
e.speed_limit / (num_lcs + 1)
}
case _ => e.speed_limit
}
case t: Turn => t.speed_limit
}
// Returns an optional acceleration, or 'true', which indicates the agent is totally done.
private def constraint_stop(step: LookaheadStep): Either[Option[Double], Boolean] = {
// Request a turn before we need a decision.
step.at.on match {
case e: Edge if !route.done(e) => {
manage_turn(e)
}
case _ =>
}
// Want to stop in the range [length - end_threshold, length), preferably at that left border
if (step.dist_left_to_analyze < step.at.dist_left - cfg.end_threshold) {
return Left(None)
}
val can_go: Boolean = step.at.on match {
// Don't stop at the end of a turn
case t: Turn => true
// Stop if we're arriving at destination
case e: Edge if route.done(e) => false
// Otherwise, ask the intersection
case e: Edge => a.get_ticket(e) match {
case Some(ticket) => ticket.is_approved
case None => false
}
}
if (can_go) {
return Left(None)
}
// Are we completely done?
val dist_from_agent_to_end = step.dist_so_far + step.at.dist_left
val maybe_done = dist_from_agent_to_end <= cfg.end_threshold && a.is_stopped
return a.at.on match {
case e: Edge if route.done(e) && maybe_done => Right(true)
// We want to go the distance that puts us at length - end_threshold. If we're already past
// that point, then try to cover enough distance to get us to the start of the edge.
case _ => Left(Some(a.kinematic.accel_to_end(
math.max(step.dist_so_far, dist_from_agent_to_end - cfg.end_threshold)
)))
}
}
private def manage_turn(e: Edge) {
// Schedule a new turn?
if (!a.get_ticket(e).isDefined && committed_to_lane(e)) {
val ticket = new Ticket(a, route.reroute_policy.pick_next_turn(e))
a.add_ticket(ticket)
e.to.intersection.request_turn(ticket)
}
// Change existing turn?
a.get_ticket(e) match {
case Some(ticket) => {
val next_turn = route.reroute_policy.pick_alt_turn(ticket)
// Sometimes we pick the same turn here, but the later route could change.
if (next_turn != ticket.turn) {
// We don't need to cancel later tickets, because there are none. We only cancel
// unapproved tickets, and we don't lookahead beyond intersections we can't yet cross.
// but TODO verify it
ticket.cancel()
val replacement = new Ticket(a, next_turn)
a.add_ticket(replacement)
e.to.intersection.request_turn(replacement)
}
}
case _ =>
}
}
// Don't necessarily commit to turning from some lane in lookahead
private def committed_to_lane(e: Edge) =
if (e == a.at.on)
a.lc.target_lane match {
case Some(target) => target == e
case None => false
}
else
e.other_lanes.size == 1
}
// This is a lazy sequence of edges/turns that tracks distances away from the original spot. This
// assumes no lane-changing: where the agent starts predicting is where they'll end up.
case class LookaheadStep(
at: Position, dist_left_to_analyze: Double, dist_so_far: Double, a: Agent
) {
def next_step =
if (dist_left_to_analyze <= at.dist_left || is_last_step)
None
else
Some(LookaheadStep(
Position(next_at, 0), dist_left_to_analyze - at.dist_left, dist_so_far + at.dist_left, a
))
private def is_last_step = at.on match {
case e: Edge => a.route.done(e)
case _ => false
}
private def next_at = at.on match {
// This is called after manage_turn, which'll guarantee the ticket is present. However, if
// manage_turn defers the decision (due to LCing), then this method shouldn't be called, since
// the driver must stop at that intersection.
case e: Edge => a.get_ticket(e).get.turn
case t: Turn => t.to
}
}
|
dabreegster/aorta
|
utexas/aorta/sim/drivers/Behaviors.scala
|
Scala
|
gpl-2.0
| 10,424 |
package dal
import javax.inject.{ Inject, Singleton }
import play.api.db.slick.DatabaseConfigProvider
import slick.driver.JdbcProfile
import models.Person
import scala.concurrent.{ Future, ExecutionContext }
/**
* A repository for people.
*
* @param dbConfigProvider The Play db config provider. Play will inject this for you.
*/
@Singleton
class PersonRepository @Inject() (dbConfigProvider: DatabaseConfigProvider)(implicit ec: ExecutionContext) {
// We want the JdbcProfile for this provider
private val dbConfig = dbConfigProvider.get[JdbcProfile]
// These imports are important, the first one brings db into scope, which will let you do the actual db operations.
// The second one brings the Slick DSL into scope, which lets you define the table and other queries.
import dbConfig._
import driver.api._
/**
* Here we define the table. It will have a name of people
*/
private class PeopleTable(tag: Tag) extends Table[Person](tag, "people") {
/** The ID column, which is the primary key, and auto incremented */
def id = column[Long]("id", O.PrimaryKey, O.AutoInc)
/** The name column */
def name = column[String]("name")
/** The age column */
def age = column[Int]("age")
//new column
def sex = column[String]("sex")
/**
* This is the tables default "projection".
*
* It defines how the columns are converted to and from the Person object.
*
* In this case, we are simply passing the id, name and age parameters to the Person case classes
* apply and unapply methods.
*/
def * = (id, name, age,sex) <> ((Person.apply _).tupled, Person.unapply)
}
/**
* The starting point for all queries on the people table.
*/
private val people = TableQuery[PeopleTable]
/**
* Create a person with the given name and age.
*
* This is an asynchronous operation, it will return a future of the created person, which can be used to obtain the
* id for that person.
*/
def create(name: String, age: Int,sex:String): Future[Person] = db.run {
// We create a projection of just the name and age columns, since we're not inserting a value for the id column
(people.map(p => (p.name,p.age,p.sex))
// Now define it to return the id, because we want to know what id was generated for the person
returning people.map(_.id)
// And we define a transformation for the returned value, which combines our original parameters with the
// returned id
into ((nameAgeSex, id) => Person(id, nameAgeSex._1, nameAgeSex._2,nameAgeSex._3))
// And finally, insert the person into the database
) += (name, age,sex)
}
/**
* List all the people in the database.
*/
def list(): Future[Seq[Person]] = db.run {
people.result
}
}
|
Towncarl/sandbox
|
play-scala-intro/app/dal/PersonRepository.scala
|
Scala
|
apache-2.0
| 2,798 |
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics
package snowplow
package enrich
package common
package enrichments
package registry
// Scalaz
import scalaz._
import Scalaz._
// Maven Artifact
import org.apache.maven.artifact.versioning.DefaultArtifactVersion
// json4s
import org.json4s.JValue
// Iglu
import iglu.client.{
SchemaCriterion,
SchemaKey
}
import iglu.client.validation.ProcessingMessageMethods._
// This project
import utils.ScalazJson4sUtils
/**
* Trait inherited by every enrichment config case class
*/
trait Enrichment {
val version: DefaultArtifactVersion
}
/**
* Trait to hold helpers relating to enrichment config
*/
trait ParseableEnrichment {
val supportedSchema: SchemaCriterion
/**
* Tests whether a JSON is parseable by a
* specific EnrichmentConfig constructor
*
* @param config The JSON
* @param schemaKey The schemaKey which needs
* to be checked
* @return The JSON or an error message, boxed
*/
def isParseable(config: JValue, schemaKey: SchemaKey): ValidatedNelMessage[JValue] = {
if (supportedSchema matches schemaKey) {
config.success
} else {
("Schema key %s is not supported. A '%s' enrichment must have schema '%s'.")
.format(schemaKey, supportedSchema.name, supportedSchema)
.toProcessingMessage.fail.toValidationNel
}
}
}
|
mdavid/lessig-bigdata
|
lib/snowplow/3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/enrichments/registry/enrichments.scala
|
Scala
|
mit
| 2,044 |
package phenan.prj.body
import org.scalatest._
class AllTests extends Suites(new TypeParsersTest, new BodyParsersTest) {
override def suiteName: String = "phenan.prj.body"
}
|
csg-tokyo/proteaj2
|
src/test/scala/phenan/prj/body/AllTests.scala
|
Scala
|
mit
| 177 |
/**
* Copyright (C) 2016 Nicola Justus <[email protected]>
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package de.thm.move.controllers
import java.net.URL
import java.util.ResourceBundle
import javafx.fxml.{FXML, FXMLLoader, Initializable}
import javafx.scene.control.TextArea
import javafx.scene.layout.{AnchorPane, VBox}
import javafx.scene.{Parent, Scene}
import javafx.stage.Stage
import de.thm.move.Global
import de.thm.move.views.dialogs.InfoLine
class AboutCtrl extends Initializable {
@FXML
var aboutPaneLeft: AnchorPane = _
@FXML
var aboutPaneRight: VBox = _
@FXML
var licenseArea: TextArea = _
override def initialize(location: URL, resources: ResourceBundle): Unit = {
//about pane
val m = Map(
"Copyright" -> Global.copyright,
"Version" -> Global.version
)
val elementOpts = List(
)
val elements = elementOpts.flatten
val infolines = elements.map(new InfoLine(_))
Global.config.getString("window.title").
map(new InfoLine(_)).
foreach(aboutPaneRight.getChildren.add)
aboutPaneRight.getChildren.addAll(toInfoLines(m):_*)
aboutPaneRight.getChildren.addAll(infolines:_*)
//setup license pane
licenseArea.setText(Global.licenseString)
}
private def toInfoLines(m:Map[String,String]): List[InfoLine] = {
m.map {
case (k, v) => new InfoLine(k, v)
}.toList
}
}
object AboutCtrl {
def setupAboutDialog(): (Stage, AboutCtrl) = {
//=== setup about dialog
val aboutCtrl = new AboutCtrl()
val aboutStage = new Stage()
val aboutWindowWidth = Global.config.getDouble("window.about.width").getOrElse(500.0)
val aboutWindowHeight = Global.config.getDouble("window.about.height").getOrElse(500.0)
val fxmlLoader = new FXMLLoader(getClass.getResource("/fxml/about.fxml"))
fxmlLoader.setController(aboutCtrl)
val aboutViewRoot: Parent = fxmlLoader.load()
val scene = new Scene(aboutViewRoot)
scene.getStylesheets.add(Global.styleSheetUrl)
aboutStage.setTitle(Global.config.getString("window.title").map(_+" - About").getOrElse(""))
aboutStage.setScene(scene)
aboutStage.setWidth(aboutWindowWidth)
aboutStage.setHeight(aboutWindowHeight)
(aboutStage, aboutCtrl)
}
}
|
THM-MoTE/MoVE
|
src/main/scala/de/thm/move/controllers/AboutCtrl.scala
|
Scala
|
mpl-2.0
| 2,415 |
package blended.mgmt.rest.internal
import java.io.File
import java.util.UUID
import blended.akka.http.HttpContext
import blended.akka.http.internal.BlendedAkkaHttpActivator
import blended.akka.internal.BlendedAkkaActivator
import blended.mgmt.repo.WritableArtifactRepo
import blended.mgmt.repo.internal.ArtifactRepoActivator
import blended.persistence.h2.internal.H2Activator
import blended.security.internal.SecurityActivator
import blended.testsupport.pojosr.{BlendedPojoRegistry, PojoSrTestHelper, SimplePojoContainerSpec}
import blended.testsupport.scalatest.LoggingFreeSpecLike
import blended.testsupport.{BlendedTestSupport, RequiresForkedJVM, TestFile}
import blended.updater.config.{ActivateProfile, OverlayConfig, OverlayConfigCompanion, UpdateAction}
import blended.updater.config.json.PrickleProtocol._
import blended.updater.remote.internal.RemoteUpdaterActivator
import blended.util.logging.Logger
import com.softwaremill.sttp
import com.softwaremill.sttp.UriContext
import com.typesafe.config.ConfigFactory
import domino.DominoActivator
import org.osgi.framework.BundleActivator
import org.scalatest.Matchers
import prickle.{Pickle, Unpickle}
@RequiresForkedJVM
class CollectorServicePojosrSpec extends SimplePojoContainerSpec
with LoggingFreeSpecLike
with Matchers
with PojoSrTestHelper
with TestFile {
override def baseDir: String = new File(BlendedTestSupport.projectTestOutput, "container").getAbsolutePath()
override def bundles: Seq[(String, BundleActivator)] = Seq(
"blended.akka" -> new BlendedAkkaActivator(),
"blended.akka.http" -> new BlendedAkkaHttpActivator(),
"blended.security" -> new SecurityActivator(),
"blended.mgmt.repo" -> new ArtifactRepoActivator(),
"blended.mgmt.rest" -> new MgmtRestActivator(),
"blended.updater.remote" -> new RemoteUpdaterActivator(),
"blended.persistence.h2" -> new H2Activator()
)
private[this] val log = Logger[this.type]
case class Server(serviceRegistry: BlendedPojoRegistry, dir: File)
def withServer(f: Server => Unit): Unit = {
log.info(s"Server path: ${baseDir}")
// cleanup potential left over data from previous runs
deleteRecursive(
new File(baseDir, "data"),
new File(baseDir, "repositories")
)
// We consume services with a nice domino API
new DominoActivator() {
whenBundleActive {
whenServicePresent[WritableArtifactRepo] { repo =>
whenAdvancedServicePresent[HttpContext]("(prefix=mgmt)") { httpCtxt =>
log.info("Test-Server up and running. Starting test case...")
f(Server(serviceRegistry = registry, dir = new File(baseDir)))
}
}
}
}.start(registry.getBundleContext())
}
implicit val sttpBackend = sttp.HttpURLConnectionBackend()
val serverUrl = uri"http://localhost:9995/mgmt"
val versionUrl = uri"${serverUrl}/version"
s"REST-API with a self-hosted HTTP server at ${serverUrl}" - {
s"GET ${versionUrl} should return the version" in {
withServer { sr =>
val response = sttp.sttp.get(versionUrl).send()
assert(response.body === Right("\"0.0.0\""))
}
}
"OverlayConfig" - {
val url = uri"${serverUrl}/overlayConfig"
"GET without credentials should fail with 401 - pending until feature is enabled" in logException {
// we currently do not require any permission for GET
pending
withServer { server =>
val response = sttp.sttp.get(url).send()
assert(response.code === 401)
}
}
"initial GET should return empty overlay list" in logException {
withServer { server =>
val response = sttp.sttp.get(url)
.auth.basic("tester", "mysecret")
.send()
assert(response.code === 200)
val ocs = Unpickle[Seq[OverlayConfig]].fromString(response.unsafeBody).get
assert(ocs.size === 0)
}
}
"POST allows upload of new OverlayConfig" in logException {
val o1 =
"""name = "jvm-medium"
|version = "1"
|properties = {
| "blended.launcher.jvm.xms" = "768M"
| "blended.launcher.jvm.xmx" = "768M"
| "amq.systemMemoryLimit" = "500m"
|}
|""".stripMargin
val oc = OverlayConfigCompanion.read(ConfigFactory.parseString(o1)).get
withServer { server =>
val responsePost = sttp.sttp
.post(url)
.body(Pickle.intoString(oc))
.header(sttp.HeaderNames.ContentType, sttp.MediaTypes.Json)
.auth.basic("tester", "mysecret")
.send()
assert(responsePost.code === 200)
assert(responsePost.unsafeBody === "\"Registered jvm-medium-1\"")
val responseGet = sttp.sttp.get(url)
.auth.basic("tester", "mysecret")
.send()
assert(responseGet.code === 200)
val ocs = Unpickle[Seq[OverlayConfig]].fromString(responseGet.unsafeBody).get
assert(ocs.size === 1)
assert(ocs.find(_.name == "jvm-medium").isDefined)
}
}
}
"ActivateProfile" - {
val ci1 = "ci1_ActivateProfile"
val ci2 = "ci2_ActivateProfile"
def url(containerId: String) = uri"${serverUrl}/container/${containerId}/update"
val ap = ActivateProfile(
id = UUID.randomUUID().toString(),
profileName = "p",
profileVersion = "1",
overlays = Set.empty
)
"POST with missing credentials fails with 401 Unauthorized" in logException {
withServer { server =>
val responsePost = sttp.sttp
.post(url(ci1))
.body(Pickle.intoString(ap))
.header(sttp.HeaderNames.ContentType, sttp.MediaTypes.Json)
.send()
assert(responsePost.code === 401)
assert(responsePost.statusText === "Unauthorized")
}
}
"POST an valid ActivateProfile action succeeds" in logException {
withServer { server =>
val responsePost = sttp.sttp
.post(url(ci1))
.body(Pickle.intoString[UpdateAction](ap))
.header(sttp.HeaderNames.ContentType, sttp.MediaTypes.Json)
.auth.basic("tester", "mysecret")
.send()
log.info(s"Response: ${responsePost}")
assert(responsePost.code === 200)
assert(responsePost.unsafeBody === "\"Added UpdateAction to ci1_ActivateProfile\"")
}
}
}
"Upload deployment pack" - {
val uploadUrl = uri"${serverUrl}/profile/upload/deploymentpack/artifacts"
val emptyPackFile = new File(BlendedTestSupport.projectTestOutput, "test.pack.empty-1.0.0.zip")
val packFile = new File(BlendedTestSupport.projectTestOutput, "test.pack.minimal-1.0.0.zip")
s"Uploading with missing credentials should fail with 401" in logException {
withServer { server =>
assert(packFile.exists())
val response = sttp.sttp.
multipartBody(sttp.multipartFile("file", emptyPackFile)).
post(uploadUrl).
send()
assert(response.code === 401)
}
}
s"Uploading with wrong credentials should fail with 401" in logException {
withServer { server =>
assert(packFile.exists())
val response = sttp.sttp.
multipartBody(sttp.multipartFile("file", emptyPackFile)).
auth.basic("unknown", "pass").
post(uploadUrl).
send()
assert(response.code === 401)
}
}
s"Multipart POST with empty profile (no bundles) should fail with validation errors" in logException {
withServer { server =>
assert(emptyPackFile.exists() === true)
val response = sttp.sttp.
multipartBody(sttp.multipartFile("file", emptyPackFile)).
auth.basic("tester", "mysecret").
post(uploadUrl).
send()
log.info("body: " + response.body)
log.info("headers: " + response.headers)
log.info("response: " + response)
assert(response.code === 422)
assert(response.statusText === "Unprocessable Entity")
assert(response.body.isLeft)
assert(response.body.left.get ===
"Could not process the uploaded deployment pack file. Reason: requirement failed: " +
"A ResolvedRuntimeConfig needs exactly one bundle with startLevel '0', but this one has (distinct): 0")
}
}
s"Multipart POST with minimal profile (one bundles) should succeed" in logException {
withServer { server =>
assert(packFile.exists() === true)
val response = sttp.sttp.
multipartBody(sttp.multipartFile("file", packFile)).
auth.basic("tester", "mysecret").
post(uploadUrl).
send()
log.info("body: " + response.body)
log.info("headers: " + response.headers)
log.info("response: " + response)
assert(response.code === 200)
assert(response.statusText === "OK")
assert(response.body.isRight)
assert(response.body.right.get === "\"Uploaded profile test.pack.minimal 1.0.0\"")
// We expect the bundle file in the local repo
assert(new File(server.dir, "repositories/artifacts/org/example/fake/1.0.0/fake-1.0.0.jar").exists())
// We expect the profile in the profile repo
assert(new File(server.dir, "repositories/rcs/test.pack.minimal-1.0.0.conf").exists())
}
}
}
}
}
|
lefou/blended
|
blended.mgmt.rest/src/test/scala/blended/mgmt/rest/internal/CollectorServicePojosrSpec.scala
|
Scala
|
apache-2.0
| 9,633 |
package org.scalaide.core.extensions
import java.util.concurrent.ConcurrentMap
import java.util.concurrent.ConcurrentHashMap
import org.eclipse.core.runtime.CoreException
import org.eclipse.core.runtime.IConfigurationElement
import org.eclipse.core.runtime.IExtension
import org.eclipse.core.runtime.IExtensionPoint
import org.eclipse.core.runtime.Platform
import org.scalaide.logging.HasLogger
import org.eclipse.core.runtime.IPath
object SourceFileProviderRegistry extends HasLogger {
private val EXTENSION_POINT = "org.scala-ide.sdt.core.sourcefileprovider"
// Note: The map has to be thread-safe, since it can potentially be accessed by different threads at the same time
private val registry: ConcurrentMap[String, SourceFileProvider] = new ConcurrentHashMap
registerProviders()
/** Return the source file provider for the given path.
*
* @return A registered `SourceFileProvider` or `null` if not found.
*/
def getProvider(path: IPath): SourceFileProvider = {
import scala.collection.JavaConverters._
val fullName = path.toPortableString()
val record = registry.asScala find { case (k, v) => fullName.endsWith(k) }
record.map(_._2).getOrElse(null)
}
private def registerProviders(): Unit = {
val extensionPoint = Platform.getExtensionRegistry().getExtensionPoint(EXTENSION_POINT)
if (extensionPoint != null) {
val extensions = extensionPoint.getExtensions()
for {
extension <- extensions
config <- extension.getConfigurationElements
if config.isValid
} try {
val provider = config.createExecutableExtension("class").asInstanceOf[SourceFileProvider]
registerProvider(config.getAttribute("file_extension"), provider)
} catch {
case e: CoreException =>
eclipseLog.error("Failed to register source file provider for extension point: " + extension, e)
}
}
}
private def registerProvider(fileExtension: String, provider: SourceFileProvider): Unit = {
if(registry containsKey fileExtension) eclipseLog.warn("Source file provider for file extension `%s` already exists. Registration of `%s` will hence be ignored.".format(fileExtension, provider))
else registry put (fileExtension, provider)
}
// Note: we may need to implement the `IRegistryEventListener` if we want to support plugins that are started on the fly. This can be easily done
// via `Platform.getExtensionRegistry().addListener(...)`
}
|
romanowski/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/core/extensions/SourceFileProviderRegistry.scala
|
Scala
|
bsd-3-clause
| 2,472 |
object HelloWorld2 {
def main(args: Array[String]) {
println("Hello, world!!")
}
}
|
crocker/sbt-multi-project
|
service/audit/src/main/scala/HelloWorld2.scala
|
Scala
|
mit
| 90 |
package com.codahale.jerkson.ser
import java.lang.reflect.Modifier
import com.codahale.jerkson.JsonSnakeCase
import com.codahale.jerkson.Util._
import com.fasterxml.jackson.core.JsonGenerator
import com.fasterxml.jackson.annotation.{JsonIgnore, JsonIgnoreProperties}
import com.fasterxml.jackson.databind.{SerializerProvider, JsonSerializer}
class CaseClassSerializer[A <: Product](klass: Class[_]) extends JsonSerializer[A] {
private val isSnakeCase = klass.isAnnotationPresent(classOf[JsonSnakeCase])
private val ignoredFields = if (klass.isAnnotationPresent(classOf[JsonIgnoreProperties])) {
klass.getAnnotation(classOf[JsonIgnoreProperties]).value().toSet
} else Set.empty[String]
private val nonIgnoredFields = klass.getDeclaredFields.filterNot { f =>
f.getAnnotation(classOf[JsonIgnore]) != null ||
ignoredFields(f.getName) ||
(f.getModifiers & Modifier.TRANSIENT) != 0 ||
f.getName.contains("$")
}
private val methods = klass.getDeclaredMethods
.filter { _.getParameterTypes.isEmpty }
.map { m => m.getName -> m }.toMap
def serialize(value: A, json: JsonGenerator, provider: SerializerProvider): Unit = {
json.writeStartObject()
for (field <- nonIgnoredFields) {
val methodOpt = methods.get(field.getName)
val fieldValue: Object = methodOpt.map { _.invoke(value) }.getOrElse(field.get(value))
if (fieldValue != None) {
val fieldName = methodOpt.map { _.getName }.getOrElse(field.getName)
provider.defaultSerializeField(if (isSnakeCase) snakeCase(fieldName) else fieldName, fieldValue, json)
}
}
json.writeEndObject()
}
}
|
rememberthemilk/jerkson
|
src/main/scala/com/codahale/jerkson/ser/CaseClassSerializer.scala
|
Scala
|
mit
| 1,697 |
/**
* Copyright(C) 2015 Ryan Delucchi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package io.nary.genera
/**
* Genera is intended as a simple yet fairly general-purpose persistence layer. It operates on three implicit
* layers of abstraction:
*
* first-order (DB Schema):
* Definition of a "thing": A record in the "things" table
* Columns: content, relation to another "thing", meta-relation to another "thing"
*
* second-order (Data Organization):
* Definition of a "thing": Can contain content, and optionally be a node within a group hierarchy or KV Pair
* child -> parent relations are supported (if the meta-relation is the group-child marker)
* key -> value relations are supported (if the meta-relation is the dictionary key marker)
*
* third-order (Application Logic):
* Utilization of Data organization functionality per the application's implementation.
*/
trait GeneraStore[C <: GeneraContext[C], T <: GeneraThing[C, T]] {
/**
* Put content in the store and get a "thing" back.
*/
protected[genera] def put(content: GeneraContent, context: Option[C]) : Result[C, T, Singleton, StoredSingleton[C, T]]
/**
* Get a sequence of "things" such that all of the content matches
*/
protected[genera] def get(content: GeneraContent, context: Option[C]) : Result[C, T, List, StoredValues[T, List]]
}
/**
* Represents all session information for a particular store type
*/
trait GeneraContext[+C <: GeneraContext[C]] { self : C =>
def withStoredValues[T <: GeneraThing[C, T], S[_]](item: S[T]) = Result(self, StoredValues[T, S](item))
def withStoredSingleton[T <: GeneraThing[C, T]](item: Singleton[T]) = Result(self, StoredSingleton[C, T](item))
def withComputed[T](item: Singleton[T]) = Result(self, ComputedValue(item))
def withUnit = Result[C, Unit, Singleton, ComputedValue[C, Unit]](self, ComputedValue(Singleton(())))
}
case class Singleton[+T](singleton: T)
trait ResultValue[+T, +S[+_]] {
val item : S[T]
}
case class ComputedValue[+C <: GeneraContext[_], +T](override val item: Singleton[T]) extends ResultValue[T, Singleton]
case class StoredSingleton[+C <: GeneraContext[_], +T <: GeneraThing[_, _]](item : Singleton[T]) extends ResultValue[T, Singleton]
case class StoredValues[+T <: GeneraThing[_, _], +S[+_]](override val item: S[T]) extends ResultValue[T, S]
case class Result[+C <: GeneraContext[C], +T, +S[+_], +V <: ResultValue[T, S]](context: C, value: V)
/**
* Provides access to GeneraStore operations given an implicit context and store.
*/
object GeneraStore {
def put[C <: GeneraContext, T <: GeneraThing[C, T]](content: GeneraContent)(implicit store: GeneraStore[C, T]) : Result[C, T, Singleton[T], StoredSingleton[C, T]] = {
store.put(content, None)
}
def get[C <: GeneraContext, T <: GeneraThing[C, T]](content: GeneraContent)(implicit store: GeneraStore[C, T]) : Result[C, T, List, StoredValues[T, List]] = {
store.get(content, None)
}
def putJoin[C <: GeneraContext, T <: GeneraThing[C, T]](content: GeneraContent)(implicit context: C, store: GeneraStore[C, T]) = store.put(content, Some(context))
def getJoin[C <: GeneraContext, T <: GeneraThing[C, T]](content: GeneraContent)(implicit context: C, store: GeneraStore[C, T]) = store get(content, Some(context))
}
|
ryanonsrc/genera
|
src/main/scala/io/nary/genera/GeneraStore.scala
|
Scala
|
apache-2.0
| 3,806 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package statements
import expressions.{SelfInvocation, BlockStat}
import lexer.ScalaTokenTypes
import builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 13.03.2008
*/
object ConstrBlock {
def parse(builder: ScalaPsiBuilder): Boolean = {
val constrExprMarker = builder.mark
builder.getTokenType match {
case ScalaTokenTypes.tLBRACE => {
builder.advanceLexer() //Ate {
builder.enableNewlines
SelfInvocation parse builder
while (true) {
builder.getTokenType match {
case ScalaTokenTypes.tRBRACE => {
builder.advanceLexer() //Ate }
builder.restoreNewlinesState
constrExprMarker.done(ScalaElementTypes.CONSTR_BLOCK)
return true
}
case ScalaTokenTypes.tSEMICOLON => {
builder.advanceLexer() //Ate semi
BlockStat parse builder
}
case _ if builder.newlineBeforeCurrentToken =>
if (!BlockStat.parse(builder)) {
builder error ErrMsg("rbrace.expected")
builder.restoreNewlinesState
while (!builder.eof && !ScalaTokenTypes.tRBRACE.eq(builder.getTokenType) &&
!builder.newlineBeforeCurrentToken) {
builder.advanceLexer()
}
constrExprMarker.done(ScalaElementTypes.CONSTR_BLOCK)
return true
}
case _ => {
builder error ErrMsg("rbrace.expected")
builder.restoreNewlinesState
while (!builder.eof && !ScalaTokenTypes.tRBRACE.eq(builder.getTokenType) &&
!builder.newlineBeforeCurrentToken) {
builder.advanceLexer()
}
constrExprMarker.done(ScalaElementTypes.CONSTR_BLOCK)
return true
}
}
}
true //it's trick to compiler
}
case _ => {
constrExprMarker.drop()
false
}
}
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/parser/parsing/statements/ConstrBlock.scala
|
Scala
|
apache-2.0
| 2,120 |
package org.jetbrains.plugins.scala
package lang
package resolve
package processor
import com.intellij.psi._
import org.jetbrains.plugins.scala.caches.CachesUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScTypeAlias}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.usages.ImportUsed
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.types.api.TypeSystem
import org.jetbrains.plugins.scala.lang.psi.types.{PhysicalSignature, ScSubstitutor, ScType, Signature}
import org.jetbrains.plugins.scala.lang.resolve.processor.CompletionProcessor.QualifiedName
import scala.collection.{Set, mutable}
object CompletionProcessor {
case class QualifiedName(name: String, isNamedParameter: Boolean)
def getQualifiedName(result: ScalaResolveResult): QualifiedName = {
val name = result.isRenamed match {
case Some(str) => str
case None => result.name
}
val isNamedParameter = result.isNamedParameter
QualifiedName(name, isNamedParameter)
}
}
class CompletionProcessor(override val kinds: Set[ResolveTargets.Value],
val place: PsiElement,
val collectImplicits: Boolean = false,
forName: Option[String] = None,
postProcess: ScalaResolveResult => Unit = r => {},
val includePrefixImports: Boolean = true)
(implicit override val typeSystem: TypeSystem)
extends BaseProcessor(kinds) with PrecedenceHelper[QualifiedName] {
protected val precedence: mutable.HashMap[QualifiedName, Int] = new mutable.HashMap[QualifiedName, Int]()
protected val signatures: mutable.HashMap[Signature, Boolean] = new mutable.HashMap[Signature, Boolean]()
protected def getPlace: PsiElement = place
protected def getQualifiedName(result: ScalaResolveResult): QualifiedName = CompletionProcessor.getQualifiedName(result)
override protected def isCheckForEqualPrecedence = false
protected def getTopPrecedence(result: ScalaResolveResult): Int = precedence.getOrElse(getQualifiedName(result), 0)
protected def setTopPrecedence(result: ScalaResolveResult, i: Int) {
precedence.put(getQualifiedName(result), i)
}
override protected def filterNot(p: ScalaResolveResult, n: ScalaResolveResult): Boolean = {
getQualifiedName(p) == getQualifiedName(n) && super.filterNot(p, n)
}
def getSignature(element: PsiNamedElement, substitutor: => ScSubstitutor): Option[Signature] = {
element match {
case method: PsiMethod => Some(new PhysicalSignature(method, substitutor))
case td: ScTypeAlias => None
case td: PsiClass => None
case _ => Some(new Signature(element.name, Seq.empty, 0, substitutor, element))
}
}
def execute(_element: PsiElement, state: ResolveState): Boolean = {
if (!_element.isInstanceOf[PsiElement]) return false
val element = _element.asInstanceOf[PsiNamedElement]
forName match {
case Some(name) if element.name != name => return true
case _ =>
}
lazy val substitutor: ScSubstitutor = Option(state.get(ScSubstitutor.key)).getOrElse(ScSubstitutor.empty)
lazy val isRenamed: Option[String] = Option(state.get(ResolverEnv.nameKey))
lazy val implFunction: Option[PsiNamedElement] = Option(state.get(CachesUtil.IMPLICIT_FUNCTION))
lazy val isNamedParameter: Boolean = Option(state.get(CachesUtil.NAMED_PARAM_KEY)).exists(_.booleanValue())
val fromType: Option[ScType] = Option(state.get(BaseProcessor.FROM_TYPE_KEY))
val importsUsed: Set[ImportUsed] = Option(state.get(ImportUsed.key)).getOrElse(Set.empty)
val prefixCompletion: Boolean = Option(state.get(ScalaCompletionUtil.PREFIX_COMPLETION_KEY)).getOrElse(false)
def _addResult(result: ScalaResolveResult) {
val signature: Option[Signature] = getSignature(element, substitutor)
val forImplicit = implFunction.isDefined
if (!forImplicit) {
if (levelSet.contains(result)) {
if (result.prefixCompletion) {
levelSet.remove(result)
addResult(result)
}
} else addResult(result)
signature.foreach(sign => signatures += ((sign, forImplicit)))
} else {
signature match {
case Some(sign) =>
signatures.get(sign) match {
case Some(true) =>
val iterator = levelSet.iterator()
while (iterator.hasNext) {
val next = iterator.next()
if (getQualifiedName(next) == getQualifiedName(result) && next.element != result.element &&
signature == getSignature(next.element, next.substitutor)) {
iterator.remove()
}
}
case Some(false) => //do nothing
case None =>
addResult(result)
signature.foreach(sign => signatures += ((sign, forImplicit)))
}
case _ =>
if (levelSet.contains(result)) {
if (result.prefixCompletion) {
levelSet.remove(result)
addResult(result)
}
} else addResult(result)
}
}
}
element match {
case fun: ScFunction if fun.isConstructor => return true //do not add constructor
case td: ScTypeDefinition =>
if (kindMatches(td)) {
val result = new ScalaResolveResult(td, substitutor, nameShadow = isRenamed,
implicitFunction = implFunction, fromType = fromType, importsUsed = importsUsed, prefixCompletion = prefixCompletion)
_addResult(result)
}
ScalaPsiUtil.getCompanionModule(td) match {
case Some(td: ScTypeDefinition) if kindMatches(td) =>
val result = new ScalaResolveResult(td, substitutor,
nameShadow = isRenamed, implicitFunction = implFunction, fromType = fromType, importsUsed = importsUsed,
prefixCompletion = prefixCompletion)
_addResult(result)
case _ =>
}
case named: PsiNamedElement =>
if (kindMatches(element)) {
element match {
case method: PsiMethod =>
val result = new ScalaResolveResult(named, substitutor, nameShadow = isRenamed,
implicitFunction = implFunction, isNamedParameter = isNamedParameter, fromType = fromType,
importsUsed = importsUsed, prefixCompletion = prefixCompletion)
_addResult(result)
case bindingPattern: ScBindingPattern =>
val result = new ScalaResolveResult(named, substitutor, nameShadow = isRenamed,
implicitFunction = implFunction, isNamedParameter = isNamedParameter, fromType = fromType,
importsUsed = importsUsed, prefixCompletion = prefixCompletion)
_addResult(result)
case _ =>
val result = new ScalaResolveResult(named, substitutor, nameShadow = isRenamed,
implicitFunction = implFunction, isNamedParameter = isNamedParameter, fromType = fromType,
importsUsed = importsUsed, prefixCompletion = prefixCompletion)
_addResult(result)
}
}
}
true
}
override def changedLevel: Boolean = {
if (levelSet.isEmpty) return true
val iterator = levelSet.iterator()
while (iterator.hasNext) {
val next = iterator.next()
postProcess(next)
candidatesSet += next
}
qualifiedNamesSet.addAll(levelQualifiedNamesSet)
levelSet.clear()
levelQualifiedNamesSet.clear()
true
}
override def candidatesS: Set[ScalaResolveResult] = {
val res = candidatesSet
val iterator = levelSet.iterator()
while (iterator.hasNext) {
val next = iterator.next()
postProcess(next)
res += next
}
res
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/resolve/processor/CompletionProcessor.scala
|
Scala
|
apache-2.0
| 8,209 |
package com.twitter.cache
import java.util.concurrent.ConcurrentHashMap
import com.twitter.util.Future
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class KeyEncodingCacheTest extends AbstractFutureCacheTest {
def name: String = "KeyEncodingCache"
def mkCtx(): Ctx = new Ctx {
val underlyingMap: ConcurrentHashMap[Int, Future[String]] = new ConcurrentHashMap()
val underlyingCache: FutureCache[Int, String] = new ConcurrentMapCache(underlyingMap)
val cache: FutureCache[String, String] =
new KeyEncodingCache({ num: String => num.hashCode }, underlyingCache)
}
}
|
edombowsky/util
|
util-cache/src/test/scala/com/twitter/cache/KeyEncodingCacheTest.scala
|
Scala
|
apache-2.0
| 646 |
package org.jetbrains.plugins.scala.lang.psi.stubs.impl
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs.{IStubElementType, StubBase, StubElement}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScExtensionBody
import org.jetbrains.plugins.scala.lang.psi.stubs.ScExtensionBodyStub
class ScExtensionBodyStubImpl(
parent: StubElement[_ <: PsiElement],
elementType: IStubElementType[_ <: StubElement[_ <: PsiElement], _ <: PsiElement]
) extends StubBase[ScExtensionBody](parent, elementType)
with ScExtensionBodyStub
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/stubs/impl/ScExtensionBodyStubImpl.scala
|
Scala
|
apache-2.0
| 556 |
package com.cave.metrics.data.postgresql
import java.util.UUID
import com.cave.metrics.data.postgresql.Tables._
import com.cave.metrics.data.{User, AwsConfig, Token}
import com.typesafe.config.ConfigFactory
import org.apache.commons.logging.LogFactory
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import scala.slick.driver.H2Driver.simple._
import scala.slick.lifted.TableQuery
abstract class AbstractDataManagerSpec extends FlatSpec with Matchers with MockitoSugar with BeforeAndAfterAll {
val log = LogFactory.getLog(classOf[AbstractDataManagerSpec])
val First1 = "First"
val Last1 = "Last"
val Email1 = "[email protected]"
val Password1 = "hash-hash-hash"
val Salt1 = Some("12345")
val First2 = "FirstFirst"
val Last2 = "LastLast"
val Email2 = "[email protected]"
val Password2 = "hush-hush-hush"
val Salt2 = Some("54321")
val GiltEmail = "[email protected]"
val GiltOrgName = "test-org"
val GiltNotificationUrl = "https://notifications.gilt.com/alert"
val GiltOrgTokenDescription = "test token"
val GiltOrgToken1 = Token.createToken(GiltOrgTokenDescription)
val GiltOrgToken2 = Token.createToken(GiltOrgTokenDescription)
val GiltOrgToken3 = Token.createToken(GiltOrgTokenDescription)
var GiltOrganizationId: Long = _
var SecondOrganizationId: Long = _
val testTeamName = "twain test Team"
val testTeamName2 = "second test Team"
var teamId: Long = _
var team2Id: Long = _
var User1: User = _
var User2: User = _
val SecondOrgName = GiltOrgName + "-second"
lazy val organizationsTable = TableQuery[OrganizationsTable]
lazy val tokensTable = TableQuery[TokensTable]
lazy val alertsTable = TableQuery[AlertsTable]
lazy val teamsTable = TableQuery[TeamsTable]
lazy val queriesTable: TableQuery[QueriesTable] = TableQuery[QueriesTable]
lazy val alert2queriesTable: TableQuery[AlertQueriesTable] = TableQuery[AlertQueriesTable]
lazy val usersTable = TableQuery[UsersTable]
val appConfig = ConfigFactory.load("test-inmemorydb.conf")
val awsConfig: AwsConfig = new AwsConfig(appConfig)
val database = Database.forURL(awsConfig.rdsJdbcDatabaseUrl, driver = awsConfig.rdsJdbcDatabaseClass)
implicit val session: Session = database.createSession()
override def beforeAll() = {
log.info("Creating the DB schema...")
createSchema()
log.info("Populating the DB ...")
populateDatabase()
}
override def afterAll() = {
dropSchema()
session.close()
}
private[postgresql] def createSchema() = Tables.createSchema()
private[postgresql] def dropSchema() = Tables.dropSchema()
private[postgresql] def populateDatabase() = {
GiltOrganizationId = createOrganization(GiltOrgName)
SecondOrganizationId = createOrganization(SecondOrgName)
teamId = createTeam(testTeamName, GiltOrganizationId)
team2Id = createTeam(testTeamName2, GiltOrganizationId)
User1 = createUser(First1, Last1, Email1, Password1, Salt1)
User2 = createUser(First2, Last2, Email2, Password2, Salt2)
}
private[postgresql] def createTeam(teamName: String, orgId: Long): Long = {
val (uuid, timestamp) = (UUID.randomUUID(), new java.sql.Timestamp(System.currentTimeMillis()))
val newTeamId2 = (teamsTable returning teamsTable.map(_.id)) += TeamsRow(1, orgId, teamName, None, uuid, timestamp, uuid, timestamp, None, None)
tokensTable += TokensRow(1, GiltOrganizationId, Some(newTeamId2), "token for team twain 2 desc", "token_twain_value2", uuid, timestamp, uuid, timestamp, None, None)
newTeamId2
}
private[postgresql] def createOrganization(orgName: String): Long = {
val (uuid, timestamp) = (UUID.randomUUID(), new java.sql.Timestamp(System.currentTimeMillis()))
val orgId = (organizationsTable returning organizationsTable.map(_.id)) += OrganizationsRow(1, orgName, GiltEmail, GiltNotificationUrl, None, uuid, timestamp, uuid, timestamp, None, None)
for (i <- 1 to 3) tokensTable += TokensRow(1, orgId, None, s"token $i description for $orgName", s"token $i value for $orgName", uuid, timestamp, uuid, timestamp, None, None)
tokensTable += TokensRow(1, orgId, None, "DELETED token 4 desc", "DELETED token_value4", uuid, timestamp, uuid, timestamp, Some(uuid), Some(timestamp))
orgId
}
private[postgresql] def createUser(first: String, last: String, email: String, password: String, salt: Option[String]): User = {
val id = (usersTable returning usersTable.map(_.id)) += UsersRow(1, first, last, email, password, salt)
User(Some(id), first, last, email, password, salt)
}
}
|
gilt/cave
|
core/src/test/scala/com/cave/metrics/data/postgresql/AbstractDataManagerSpec.scala
|
Scala
|
mit
| 4,612 |
package com.neu.pdp.dataSampler
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import scala.util.Random
/**
* @author ${user.name}
*/
object App {
def main(args : Array[String]) {
if (args.length == 2) {
/**
* Initialize spark context and define accumulators
*/
// Initialize job configuration
val conf = new SparkConf()
.setAppName("Data Sampler")
// Initialize job context
val sc = new SparkContext(conf)
// Read the input & output file path
val inputPath = args(0)
val outputPath = args(1)
// Read the data and extract all pages and out-links
val selectedRecordsRDD = sc.textFile(inputPath)
.map(line => {
val delimIndex = line.indexOf(',')
val stationId = line.substring(0, delimIndex)
val pageContent = line.substring(delimIndex)
if (Random.nextFloat() * 100 < 3) {
(stationId, pageContent)
} else {
(stationId, "")
}
})
.filter(t => {t._2.length() > 1})
// Write the sampled records to an output file
selectedRecordsRDD.saveAsTextFile(outputPath)
} else {
println("Invalid run time arguments")
}
}
}
|
ideepakkrishnan/spark-scala
|
dataSampler/src/main/scala/com/neu/pdp/dataSampler/App.scala
|
Scala
|
mit
| 1,519 |
package im.actor.server.util
import scala.concurrent.ExecutionContext
import scala.util.control.NoStackTrace
import org.joda.time.DateTime
import slick.dbio.DBIO
import im.actor.api.rpc.AuthorizedClientData
import im.actor.server.{ models, persist }
object HistoryUtils {
import GroupUtils._
// User for writing history in public groups
private val sharedUserId = 0
def writeHistoryMessage(
fromPeer: models.Peer,
toPeer: models.Peer,
date: DateTime,
randomId: Long,
messageContentHeader: Int,
messageContentData: Array[Byte]
)(
implicit
ec: ExecutionContext
): DBIO[Unit] = {
requirePrivatePeer(fromPeer)
requireDifferentPeers(fromPeer, toPeer)
if (toPeer.typ == models.PeerType.Private) {
val outMessage = models.HistoryMessage(
userId = fromPeer.id,
peer = toPeer,
date = date,
senderUserId = fromPeer.id,
randomId = randomId,
messageContentHeader = messageContentHeader,
messageContentData = messageContentData,
deletedAt = None
)
val inMessage = models.HistoryMessage(
userId = toPeer.id,
peer = fromPeer,
date = date,
senderUserId = fromPeer.id,
randomId = randomId,
messageContentHeader = messageContentHeader,
messageContentData = messageContentData,
deletedAt = None
)
for {
_ ← persist.HistoryMessage.create(Seq(outMessage, inMessage))
_ ← persist.Dialog.updateLastMessageDate(fromPeer.id, toPeer, date)
res ← persist.Dialog.updateLastMessageDate(toPeer.id, fromPeer, date)
} yield ()
} else if (toPeer.typ == models.PeerType.Group) {
withGroup(toPeer.id) { group ⇒
withGroupUserIds(group.id) { groupUserIds ⇒
if (group.isPublic) {
val historyMessage = models.HistoryMessage(sharedUserId, toPeer, date, fromPeer.id, randomId, messageContentHeader, messageContentData, None)
for {
_ ← persist.Dialog.updateLastMessageDates(groupUserIds.toSet, toPeer, date)
_ ← persist.HistoryMessage.create(historyMessage)
} yield ()
} else {
val historyMessages = groupUserIds.map { groupUserId ⇒
models.HistoryMessage(groupUserId, toPeer, date, fromPeer.id, randomId, messageContentHeader, messageContentData, None)
}
val dialogAction = persist.Dialog.updateLastMessageDates(groupUserIds.toSet, toPeer, date)
DBIO.sequence(Seq(dialogAction, (persist.HistoryMessage.create(historyMessages) map (_.getOrElse(0))))) map (_ ⇒ ())
}
}
}
} else {
DBIO.failed(new Exception("PeerType is not supported") with NoStackTrace)
}
}
def markMessagesReceived(byPeer: models.Peer, peer: models.Peer, date: DateTime)(implicit ec: ExecutionContext): DBIO[Unit] = {
requirePrivatePeer(byPeer)
requireDifferentPeers(byPeer, peer)
peer.typ match {
case models.PeerType.Private ⇒
// TODO: #perf do in single query
DBIO.sequence(Seq(
persist.Dialog.updateLastReceivedAt(peer.id, models.Peer.privat(byPeer.id), date),
persist.Dialog.updateOwnerLastReceivedAt(byPeer.id, peer, date)
)) map (_ ⇒ ())
case models.PeerType.Group ⇒
withGroup(peer.id) { group ⇒
persist.GroupUser.findUserIds(peer.id) flatMap { groupUserIds ⇒
// TODO: #perf update dialogs in one query
val selfAction = persist.Dialog.updateOwnerLastReceivedAt(byPeer.id, models.Peer.group(peer.id), date)
val otherGroupUserIds = groupUserIds.view.filterNot(_ == byPeer.id).toSet
val otherAction = persist.Dialog.updateLastReceivedAt(otherGroupUserIds, models.Peer.group(peer.id), date)
selfAction andThen otherAction map (_ ⇒ ())
}
}
}
}
def markMessagesRead(byPeer: models.Peer, peer: models.Peer, date: DateTime)(implicit ec: ExecutionContext): DBIO[Unit] = {
requirePrivatePeer(byPeer)
requireDifferentPeers(byPeer, peer)
peer.typ match {
case models.PeerType.Private ⇒
// TODO: #perf do in single query
DBIO.sequence(Seq(
persist.Dialog.updateLastReadAt(peer.id, models.Peer.privat(byPeer.id), date),
persist.Dialog.updateOwnerLastReadAt(byPeer.id, peer, date)
)) map (_ ⇒ ())
case models.PeerType.Group ⇒
withGroup(peer.id) { group ⇒
persist.GroupUser.findUserIds(peer.id) flatMap { groupUserIds ⇒
// TODO: #perf update dialogs in one query
val selfAction = persist.Dialog.updateOwnerLastReadAt(byPeer.id, models.Peer.group(peer.id), date)
val otherGroupUserIds = groupUserIds.view.filterNot(_ == byPeer.id).toSet
val otherAction = persist.Dialog.updateLastReadAt(otherGroupUserIds, models.Peer.group(peer.id), date)
selfAction andThen otherAction map (_ ⇒ ())
}
}
}
}
def withHistoryOwner[A](peer: models.Peer)(f: Int ⇒ DBIO[A])(
implicit
ec: ExecutionContext,
client: AuthorizedClientData
): DBIO[A] = {
(peer.typ match {
case models.PeerType.Private ⇒ DBIO.successful(client.userId)
case models.PeerType.Group ⇒
withGroup(peer.id) { group ⇒
if (group.isPublic) {
DBIO.successful(sharedUserId)
} else {
DBIO.successful(client.userId)
}
}
}) flatMap f
}
def isSharedUser(userId: Int): Boolean = userId == sharedUserId
private def requireDifferentPeers(peer1: models.Peer, peer2: models.Peer) = {
if (peer1 == peer2)
throw new Exception("peers should not be same")
}
private def requirePrivatePeer(peer: models.Peer) = {
if (peer.typ != models.PeerType.Private)
throw new Exception("peer should be Private")
}
}
|
luoxiaoshenghustedu/actor-platform
|
actor-server/actor-utils/src/main/scala/im/actor/server/util/HistoryUtils.scala
|
Scala
|
mit
| 6,014 |
package org.beaucatcher.mongo.jdriver
import org.beaucatcher.bobject._
import org.beaucatcher.bobject.Implicits._
import org.beaucatcher.bson._
import org.beaucatcher.mongo._
import org.beaucatcher.caseclass.ClassAnalysis
import org.junit.Assert._
import org.junit._
import scala.util.Random
package restdemo {
import BObjectCodecs._
case class Foo(_id: ObjectId, aString: String, anInt: Int)
object Foo
extends CollectionAccessWithEntitiesBObjectOrCaseClassIdObjectId[Foo]
with JsonMethods[Foo] {
// the default collection name would conflict with the Foo
// in CollectionTest since tests are run concurrently;
// in apps you don't usually have to set this manually
override val collectionName = "restfoo"
override val jsonAnalysis = new ClassAnalysis(classOf[Foo])
override def jsonSync(implicit context: Context): BoundSyncCollection[BObject, BObject, BObject, _, _] = sync[BObject]
override def createQueryForAllObjects = BObject() // this would be dangerous in a non-test
// This object inherits a complete Collection for BObject and for the Foo case class,
// plus CRUD methods that accept/return JSON strings. In this file we're
// testing the CRUD methods.
}
case class FooWithIntId(_id: Int, aString: String, anInt: Int)
object FooWithIntId
extends CollectionAccessWithEntitiesBObjectOrCaseClass[FooWithIntId, Int]
with JsonMethods[FooWithIntId] {
// the default collection name would conflict with the FooWithIntId
// in CollectionTest since tests are run concurrently;
// in apps you don't usually have to set this manually
override val collectionName = "restfooWithIntId"
override val jsonAnalysis = new ClassAnalysis(classOf[FooWithIntId])
override def jsonSync(implicit context: Context): BoundSyncCollection[BObject, BObject, BObject, _, _] = sync[BObject]
override def createQueryForAllObjects = BObject() // this would be dangerous in a non-test
override def parseJValueIdFromPath(path: String): BInt32 = {
try {
path.toInt
} catch {
case e: NumberFormatException =>
throw new JsonValidationException("ID must be an integer, not %s".format(path), e)
}
}
// don't do this in a real program, please.
private var nextId = 1
override def generateJValueId(): BInt32 = {
nextId += 1
nextId - 1
}
override def createQueryForObject(path: String) = {
BObject("_id" -> BInt32(path.toInt))
}
}
}
class JsonMethodsTest
extends JavaDriverTestContextProvider {
import restdemo._
protected implicit def context: Context = mongoContext
@org.junit.Before
def setup() {
Foo.sync.remove(BObject())
FooWithIntId.sync.remove(BObject())
}
@Test
def putAndGetWorks(): Unit = {
// create an object
val createdJson = Foo.createJson("""{ "aString" : "hello", "anInt" : 76 }""")
// parse what we created
val bobjectCreated = Foo.parseJson(createdJson)
assertEquals("hello", bobjectCreated.get("aString").get.unwrapped)
assertEquals(76, bobjectCreated.get("anInt").get.unwrapped)
val createdIdString = bobjectCreated.get("_id").get.unwrapped.toString
// get the object
val gotJsonOption = Foo.readJson(Some(createdIdString))
assertTrue(gotJsonOption.isDefined)
// parse what we got
val bobjectGot = Foo.parseJson(gotJsonOption.get)
assertEquals(createdIdString, bobjectGot.get("_id").get.unwrapped.toString)
assertEquals(bobjectCreated, bobjectGot)
assertEquals("hello", bobjectGot.get("aString").get.unwrapped)
assertEquals(76, bobjectGot.get("anInt").get.unwrapped)
// update the object with the ID in the path only, not in JSON
val modifiedJson = Foo.updateJson(createdIdString, """{ "aString" : "hello world", "anInt" : 57 }""")
val gotModifiedJsonOption = Foo.readJson(Some(createdIdString))
val bobjectModified = Foo.parseJson(gotModifiedJsonOption.get)
assertEquals(createdIdString, bobjectModified.get("_id").get.unwrapped.toString)
assertEquals("hello world", bobjectModified.get("aString").get.unwrapped)
assertEquals(57, bobjectModified.get("anInt").get.unwrapped)
// update the object with redundant ID in the JSON
val modifiedJson2 = Foo.updateJson(createdIdString, """{ "_id" : """" +
createdIdString + """", "aString" : "hello world 2", "anInt" : 23 }""")
val gotModifiedJsonOption2 = Foo.readJson(Some(createdIdString))
val bobjectModified2 = Foo.parseJson(gotModifiedJsonOption2.get)
assertEquals(createdIdString, bobjectModified2.get("_id").get.unwrapped.toString)
assertEquals("hello world 2", bobjectModified2.get("aString").get.unwrapped)
assertEquals(23, bobjectModified2.get("anInt").get.unwrapped)
}
@Test
def putAndGetWorksWithIntId(): Unit = {
// create an object
val createdJson = FooWithIntId.createJson("""{ "aString" : "hello", "anInt" : 76 }""")
// parse what we created
val bobjectCreated = FooWithIntId.parseJson(createdJson)
assertEquals("hello", bobjectCreated.get("aString").get.unwrapped)
assertEquals(76, bobjectCreated.get("anInt").get.unwrapped)
val createdIdString = bobjectCreated.get("_id").get.unwrapped.toString
// get the object
val gotJsonOption = FooWithIntId.readJson(Some(createdIdString))
assertTrue(gotJsonOption.isDefined)
// parse what we got
val bobjectGot = FooWithIntId.parseJson(gotJsonOption.get)
assertEquals(createdIdString, bobjectGot.get("_id").get.unwrapped.toString)
assertEquals(bobjectCreated, bobjectGot)
assertEquals("hello", bobjectGot.get("aString").get.unwrapped)
assertEquals(76, bobjectGot.get("anInt").get.unwrapped)
// update the object with the ID in the path only, not in JSON
val modifiedJson = FooWithIntId.updateJson(createdIdString, """{ "aString" : "hello world", "anInt" : 57 }""")
val gotModifiedJsonOption = FooWithIntId.readJson(Some(createdIdString))
val bobjectModified = FooWithIntId.parseJson(gotModifiedJsonOption.get)
assertEquals(createdIdString, bobjectModified.get("_id").get.unwrapped.toString)
assertEquals("hello world", bobjectModified.get("aString").get.unwrapped)
assertEquals(57, bobjectModified.get("anInt").get.unwrapped)
// update the object with redundant ID in the JSON
val modifiedJson2 = FooWithIntId.updateJson(createdIdString, """{ "_id" : """ +
createdIdString + """, "aString" : "hello world 2", "anInt" : 23 }""")
val gotModifiedJsonOption2 = FooWithIntId.readJson(Some(createdIdString))
val bobjectModified2 = FooWithIntId.parseJson(gotModifiedJsonOption2.get)
assertEquals(createdIdString, bobjectModified2.get("_id").get.unwrapped.toString)
assertEquals("hello world 2", bobjectModified2.get("aString").get.unwrapped)
assertEquals(23, bobjectModified2.get("anInt").get.unwrapped)
}
@Test
def deleteWorks(): Unit = {
// create an object
val createdJson = Foo.createJson("""{ "aString" : "hello", "anInt" : 76 }""")
// parse what we created
val bobjectCreated = Foo.parseJson(createdJson)
assertEquals("hello", bobjectCreated.get("aString").get.unwrapped)
assertEquals(76, bobjectCreated.get("anInt").get.unwrapped)
val createdIdString = bobjectCreated.get("_id").get.unwrapped.toString
// get the object
val gotJsonOption = Foo.readJson(Some(createdIdString))
assertTrue(gotJsonOption.isDefined)
// parse what we got
val bobjectGot = Foo.parseJson(gotJsonOption.get)
assertEquals(createdIdString, bobjectGot.get("_id").get.unwrapped.toString)
assertEquals(bobjectCreated, bobjectGot)
assertEquals("hello", bobjectGot.get("aString").get.unwrapped)
assertEquals(76, bobjectGot.get("anInt").get.unwrapped)
// delete the object
Foo.deleteJson(createdIdString)
// fail to get the object
val gotAfterDeleteJsonOption = Foo.readJson(Some(createdIdString))
assertFalse("object is gone", gotAfterDeleteJsonOption.isDefined)
}
@Test
def readAllObjectsWorks(): Unit = {
// create some objects
Foo.createJson("""{ "aString" : "hello", "anInt" : 76 }""")
Foo.createJson("""{ "aString" : "hello2", "anInt" : 77 }""")
Foo.createJson("""{ "aString" : "hello3", "anInt" : 78 }""")
// read all
val allJsonOption = Foo.readJson(None)
assertTrue(allJsonOption.isDefined)
val objects = Foo.parseJsonArray(allJsonOption.get)
assertEquals(3, objects.size)
val strings = objects.map(_ match {
case obj: BObject => obj.get("aString").get.unwrapped.asInstanceOf[String]
case _ => throw new Exception("not an object")
})
assertEquals(List("hello", "hello2", "hello3"), strings.sorted.toList)
}
}
|
havocp/beaucatcher
|
mongo-test/src/test/scala/org/beaucatcher/mongo/jdriver/JsonMethodsTest.scala
|
Scala
|
apache-2.0
| 9,367 |
package week5
import akka.actor._
import akka.event.LoggingReceive
/**
* @author rgancea
*/
class BankAccount extends Actor {
import BankAccount._
private[this] var balance = BigInt(0)
def receive = LoggingReceive {
case Deposit(amount) =>
balance += amount
sender ! Done
case Withdraw(amount) if (amount <= balance) =>
balance -= amount
sender ! Done
case _ => sender ! Failed
}
}
object BankAccount {
case class Deposit(amount: BigInt) {
require(amount > 0)
}
case class Withdraw(amount: BigInt) {
require(amount > 0)
}
case object Done
case object Failed
}
|
radusw/ReactiveProgrammingCourse
|
src/main/scala/week5/BankAccount.scala
|
Scala
|
gpl-2.0
| 633 |
/*
* Copyright © 2014 Nemanja Stanarevic <[email protected]>
*
* Made with ❤ in NYC at Hacker School <http://hackerschool.com>
*
* Licensed under the GNU Affero General Public License, Version 3
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at:
*
* <http://www.gnu.org/licenses/agpl-3.0.html>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gmailapi.resources
case class DraftList(
drafts: Seq[Draft] = Nil,
nextPageToken: Option[String] = None,
resultSizeEstimate: Option[Int] = None) extends GmailResource
|
nemanja-stanarevic/gmail-api-scala-client
|
src/main/scala/gmailapi/resources/DraftList.scala
|
Scala
|
agpl-3.0
| 894 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import java.io.File
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import org.apache.commons.io.FileUtils
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.expressions.{Attribute, Predicate}
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.execution.datasources.FileBasedDataSourceTest
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanRelation
import org.apache.spark.sql.execution.datasources.v2.orc.OrcScan
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.ORC_IMPLEMENTATION
/**
* OrcTest
* -> OrcSuite
* -> OrcSourceSuite
* -> HiveOrcSourceSuite
* -> OrcQueryTests
* -> OrcQuerySuite
* -> HiveOrcQuerySuite
* -> OrcPartitionDiscoveryTest
* -> OrcPartitionDiscoverySuite
* -> HiveOrcPartitionDiscoverySuite
* -> OrcFilterSuite
* -> HiveOrcFilterSuite
*/
abstract class OrcTest extends QueryTest with FileBasedDataSourceTest with BeforeAndAfterAll {
val orcImp: String = "native"
private var originalConfORCImplementation = "native"
override protected val dataSourceName: String = "orc"
override protected val vectorizedReaderEnabledKey: String =
SQLConf.ORC_VECTORIZED_READER_ENABLED.key
protected override def beforeAll(): Unit = {
super.beforeAll()
originalConfORCImplementation = spark.conf.get(ORC_IMPLEMENTATION)
spark.conf.set(ORC_IMPLEMENTATION.key, orcImp)
}
protected override def afterAll(): Unit = {
spark.conf.set(ORC_IMPLEMENTATION.key, originalConfORCImplementation)
super.afterAll()
}
/**
* Writes `data` to a Orc file, which is then passed to `f` and will be deleted after `f`
* returns.
*/
protected def withOrcFile[T <: Product: ClassTag: TypeTag]
(data: Seq[T])
(f: String => Unit): Unit = withDataSourceFile(data)(f)
/**
* Writes `data` to a Orc file and reads it back as a `DataFrame`,
* which is then passed to `f`. The Orc file will be deleted after `f` returns.
*/
protected def withOrcDataFrame[T <: Product: ClassTag: TypeTag]
(data: Seq[T], testVectorized: Boolean = true)
(f: DataFrame => Unit): Unit = withDataSourceDataFrame(data, testVectorized)(f)
/**
* Writes `data` to a Orc file, reads it back as a `DataFrame` and registers it as a
* temporary table named `tableName`, then call `f`. The temporary table together with the
* Orc file will be dropped/deleted after `f` returns.
*/
protected def withOrcTable[T <: Product: ClassTag: TypeTag]
(data: Seq[T], tableName: String, testVectorized: Boolean = true)
(f: => Unit): Unit = withDataSourceTable(data, tableName, testVectorized)(f)
protected def makeOrcFile[T <: Product: ClassTag: TypeTag](
data: Seq[T], path: File): Unit = makeDataSourceFile(data, path)
protected def makeOrcFile[T <: Product: ClassTag: TypeTag](
df: DataFrame, path: File): Unit = makeDataSourceFile(df, path)
protected def checkPredicatePushDown(df: DataFrame, numRows: Int, predicate: String): Unit = {
withTempPath { file =>
// It needs to repartition data so that we can have several ORC files
// in order to skip stripes in ORC.
df.repartition(numRows).write.orc(file.getCanonicalPath)
val actual = stripSparkFilter(spark.read.orc(file.getCanonicalPath).where(predicate)).count()
assert(actual < numRows)
}
}
protected def checkNoFilterPredicate
(predicate: Predicate, noneSupported: Boolean = false)
(implicit df: DataFrame): Unit = {
val output = predicate.collect { case a: Attribute => a }.distinct
val query = df
.select(output.map(e => Column(e)): _*)
.where(Column(predicate))
query.queryExecution.optimizedPlan match {
case PhysicalOperation(_, filters,
DataSourceV2ScanRelation(_, o: OrcScan, _)) =>
assert(filters.nonEmpty, "No filter is analyzed from the given query")
if (noneSupported) {
assert(o.pushedFilters.isEmpty, "Unsupported filters should not show in pushed filters")
} else {
assert(o.pushedFilters.nonEmpty, "No filter is pushed down")
val maybeFilter = OrcFilters.createFilter(query.schema, o.pushedFilters)
assert(maybeFilter.isEmpty, s"Couldn't generate filter predicate for ${o.pushedFilters}")
}
case _ =>
throw new AnalysisException("Can not match OrcTable in the query.")
}
}
protected def readResourceOrcFile(name: String): DataFrame = {
val url = Thread.currentThread().getContextClassLoader.getResource(name)
// Copy to avoid URISyntaxException when `sql/hive` accesses the resources in `sql/core`
val file = File.createTempFile("orc-test", ".orc")
file.deleteOnExit();
FileUtils.copyURLToFile(url, file)
spark.read.orc(file.getAbsolutePath)
}
/**
* Takes a sequence of products `data` to generate multi-level nested
* dataframes as new test data. It tests both non-nested and nested dataframes
* which are written and read back with Orc datasource.
*
* This is different from [[withOrcDataFrame]] which does not
* test nested cases.
*/
protected def withNestedOrcDataFrame[T <: Product: ClassTag: TypeTag](data: Seq[T])
(runTest: (DataFrame, String, Any => Any) => Unit): Unit =
withNestedOrcDataFrame(spark.createDataFrame(data))(runTest)
protected def withNestedOrcDataFrame(inputDF: DataFrame)
(runTest: (DataFrame, String, Any => Any) => Unit): Unit = {
withNestedDataFrame(inputDF).foreach { case (newDF, colName, resultFun) =>
withTempPath { file =>
newDF.write.format(dataSourceName).save(file.getCanonicalPath)
readFile(file.getCanonicalPath, true) { df => runTest(df, colName, resultFun) }
}
}
}
}
|
rednaxelafx/apache-spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcTest.scala
|
Scala
|
apache-2.0
| 6,764 |
package quisp.flot
import quisp._
import spray.json.{JsValue, JsonWriter}
import java.awt.Color
import javax.jws.WebMethod
/**
* Specialized line chart
* @author rodneykinney
*/
class ConfigurableLineChart(
var config: Chart,
val display: ChartDisplay[ConfigurableChart[Chart], Int])
extends LineChartAPI[ConfigurableLineChart]
trait LineChartAPI[T <: UpdatableChart[T, Chart]]
extends ChartAPI[T] {
@WebMethod(action = "Data series options")
override def series(idx: Int) =
config.series(idx).lineApi(s =>
update(config.copy(series = config.series.updated(idx, s))))
@WebMethod(action = "Line painting options")
def lineOptions = {
val opt = Option(config.options.series).getOrElse(DefaultSeriesOptions())
val lineOpt = Option(opt.lines).getOrElse(LineChartOptions())
lineOpt.api(x =>
update(config.copy(options = config.options.copy(series = opt.copy(lines = x)))))
}
@WebMethod(action = "Marker painting options")
def markerOptions = {
val opt = Option(config.options.series).getOrElse(DefaultSeriesOptions())
val markerOpt = Option(opt.points).getOrElse(Marker())
markerOpt.api(x =>
update(config.copy(options = config.options.copy(series = opt.copy(points = x)))))
}
}
case class LineChartOptions(
show: Boolean = true,
lineWidth: Option[Int] = None,
fill: Option[Double] = None,
fillColor: Color = null,
additionalFields: Map[String, JsValue] = Map()
) extends ExtensibleJsObject {
def api[T](update: LineChartOptions => T) = new LineChartOptionsAPI(this, update)
}
class LineChartOptionsAPI[T](config: LineChartOptions, update: LineChartOptions => T) extends ExtensibleJsObjectAPI {
@WebMethod(action = "Show line")
def show(x: Boolean) = update(config.copy(show = x))
@WebMethod
def lineWidth(x: Int) = update(config.copy(lineWidth = Some(x)))
@WebMethod(action = "Opacity of fill color")
def fillOpacity(x: Double) = update(config.copy(fill = Some(x)))
@WebMethod(action = "Fill area under line with this color")
def fillColor(x: Color) = update(config.copy(fillColor = x))
@WebMethod(action = "Add additional values to the JSON object")
def additionalField[V: JsonWriter](name: String, value: V)
= update(config.copy(additionalFields = config.additionalFields + (name -> implicitly[JsonWriter[V]].write(value))))
}
class LineSeriesAPI[T](config: Series, update: Series => T) extends SeriesAPI(config, update) {
@WebMethod(action = "Line painting options")
def lineOptions = Option(config.lines).getOrElse(LineChartOptions())
.api(x => update(config.copy(lines = x)))
@WebMethod(action = "Marker painting options")
def markerOptions = Option(config.points).getOrElse(Marker())
.api(x => update(config.copy(points = x)))
}
|
rodneykinney/quisp
|
src/main/scala/quisp/flot/ConfigurableLineChart.scala
|
Scala
|
apache-2.0
| 2,771 |
package sangria.macros
import sangria.parser.{QueryParser, SyntaxError}
import scala.reflect.macros.blackbox
class ParseMacro(context: blackbox.Context)
extends {
val c = context
}
with MacroAstLiftable {
import c.universe._
def impl(args: Expr[Any]*) =
if (args.nonEmpty)
c.abort(
c.enclosingPosition,
"String interpolation is not supported for `graphql`/`gql` macro at the moment.")
else
c.prefix.tree match {
// Expects a string interpolation that doesn't contain any
// expressions, thus containing only a single tree
case Apply(_, List(Apply(_, t :: Nil))) =>
val q"${gql: String}" = t
try q"${QueryParser.parse(gql).get}"
catch {
case error: SyntaxError => syntaxError(error)
}
case _ =>
c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.")
}
def implInput(args: Expr[Any]*) =
if (args.nonEmpty)
c.abort(
c.enclosingPosition,
"String interpolation is not supported for `graphqlInput`/`gqlInp` macro at the moment.")
else
c.prefix.tree match {
// Expects a string interpolation that doesn't contain any
// expressions, thus containing only a single tree
case Apply(_, List(Apply(_, t :: Nil))) =>
val q"${gql: String}" = t
try q"${QueryParser.parseInput(gql).get}"
catch {
case error: SyntaxError => syntaxError(error)
}
case _ =>
c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.")
}
def implInputDoc(args: Expr[Any]*) =
if (args.nonEmpty)
c.abort(
c.enclosingPosition,
"String interpolation is not supported for `gqlInpDoc` macro at the moment.")
else
c.prefix.tree match {
// Expects a string interpolation that doesn't contain any
// expressions, thus containing only a single tree
case Apply(_, List(Apply(_, t :: Nil))) =>
val q"${gql: String}" = t
try q"${QueryParser.parseInputDocument(gql).get}"
catch {
case error: SyntaxError => syntaxError(error)
}
case _ =>
c.abort(c.enclosingPosition, "Invalid `graphql` invocation syntax.")
}
def syntaxError(error: SyntaxError) = {
val errorPos = error.originalError.position
val enclosingCol = if (errorPos.line == 1) calcStringStart else 0
val source = c.enclosingPosition.source
val line = source.lineToOffset(c.enclosingPosition.line + (errorPos.line - 2))
val col = line + enclosingCol + (errorPos.column - 1)
val pos = c.enclosingPosition.withPoint(col)
c.abort(pos, error.formattedError(showPosition = false))
}
def calcStringStart: Int = {
val source = c.enclosingPosition.source
val content = source.lineToString(c.enclosingPosition.line - 1)
val contentStart = content.substring(c.enclosingPosition.column - 1)
val offset = "(\\w+\"+)".r.findFirstMatchIn(contentStart).fold(0)(_.end)
c.enclosingPosition.column - 1 + offset
}
}
|
OlegIlyenko/sangria
|
modules/core/src/main/scala/sangria/macros/ParseMacro.scala
|
Scala
|
apache-2.0
| 3,130 |
package s99
import scala.language.implicitConversions
object Logic extends Log {
def not(a: Boolean): Boolean = !a
def and(a: Boolean, b: Boolean): Boolean = a && b
def or(a: Boolean, b: Boolean): Boolean = a || b
def nand(a: Boolean, b: Boolean): Boolean = !(and(a, b))
def nor(a: Boolean, b: Boolean): Boolean = !(nor(a, b))
def xor(a: Boolean, b: Boolean): Boolean = !(a == b)
def equ(a: Boolean, b: Boolean): Boolean = a == b
def impl(a: Boolean, b: Boolean): Boolean = or(not(a), b)
implicit def b2l(a: Boolean) = new Logic(a)
def table2(f: (Boolean, Boolean) => Boolean): String = {
val fmt = s"%-5s %-5s %-5s\\n"
val head = fmt.format("A", "B", "Result")
val res = for {
a <- List(true, false)
b <- List(true, false)
} yield {
fmt.format(a, b, f(a, b))
}
head + res.mkString
}
def gray(n: Int): List[String] = {
n match {
case 1 => List("0", "1")
case n =>
val tmp = gray(n - 1)
val tmp1 = tmp.map { x => "0" + x }
val tmp2 = tmp.map { x => "1" + x }
tmp1 ++ tmp2
}
}
def huffman(xs: List[(String, Int)]): List[(String, String)] = {
val ys = xs.sortBy(_._2)
val zs = P0828.combinations(2, ys)
Nil
}
}
class Logic(a: Boolean) {
import Logic.not
import Logic.b2l
def and(b: Boolean) = a && b
def or(b: Boolean): Boolean = a || b
def nand(b: Boolean): Boolean = not(and(b))
def nor(b: Boolean): Boolean = not(nor(b))
def xor(b: Boolean): Boolean = not(a == b)
def equ(b: Boolean): Boolean = a == b
def impl(b: Boolean): Boolean = not(a) or (b)
}
/**
* http://rosettacode.org/wiki/Huffman_coding#Scala
*/
object Huffman extends Log {
sealed trait Tree[+A]
case class Leaf[A](v: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
//case object End extends Tree[Nothing]
def contains[A](tree: Tree[A], ch: A): Boolean = {
tree match {
//case End => false
case Leaf(a) => ch == a
case Branch(left, right) => contains(left, ch) || contains(right, ch)
}
}
def encode[A](tree: Tree[A], v: A): String = {
@scala.annotation.tailrec
def go[A](tree: Tree[A], v: A, code: String): String = tree match {
//case End => ""
case Leaf(_) => code
case Branch(l, r) => if (contains(l, v)) go(l, v, code + "0") else go(r, v, code + "1")
}
go(tree, v, "")
}
@scala.annotation.tailrec
def merge[A](xs: List[(Tree[A], Int)]): List[(Tree[A], Int)] = {
debug(s"xs=${xs}")
xs match {
case List(a) => xs
case l :: r :: as =>
val m = (Branch(l._1, r._1), l._2 + r._2)
merge( (m :: as).sortBy(_._2))
}
}
def codify[A](xs: Tree[A]): List[(A, String)] = {
def recurese(xs: Tree[A], prefix: String): List[(A, String)] = xs match {
case Leaf(c) => (c, prefix) :: Nil
case Branch(l, r) => recurese(l, prefix + "0") ::: recurese(r, prefix + "1")
}
recurese(xs, "")
}
}
|
hzwuhao8/S99
|
src/main/scala/s99/Logic.scala
|
Scala
|
apache-2.0
| 3,048 |
/**
*
* © Copyright 2017 Greg Symons <[email protected]>.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package tipster.tips
import scala.concurrent._
import akka._
import akka.stream._
import akka.stream.scaladsl._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.common._
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import akka.http.scaladsl.marshalling._
import akka.http.scaladsl.unmarshalling._
import org.joda.time.{ DateTime => JodaDateTime }
import tipster.json._
object model {
sealed trait TipMessage { }
sealed trait HasUsername {
val username: String
}
sealed trait HasMessage {
val message: String
}
sealed trait HasId {
val id: Int
}
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
final case class GetTip(id: Option[Int] = None) extends TipMessage
object GetTip extends TipMessage{
def apply(id: Int): GetTip = GetTip(Some(id))
}
final case class CreateTip(
override val username: String,
override val message: String
) extends TipMessage
with HasUsername
with HasMessage
final case class Tip(
override val id: Int,
override val username: String,
override val message: String,
created: JodaDateTime,
updated: JodaDateTime
) extends TipMessage
with HasUsername
with HasMessage
with HasId
//Explicitly extend Function5 so Slick auto mapping works.
object Tip extends Function5[Int,
String,
String,
JodaDateTime,
JodaDateTime,
Tip] {
val MAX_MESSAGE_LEN = 140
}
sealed trait CommentMessage { }
final case class CreateComment(
tipId: Int,
username: String,
comment: String
) extends CommentMessage
final case class GetComment(
tipId: Int,
id: Option[Int]
)
final case class Comment(
id: Int,
tipId: Int,
username: String,
comment: String,
created: JodaDateTime
) extends CommentMessage
}
object services {
import model._
trait TipsWriter {
def createTip(tip: CreateTip): Future[Tip]
def createComment(comment: CreateComment): Future[Comment]
}
trait TipsReader {
def findTip(tip: GetTip): Future[Option[Tip]]
def getAllTips: Source[Tip, NotUsed]
def getAllComments(comments: GetComment): Source[Comment, NotUsed]
}
}
trait TipsApi extends Directives
with TipsJsonSupport
{
import model._
import services._
def tipsWriter: Option[TipsWriter]
def tipsReader: Option[TipsReader]
@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
def tipsRoutes: Route = {
implicit val jsess = EntityStreamingSupport.json
pathPrefix("tips") {
pathEnd {
post {
tipsWriter map { writer =>
entity(as[CreateTip]) { incoming =>
validate(incoming.message.length <= Tip.MAX_MESSAGE_LEN,
"Message too long")
{
complete(writer.createTip(incoming))
}
}
} getOrElse complete(StatusCodes.InternalServerError)
} ~
get {
tipsReader map { reader =>
complete(reader.getAllTips)
} getOrElse complete(StatusCodes.InternalServerError)
}
} ~
pathPrefix(IntNumber) { tipId =>
pathEnd {
get {
tipsReader map { reader =>
onSuccess(reader.findTip(GetTip(tipId))) {
case Some(tip) => complete(tip)
case None => complete(StatusCodes.NotFound)
}
} getOrElse complete(StatusCodes.InternalServerError)
}
} ~
pathPrefix("comments") {
pathEnd {
post {
tipsWriter map { writer =>
entity(as[Map[String, String]]) { incoming =>
(validate(incoming.get("username").nonEmpty, "username field is required") &
validate(incoming.get("comment").nonEmpty, "comment field is required")) {
complete(
writer.createComment(
CreateComment(
tipId = tipId,
username = incoming("username"),
comment = incoming("comment"))))
}
}
} getOrElse complete(StatusCodes.InternalServerError)
} ~
get {
tipsReader map { reader =>
complete(reader.getAllComments(GetComment(tipId = tipId, id=None)))
} getOrElse complete(StatusCodes.InternalServerError)
}
}
}
}
}
}
}
|
gregsymons/tipster
|
src/main/scala/Tips.scala
|
Scala
|
apache-2.0
| 5,472 |
package home.yang.dataflow.datahold
import home.yang.dataflow.{DataHolder, ValueMapper}
/**
* Created by Administrator on 2016/5/2 0002.
*/
class IntData(id: String, value: String) extends DataHolder(id, value) {
var int = 0
override def parse: Unit = {
int = Integer.parseInt(value)
}
override def mapTo(outValueCreator: ValueMapper): Any = outValueCreator.createInt(int)
}
|
wjingyao2008/firsttry
|
dataflow/src/main/scala/home/yang/dataflow/datahold/IntData.scala
|
Scala
|
apache-2.0
| 396 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.filters.cors
import play.api.Configuration
import play.filters.cors.CORSConfig.Origins
import scala.concurrent.duration._
/**
* Configuration for play.filters.cors.AbstractCORSPolicy.
*
* <ul>
* <li>allow only requests with origins from a whitelist (by default all origins are allowed)</li>
* <li>allow only HTTP methods from a whitelist for preflight requests (by default all methods are allowed)</li>
* <li>allow only HTTP headers from a whitelist for preflight requests (by default all headers are allowed)</li>
* <li>set custom HTTP headers to be exposed in the response (by default no headers are exposed)</li>
* <li>disable/enable support for credentials (by default credentials support is enabled)</li>
* <li>set how long (in seconds) the results of a preflight request can be cached in a preflight result cache (by default 3600 seconds, 1 hour)</li>
* <li>enable/disable serving requests with origins not in whitelist as non-CORS requests (by default they are forbidden)</li>
* </ul>
*
* @param allowedOrigins
* [[http://www.w3.org/TR/cors/#resource-requests §6.1.2]]
* [[http://www.w3.org/TR/cors/#resource-preflight-requests §6.2.2]]
* Always matching is acceptable since the list of origins can be unbounded.
* @param isHttpMethodAllowed
* [[http://www.w3.org/TR/cors/#resource-preflight-requests §6.2.5]]
* Always matching is acceptable since the list of methods can be unbounded.
* @param isHttpHeaderAllowed
* [[http://www.w3.org/TR/cors/#resource-preflight-requests §6.2.6]]
* Always matching is acceptable since the list of headers can be unbounded.
*
*/
case class CORSConfig(
allowedOrigins: Origins = Origins.None,
isHttpMethodAllowed: String => Boolean = _ => true,
isHttpHeaderAllowed: String => Boolean = _ => true,
exposedHeaders: Seq[String] = Seq.empty,
supportsCredentials: Boolean = true,
preflightMaxAge: Duration = 1.hour,
serveForbiddenOrigins: Boolean = false
) {
def anyOriginAllowed: Boolean = allowedOrigins == Origins.All
def withAnyOriginAllowed = withOriginsAllowed(Origins.All)
def withOriginsAllowed(origins: String => Boolean): CORSConfig = copy(allowedOrigins = Origins.Matching(origins))
def withMethodsAllowed(methods: String => Boolean): CORSConfig = copy(isHttpMethodAllowed = methods)
def withHeadersAllowed(headers: String => Boolean): CORSConfig = copy(isHttpHeaderAllowed = headers)
def withExposedHeaders(headers: Seq[String]): CORSConfig = copy(exposedHeaders = headers)
def withCredentialsSupport(supportsCredentials: Boolean): CORSConfig = copy(supportsCredentials = supportsCredentials)
def withPreflightMaxAge(maxAge: Duration): CORSConfig = copy(preflightMaxAge = maxAge)
def withServeForbiddenOrigins(serveForbiddenOrigins: Boolean): CORSConfig = copy(serveForbiddenOrigins = serveForbiddenOrigins)
import scala.collection.JavaConverters._
import scala.compat.java8.FunctionConverters._
import java.util.{ function => juf }
def withOriginsAllowed(origins: juf.Function[String, Boolean]): CORSConfig = withOriginsAllowed(origins.asScala)
def withMethodsAllowed(methods: juf.Function[String, Boolean]): CORSConfig = withMethodsAllowed(methods.asScala)
def withHeadersAllowed(headers: juf.Function[String, Boolean]): CORSConfig = withHeadersAllowed(headers.asScala)
def withExposedHeaders(headers: java.util.List[String]): CORSConfig = withExposedHeaders(headers.asScala.toSeq)
def withPreflightMaxAge(maxAge: java.time.Duration): CORSConfig = withPreflightMaxAge(Duration.fromNanos(maxAge.toNanos))
}
/**
* Helpers to build CORS policy configurations
*/
object CORSConfig {
/**
* Origins allowed by the CORS filter
*/
sealed trait Origins extends (String => Boolean)
object Origins {
case object All extends Origins {
override def apply(v: String) = true
}
case class Matching(func: String => Boolean) extends Origins {
override def apply(v: String) = func(v)
}
val None = Matching(_ => false)
}
/**
*
*/
val denyAll: CORSConfig =
CORSConfig(
allowedOrigins = Origins.None,
isHttpMethodAllowed = _ => false,
isHttpHeaderAllowed = _ => false,
exposedHeaders = Seq.empty,
supportsCredentials = true,
preflightMaxAge = 0.seconds,
serveForbiddenOrigins = false
)
/**
* Builds a [[CORSConfig]] from a play.api.Configuration instance.
*
* @example The configuration is as follows:
* {{{
* play.filters.cors {
* pathPrefixes = ["/myresource", ...] # ["/"] by default
* allowedOrigins = ["http://...", ...] # If null, all origins are allowed
* allowedHttpMethods = ["PATCH", ...] # If null, all methods are allowed
* allowedHttpHeaders = ["Custom-Header", ...] # If null, all headers are allowed
* exposedHeaders = [...] # empty by default
* supportsCredentials = true # true by default
* preflightMaxAge = 1 hour # 1 hour by default
* serveForbiddenOrigins = false # false by default
* }
*
* }}}
*/
def fromConfiguration(conf: Configuration): CORSConfig = {
val config = conf.get[Configuration]("play.filters.cors")
fromUnprefixedConfiguration(config)
}
private[cors] def fromUnprefixedConfiguration(config: Configuration): CORSConfig = {
CORSConfig(
allowedOrigins = config.get[Option[Seq[String]]]("allowedOrigins") match {
case Some(allowed) => Origins.Matching(allowed.toSet)
case None => Origins.All
},
isHttpMethodAllowed =
config.get[Option[Seq[String]]]("allowedHttpMethods").map { methods =>
val s = methods.toSet
s.contains _
}.getOrElse(_ => true),
isHttpHeaderAllowed =
config.get[Option[Seq[String]]]("allowedHttpHeaders").map { headers =>
val s = headers.map(_.toLowerCase(java.util.Locale.ENGLISH)).toSet
s.contains _
}.getOrElse(_ => true),
exposedHeaders =
config.get[Seq[String]]("exposedHeaders"),
supportsCredentials =
config.get[Boolean]("supportsCredentials"),
preflightMaxAge =
config.get[Duration]("preflightMaxAge"),
serveForbiddenOrigins =
config.get[Boolean]("serveForbiddenOrigins")
)
}
}
|
wsargent/playframework
|
framework/src/play-filters-helpers/src/main/scala/play/filters/cors/CORSConfig.scala
|
Scala
|
apache-2.0
| 6,484 |
package com.andre_cruz.utils
import org.scalatest.{Matchers, WordSpecLike}
class OptionUtilsTest extends WordSpecLike with Matchers {
import OptionUtils._
"optionIf" when {
"its predicate is true" should {
"yield an Option" in {
optionIf(true) { 123 } shouldBe Some(123)
}
}
"its predicate is false" should {
"yield None" in {
optionIf(false) { 123 } shouldBe None
}
"never evaluate its 'result' block" should {
optionIf(false) {
fail("'result' should not have evaluated")
} shouldBe None
}
}
}
}
|
codecruzer/scala-caching
|
src/test/scala/com/andre_cruz/utils/OptionUtilsTest.scala
|
Scala
|
apache-2.0
| 605 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.jdbc.connection
class DB2ConnectionProviderSuite extends ConnectionProviderSuiteBase {
test("setAuthenticationConfigIfNeeded must set authentication if not set") {
val provider = new DB2ConnectionProvider()
val driver = registerDriver(provider.driverClass)
testSecureConnectionProvider(provider, driver, options("jdbc:db2://localhost/db2"))
}
}
|
witgo/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/jdbc/connection/DB2ConnectionProviderSuite.scala
|
Scala
|
apache-2.0
| 1,216 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.feature.transform.vision.image.augmentation
import com.intel.analytics.bigdl.dllib.feature.dataset.segmentation.PolyMasks
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.{FeatureTransformer, ImageFeature}
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.label.roi.RoiLabel
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.util.BboxUtil
object ScaleResize {
/**
* Scaling length and width of image feature to ensure that:
* if maxSize is not set, the smaller one between width and length will be scaled to minSize.
* if maxSize is set, the larger one will be scaled to maxSize or maxSize -1.
* e.g. image feature height = 375, width = 500
* case 1: minSize=100, maxSize=120, then new size (90, 120)
* case 2: minSize=100, maxSize=-1, then new size (100, 133)
* @param minSize the minimal size after resize
* @param maxSize the maximal size after resize
* @param resizeROI whether to resize roi, default false
*/
def apply(minSize: Int, maxSize: Int = -1, resizeROI: Boolean = false): ScaleResize =
new ScaleResize(minSize, maxSize, resizeROI)
}
class ScaleResize(minSize: Int, maxSize: Int = -1, resizeROI: Boolean = false)
extends FeatureTransformer {
private def getSize(sizeH: Int, sizeW: Int): (Int, Int) = {
var size = minSize
if (maxSize > 0) {
val (minOrigSize, maxOrigSize) = if (sizeW > sizeH) (sizeH, sizeW) else (sizeW, sizeH)
val thread = maxOrigSize.toFloat / minOrigSize * size
if (thread > maxSize) size = math.round(maxSize * minOrigSize / maxOrigSize)
}
if ((sizeW <= sizeH && sizeW == size) || (sizeH <= sizeW && sizeH == size)) {
(sizeH, sizeW)
} else if (sizeW < sizeH) {
(size * sizeH / sizeW, size)
} else {
(size, size * sizeW / sizeH)
}
}
override def transformMat(feature: ImageFeature): Unit = {
val sizes = this.getSize(feature.getHeight(), feature.getWidth())
val resizeH = sizes._1
val resizeW = sizes._2
Resize.transform(feature.opencvMat(), feature.opencvMat(), resizeW, resizeH,
useScaleFactor = false)
// resize roi label
if (feature.hasLabel() && feature(ImageFeature.label).isInstanceOf[RoiLabel] && resizeROI) {
// bbox resize
resizeBbox(feature)
// mask resize
resizeMask(feature)
}
}
private def resizeBbox(feature: ImageFeature): Unit = {
val scaledW = feature.getWidth().toFloat / feature.getOriginalWidth
val scaledH = feature.getHeight().toFloat / feature.getOriginalHeight
val target = feature.getLabel[RoiLabel]
BboxUtil.scaleBBox(target.bboxes, scaledH, scaledW)
}
private def resizeMask(feature: ImageFeature): Unit = {
val scaledW = feature.getWidth().toFloat / feature.getOriginalWidth
val scaledH = feature.getHeight().toFloat / feature.getOriginalHeight
val masks = feature.getLabel[RoiLabel].masks
if (masks == null) return
for (i <- 0 until masks.length) {
val oneMask = masks(i)
require(oneMask.isInstanceOf[PolyMasks],
s"Only support poly mask resize, but get ${oneMask}")
if (oneMask.isInstanceOf[PolyMasks]) {
val polyMask = oneMask.asInstanceOf[PolyMasks]
val poly = polyMask.poly
for (i <- 0 to (poly.length - 1)) {
val p = poly(i)
for (j <- 0 to (p.length - 1)) {
if (j % 2 == 0) p(j) *= scaledW // for x
else p(j) *= scaledH // for y
}
}
// change to resized mask
masks(i) = PolyMasks(poly, feature.getHeight(), feature.getWidth())
}
}
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/feature/transform/vision/image/augmentation/ScaleResize.scala
|
Scala
|
apache-2.0
| 4,268 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.