code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
import sbt._
import scala._
object Resolvers {
//lazy val localm2 = "Local Maven Repository" at "file://"+Path.userHome+"/.m2/repository"
lazy val mvncentral = "Maven Central" at "http://repo1.maven.org/maven2/"
lazy val typesafe = Classpaths.typesafeReleases
lazy val ossreleases = "Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases/"
lazy val osssnapshots = "Sonatype OSS Snapshots" at "http://oss.sonatype.org/content/repositories/snapshots/"
lazy val all = Seq(mvncentral, typesafe, ossreleases, osssnapshots)
}
object Dependencies {
val core = Core.all
val test = Test.all
object Core {
//lazy val scalaz = "org.scalaz" %% "scalaz-core" % "7.1.0-M4"
//lazy val shapeless = "com.chuusai" %% "shapeless" % "2.0.0-SNAPSHOT"
lazy val paradise = "org.scalamacros" % "paradise_2.11.1" % "2.1.0-M1"
lazy val slf4j = "org.slf4j" % "slf4j-api" % "1.7.6"
lazy val all = Seq(paradise, slf4j) //shapeless, scalaz
}
object Test {
lazy val logback = "ch.qos.logback" % "logback-classic" % "1.1.2" % "test"
lazy val scalatest = "org.scalatest" %% "scalatest" % "2.2.0" % "test"
lazy val junit = "junit" % "junit" % "4.7" % "test" //for xml output
lazy val all = Seq(logback, scalatest, junit)
}
}
|
thecoda/autoproxy
|
project/Dependencies.scala
|
Scala
|
apache-2.0
| 1,565 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import reflect.macros.Context
private[anyvals] object OddIntMacro extends CompileTimeAssertions {
def apply(c: Context)(value: c.Expr[Int]): c.Expr[OddInt] = {
val notValidMsg =
"OddInt.apply can only be invoked on odd Int literals, like OddInt(3)."
val notLiteralMsg =
"OddInt.apply can only be invoked on Int literals, like " +
"OddInt(3). Please use OddInt.from instead."
ensureValidIntLiteral(c)(value, notValidMsg, notLiteralMsg) { i => i % 2 == 1 }
c.universe.reify { OddInt.from(value.splice).get }
}
}
|
dotty-staging/scalatest
|
scalactic-test/src/test/scala/org/scalactic/anyvals/OddIntMacro.scala
|
Scala
|
apache-2.0
| 1,165 |
package co.ledger.wallet.web.ripple.components
import org.scalajs.dom
import scala.scalajs.js
/**
* Created by alix on 4/19/17.
*/
object WindowManager {
def open(url: String) = {
/*val width = 600
val height = 1200
js.Dynamic.global.chrome.app.window.create(url, js.Dynamic.literal(outerBounds = js.Dynamic.literal(width = width,
height = height,
left = (dom.window.screen.availWidth - width),
top = (dom.window.screen.availHeight - height))))*/
js.Dynamic.global.gui.Shell.openExternal(url)
}
}
//
|
LedgerHQ/ledger-wallet-ripple
|
src/main/scala/co/ledger/wallet/web/ripple/components/WindowFormatter.scala
|
Scala
|
mit
| 544 |
package keystoneml.nodes.nlp
import java.lang.Integer.{rotateLeft => rotl}
import breeze.linalg.SparseVector
import keystoneml.workflow.Transformer
import scala.collection.mutable
/**
* Converts the n-grams of a sequence of terms to a sparse vector representing their frequencies,
* using the hashing trick: https://en.wikipedia.org/wiki/Feature_hashing
*
* It computes a rolling MurmurHash3 instead of fully constructing the n-grams, making
* it more efficient than using [[NGramsFeaturizer]] followed by [[HashingTF]], although
* it should return the exact same feature vector. The MurmurHash3 methods are copied from
* [[scala.util.hashing.MurmurHash3]]
*
* Individual terms are hashed using Scala's `.##` method. We may want to convert to MurmurHash3 for strings,
* as discussed for Spark's ML Pipelines in https://issues.apache.org/jira/browse/SPARK-10574
*
* @param orders valid ngram orders, must be consecutive positive integers
* @param numFeatures The desired feature space to convert to using the hashing trick.
*/
case class NGramsHashingTF(orders: Seq[Int], numFeatures: Int)
extends Transformer[Seq[String], SparseVector[Double]] {
private[this] final val minOrder = orders.min
private[this] final val maxOrder = orders.max
require(minOrder >= 1, s"minimum order is not >= 1, found $minOrder")
orders.sliding(2).foreach {
case xs if xs.length > 1 => require(xs(0) == xs(1) - 1,
s"orders are not consecutive; contains ${xs(0)} and ${xs(1)}")
case _ =>
}
final val seqSeed = "Seq".hashCode
/** Mix in a block of data into an intermediate hash value. */
final def mix(hash: Int, data: Int): Int = {
var h = mixLast(hash, data)
h = rotl(h, 13)
h * 5 + 0xe6546b64
}
/** May optionally be used as the last mixing step. Is a little bit faster than mix,
* as it does no further mixing of the resulting hash. For the last element this is not
* necessary as the hash is thoroughly mixed during finalization anyway. */
final def mixLast(hash: Int, data: Int): Int = {
var k = data
k *= 0xcc9e2d51
k = rotl(k, 15)
k *= 0x1b873593
hash ^ k
}
/** Finalize a hash to incorporate the length and make sure all bits avalanche. */
final def finalizeHash(hash: Int, length: Int): Int = avalanche(hash ^ length)
/** Force all bits of the hash to avalanche. Used for finalizing the hash. */
private final def avalanche(hash: Int): Int = {
var h = hash
h ^= h >>> 16
h *= 0x85ebca6b
h ^= h >>> 13
h *= 0xc2b2ae35
h ^= h >>> 16
h
}
def nonNegativeMod(x: Int, mod: Int): Int = {
val rawMod = x % mod
rawMod + (if (rawMod < 0) mod else 0)
}
def apply(line: Seq[String]): SparseVector[Double] = {
val hashes = new Array[Integer](line.length)
var i = 0
while (i < line.length) {
hashes(i) = line(i).##
i += 1
}
var j = 0
val termFrequencies = mutable.HashMap.empty[Int, Double]
i = 0
while (i + minOrder <= line.length) {
var order = minOrder
var h = seqSeed
j = i
while (j < i + minOrder) {
h = mix(h, hashes(j))
j += 1
}
val feature = nonNegativeMod(finalizeHash(h, order), numFeatures)
termFrequencies.put(feature, termFrequencies.getOrElse(feature, 0.0) + 1.0)
order = minOrder + 1
while (order <= maxOrder && i + order <= line.length) {
h = mix(h, hashes(i + order - 1))
val feature = nonNegativeMod(finalizeHash(h, order), numFeatures)
termFrequencies.put(feature, termFrequencies.getOrElse(feature, 0.0) + 1.0)
order += 1
}
i += 1
}
SparseVector(numFeatures)(termFrequencies.toSeq:_*)
}
}
|
amplab/keystone
|
src/main/scala/keystoneml/nodes/nlp/NGramsHashingTF.scala
|
Scala
|
apache-2.0
| 3,723 |
package org.typedsolutions.aws.kinesis
import com.amazonaws.AmazonWebServiceRequest
import com.amazonaws.ClientConfiguration
import com.amazonaws.auth.AWSCredentialsProvider
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain
import com.amazonaws.services.kinesis.{AmazonKinesisAsync => Underlying, AmazonKinesisAsyncClient => UnderlyingClient}
import org.typedsolutions.aws.handlers.PromiseHandler
import org.typedsolutions.aws.handlers.PromiseHandlerFactory
import org.typedsolutions.aws.kinesis.converters.KinesisConverter
import org.typedsolutions.aws.kinesis.model._
import org.typedsolutions.aws.util.ExecutionContextWrapper
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
class AmazonKinesisClient(
val underlying: Underlying,
val converter: KinesisConverter,
val factory: PromiseHandlerFactory)(
implicit ec: ExecutionContext) extends AmazonKinesis {
import converter._
import factory._
def createStream(request: CreateStreamRequest): Future[CreateStreamResponse] = {
invoke(request)(toAws)(underlying.createStreamAsync)(Void => CreateStreamResponse())
}
def deleteStream(request: DeleteStreamRequest): Future[DeleteStreamResponse] = {
invoke(request)(toAws)(underlying.deleteStreamAsync)(Void => DeleteStreamResponse())
}
def describeStream(request: DescribeStreamRequest): Future[DescribeStreamResponse] = {
invoke(request)(toAws)(underlying.describeStreamAsync)(fromAws)
}
def getRecords(request: GetRecordsRequest): Future[GetRecordsResponse] = {
invoke(request)(toAws)(underlying.getRecordsAsync)(fromAws)
}
def getShardIterator(request: GetShardIteratorRequest): Future[GetShardIteratorResponse] = {
invoke(request)(toAws)(underlying.getShardIteratorAsync)(fromAws)
}
def listStreams(request: ListStreamsRequest): Future[ListStreamsResponse] = {
invoke(request)(toAws)(underlying.listStreamsAsync)(fromAws)
}
def mergeShards(request: MergeShardsRequest): Future[MergeShardsResponse] = {
invoke(request)(toAws)(underlying.mergeShardsAsync)(Void => MergeShardsResponse())
}
def putRecord(request: PutRecordRequest): Future[PutRecordResponse] = {
invoke(request)(toAws)(underlying.putRecordAsync)(fromAws)
}
def putRecords(request: PutRecordsRequest): Future[PutRecordsResponse] = {
invoke(request)(toAws)(underlying.putRecordsAsync)(fromAws)
}
def splitShard(request: SplitShardRequest): Future[SplitShardResponse] = {
invoke(request)(toAws)(underlying.splitShardAsync)(Void => SplitShardResponse())
}
private def invoke
[Request, UnderlyingRequest <: AmazonWebServiceRequest, UnderlyingResponse, Response]
(request: Request)
(toAws: Request => UnderlyingRequest)
(method: (UnderlyingRequest, PromiseHandler[UnderlyingRequest, UnderlyingResponse]) => _)
(fromAws: UnderlyingResponse => Response): Future[Response] = {
val handler = create[UnderlyingRequest, UnderlyingResponse]()
val underlyingRequest = toAws(request)
method(underlyingRequest, handler)
handler.future.map(fromAws)
}
}
object AmazonKinesisClient {
def apply(
awsCredentialsProvider: AWSCredentialsProvider = new DefaultAWSCredentialsProviderChain,
clientConfiguration: ClientConfiguration = new ClientConfiguration())(implicit
ec: ExecutionContext): AmazonKinesisClient = {
new AmazonKinesisClient(
new UnderlyingClient(awsCredentialsProvider, clientConfiguration, new ExecutionContextWrapper(ec)),
new KinesisConverter,
new PromiseHandlerFactory)
}
}
|
mattroberts297/akka-kinesis
|
src/main/scala/org/typedsolutions/aws/kinesis/AmazonKinesisClient.scala
|
Scala
|
mit
| 3,573 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.renewal
import cats.data.OptionT
import cats.implicits._
import connectors.DataCacheConnector
import controllers.{AmlsBaseController, CommonPlayDependencies}
import javax.inject.{Inject, Singleton}
import models.businessmatching.BusinessMatching
import models.registrationprogress.Completed
import models.responsiblepeople.ResponsiblePerson
import models.status.{ReadyForRenewal, RenewalSubmitted}
import play.api.mvc.MessagesControllerComponents
import services.businessmatching.BusinessMatchingService
import services.{ProgressService, RenewalService, SectionsProvider, StatusService}
import utils.{AuthAction, ControllerHelper}
import views.html.renewal.renewal_progress
import scala.concurrent.Future
@Singleton
class RenewalProgressController @Inject()(val authAction: AuthAction,
val ds: CommonPlayDependencies,
val dataCacheConnector: DataCacheConnector,
val progressService: ProgressService,
val sectionsProvider: SectionsProvider,
val renewals: RenewalService,
val businessMatchingService: BusinessMatchingService,
val statusService: StatusService,
val cc: MessagesControllerComponents,
renewal_progress: renewal_progress) extends AmlsBaseController(ds, cc) {
def get = authAction.async {
implicit request =>
val statusInfo = statusService.getDetailedStatus(request.amlsRefNumber, request.accountTypeId, request.credId)
val result = statusInfo map {
case (r: ReadyForRenewal, _) => {
for {
renewalSection <- OptionT.liftF(renewals.getSection(request.credId))
cache <- OptionT(dataCacheConnector.fetchAll(request.credId))
responsiblePeople <- OptionT.fromOption[Future](cache.getEntry[Seq[ResponsiblePerson]](ResponsiblePerson.key))
businessMatching <- OptionT.fromOption[Future](cache.getEntry[BusinessMatching](BusinessMatching.key))
} yield {
val businessName = businessMatching.reviewDetails.map(r => r.businessName).getOrElse("")
val activities = businessMatching.activities.fold(Seq.empty[String])(_.businessActivities.map(_.getMessage()).toSeq)
val variationSections = sectionsProvider.sections(cache).filter(_.name != BusinessMatching.messageKey)
val canSubmit = renewals.canSubmit(renewalSection, variationSections)
val msbOrTcspExists = ControllerHelper.isMSBSelected(Some(businessMatching)) ||
ControllerHelper.isTCSPSelected(Some(businessMatching))
val hasCompleteNominatedOfficer = ControllerHelper.hasCompleteNominatedOfficer(Option(responsiblePeople))
val nominatedOfficerName = ControllerHelper.completeNominatedOfficerTitleName(Option(responsiblePeople))
Ok(renewal_progress(variationSections, businessName, activities, canSubmit, msbOrTcspExists, r, renewalSection.status == Completed, hasCompleteNominatedOfficer, nominatedOfficerName))
}
}
case (r:RenewalSubmitted, _) => OptionT.fromOption[Future](Some(Redirect(controllers.routes.RegistrationProgressController.get)))
case _ => throw new Exception("An UnknownException has occurred: RenewalProgressController")
}
result.flatMap(_.getOrElse(InternalServerError("Cannot get business matching or renewal date")))
}
def post() = authAction.async {
implicit request =>
progressService.getSubmitRedirect(request.amlsRefNumber, request.accountTypeId, request.credId) map {
case Some(url) => Redirect(url)
case _ => InternalServerError("Could not get data for redirect")
}
}
}
|
hmrc/amls-frontend
|
app/controllers/renewal/RenewalProgressController.scala
|
Scala
|
apache-2.0
| 4,607 |
package com.getbootstrap.savage.server
import scala.util.{Failure, Success, Try}
import org.eclipse.egit.github.core.service.IssueService
import com.getbootstrap.savage.github.{GitHubActorWithLogging, PullRequestNumber}
import com.getbootstrap.savage.PullRequestBuildResult
class PullRequestCommenter extends GitHubActorWithLogging {
private def tryToCommentOn(prNum: PullRequestNumber, commentMarkdown: String) = {
val issueService = new IssueService(gitHubClient)
Try { issueService.createComment(settings.MainRepoId, prNum.number, commentMarkdown) }
}
override def receive = {
case PullRequestBuildResult(prNum, commitSha, buildUrl, succeeded) => {
val mythicalStatus = if (succeeded) { "**CONFIRMED**" } else { "**BUSTED**" }
val plainStatus = if (succeeded) { "**Tests passed.**" } else { "**Tests failed.**" }
val previewInfo = if (settings.ShowPreviewUrls) { s"Docs preview: http://preview.twbsapps.com/c/${commitSha.sha}" } else { "" }
val commentMarkdown = s"""
|${plainStatus} Automated cross-browser testing via Sauce Labs and Travis CI shows that the JavaScript changes in this pull request are: ${mythicalStatus}
|
|Commit: ${commitSha.sha}
|Build details: ${buildUrl}
|${previewInfo}
|
|(*Please note that this is a [fully automated](https://github.com/twbs/savage) comment.*)
""".stripMargin
tryToCommentOn(prNum, commentMarkdown) match {
case Success(comment) => log.info(s"Successfully posted comment ${comment.getUrl} for ${prNum}")
case Failure(exc) => log.error(exc, s"Error posting comment for ${prNum}")
}
}
}
}
|
twbs/savage
|
src/main/scala/com/getbootstrap/savage/server/PullRequestCommenter.scala
|
Scala
|
mit
| 1,677 |
package com.seanshubin.uptodate.logic
trait PomFileScanner {
def scanPomFiles(): Seq[Pom]
}
|
SeanShubin/up-to-date
|
logic/src/main/scala/com/seanshubin/uptodate/logic/PomFileScanner.scala
|
Scala
|
unlicense
| 95 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.ops
import com.intel.analytics.bigdl.nn.Squeeze
import com.intel.analytics.bigdl.tensor.Tensor
import org.scalatest.{FlatSpec, Matchers}
class SqueezeSpec extends FlatSpec with Matchers {
"Squeeze operation" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericFloat
val input =
Tensor(Array(1, 2, 1, 3, 1, 1)).rand()
val expectOutput = input.squeeze()
val output = Squeeze(null, false).forward(input)
output should be(expectOutput)
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/ops/SqueezeSpec.scala
|
Scala
|
apache-2.0
| 1,135 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frsse2008
import uk.gov.hmrc.ct.accounts.frsse2008.calculations.ProfitOrLossCalculator
import uk.gov.hmrc.ct.accounts.frsse2008.retriever.Frsse2008AccountsBoxRetriever
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
case class AC37(value: Option[Int]) extends CtBoxIdentifier(name = "Previous Profit or loss for the financial year") with CtOptionalInteger
object AC37 extends Calculated[AC37, Frsse2008AccountsBoxRetriever] with ProfitOrLossCalculator {
override def calculate(boxRetriever: Frsse2008AccountsBoxRetriever): AC37 = {
calculatePreviousProfitOtLossAfterTax(ac33 = boxRetriever.ac33(),
ac35 = boxRetriever.ac35())
}
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frsse2008/AC37.scala
|
Scala
|
apache-2.0
| 1,347 |
package ml.combust.mleap.core.feature
import ml.combust.mleap.core.Model
import ml.combust.mleap.core.types._
import ml.combust.mleap.tensor.Tensor
/** Class for a reverse string indexer model.
*
* This model reverses the [[StringIndexerModel]] model.
*
* Use this to go from an integer representation of a label to a string.
* Alternatively, goes from a list of integers to a list of labels.
*
* @param labels labels for reverse string indexing
* @param inputShape shape of the input, determines scalar/list output as well
*/
case class ReverseStringIndexerModel(labels: Seq[String],
inputShape: DataShape = ScalarShape(false)) extends Model {
require(inputShape.nonNullable, "cannot take null inputs")
private val indexToString: Map[Int, String] = labels.zipWithIndex.map(v => (v._2, v._1)).toMap
/** Map an index to its string representation.
*
* @param index index to reverse index
* @return string representation of index
*/
def apply(index: Int): String = indexToString(index)
/** Map a list of indices to string representations.
*
* @param indices sequence of indices
* @return sequence of labels
*/
def apply(indices: Seq[Int]): Seq[String] = indices.map(indexToString)
/** Map a tensor of indices to string representations.
*
* @param indices tensor of indices
* @return tensor of label strings
*/
def apply(indices: Tensor[Int]): Tensor[String] = indices.mapValues(indexToString)
override def inputSchema: StructType = StructType("input" -> DataType(BasicType.Double, inputShape)).get
override def outputSchema: StructType = StructType("output" -> DataType(BasicType.String, inputShape).asNullable).get
}
|
combust-ml/mleap
|
mleap-core/src/main/scala/ml/combust/mleap/core/feature/ReverseStringIndexerModel.scala
|
Scala
|
apache-2.0
| 1,746 |
package scala.slick.ast
import OptimizerUtil._
import collection.mutable.HashMap
import scala.slick.util.RefId
/**
* A tree transformer which replaces nodes transitively while updating
* some information about the tree.
*/
abstract class Transformer extends (Node => Node) { self =>
def replace: PartialFunction[Node, Node]
def initTree(n: Node) = ()
private def applyInternal(tree: Node, once: Boolean): Node = {
val repl = replace.orElse(pfidentity[Node])
def scanAndTr(n: Node): Node = {
initTree(n)
val n2 = memoized[Node, Node](r => { n => repl(n).nodeMapChildren(r) })(n)
if(once || (n2 eq n)) n2 else scanAndTr(n2)
}
scanAndTr(tree)
}
def apply(tree: Node): Node = applyInternal(tree, false)
def applyOnce(tree: Node): Node = applyInternal(tree, true)
def compose(g: Transformer): Transformer = new Transformer {
def replace = self.replace.orElse(g.replace)
override def initTree(n: Node) {
self.initTree(n)
g.initTree(n)
}
}
def andThen(g: Transformer): Transformer = g.compose(this)
}
object Transformer {
trait Defs extends Transformer {
val defs = new HashMap[Symbol, Node]
abstract override def initTree(tree: Node) {
super.initTree(tree)
defs.clear()
defs ++= tree.collectAll[(Symbol, Node)] { case d: DefNode => d.nodeGenerators }
}
//@deprecated("Use Def instead", "")
object ResolvedRef {
def unapply(n: Node): Option[(Symbol, Node)] = n match {
case Ref(sym) => defs.get(sym).map(v => (sym, v))
case _ => None
}
}
//@deprecated("Use Def instead", "")
object ResolvedInRef {
def unapply(n: Node): Option[(Symbol, Node, Node)] = n match {
case InRef(sym, what) => defs.get(sym).map(v => (sym, v, what))
case _ => None
}
}
object Def {
def unapply(sym: Symbol): Option[Node] = defs.get(sym)
}
}
trait DefRefsBidirectional extends Transformer {
val defs = new HashMap[Symbol, RefId[Node]]
val reverse = new HashMap[RefId[Node], Symbol]
override def initTree(tree: Node) {
super.initTree(tree)
defs.clear()
defs ++= tree.collectAll[(Symbol, RefId[Node])] {
case d: DefNode => d.nodeGenerators.map { case (s,n) => (s, RefId(n)) }
}
reverse.clear()
reverse ++= defs.map { case (k,v) => (v,k) }
}
}
}
abstract class RecursiveTransformer extends (Node => Node) {
private[this] var _chain: List[Node] = Nil
private[this] var _scope: Map[Symbol, (Node, Node)] = Map.empty
def chain: List[Node] = _chain
def scope: Map[Symbol, (Node, Node)]
def apply(tree: Node): Node = {
val repl = replace.orElse(pfidentity[Node])
def tr(n: Node): Node = {
val n2 = repl(n)
var defsHere: Seq[(Symbol, Node)] = n2 match {
case d: DefNode => d.nodeGenerators
case _ => Seq.empty
}
def updateDefs(from: Node, to: Node) {
defsHere = defsHere.map { case (s, n) => (s, if(n eq from) to else n) }
}
val defChildren = defsHere.map(t => RefId(t._2)).toSet
val n3 = n2.nodeMapChildren { ch: Node =>
if(defChildren.isEmpty || defChildren.contains(RefId(ch))) {
val prevChain = _chain
_chain = n2 :: _chain
val ch2 = tr(ch)
updateDefs(ch, ch2)
_chain = prevChain
ch2
} else {
val prevChain = _chain
_chain = n2 :: _chain
val prevScope = _scope
_scope = _scope ++ defsHere.map { case (s, what) => (s, (what, n2)) }
val ch2 = tr(ch)
updateDefs(ch, ch2)
_chain = prevChain
_scope = prevScope
ch2
}
}
if(n3 eq n) n else tr(n3)
}
tr(tree)
}
def replace: PartialFunction[Node, Node]
}
|
szeiger/scala-query
|
src/main/scala/scala/slick/ast/Transformer.scala
|
Scala
|
bsd-2-clause
| 3,823 |
package epic.parser
package kbest
import epic.trees._
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable
import Trees.Zipper._
import epic.trees.Trees.Zipper
object TopDownKBestAStar {
private implicit def ordTKAItem[L]: Ordering[TKAItem[L]] = Ordering[Double].on((_:TKAItem[L]).weight)
def apply[L, W](chart: RefinedChartMarginal[L, W], k: Int):IndexedSeq[(BinarizedTree[L], Double)] = {
import chart._
val root = chart.topology.rootIndex
val kbestList = new ArrayBuffer[(BinarizedTree[L], Double)]()
val queue = new mutable.PriorityQueue[TKAItem[(Int, Int)]]
queue.enqueue(StartItem)
while(!queue.isEmpty && kbestList.size < k) {
queue.dequeue() match {
case StartItem =>
val begin = 0
val end = length
val span = Span(begin, end)
val l = root
for(lref <- inside.top.enteredLabelRefinements(begin, end, l)) {
val insideScore = inside.top.labelScore(begin, end, l, lref)
queue += TopItem(Zipper(NullaryTree(l -> lref, span)), insideScore)
}
case CompleteTreeItem(tree, weight) =>
kbestList += (tree.map(l => chart.topology.labelIndex.get(l._1)) -> weight)
case TopItem(zipper, weight) =>
val (a, refA) = zipper.tree.label
val begin = zipper.tree.begin
val end = zipper.tree.end
val aScore = inside.top.labelScore(begin, end, a, refA)
for (r <- topology.indexedUnaryRulesWithParent(a); refR <- anchoring.validRuleRefinementsGivenParent(begin, end, r, refA)) {
val b = topology.child(r)
val chain = topology.chain(r)
val refB = anchoring.childRefinement(r, refR)
val bScore = inside.bot.labelScore(begin, end, b, refB)
if(!bScore.isInfinite) {
val rScore = anchoring.scoreUnaryRule(begin, end, r, refR)
val newWeight = weight - aScore + bScore + rScore
val newParentLabel = (b,refB)
val newZipper = zipper.copy(UnaryTree(zipper.tree.label, NullaryTree(newParentLabel, zipper.tree.span), chain, zipper.tree.span)).down.get
assert(newZipper.label == newZipper.label)
queue += BotItem(newZipper, newWeight)
}
}
// completed the whole sentence
case BotItem(zipper, weight) if zipper.begin == zipper.end - 1 =>
zipper.next match {
case None =>
queue += CompleteTreeItem(zipper.upToRoot.tree, weight)
case Some(zip) =>
queue += TopItem(zip, weight)
}
case BotItem(zipper, weight) =>
val (root, rootRef) = zipper.label
val begin = zipper.begin
val end = zipper.end
val aScore = inside.bot.labelScore(begin, end, root, rootRef)
val traceOn = (begin == 0 && end == 4)
val spanScore = anchoring.scoreSpan(begin, end, root, rootRef)
for {
r <- topology.indexedBinaryRulesWithParent(root)
b = topology.leftChild(r)
c = topology.rightChild(r)
refR <- anchoring.validRuleRefinementsGivenParent(begin, end, r, rootRef)
refB = anchoring.leftChildRefinement(r, refR)
refC = anchoring.rightChildRefinement(r, refR)
split <- inside.top.feasibleSplitPoints(begin, end, b, refB, c, refC)
} {
val ruleScore = anchoring.scoreBinaryRule(begin, split, end, r, refR)
val score = (
ruleScore
+ inside.top.labelScore(begin, split, b, refB)
+ inside.top.labelScore(split, end, c, refC)
+ spanScore
)
assert(score <= aScore + 1E-4, score -> aScore)
val newWeight = weight - aScore + score
if(!newWeight.isInfinite) {
val newZipper = zipper.copy(BinaryTree(zipper.tree.label,
NullaryTree(b -> refB, Span(begin,split)),
NullaryTree(c -> refC, Span(split, end)), zipper.tree.span)).down.get
assert(newZipper.next.get.begin == newZipper.end, newZipper)
queue += TopItem(newZipper, newWeight)
}
}
}
}
kbestList
}
/**
*
* @author dlwh
*/
private sealed trait TKAItem[+L] { def weight: Double }
private case object StartItem extends TKAItem[Nothing] { def weight = 0.0 }
private case class TopItem[L](zipper: Zipper[L], weight: Double) extends TKAItem[L]
private case class BotItem[L](zipper: Zipper[L], weight: Double) extends TKAItem[L]
private case class CompleteTreeItem[L](tree: BinarizedTree[L], weight: Double) extends TKAItem[L]
}
|
maxim-rabinovich/epic
|
src/main/scala/epic/parser/kbest/TopDownKBestAStar.scala
|
Scala
|
apache-2.0
| 4,738 |
package io.getquill.context.async
object SqlTypes extends Enumeration {
type SqlTypes = Value
val BIT, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, REAL, DOUBLE, NUMERIC, DECIMAL, CHAR, VARCHAR, LONGVARCHAR, DATE, TIME, TIMESTAMP, BINARY, VARBINARY, LONGVARBINARY, NULL, ARRAY, BLOB, BOOLEAN, TIME_WITH_TIMEZONE, TIMESTAMP_WITH_TIMEZONE, UUID = Value
}
|
getquill/quill
|
quill-async/src/main/scala/io/getquill/context/async/SqlTypes.scala
|
Scala
|
apache-2.0
| 358 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.nio.{ByteBuffer, MappedByteBuffer}
import scala.collection.Map
import scala.collection.mutable
import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import sun.misc.Unsafe
import sun.nio.ch.DirectBuffer
import org.apache.spark.SparkConf
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.util.Utils
/**
* Storage information for each BlockManager.
*
* This class assumes BlockId and BlockStatus are immutable, such that the consumers of this
* class cannot mutate the source of the information. Accesses are not thread-safe.
*/
private[spark] class StorageStatus(
val blockManagerId: BlockManagerId,
val maxMemory: Long,
val maxOnHeapMem: Option[Long],
val maxOffHeapMem: Option[Long]) {
/**
* Internal representation of the blocks stored in this block manager.
*/
private val _rddBlocks = new mutable.HashMap[Int, mutable.Map[BlockId, BlockStatus]]
private val _nonRddBlocks = new mutable.HashMap[BlockId, BlockStatus]
private case class RddStorageInfo(memoryUsage: Long, diskUsage: Long, level: StorageLevel)
private val _rddStorageInfo = new mutable.HashMap[Int, RddStorageInfo]
private case class NonRddStorageInfo(var onHeapUsage: Long, var offHeapUsage: Long,
var diskUsage: Long)
private val _nonRddStorageInfo = NonRddStorageInfo(0L, 0L, 0L)
/** Create a storage status with an initial set of blocks, leaving the source unmodified. */
def this(
bmid: BlockManagerId,
maxMemory: Long,
maxOnHeapMem: Option[Long],
maxOffHeapMem: Option[Long],
initialBlocks: Map[BlockId, BlockStatus]) = {
this(bmid, maxMemory, maxOnHeapMem, maxOffHeapMem)
initialBlocks.foreach { case (bid, bstatus) => addBlock(bid, bstatus) }
}
/**
* Return the blocks stored in this block manager.
*
* @note This is somewhat expensive, as it involves cloning the underlying maps and then
* concatenating them together. Much faster alternatives exist for common operations such as
* contains, get, and size.
*/
def blocks: Map[BlockId, BlockStatus] = _nonRddBlocks ++ rddBlocks
/**
* Return the RDD blocks stored in this block manager.
*
* @note This is somewhat expensive, as it involves cloning the underlying maps and then
* concatenating them together. Much faster alternatives exist for common operations such as
* getting the memory, disk, and off-heap memory sizes occupied by this RDD.
*/
def rddBlocks: Map[BlockId, BlockStatus] = _rddBlocks.flatMap { case (_, blocks) => blocks }
/** Add the given block to this storage status. If it already exists, overwrite it. */
private[spark] def addBlock(blockId: BlockId, blockStatus: BlockStatus): Unit = {
updateStorageInfo(blockId, blockStatus)
blockId match {
case RDDBlockId(rddId, _) =>
_rddBlocks.getOrElseUpdate(rddId, new mutable.HashMap)(blockId) = blockStatus
case _ =>
_nonRddBlocks(blockId) = blockStatus
}
}
/**
* Return the given block stored in this block manager in O(1) time.
*
* @note This is much faster than `this.blocks.get`, which is O(blocks) time.
*/
def getBlock(blockId: BlockId): Option[BlockStatus] = {
blockId match {
case RDDBlockId(rddId, _) =>
_rddBlocks.get(rddId).flatMap(_.get(blockId))
case _ =>
_nonRddBlocks.get(blockId)
}
}
/** Return the max memory can be used by this block manager. */
def maxMem: Long = maxMemory
/** Return the memory remaining in this block manager. */
def memRemaining: Long = maxMem - memUsed
/** Return the memory used by this block manager. */
def memUsed: Long = onHeapMemUsed.getOrElse(0L) + offHeapMemUsed.getOrElse(0L)
/** Return the on-heap memory remaining in this block manager. */
def onHeapMemRemaining: Option[Long] =
for (m <- maxOnHeapMem; o <- onHeapMemUsed) yield m - o
/** Return the off-heap memory remaining in this block manager. */
def offHeapMemRemaining: Option[Long] =
for (m <- maxOffHeapMem; o <- offHeapMemUsed) yield m - o
/** Return the on-heap memory used by this block manager. */
def onHeapMemUsed: Option[Long] = onHeapCacheSize.map(_ + _nonRddStorageInfo.onHeapUsage)
/** Return the off-heap memory used by this block manager. */
def offHeapMemUsed: Option[Long] = offHeapCacheSize.map(_ + _nonRddStorageInfo.offHeapUsage)
/** Return the memory used by on-heap caching RDDs */
def onHeapCacheSize: Option[Long] = maxOnHeapMem.map { _ =>
_rddStorageInfo.collect {
case (_, storageInfo) if !storageInfo.level.useOffHeap => storageInfo.memoryUsage
}.sum
}
/** Return the memory used by off-heap caching RDDs */
def offHeapCacheSize: Option[Long] = maxOffHeapMem.map { _ =>
_rddStorageInfo.collect {
case (_, storageInfo) if storageInfo.level.useOffHeap => storageInfo.memoryUsage
}.sum
}
/** Return the disk space used by this block manager. */
def diskUsed: Long = _nonRddStorageInfo.diskUsage + _rddBlocks.keys.toSeq.map(diskUsedByRdd).sum
/** Return the disk space used by the given RDD in this block manager in O(1) time. */
def diskUsedByRdd(rddId: Int): Long = _rddStorageInfo.get(rddId).map(_.diskUsage).getOrElse(0L)
/**
* Update the relevant storage info, taking into account any existing status for this block.
*/
private def updateStorageInfo(blockId: BlockId, newBlockStatus: BlockStatus): Unit = {
val oldBlockStatus = getBlock(blockId).getOrElse(BlockStatus.empty)
val changeInMem = newBlockStatus.memSize - oldBlockStatus.memSize
val changeInDisk = newBlockStatus.diskSize - oldBlockStatus.diskSize
val level = newBlockStatus.storageLevel
// Compute new info from old info
val (oldMem, oldDisk) = blockId match {
case RDDBlockId(rddId, _) =>
_rddStorageInfo.get(rddId)
.map { case RddStorageInfo(mem, disk, _) => (mem, disk) }
.getOrElse((0L, 0L))
case _ if !level.useOffHeap =>
(_nonRddStorageInfo.onHeapUsage, _nonRddStorageInfo.diskUsage)
case _ if level.useOffHeap =>
(_nonRddStorageInfo.offHeapUsage, _nonRddStorageInfo.diskUsage)
}
val newMem = math.max(oldMem + changeInMem, 0L)
val newDisk = math.max(oldDisk + changeInDisk, 0L)
// Set the correct info
blockId match {
case RDDBlockId(rddId, _) =>
// If this RDD is no longer persisted, remove it
if (newMem + newDisk == 0) {
_rddStorageInfo.remove(rddId)
} else {
_rddStorageInfo(rddId) = RddStorageInfo(newMem, newDisk, level)
}
case _ =>
if (!level.useOffHeap) {
_nonRddStorageInfo.onHeapUsage = newMem
} else {
_nonRddStorageInfo.offHeapUsage = newMem
}
_nonRddStorageInfo.diskUsage = newDisk
}
}
}
/** Helper methods for storage-related objects. */
private[spark] object StorageUtils extends Logging {
// In Java 8, the type of DirectBuffer.cleaner() was sun.misc.Cleaner, and it was possible
// to access the method sun.misc.Cleaner.clean() to invoke it. The type changed to
// jdk.internal.ref.Cleaner in later JDKs, and the .clean() method is not accessible even with
// reflection. However sun.misc.Unsafe added a invokeCleaner() method in JDK 9+ and this is
// still accessible with reflection.
private val bufferCleaner: DirectBuffer => Unit =
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) {
val cleanerMethod =
Utils.classForName("sun.misc.Unsafe").getMethod("invokeCleaner", classOf[ByteBuffer])
val unsafeField = classOf[Unsafe].getDeclaredField("theUnsafe")
unsafeField.setAccessible(true)
val unsafe = unsafeField.get(null).asInstanceOf[Unsafe]
buffer: DirectBuffer => cleanerMethod.invoke(unsafe, buffer)
} else {
val cleanerMethod = Utils.classForName("sun.misc.Cleaner").getMethod("clean")
buffer: DirectBuffer => {
// Careful to avoid the return type of .cleaner(), which changes with JDK
val cleaner: AnyRef = buffer.cleaner()
if (cleaner != null) {
cleanerMethod.invoke(cleaner)
}
}
}
/**
* Attempt to clean up a ByteBuffer if it is direct or memory-mapped. This uses an *unsafe* Sun
* API that will cause errors if one attempts to read from the disposed buffer. However, neither
* the bytes allocated to direct buffers nor file descriptors opened for memory-mapped buffers put
* pressure on the garbage collector. Waiting for garbage collection may lead to the depletion of
* off-heap memory or huge numbers of open files. There's unfortunately no standard API to
* manually dispose of these kinds of buffers.
*/
def dispose(buffer: ByteBuffer): Unit = {
if (buffer != null && buffer.isInstanceOf[MappedByteBuffer]) {
logTrace(s"Disposing of $buffer")
bufferCleaner(buffer.asInstanceOf[DirectBuffer])
}
}
/**
* Get the port used by the external shuffle service. In Yarn mode, this may be already be
* set through the Hadoop configuration as the server is launched in the Yarn NM.
*/
def externalShuffleServicePort(conf: SparkConf): Int = {
val tmpPort = Utils.getSparkOrYarnConfig(conf, config.SHUFFLE_SERVICE_PORT.key,
config.SHUFFLE_SERVICE_PORT.defaultValueString).toInt
if (tmpPort == 0) {
// for testing, we set "spark.shuffle.service.port" to 0 in the yarn config, so yarn finds
// an open port. But we still need to tell our spark apps the right port to use. So
// only if the yarn config has the port set to 0, we prefer the value in the spark config
conf.get(config.SHUFFLE_SERVICE_PORT.key).toInt
} else {
tmpPort
}
}
}
|
shuangshuangwang/spark
|
core/src/main/scala/org/apache/spark/storage/StorageUtils.scala
|
Scala
|
apache-2.0
| 10,561 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.javaHighlighting.JavaHighlightingTestBase
import org.junit.experimental.categories.Category
/**
* @author Alefas
* @since 23/03/16
*/
@Category(Array(classOf[PerfCycleTests]))
class JavaHighlightingTest extends JavaHighlightingTestBase() {
def testSCL8982() = {
val scala =
"""
|object Foo {
| class Bar {
|
| }
|}
""".stripMargin
val java =
"""
|public class Main {
| public static void main(String[] args) {
| new Foo$Bar();
| }
|}
|
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "Main"))
}
def testSCL9663B() = {
val scala =
"""
|class Foo(val cell: String) extends AnyVal {
| def foo(x: Int) = 123
|}
""".stripMargin
val java =
"""
|public class Test {
| public static void main(String[] args) {
| Foo$ foo = Foo$.MODULE$;
|
| foo.foo$extension("text", 1);
| }
|}
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "Test"))
}
def testSCL6409() = {
val java =
"""
|public class JavaDummy<T> {
| public void method(JavaDummy<? super JavaDummy<?>> arg) {}
|}""".stripMargin
val scala =
"""
|class Inheritor extends JavaDummy[Int] {
| override def method(arg: JavaDummy[_ <: JavaDummy[_]]): Unit = super.method(arg)
|}""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL6114() = {
val scala =
"""
|package foo;
|
|package bar {
| class Test
|}
""".stripMargin
val java =
"""
|package foo;
|
|class A {
| public bar.Test something; // Test is red - cannot resolve symbol Test.
|}
""".stripMargin
assertNothing(errorsFromJavaCode(scala, java, "A"))
}
def testSCL8639(): Unit = {
val java =
"""
|public abstract class Java<S> {
| public abstract class JavaInner {
| abstract void foo(S arg);
| }
|}
|
""".stripMargin
val scala =
"""
|class Scala extends Java[String]{
| val s = new JavaInner {
| override def foo(arg: String): Unit = {}
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL8666(): Unit = {
val java =
"""
|import scala.Function0;
|import scala.Function1;
|
|import java.util.concurrent.Callable;
|import java.util.function.Function;
|
|public class Lambdas {
|
| public static <A> A doIt(Callable<A> f) {
| System.out.println("callable");
| try {
| return f.call();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static <A> A doIt(final Function0<A> f) {
| System.out.println("java_func");
| try {
| return f.apply();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static void doIt(Runnable f) {
| System.out.println("runnable");
| try {
| f.run();
| } catch (final Exception ex) {
| throw new RuntimeException(ex);
| }
| }
|
| public static void main(final String... args) {
| final Lambdas l = new Lambdas();
| Lambdas.doIt(() -> {
| int x = 3;
| });
| Lambdas.doIt(() -> 24);
| }
|}
""".stripMargin
assertNothing(errorsFromJavaCode("", java, "Lambdas"))
}
def testSCL10531(): Unit = {
val java =
"""
|public interface Id {
| static scala.Option<Id> unapply(Id id) {
| // Can't define this in Scala because the static forwarder in companion class
| // conflicts with the interface trait. Should be fixed in 2.12.
| // https://github.com/scala/scala-dev/issues/59
| if (id == NoId.instance()) {
| return scala.Option.empty();
| }
| return scala.Option.apply(id);
| }
|}
|
|class NoId {
| public static Id instance() {
| return null;
| }
|}
""".stripMargin
val scala =
"""
|class mc {
| NoId.instance() match {
| case Id(id) =>
| true
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
def testSCL10930() = {
val scala =
"""
| def testThis2(): Range[Integer] = {
| Range.between(1, 3)
| }
""".stripMargin
val java =
"""
|import java.util.Comparator;
|
|public class Range<T> {
|
| private Range(T element1, T element2, Comparator<T> comparator) {
| }
|
| public static <T extends Comparable<T>> Range<T> between(T fromInclusive, T toInclusive) {
| return between(fromInclusive, toInclusive, null);
| }
|
| public static <T> Range<T> between(T fromInclusive, T toInclusive, Comparator<T> comparator) {
| return new Range<T>(fromInclusive, toInclusive, comparator);
| }
|}
""".stripMargin
assertNothing(errorsFromScalaCode(scala, java))
}
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/failed/annotator/JavaHighlightingTest.scala
|
Scala
|
apache-2.0
| 6,045 |
package sc.ala.http.mock
import java.util.concurrent.ExecutionException
class RunnableSpec extends TestHelper {
val url : String = s"http://127.0.0.1:$testPort"
def ok(): Unit = assert(get(url).getStatusCode === 200)
def ng(): Unit = intercept[ExecutionException] { get(url) }
describe("Setting()") {
it("run in loan pattern") {
Setting(port = testPort).run { server =>
ok()
}
ng()
}
}
describe("HttpMock") {
it("run(port) in loan pattern") {
HttpMock.run(testPort) { server =>
ok()
}
ng()
}
}
}
|
xuwei-k/http-mock
|
src/test/scala/sc/ala/http/mock/RunnableSpec.scala
|
Scala
|
mit
| 586 |
package arx.core.rich
import arx.core.units.UnitOfMeasure
import arx.core.ArxImplicits
/**
* Created by IntelliJ IDEA.
* User: nvt
* Date: 3/4/13
* Time: 4:44 PM
* Created by nonvirtualthunk
*/
class ArxList[+T](intern : List[T]) {
def usum[U <: UnitOfMeasure[U]]( f : (T) => U )(implicit start : U) : U = { intern.map( v => f(v) ).foldLeft(start)( (a,b) => a + b ) }
def fsum ( f : (T) => Float ) : Float = { intern.foldLeft(0.0f) { (a,v) => a + f(v) } }
def isum ( f : (T) => Int ) : Int = { intern.foldLeft(0) { (a,v) => a + f(v) } }
def ofType [E <: AnyRef : Manifest] : List[E] = {
val erasure = manifest[E].erasure
// intern.filter ( i => erasure.isAssignableFrom(i.getClass) ).asInstanceOf[List[E]]
intern.collect { case e if ( manifest[E].erasure.isAssignableFrom(e.getClass) ) => e.asInstanceOf[E] }
}
def notOfType [E <: AnyRef : Manifest] : List[E] = {
intern.collect { case e if ( ! manifest[E].erasure.isAssignableFrom(e.getClass) ) => e.asInstanceOf[E] }
}
def findFirstWith[U] ( f : (T) => Option[U] ) : Option[(T,U)] = {
val i = intern.iterator
while ( i.hasNext ) {
val e = i.next()
f(e) match {
case Some(v) => return Some((e,v))
case _ =>
}
}
None
}
def without[U >: T] ( t : U ) : List[T] = intern.filterNot { e : T => e == t }
def firstOfType [E <: AnyRef : Manifest] : Option[E] = {
val iter = intern.iterator
while ( iter.hasNext ) {
val n = iter.next()
n match {
case e if ( manifest[E].erasure.isAssignableFrom(e.getClass) ) => { return Some(e.asInstanceOf[E]) }
case _ =>
}
}
None
// this.ofType[E].headOption
}
def forEachPair[U] ( func : (T,T) => U ) {
ArxImplicits.forEachPair(intern)(func)
}
}
|
nonvirtualthunk/arx-core
|
src/main/scala/arx/core/rich/ArxList.scala
|
Scala
|
bsd-2-clause
| 1,711 |
package org.ergoplatform
import org.ergoplatform.ErgoAddressEncoder.NetworkPrefix
import org.ergoplatform.settings.MonetarySettings
import sigmastate.SCollection.SByteArray
import sigmastate.Values._
import sigmastate.basics.DLogProtocol.ProveDlog
import sigmastate.eval.IRContext
import sigmastate.interpreter.CryptoConstants
import sigmastate.lang.Terms.ValueOps
import sigmastate.{SLong, _}
import sigmastate.lang.{TransformingSigmaBuilder, SigmaCompiler, CompilerSettings}
import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer
import sigmastate.utxo._
object ErgoScriptPredef {
import sigmastate.interpreter.Interpreter._
def compileWithCosting(env: ScriptEnv, code: String, networkPrefix: NetworkPrefix)(implicit IR: IRContext): Value[SType] = {
val compiler = new SigmaCompiler(networkPrefix)
val interProp = compiler.typecheck(env, code)
val IR.Pair(calcF, _) = IR.doCosting(env, interProp)
IR.buildTree(calcF)
}
/** Create ErgoTree with `false` proposition, which is never true.
* @param headerFlags ErgoTree header flags to be combined with default header
* @see ErgoTree.headerWithVersion()
*/
def FalseProp(headerFlags: Byte): ErgoTree = ErgoTree.withoutSegregation(headerFlags, FalseSigmaProp)
/** Create ErgoTree with `true` proposition, which is always true.
* @param headerFlags ErgoTree header flags to be combined with default header
* @see ErgoTree.headerWithVersion()
*/
def TrueProp(headerFlags: Byte): ErgoTree = ErgoTree.withoutSegregation(headerFlags, TrueSigmaProp)
/**
* Byte array value of the serialized reward output script proposition with pk being substituted
* with given pk
*
* @param delta - number of blocks for which miner should hold this box before spending it
* @param minerPkBytesVal - byte array val for pk to substitute in the reward script
*/
def expectedMinerOutScriptBytesVal(delta: Int, minerPkBytesVal: Value[SByteArray]): Value[SByteArray] = {
val genericPk = ProveDlog(CryptoConstants.dlogGroup.generator)
val genericMinerProp = rewardOutputScript(delta, genericPk)
val genericMinerPropBytes = DefaultSerializer.serializeErgoTree(genericMinerProp)
// first segregated constant is delta, so key is second constant
val positions = IntArrayConstant(Array[Int](1))
val minerPubkeySigmaProp = CreateProveDlog(DecodePoint(minerPkBytesVal))
val newVals = Values.ConcreteCollection(Array[SigmaPropValue](minerPubkeySigmaProp), SSigmaProp)
SubstConstants(genericMinerPropBytes, positions, newVals)
}
/**
* Required script of the box, that collects mining rewards
*/
def rewardOutputScript(delta: Int, minerPk: ProveDlog): ErgoTree = {
SigmaAnd(
GE(Height, Plus(boxCreationHeight(Self), IntConstant(delta))).toSigmaProp,
SigmaPropConstant(minerPk)
).treeWithSegregation
}
/**
* Proposition that allows to send coins to a box which is protected by the following proposition:
* prove dlog of miner's public key and height is at least `delta` blocks bigger then the current one.
*/
def feeProposition(delta: Int = 720): ErgoTree = {
val out = ByIndex(Outputs, IntConstant(0))
AND(
EQ(Height, boxCreationHeight(out)),
EQ(ExtractScriptBytes(out), expectedMinerOutScriptBytesVal(delta, MinerPubkey)),
EQ(SizeOf(Outputs), 1)
).toSigmaProp.treeWithSegregation
}
/**
* A contract that only allows to collect emission reward by a box with miner proposition.
*/
def emissionBoxProp(s: MonetarySettings): ErgoTree = {
val rewardOut = ByIndex(Outputs, IntConstant(0))
val minerOut = ByIndex(Outputs, IntConstant(1))
val minersReward = s.fixedRate - s.foundersInitialReward
val minersFixedRatePeriod = s.fixedRatePeriod + 2 * s.epochLength
val epoch = Plus(IntConstant(1), Divide(Minus(Height, IntConstant(s.fixedRatePeriod)), IntConstant(s.epochLength)))
val coinsToIssue = If(LT(Height, IntConstant(minersFixedRatePeriod)),
minersReward,
Minus(s.fixedRate, Multiply(s.oneEpochReduction, epoch.upcastTo(SLong)))
)
val sameScriptRule = EQ(ExtractScriptBytes(Self), ExtractScriptBytes(rewardOut))
val heightCorrect = EQ(boxCreationHeight(rewardOut), Height)
val heightIncreased = GT(Height, boxCreationHeight(Self))
val correctCoinsConsumed = EQ(coinsToIssue, Minus(ExtractAmount(Self), ExtractAmount(rewardOut)))
val lastCoins = LE(ExtractAmount(Self), s.oneEpochReduction)
val outputsNum = EQ(SizeOf(Outputs), 2)
val correctMinerOutput = AND(
EQ(ExtractScriptBytes(minerOut), expectedMinerOutScriptBytesVal(s.minerRewardDelay, MinerPubkey)),
EQ(Height, boxCreationHeight(minerOut))
)
AND(
heightIncreased,
correctMinerOutput,
OR(AND(outputsNum, sameScriptRule, correctCoinsConsumed, heightCorrect), lastCoins)
).toSigmaProp.treeWithSegregation
}
/**
* Script for Ergo foundation box.
* The script allows to spend a box, if:
* - first transaction output contains at least EmissionRules.remainingFoundationAtHeight coins in it
* - first transaction output is protected by this script
* - conditions from the first non-mandatory register (R4) are satisfied
*
* Thus, this script always controls the level of emission and does not allow to take
* more coinы than prescribed by emission rules. In addition, it is protected by
* custom proposition in R4 which is assumed to be a simple 2-of-3 multisignature with
* public keys of foundation members in the beginning. When foundation members spend
* this box, they are free to put any new proposition to the R4 register, thus they
* may add or remove members, or change it to something more complicated like
* `tokenThresholdScript`.
*/
def foundationScript(s: MonetarySettings): ErgoTree = {
// new output of the foundation
val newFoundationBox = ByIndex(Outputs, IntConstant(0))
// calculate number of coins, that are not issued yet and should be kept in `newFoundationBox`
// the same as Emission.remainingFoundationRewardAtHeight rewritten in Ergo script
val remainingAmount = {
val full15reward = (s.foundersInitialReward - 2 * s.oneEpochReduction) * s.epochLength
val full45reward = (s.foundersInitialReward - s.oneEpochReduction) * s.epochLength
val fixedRatePeriodMinus1: Int = s.fixedRatePeriod - 1
If(LT(Height, IntConstant(s.fixedRatePeriod)),
Plus(
LongConstant(full15reward + full45reward),
Multiply(s.foundersInitialReward, Upcast(Minus(fixedRatePeriodMinus1, Height), SLong))
),
If(LT(Height, IntConstant(s.fixedRatePeriod + s.epochLength)),
Plus(
full15reward,
Multiply(
s.foundersInitialReward - s.oneEpochReduction,
Upcast(Minus(fixedRatePeriodMinus1 + s.epochLength, Height), SLong)
)
),
If(LT(Height, IntConstant(s.fixedRatePeriod + 2 * s.epochLength)),
Multiply(
s.foundersInitialReward - 2 * s.oneEpochReduction,
Upcast(Minus(fixedRatePeriodMinus1 + 2 * s.epochLength, Height), SLong)
),
LongConstant(0)
)
)
)
}
// check, that `newFoundationBox` contains at least `remainingAmount`
val amountCorrect = GE(ExtractAmount(newFoundationBox), remainingAmount)
// check, that `newFoundationBox` have the same protecting script
val sameScriptRule = EQ(ExtractScriptBytes(Self), ExtractScriptBytes(newFoundationBox))
// check, that additional rules defined by foundation members are satisfied
val customProposition = DeserializeRegister(ErgoBox.R4, SSigmaProp)
// combine 3 conditions above with AND conjunction
SigmaAnd(amountCorrect.toSigmaProp, sameScriptRule.toSigmaProp, customProposition).treeWithSegregation
}
/**
* Creation height of a box
*/
def boxCreationHeight(box: Value[SBox.type]): Value[SInt.type] =
SelectField(ExtractCreationInfo(box), 1).asIntValue
/**
* Proposition of the box that may be spent by a transaction
* which inputs contains at least `thresholdAmount` of token with id `tokenId`.
* The logic of this script is following
* (v1) INPUTS.flatMap(box => box.tokens.filter(t => t._1 == tokenId).map(t => t._2)).sum >= thresholdAmount
* (v2) INPUTS.flatMap(box => box.tokens).filter(t => t._1 == tokenId).sum >= thresholdAmount
* (v3) INPUTS.map(box => box.tokens.find(t => t._1 == tokenId).map(t => t._2).getOrElse(0)).sum >= thresholdAmount
*/
def tokenThresholdScript(tokenId: Array[Byte], thresholdAmount: Long, networkPrefix: NetworkPrefix)
(implicit IR: IRContext): SigmaPropValue = {
val env = emptyEnv +
("tokenId" -> tokenId, "thresholdAmount" -> thresholdAmount)
val res = compileWithCosting(env,
"""{
| val sumValues = { (xs: Coll[Long]) => xs.fold(0L, { (acc: Long, amt: Long) => acc + amt }) }
|
| val tokenAmounts = INPUTS.map({ (box: Box) =>
| sumValues(box.tokens.map { (tokenPair: (Coll[Byte], Long)) =>
| val ourTokenAmount = if (tokenPair._1 == tokenId) tokenPair._2 else 0L
| ourTokenAmount
| })
| })
| val total = sumValues(tokenAmounts)
| sigmaProp(total >= thresholdAmount)
|}
""".stripMargin, networkPrefix)
res.asSigmaProp
}
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/main/scala/org/ergoplatform/ErgoScriptPredef.scala
|
Scala
|
mit
| 9,506 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import scala.reflect.ClassTag
// T is the type of the object that has a Boolean property to verify with an instance of this trait
// This is not a subtype of BeMatcher, because BeMatcher only works after "be", but
// BePropertyMatcher will work after "be", "be a", or "be an"
/**
* Trait extended by matcher objects, which may appear after the word <code>be</code>, that can match against a <code>Boolean</code>
* property. The match will succeed if and only if the <code>Boolean</code> property equals <code>true</code>.
* The object containing the property, which must be of the type specified by the <code>BePropertyMatcher</code>'s type
* parameter <code>T</code>, is passed to the <code>BePropertyMatcher</code>'s
* <code>apply</code> method. The result is a <code>BePropertyMatchResult</code>.
* A <code>BePropertyMatcher</code> is, therefore, a function from the specified type, <code>T</code>, to
* a <code>BePropertyMatchResult</code>.
*
* <p>
* Although <code>BePropertyMatcher</code>
* and <code>Matcher</code> represent similar concepts, they have no inheritance relationship
* because <code>Matcher</code> is intended for use right after <code>should</code> or <code>must</code>
* whereas <code>BePropertyMatcher</code> is intended for use right after <code>be</code>.
* </p>
*
* <p>
* A <code>BePropertyMatcher</code> essentially allows you to write statically typed <code>Boolean</code>
* property assertions similar to the dynamic ones that use symbols:
* </p>
*
* <pre class="stHighlight">
* tempFile should be a ('file) // dynamic: uses reflection
* tempFile should be a (file) // type safe: only works on Files; no reflection used
* </pre>
*
* <p>
* One good way to organize custom matchers is to place them inside one or more traits that
* you can then mix into the suites or specs that need them. Here's an example that
* includes two <code>BePropertyMatcher</code>s:
* </p>
*
* <pre class="stHighlight">
* trait CustomMatchers {
*
* class FileBePropertyMatcher extends BePropertyMatcher[java.io.File] {
* def apply(left: java.io.File) = BePropertyMatchResult(left.isFile, "file")
* }
*
* class DirectoryBePropertyMatcher extends BePropertyMatcher[java.io.File] {
* def apply(left: java.io.File) = BePropertyMatchResult(left.isDirectory, "directory")
* }
*
* val file = new FileBePropertyMatcher
* val directory = new DirectoryBePropertyMatcher
* }
* </pre>
*
* <p>
* Because the type parameter of these two <code>BePropertyMatcher</code>s is <code>java.io.File</code>, they
* can only be used with instances of that type. (The compiler will enforce this.) All they do is create a
* <code>BePropertyMatchResult</code> whose <code>matches</code> field is <code>true</code> if and only if the <code>Boolean</code> property
* is <code>true</code>. The second field, <code>propertyName</code>, is simply the string name of the property.
* The <code>file</code> and <code>directory</code> <code>val</code>s create variables that can be used in
* matcher expressions that test whether a <code>java.io.File</code> is a file or a directory. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class ExampleSpec extends RefSpec with Matchers with CustomMatchers {
*
* describe("A temp file") {
*
* it("should be a file, not a directory") {
*
* val tempFile = java.io.File.createTempFile("delete", "me")
*
* try {
* tempFile should be a (file)
* tempFile should not be a (directory)
* }
* finally {
* tempFile.delete()
* }
* }
* }
* }
* </pre>
*
* <p>
* These matches should succeed, but if for example the first match, <code>tempFile should be a (file)</code>, were to fail, you would get an error message like:
* </p>
*
* <pre class="stExamples">
* /tmp/delme1234me was not a file
* </pre>
*
* <p>
* For more information on <code>BePropertyMatchResult</code> and the meaning of its fields, please
* see the documentation for <a href="BePropertyMatchResult.html"><code>BePropertyMatchResult</code></a>. To understand why <code>BePropertyMatcher</code>
* is contravariant in its type parameter, see the section entitled "Matcher's variance" in the
* documentation for <a href="../Matcher.html"><code>Matcher</code></a>.
* </p>
*
* @author Bill Venners
*/
trait BePropertyMatcher[-T] extends Function1[T, BePropertyMatchResult] {
thisBePropertyMatcher =>
/**
* Check to see if a <code>Boolean</code> property on the specified object, <code>objectWithProperty</code>, matches its
* expected value, and report the result in
* the returned <code>BePropertyMatchResult</code>. The <code>objectWithProperty</code> is
* usually the value to the left of a <code>should</code> or <code>must</code> invocation. For example, <code>tempFile</code>
* would be passed as the <code>objectWithProperty</code> in:
*
* <pre class="stHighlight">
* tempFile should be a (file)
* </pre>
*
* @param objectWithProperty the object with the <code>Boolean</code> property against which to match
* @return the <code>BePropertyMatchResult</code> that represents the result of the match
*/
def apply(objectWithProperty: T): BePropertyMatchResult
/**
* Compose this <code>BePropertyMatcher</code> with the passed function, returning a new <code>BePropertyMatcher</code>.
*
* <p>
* This method overrides <code>compose</code> on <code>Function1</code> to
* return a more specific function type of <code>BePropertyMatcher</code>.
* </p>
*/
override def compose[U](g: U => T): BePropertyMatcher[U] =
new BePropertyMatcher[U] {
def apply(u: U) = thisBePropertyMatcher.apply(g(u))
}
}
/**
* Companion object for trait <code>BePropertyMatcher</code> that provides a
* factory method that creates a <code>BePropertyMatcher[T]</code> from a
* passed function of type <code>(T => BePropertyMatchResult)</code>.
*
* @author Bill Venners
*/
object BePropertyMatcher {
/**
* Factory method that creates a <code>BePropertyMatcher[T]</code> from a
* passed function of type <code>(T => BePropertyMatchResult)</code>.
*
* @author Bill Venners
*/
def apply[T](fun: T => BePropertyMatchResult)(implicit ev: ClassTag[T]): BePropertyMatcher[T] =
new BePropertyMatcher[T] {
def apply(left: T) = fun(left)
override def toString: String = "BePropertyMatcher[" + ev.runtimeClass.getName + "](" + ev.runtimeClass.getName + " => BePropertyMatchResult)"
}
}
|
dotty-staging/scalatest
|
scalatest/src/main/scala/org/scalatest/matchers/BePropertyMatcher.scala
|
Scala
|
apache-2.0
| 7,177 |
/*
* Copyright (c) 2021. StulSoft
*/
package com.stulsoft.poc.json.json4s.dwhsync
/**
* @author Yuriy Stul
*/
case class SyncStatus(status:String, description:String)
|
ysden123/poc
|
scala-json/src/main/scala/com/stulsoft/poc/json/json4s/dwhsync/SyncStatus.scala
|
Scala
|
mit
| 173 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.catnap
import java.util.concurrent.Executors
import minitest.SimpleTestSuite
import cats.effect.IO
import cats.implicits._
import monix.execution.atomic.Atomic
import scala.concurrent.{CancellationException, ExecutionContext}
import scala.concurrent.duration._
object CatsEffectIssue380Suite extends SimpleTestSuite {
test("MVar does not block on put — typelevel/cats-effect#380") {
val service = Executors.newSingleThreadScheduledExecutor()
implicit val ec = ExecutionContext.global
implicit val cs = IO.contextShift(ec)
implicit val timer = IO.timer(ec, service)
try {
for (_ <- 0 until 10) {
val cancelLoop = Atomic(false)
val unit = IO {
if (cancelLoop.get()) throw new CancellationException
}
try {
val task = for {
mv <- MVar[IO].empty[Unit]()
_ <- (mv.take *> unit.foreverM).start
_ <- timer.sleep(100.millis)
_ <- mv.put(())
} yield ()
val dt = 10.seconds
assert(task.unsafeRunTimed(dt).nonEmpty, s"timed-out after $dt")
} finally {
cancelLoop := true
}
}
} finally {
service.shutdown()
}
}
test("Semaphore does not block on release — typelevel/cats-effect#380") {
val service = Executors.newSingleThreadScheduledExecutor()
implicit val ec = ExecutionContext.global
implicit val cs = IO.contextShift(ec)
implicit val timer = IO.timer(ec, service)
try {
for (_ <- 0 until 10) {
val cancelLoop = Atomic(false)
val unit = IO {
if (cancelLoop.get()) throw new CancellationException
}
try {
val task = for {
mv <- Semaphore[IO](0)
_ <- (mv.acquire *> unit.foreverM).start
_ <- timer.sleep(100.millis)
_ <- mv.release
} yield ()
val dt = 10.seconds
assert(task.unsafeRunTimed(dt).nonEmpty, s"timed-out after $dt")
} finally {
cancelLoop := true
}
}
} finally {
service.shutdown()
}
}
}
|
monifu/monifu
|
monix-catnap/jvm/src/test/scala/monix/catnap/CatsEffectIssue380Suite.scala
|
Scala
|
apache-2.0
| 2,808 |
package reductions
import org.scalameter._
import common._
object ParallelCountChangeRunner {
@volatile var seqResult = 0
@volatile var parResult = 0
val standardConfig = config(
Key.exec.minWarmupRuns -> 20,
Key.exec.maxWarmupRuns -> 40,
Key.exec.benchRuns -> 80,
Key.verbose -> true
) withWarmer(new Warmer.Default)
def main(args: Array[String]): Unit = {
val amount = 250
val coins = List(1, 2, 5, 10, 20, 50)
val seqtime = standardConfig measure {
seqResult = ParallelCountChange.countChange(amount, coins)
}
println(s"sequential result = $seqResult")
println(s"sequential count time: $seqtime ms")
def measureParallelCountChange(threshold: ParallelCountChange.Threshold): Unit = {
val fjtime = standardConfig measure {
parResult = ParallelCountChange.parCountChange(amount, coins, threshold)
}
println(s"parallel result = $parResult")
println(s"parallel count time: $fjtime ms")
println(s"speedup: ${seqtime / fjtime}")
}
measureParallelCountChange(ParallelCountChange.moneyThreshold(amount))
measureParallelCountChange(ParallelCountChange.totalCoinsThreshold(coins.length))
measureParallelCountChange(ParallelCountChange.combinedThreshold(amount, coins))
}
}
object ParallelCountChange {
/** Returns the number of ways change can be made from the specified list of
* coins for the specified amount of money.
*/
def countChange(money: Int, coins: List[Int]): Int = {
if (money == 0)
1
else if (money < 0 || coins.isEmpty)
0
else
countChange(money - coins.head, coins) + countChange(money, coins.tail)
}
type Threshold = (Int, List[Int]) => Boolean
/** In parallel, counts the number of ways change can be made from the
* specified list of coins for the specified amount of money.
*/
def parCountChange(money: Int, coins: List[Int], threshold: Threshold): Int = {
if (money < 0 || coins.isEmpty)
0
else if (threshold(money, coins))
countChange(money, coins)
else {
val (l, r) = parallel(parCountChange(money - coins.head, coins, threshold), parCountChange(money, coins.tail, threshold))
l + r
}
}
/** Threshold heuristic based on the starting money. */
def moneyThreshold(startingMoney: Int): Threshold =
(remainingMoney: Int, _) => {
remainingMoney <= ((startingMoney * 2) / 3)
}
/** Threshold heuristic based on the total number of initial coins. */
def totalCoinsThreshold(totalCoins: Int): Threshold =
(_, remainingCoins) => {
remainingCoins.length <= ((totalCoins * 2) / 3)
}
/** Threshold heuristic based on the starting money and the initial list of coins. */
def combinedThreshold(startingMoney: Int, allCoins: List[Int]): Threshold = {
(remainingMoney, remainingCoins) => {
(remainingMoney * remainingCoins.length) <= ((startingMoney * allCoins.length) / 2)
}
}
}
|
matija94/show-me-the-code
|
scala_practice/reductions/src/main/scala/reductions/ParallelCountChange.scala
|
Scala
|
mit
| 2,963 |
/*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
package net.adamcin.scalamojo
import org.apache.maven.project.MavenProject
import java.io.File
import org.apache.maven.plugin.descriptor.{Parameter, MojoDescriptor}
import scala.collection.JavaConverters._
import tools.nsc._
import doc.model.DocTemplateEntity
import tools.nsc.reporters._
import doc.{DocFactory, Universe, Settings}
import org.slf4j.LoggerFactory
import org.apache.maven.tools.plugin.PluginToolsRequest
import scala.Some
import reflect.internal.util.FakePos
/**
* Wraps a ScalaDoc-compiled model in a MojoDescriptor decorator function
* @since 0.6.0
* @author Mark Adamcin
*/
class ScalaDocExtractorCompiler(request: PluginToolsRequest) {
private val log = LoggerFactory.getLogger(getClass)
def decorate(universe: Option[Universe])(descriptor: MojoDescriptor): MojoDescriptor = {
if (descriptor.getGoal == "help") {
descriptor
} else {
def findClass(p: doc.model.Package, c: String): Option[doc.model.Class with DocTemplateEntity] = {
val dotIndex = c.indexOf(".")
if (dotIndex < 0) {
p.templates.find((t) => t.isClass && t.name == c) match {
case Some(entity) => Some(entity.asInstanceOf[doc.model.Class with DocTemplateEntity])
case None => None
}
} else {
val pName = c.substring(0, dotIndex)
p.packages.find((sp) => sp.name == pName) match {
case Some(sp) => findClass(sp, c.substring(dotIndex + 1, c.length))
case None => None
}
}
}
import ScalaDocStringer._
universe match {
case None => ()
case Some(u) => findClass(u.rootPackage, descriptor.getImplementation) match {
case None => ()
case Some(c) => {
// apparently, this causes a different injector to be looked up. Better stick with java.
//descriptor.setLanguage("scala")
getDeprecated(c) match {
case None => ()
case Some(deprecated) => descriptor.setDeprecated(deprecated)
}
getDescription(c.comment) match {
case None => ()
case Some(description) => descriptor.setDescription(description)
}
getSince(c.comment) match {
case None => ()
case Some(since) => descriptor.setSince(since)
}
val memberMap = c.members.map {
(entity) => (entity.name, entity)
}.toMap
descriptor.getParameters.asScala.map { _.asInstanceOf[Parameter] }.foreach {
(param: Parameter) => {
memberMap.get(param.getName) match {
case None => ()
case Some(member) => {
getDeprecated(member) match {
case None => ()
case Some(deprecated) => param.setDeprecated(deprecated)
}
val inheritor = commentInheritor(member)_
inheritor(getDescription) match {
case None => ()
case Some(description) => param.setDescription(description)
}
inheritor(getSince) match {
case None => ()
case Some(since) => param.setSince(since)
}
}
}
}
}
}
}
}
}
descriptor
}
def extractDescriptorDecorator(sourceFiles: List[String]): (MojoDescriptor) => (MojoDescriptor) = {
val project: Option[MavenProject] = Option(request.getProject)
def initialize: (Settings, Reporter) = {
var reporter: Reporter = null
val docSettings = new doc.Settings(msg => reporter.error(FakePos("scaladoc"), msg + "\\n scaladoc -help gives more information"))
docSettings.classpath.value = getClasspath(project)
docSettings.stopBefore.tryToSetColon(List("constructors"))
reporter = new MojoReporter(docSettings, quiet = true)
(docSettings, reporter)
}
val (settings, reporter) = initialize
decorate(new DocFactory(reporter, settings).makeUniverse(Left(sourceFiles)))_
}
def getClasspath(p : Option[MavenProject]): String = {
val classpath = p match {
case None => ""
case Some(project) => {
val baseClasspath = project.getCompileClasspathElements.asScala.mkString(File.pathSeparator)
Option(project.getExecutionProject) match {
case Some(exProject) => {
if (exProject != project) getClasspath(Some(exProject)) else baseClasspath
}
case None => baseClasspath
}
}
}
classpath
}
}
|
adamcin/scalamojo-maven-plugin
|
src/main/scala/net/adamcin/scalamojo/ScalaDocExtractorCompiler.scala
|
Scala
|
unlicense
| 6,017 |
package protocgen
import scala.collection.JavaConverters._
import com.google.protobuf.Descriptors.FileDescriptor
import com.google.protobuf.compiler.PluginProtos
import com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest
import com.google.protobuf.DescriptorProtos.FileDescriptorProto
case class CodeGenRequest(
parameter: String,
filesToGenerate: Seq[FileDescriptor],
allProtos: Seq[FileDescriptor],
compilerVersion: Option[PluginProtos.Version],
asProto: CodeGeneratorRequest
)
object CodeGenRequest {
def apply(req: CodeGeneratorRequest) = {
val filesMap = fileDescriptorsByName(
req.getProtoFileList().asScala.toVector
)
new CodeGenRequest(
parameter = req.getParameter(),
filesToGenerate =
req.getFileToGenerateList().asScala.toVector.map(filesMap),
allProtos = filesMap.values.toVector,
compilerVersion =
if (req.hasCompilerVersion()) Some(req.getCompilerVersion()) else None,
req
)
}
def fileDescriptorsByName(
fileProtos: Seq[FileDescriptorProto]
): Map[String, FileDescriptor] =
fileProtos.foldLeft[Map[String, FileDescriptor]](Map.empty) {
case (acc, fp) =>
val deps = fp.getDependencyList.asScala.map(acc)
acc + (fp.getName -> FileDescriptor.buildFrom(fp, deps.toArray))
}
}
|
scalapb/protoc-bridge
|
protoc-gen/src/main/scala/protocgen/CodeGenRequest.scala
|
Scala
|
apache-2.0
| 1,334 |
package uk.gov.gds.ier.transaction.crown.job
import uk.gov.gds.ier.validation.{ErrorTransformForm, ErrorMessages, FormKeys}
import play.api.data.Forms._
import uk.gov.gds.ier.model._
import scala.Some
import play.api.data.validation.{Invalid, Valid, Constraint}
import uk.gov.gds.ier.transaction.crown.InprogressCrown
trait JobForms extends JobConstraints {
self: FormKeys
with ErrorMessages =>
lazy val jobMapping = mapping(
keys.jobTitle.key -> optional(nonEmptyText),
keys.payrollNumber.key -> optional(nonEmptyText),
keys.govDepartment.key -> optional(nonEmptyText)
) (
(jobTitle, payrollNumber, govDepartment) => Job(jobTitle, payrollNumber, govDepartment)
) (
job => Some(job.jobTitle, job.payrollNumber, job.govDepartment)
) verifying jobTitleAndGovDepartmentRequired
val jobForm = ErrorTransformForm(
mapping(
keys.job.key -> optional(jobMapping)
) (
job => InprogressCrown(job = job)
) (
inprogressApplication => Some(inprogressApplication.job)
) verifying jobObjectRequired
)
}
trait JobConstraints {
self: ErrorMessages
with FormKeys =>
lazy val jobObjectRequired = Constraint[InprogressCrown](keys.job.key) {
application => application.job match {
case Some(job) => Valid
case None => Invalid(
"Please answer this question",
keys.job.jobTitle,
keys.job.payrollNumber,
keys.job.govDepartment
)
}
}
lazy val jobTitleAndGovDepartmentRequired = Constraint[Job](keys.job.key) {
job => job match {
case Job(Some(jobTitle), None, None) =>
Invalid("Please answer this question",keys.payrollNumber, keys.govDepartment)
case Job(Some(jobTitle), None, Some(govDepartment)) =>
Invalid("Please answer this question",keys.payrollNumber)
case Job(None, Some(payrollNumber), None) =>
Invalid("Please answer this question",keys.job.jobTitle, keys.payrollNumber)
case Job(None, None, Some(govDepartment)) =>
Invalid("Please answer this question",keys.job.jobTitle, keys.payrollNumber)
case Job(None, Some(payrollNumber), Some(govDepartment)) =>
Invalid("Please answer this question",keys.job.jobTitle)
case Job(Some(jobTitle), Some(payrollNumber), None) =>
Invalid("Please answer this question",keys.job.govDepartment)
case _ => Valid
}
}
}
|
michaeldfallen/ier-frontend
|
app/uk/gov/gds/ier/transaction/crown/job/JobForms.scala
|
Scala
|
mit
| 2,385 |
package nest.sparkle.util
import nest.sparkle.util.OptionConversion._
import scala.reflect.runtime.universe._
import scala.util.Try
case class OrderingNotFound(msg: String) extends RuntimeException(msg)
/** @define ordering Recover a typed Ordering instance dynamically from a TypeTag.
* @define _end
*
* $ordering
*/
object RecoverOrdering {
/** mapping from typeTag to Ordering for standard types */
val standardOrderings: Map[TypeTag[_], Ordering[_]] = Map(
typeToOrdering[Double],
typeToOrdering[Long],
typeToOrdering[Int],
typeToOrdering[Short],
typeToOrdering[Char],
typeToOrdering[String]
)
/** return a mapping from a typetag to an Ordering */
private def typeToOrdering[T: TypeTag: Ordering]: (TypeTag[T], Ordering[T]) = {
typeTag[T] -> Ordering[T]
}
/** $ordering
*
* The types that can be converted to Orderings are specified by the implicit parameter @param orderings.
* A standard set of Ordering conversions for built in types is in Implicits.standardOrderings.
*
* Throws OrderingNotFound if no Ordering is available
*/
def ordering[T](targetTag: TypeTag[_]) // format: OFF
(implicit orderings: Map[TypeTag[_], Ordering[_]] = standardOrderings)
: Ordering[T] = { // format: ON
val untypedOrdering = orderings.get(targetTag).getOrElse {
throw OrderingNotFound(targetTag.tpe.toString)
}
untypedOrdering.asInstanceOf[Ordering[T]]
}
def tryOrdering[T](targetTag: TypeTag[_]): Try[Ordering[T]] = {
val untyped = standardOrderings.get(targetTag).toTryOr(OrderingNotFound(targetTag.tpe.toString))
untyped.asInstanceOf[Try[Numeric[T]]]
}
}
|
mighdoll/sparkle
|
util/src/main/scala/nest/sparkle/util/RecoverOrdering.scala
|
Scala
|
apache-2.0
| 1,677 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.actormonitor
import java.lang.management.ManagementFactory
import javax.management.MXBean
import akka.actor.{ActorContext, ActorRef, Props}
import org.squbs.unicomplex.JMX._
import scala.annotation.tailrec
import scala.util.{Failure, Success, Try}
private[actormonitor] object ActorMonitorBean {
val Pattern = "org.squbs.unicomplex:type=ActorMonitor,name="
val Total = Pattern + "*"
def registerBean(actor: ActorRef) (implicit monitorConfig: ActorMonitorConfig , context: ActorContext) = {
if (totalBeans.size < monitorConfig.maxActorCount)
register(new ActorMonitorBean(actor), objName(actor))
}
def totalBeans(implicit context: ActorContext) =
ManagementFactory.getPlatformMBeanServer.queryNames(prefix + Total, null)
def unregisterBean(actor: ActorRef) (implicit context: ActorContext) = unregister(objName(actor))
def objName(actor: ActorRef) (implicit context: ActorContext) = {
prefix + Pattern + actor.path.toString.split(s"${actor.path.root}").mkString("")
}
def getDescendant(actor: ActorRef) =
getPrivateValue(actor, Seq("children")).map(_.asInstanceOf[Iterable[ActorRef]].toSeq).getOrElse(Seq.empty[ActorRef])
@tailrec
def getPrivateValue(obj: Any, methods: Seq[String]): Option[Any] =
methods.headOption match {
case None => Some(obj)
case Some(methodName) =>
Try {
val clazz = obj.getClass
clazz.getDeclaredMethod(methodName)
} recoverWith {
case e: Exception => Try {
val clazz = obj.getClass.getSuperclass
clazz.getDeclaredMethod(methodName)
}
} map { method =>
method.setAccessible(true)
method.invoke(obj)
} match {
case Failure(_) => None
case Success(nextObj) => getPrivateValue(nextObj, methods.tail)
}
}
}
@MXBean
private[actormonitor] trait ActorMonitorMXBean {
def getActor: String
def getClassName: String
def getRouteConfig : String
def getParent: String
def getChildren: String
def getDispatcher : String
def getMailBoxSize : String
}
private[actormonitor] class ActorMonitorBean(actor: ActorRef)(implicit monitorConfig: ActorMonitorConfig)
extends ActorMonitorMXBean {
import ActorMonitorBean._
def getActor = actor.toString()
def getClassName = props.map(_.actorClass().getCanonicalName).getOrElse("Error")
def getRouteConfig = props.map(_.routerConfig.toString).getOrElse("Error")
def getParent = getPrivateValue(actor, List("getParent")).map(_.toString).getOrElse("")
def getChildren = {
val children = getDescendant(actor)
import monitorConfig._
children.size match {
case count if count > maxChildrenDisplay => children.take(maxChildrenDisplay).mkString(",") + s"... total:$count"
case _ => children.mkString(",")
}
}
def getDispatcher = props.map(_.dispatcher).getOrElse("Error")
def getMailBoxSize =
actor.getClass.getName match {
case "akka.actor.RepointableActorRef" =>
getPrivateValue(actor, Seq("underlying", "numberOfMessages")).map(_.toString).getOrElse("N/A")
case clazz =>
getPrivateValue(actor, Seq("actorCell", "numberOfMessages")).map(_.toString).getOrElse("N/A")
}
lazy val props : Option[Props] =
actor.getClass.getName match {
case "akka.actor.LocalActorRef" =>
getPrivateValue(actor, Seq("actorCell","props")).map(_.asInstanceOf[Props])
case "akka.routing.RoutedActorRef" | "akka.actor.RepointableActorRef"=>
getPrivateValue(actor, Seq("props")).map(_.asInstanceOf[Props])
case c =>
None
}
}
@MXBean
private[actormonitor] trait ActorMonitorConfigMXBean {
def getCount : Int
def getMaxCount: Int
def getMaxChildrenDisplay: Int
}
private[actormonitor] class ActorMonitorConfigBean(config: ActorMonitorConfig, monitorActor: ActorRef,
implicit val context: ActorContext)
extends ActorMonitorConfigMXBean {
def getCount : Int = ActorMonitorBean.totalBeans.size()
def getMaxCount: Int = config.maxActorCount
def getMaxChildrenDisplay: Int = {
monitorActor ! "refresh"
config.maxChildrenDisplay
}
}
|
Harikiranvuyyuru/squbs
|
squbs-actormonitor/src/main/scala/org/squbs/actormonitor/ActorMonitorBean.scala
|
Scala
|
apache-2.0
| 4,812 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka
import com.typesafe.scalalogging.slf4j.Logging
import com.vividsolutions.jts.geom.Coordinate
import org.geotools.data._
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.factory.Hints
import org.geotools.geometry.jts.JTSFactoryFinder
import org.joda.time.DateTime
import org.junit.runner.RunWith
import org.locationtech.geomesa.kafka.ReplayTimeHelper.ff
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class KafkaDataStoreTest extends Specification with HasEmbeddedKafka with Logging {
sequential // this doesn't really need to be sequential, but we're trying to reduce zk load
val gf = JTSFactoryFinder.getGeometryFactory
val zkPath = "/geomesa/kafka/testds"
val producerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> true)
"KafkaDataSource" should {
import org.locationtech.geomesa.security._
val consumerParams = Map(
"brokers" -> brokerConnect,
"zookeepers" -> zkConnect,
"zkPath" -> zkPath,
"isProducer" -> false)
val consumerDS = DataStoreFinder.getDataStore(consumerParams)
val producerDS = DataStoreFinder.getDataStore(producerParams)
"consumerDS must not be null" >> { consumerDS must not beNull }
"producerDS must not be null" >> { producerDS must not beNull }
val schema = {
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
KafkaDataStoreHelper.createStreamingSFT(sft, zkPath)
}
"allow schemas to be created" >> {
producerDS.createSchema(schema)
"and available in other data stores" >> {
consumerDS.getTypeNames.toList must contain("test")
}
ok
}
"allow schemas to be deleted" >> {
val replaySFT = KafkaDataStoreHelper.createReplaySFT(schema, ReplayConfig(10000L, 20000L, 1000L))
val name = replaySFT.getTypeName
consumerDS.createSchema(replaySFT)
consumerDS.getTypeNames.toList must contain(name)
consumerDS.removeSchema(name)
consumerDS.getTypeNames.toList must not(contain(name))
}
"allow features to be written" >> {
// create the consumerFC first so that it is ready to receive features from the producer
val consumerFC = consumerDS.getFeatureSource("test")
val store = producerDS.getFeatureSource("test").asInstanceOf[SimpleFeatureStore]
val fw = producerDS.getFeatureWriter("test", null, Transaction.AUTO_COMMIT)
val sf = fw.next()
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
sf.visibility = "USER|ADMIN"
fw.write()
Thread.sleep(2000)
"and read" >> {
val features = consumerFC.getFeatures.features()
features.hasNext must beTrue
val readSF = features.next()
sf.getID must be equalTo readSF.getID
sf.getAttribute("dtg") must be equalTo readSF.getAttribute("dtg")
sf.visibility mustEqual Some("USER|ADMIN")
store.removeFeatures(ff.id(ff.featureId("1")))
Thread.sleep(500) // ensure FC has seen the delete
consumerFC.getCount(Query.ALL) must be equalTo 0
}
"and updated" >> {
val updated = sf
updated.setAttribute("name", "jones")
updated.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
updated.visibility = "ADMIN"
store.addFeatures(DataUtilities.collection(updated))
Thread.sleep(500)
val q = ff.id(updated.getIdentifier)
val featureCollection = consumerFC.getFeatures(q)
featureCollection.size() must be equalTo 1
val res = featureCollection.features().next()
res.getAttribute("name") must be equalTo "jones"
res.visibility mustEqual Some("ADMIN")
}
"and cleared" >> {
store.removeFeatures(Filter.INCLUDE)
Thread.sleep(500)
consumerFC.getCount(Query.ALL) must be equalTo 0
val sf = fw.next()
sf.setAttributes(Array("smith", 30, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
fw.write()
Thread.sleep(500)
consumerFC.getCount(Query.ALL) must be equalTo 1
}
"and queried with cql" >> {
val sf = fw.next()
sf.setAttributes(Array("jones", 60, DateTime.now().toDate).asInstanceOf[Array[AnyRef]])
sf.setDefaultGeometry(gf.createPoint(new Coordinate(0.0, 0.0)))
sf.visibility = "USER"
fw.write()
Thread.sleep(500)
var res = consumerFC.getFeatures(ff.equals(ff.property("name"), ff.literal("jones")))
res.size() must be equalTo 1
val resSF = res.features().next()
resSF.getAttribute("name") must be equalTo "jones"
resSF.visibility mustEqual Some("USER")
res = consumerFC.getFeatures(ff.greater(ff.property("age"), ff.literal(50)))
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
// bbox and cql
val spatialQ = ff.bbox("geom", -10, -10, 10, 10, "EPSG:4326")
val attrQ = ff.greater(ff.property("age"), ff.literal(50))
res = consumerFC.getFeatures(ff.and(spatialQ, attrQ))
res.size() must be equalTo 1
res.features().next().getAttribute("name") must be equalTo "jones"
}
ok
}
"return correctly from canProcess" >> {
import KafkaDataStoreFactoryParams._
val factory = new KafkaDataStoreFactory
factory.canProcess(Map.empty[String, Serializable]) must beFalse
factory.canProcess(Map(KAFKA_BROKER_PARAM.key -> "test", ZOOKEEPERS_PARAM.key -> "test")) must beTrue
}
}
step {
shutdown()
}
}
|
giserh/geomesa
|
geomesa-kafka/geomesa-kafka-datastore/src/test/scala/org/locationtech/geomesa/kafka/KafkaDataStoreTest.scala
|
Scala
|
apache-2.0
| 6,554 |
package section3
import java.io.InputStreamReader
import java.security.MessageDigest
import java.util.Date
import sun.misc.BASE64Encoder
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.DurationInt
import scala.concurrent.{Await, Future}
/**
* In this example, we want to synchronize three futures
* in a more realistic scenario and show some traps.
*/
object Futures4 {
def main(args: Array[String]) {
/**
* This method returns a future of a character stream
*/
def loadResourceStream(resourceName: String): Future[Stream[Char]] = Future {
Thread.sleep(1000)
println("loadResourceStream: " + Thread.currentThread().getId + " : " + new Date)
val in = new InputStreamReader(this.getClass.getResourceAsStream(resourceName))
Stream.continually(in.read()).takeWhile(_ != -1).map(_.toChar)
}
/**
* This method takes a character stream and returns a string future
* which contains the sha1 calculated hash of the character stream.
*/
def calculateSHA1(content: Stream[Char]): Future[String] = Future {
Thread.sleep(2000)
println("calculateSHA1: " + Thread.currentThread().getId + " : " + new Date)
val hash = MessageDigest.getInstance("SHA1")
content.foreach(c => hash.update(c.toByte))
new BASE64Encoder().encode(hash.digest())
}
/**
* This method takes a character stream and returns a boolean future
* which contains the result of a virus check.
*/
def checkForVirus(content: Stream[Char]): Future[Boolean] = Future {
Thread.sleep(2000)
println("checkForVirus: " + Thread.currentThread().getId + " : " + new Date)
val definition = "virus"
val findVirus = (mayVirus: String, elem: Char) => {
if (definition.startsWith(mayVirus + elem)) {
mayVirus + elem
} else if(mayVirus == definition) {
mayVirus
} else {
""
}
}
content.foldLeft[String]("")(findVirus) == definition
}
/**
* As a result, we want to store out three future results in this container class.
*/
case class ResourceInfo(content: String, hash: String, containsVirus: Boolean)
/**
* Let consider how our futures can be evaluated:
* The hash and virus methods both needs the content stream. So the stream future must first be evaluated,
* before we can hash and check. With all three values we can build the ResourceInfo.
*
* +--> hash ---+
* resource --+ +--> ResourceInfo(...)
* +--> check --+
*/
/**
* As the last example showed, we can put all our futures into a for-comprehension
*
* The stream future will be executed at first,
* than the calculateSHA1 and the checkForVirus can take the stream
* and start their futures calculations.
*
* At the end, we have all three values and put it into a ResourceInfo.
* The result will be a Future[ResourceInfo]
*
* Hmm... But wait. In my test run I get:
* loadResourceStream: 10 : 16:00:32 CET 2014
* calculateSHA1: 11 : 16:00:34 CET 2014
* checkForVirus: 10 : 16:00:36 CET 2014
*
* Look at the seconds. Seems that the hash and check methods runs not in parallel, but sequential.
* What happened? Because the for comprehension is only syntactic sugar for flatMap and map the calculations performs:
loadResourceStream("test.txt").flatMap{stream =>
calculateSHA1(stream).map { hash =>
checkForVirus(stream).map { containsVirus =>
ResourceInfo(stream.mkString, hash, containsVirus)
}
}
}
And there is our failure. Cause the cascaded mapping we perform our methods sequential and therefore perform the futures sequential.
*/
val resourceFuture = for {
stream <- loadResourceStream("test.txt")
hash <- calculateSHA1(stream)
containsVirus <- checkForVirus(stream)
} yield {
ResourceInfo(stream.mkString, hash, containsVirus)
}
/**
* To let run the hash and check future in parallel we first need the stream:
* Then we take the stream and create the two futures. They start immediately after the method call.
*
* These two futures are synchronized with a for comprehension.
* As a result we get a Future[ResourceInfo], but know with much more performance.
*/
val resourceFuture2 = loadResourceStream("test.txt").flatMap { stream =>
val hashFuture: Future[String] = calculateSHA1(stream)
val checkFuture: Future[Boolean] = checkForVirus(stream)
for {
hash <- hashFuture
containsVirus <- checkFuture
} yield {
ResourceInfo(stream.mkString, hash, containsVirus)
}
}
println(Await.result(resourceFuture, 8.seconds))
println(Await.result(resourceFuture2, 8.seconds))
}
}
|
DarkToast/scala-workshop
|
workshop/src/main/scala/section3/Futures4.scala
|
Scala
|
mit
| 4,997 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.responsiblepeople
import config.ApplicationConfig
import connectors.DataCacheConnector
import controllers.actions.SuccessfulAuthAction
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.mockito.MockitoSugar
import play.api.i18n.Messages
import play.api.inject.bind
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test.Helpers._
import utils.{AmlsSpec, AuthAction, DependencyMocks}
class FitAndProperNoticeControllerSpec extends AmlsSpec with MockitoSugar with ScalaFutures {
val recordId = 1
trait Fixture extends DependencyMocks { self =>
val request = addToken(authRequest)
lazy val mockApplicationConfig = mock[ApplicationConfig]
lazy val defaultBuilder = new GuiceApplicationBuilder()
.disable[com.kenshoo.play.metrics.PlayModule]
.overrides(bind[AuthAction].to(SuccessfulAuthAction))
.overrides(bind[DataCacheConnector].to(mockCacheConnector))
.overrides(bind[ApplicationConfig].to(mockApplicationConfig))
val builder = defaultBuilder
lazy val app = builder.build()
lazy val controller = app.injector.instanceOf[FitAndProperNoticeController]
}
"FitAndProperNoticeController" when {
"get is called" must {
"display the notice page" in new Fixture {
val result = controller.get(recordId)(request)
status(result) must be(OK)
val page: Document = Jsoup.parse(contentAsString(result))
page.getElementsByClass("button")
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.title"))
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.text1"))
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.heading1"))
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.text2"))
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.heading2"))
page.body().html() must include(Messages("responsiblepeople.fit_and_proper.notice.text3"))
}
}
"continue button is clicked" must {
"redirect to Fit and Proper page" in new Fixture {
val result = controller.get(recordId)(request)
status(result) must be(OK)
val page: Document = Jsoup.parse(contentAsString(result))
page.getElementsMatchingOwnText(Messages("button.continue"))
.attr("href") mustBe routes.FitAndProperController.get(recordId).url
}
}
}
}
|
hmrc/amls-frontend
|
test/controllers/responsiblepeople/FitAndProperNoticeControllerSpec.scala
|
Scala
|
apache-2.0
| 3,180 |
/*
* Copyright 2010 Sanjiv Sahayam
* Licensed under the Apache License, Version 2.0
*/
package shortbread
import org.openqa.selenium.chrome.ChromeDriver
import org.openqa.selenium.firefox.FirefoxDriver
import org.openqa.selenium.firefox.internal.ProfilesIni
import org.openqa.selenium.ie.InternetExplorerDriver
object DefaultDrivers {
object DefaultChromeConfig extends DefaultConfig {
override def webDriver = NamedDriver("Chrome", withTimeouts(() => new ChromeDriver))
}
object DefaultFoxConfig extends DefaultConfig {
val profile = "default"
override def webDriver = NamedDriver("Firefox", withTimeouts(() => new FirefoxDriver(new ProfilesIni().getProfile(profile))))
}
object DefaultIEConfig extends DefaultConfig {
override def webDriver = NamedDriver("InternetExplorer", withTimeouts(() => new InternetExplorerDriver()))
}
}
|
ssanj/Shortbread
|
src/main/scala/DefaultDrivers.scala
|
Scala
|
apache-2.0
| 869 |
package yang.common
import akka.actor.Status.Failure
import akka.actor._
import akka.util.Timeout
import com.nsn.oss.nbi.corba.ManagedGenericIRPConstDefs.Method
import com.nsn.oss.nbi.corba.ManagedGenericIRPSystem.InvalidParameter
import com.nsn.oss.nbi.{IRPInfo, IRPInfoServiceInstance, Operation}
import org.mockito.Mockito
import org.mockito.Mockito._
import yang.Protocol.AlarmOptPtl.get_alarm_IRP_operations_profile_msg
import yang.{SupervisorTestActor, TestKitAndFunSuite}
import scala.concurrent.duration._
/**
* Created by y28yang on 1/31/2016.
*/
class VersionProfilesInfoActorTest2 extends TestKitAndFunSuite {
test("when receive none version profile should throw exception") {
val infoservice = Mockito.mock(classOf[IRPInfoServiceInstance])
when(infoservice.getIRPInfoById("AlarmIRP")).thenReturn(new IRPInfo("id", "idInNs"))
val supervisor = system.actorOf(Props[SupervisorTestActor], "supervisor")
supervisor ! Props(new VersionProfilesInfoActor(infoservice))
val profileChild = expectMsgType[ActorRef]
profileChild ! get_alarm_IRP_operations_profile_msg("v3")
expectMsgPF() {
case Failure(cause: InvalidParameter) =>
assert(true)
println("234")
case x => fail()
}
}
//}
//class VersionProfilesInfoActorTest3 extends TestKitAndFunSuite {
test("when receive getOperationProfile should return array[Mehod]") {
val infoservice = Mockito.mock(classOf[IRPInfoServiceInstance])
val irpinfo = new IRPInfo("id", "idInNs")
val operation = new Operation("get_version")
operation.getParameters.add("p1")
val operation2 = new Operation("get_profile")
operation2.getParameters.add("p2")
operation2.getParameters.add("p3")
irpinfo.getVersions.add("v3")
irpinfo.getOperations.add(operation)
irpinfo.getOperations.add(operation2)
when(infoservice.getIRPInfoById("AlarmIRP")).thenReturn(irpinfo)
implicit val timeout = Timeout(5 seconds)
val versionProfileInfoActor = system.actorOf(Props(new VersionProfilesInfoActor(infoservice)))
versionProfileInfoActor.tell(get_alarm_IRP_operations_profile_msg("v3"), testActor)
expectMsgPF() {
case retArray: Array[Method] =>
retArray.length should be(2)
retArray(0).name should be("get_version")
retArray(0).parameter_list.length should be(1)
retArray(0).parameter_list should be(Array("p1"))
retArray(1).name shouldBe "get_profile"
retArray(1).parameter_list shouldBe Array("p2", "p3")
println("123")
case _ => fail()
}
}
}
|
wjingyao2008/firsttry
|
NextGenAct/src/test/scala/yang/common/VersionProfilesInfoActorTest2.scala
|
Scala
|
apache-2.0
| 2,581 |
package text.kanji
/**
* @author K.Sakamoto
* Created on 2016/07/26
*/
object PrimarySchool1stGradeKanjiCharacter extends KanjiCharacter {
override val kanji: Seq[String] = readKanjiCSV("primary_school_1st_grade")
}
|
ktr-skmt/FelisCatusZero
|
src/main/scala/text/kanji/PrimarySchool1stGradeKanjiCharacter.scala
|
Scala
|
apache-2.0
| 233 |
package controllers
import play.api.data.Form
import play.api.data.Forms.mapping
import play.api.data.Forms.nonEmptyText
import play.api.mvc.Controller
object Login extends Controller {
case class UserLoginDTO(username: String, password: String) extends Serializable
val loginForm: Form[UserLoginDTO] = Form(
mapping(
"username" -> nonEmptyText,
"password" -> nonEmptyText(minLength = 6))(UserLoginDTO.apply)(UserLoginDTO.unapply))
def index = TODO
def submit = TODO
}
|
HackerSchool/Passport
|
app/controllers/Login.scala
|
Scala
|
bsd-3-clause
| 498 |
package frdomain.ch4
package patterns
import scala.language.higherKinds
import java.util.{ Date, Calendar }
import Monoid._
import scalaz.State
import State._
object States {
type AccountNo = String
type BS = Map[AccountNo, Balance]
val balances: BS = Map(
"a1" -> Balance(),
"a2" -> Balance(),
"a3" -> Balance(),
"a4" -> Balance(),
"a5" -> Balance()
)
def updateBalance(txns: List[Transaction]) = modify { (b: BS) =>
txns.foldLeft(b) { (a, txn) =>
implicitly[Monoid[BS]].op(a, Map(txn.accountNo -> Balance(txn.amount)))
}
}
case class Transaction(accountNo: AccountNo, amount: Money)
val txns: List[Transaction] = List(
Transaction("a1", Money(Map(USD -> BigDecimal(100)))),
Transaction("a2", Money(Map(USD -> BigDecimal(100)))),
Transaction("a1", Money(Map(INR -> BigDecimal(500000)))),
Transaction("a3", Money(Map(USD -> BigDecimal(100)))),
Transaction("a2", Money(Map(AUD -> BigDecimal(200))))
)
updateBalance(txns) run balances
}
|
debasishg/frdomain
|
src/main/scala/frdomain/ch4/patterns/State.scala
|
Scala
|
apache-2.0
| 1,019 |
package org.edla.tmdb.client
//import acyclic.file
import scala.language.postfixOps
import java.io.{File, FileOutputStream}
import java.net.URLEncoder
import scala.concurrent.{Await, Future}
import scala.concurrent.duration.{Duration, DurationInt, FiniteDuration, SECONDS}
import org.edla.tmdb.api.Protocol.{AuthenticateResult, Configuration, Credits, Error, Movie, Releases, Results}
import org.edla.tmdb.api.TmdbApi
import akka.actor.{ActorRef, ActorSystem}
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport.sprayJsonUnmarshaller
import akka.http.scaladsl.model.{HttpRequest, HttpResponse}
import akka.http.scaladsl.model.Uri.apply
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.pattern.ask
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, IOResult}
import akka.stream.scaladsl.{FileIO, Flow, Sink, Source}
import akka.util.Timeout
import java.util.concurrent.CountDownLatch
import akka.NotUsed
import akka.http.scaladsl.settings.ConnectionPoolSettings
import java.nio.file.Path
object TmdbClient {
def apply(ApiKey: String, Language: String = "en", tmdbTimeOut: FiniteDuration = 10 seconds): TmdbClient =
new TmdbClient(ApiKey, Language, tmdbTimeOut)
}
class TmdbClient(apiKey: String, language: String, tmdbTimeOut: FiniteDuration) extends TmdbApi {
private val ApiKey = s"api_key=${apiKey}"
private val Language = s"language=${language}"
private val MaxAvailableTokens = 10
// scalastyle:off magic.number
private val TokenRefreshPeriod = new FiniteDuration(5, SECONDS)
// scalastyle:on magic.number
private val TokenRefreshAmount = 10
private val Port = 80
implicit val system = ActorSystem()
implicit val executor = system.dispatcher
implicit val materializer = ActorMaterializer(
ActorMaterializerSettings(system)
.withInputBuffer(
initialSize = 1,
maxSize = 1
)
)
private implicit val timeout = Timeout(tmdbTimeOut)
val log = Logging(system, getClass)
log.info(s"TMDb timeout value is ${tmdbTimeOut}")
val limiterProps =
Limiter.props(MaxAvailableTokens, TokenRefreshPeriod, TokenRefreshAmount)
val limiter = system.actorOf(limiterProps, name = "testLimiter")
lazy val tmdbConnectionFlow: Flow[HttpRequest, HttpResponse, Future[Http.OutgoingConnection]] =
Http().outgoingConnection("api.themoviedb.org", Port)
val poolClientFlow =
Http().cachedHostConnectionPool[String]("api.themoviedb.org")
def limitGlobal[T](limiter: ActorRef): Flow[T, T, NotUsed] = {
import akka.pattern.ask
import akka.util.Timeout
Flow[T].mapAsync(1)((element: T) => {
val limiterTriggerFuture = limiter ? Limiter.WantToPass
limiterTriggerFuture.map((_) => element)
})
}
def errorHandling(): Flow[HttpResponse, HttpResponse, NotUsed] = {
//Flow[HttpResponse].mapAsyncUnordered(4)(response => response)
Flow[HttpResponse].map { response =>
if (response.status.isSuccess) response
else {
val err = Unmarshal(response.entity).to[Error] map { e =>
if (e.status_code == 7) {
throw new InvalidApiKeyException(message = e.status_message, code = e.status_code)
} else {
throw TmdbException(message = e.status_message, code = e.status_code)
}
}
//TODO is it possible to avoid Await ?
Await.result(err, 1 seconds)
}
}
}
def tmdbRequest(request: HttpRequest): Future[HttpResponse] =
Source
.single(request)
.via(limitGlobal(limiter))
.via(tmdbConnectionFlow)
.via(errorHandling) runWith (Sink.head)
private lazy val baseUrl =
Await.result(getConfiguration(), tmdbTimeOut).images.base_url
//could not find implicit value for parameter um:
/* def generic[T](request: String): Future[T] = tmdbRequest(RequestBuilding.Get(request)).flatMap {
response ⇒ Unmarshal(response.entity).to[T]
}*/
def getConfiguration(): Future[Configuration] = {
tmdbRequest(RequestBuilding.Get(s"/3/configuration?${ApiKey}")).flatMap { response =>
Unmarshal(response.entity).to[Configuration]
}
}
def getToken(): Future[AuthenticateResult] =
tmdbRequest(RequestBuilding.Get(s"/3/authentication/token/new?${ApiKey}"))
.flatMap { response =>
Unmarshal(response.entity).to[AuthenticateResult]
}
def getMovie(id: Long): Future[Movie] = {
tmdbRequest(RequestBuilding.Get(s"/3/movie/${id}?${ApiKey}&${Language}"))
.flatMap { response =>
Unmarshal(response.entity).to[Movie]
}
}
def getCredits(id: Long): Future[Credits] = {
tmdbRequest(RequestBuilding.Get(s"/3/movie/${id}/credits?${ApiKey}&${Language}"))
.flatMap { response =>
Unmarshal(response.entity).to[Credits]
}
}
def getReleases(id: Long): Future[Releases] = {
tmdbRequest(RequestBuilding.Get(s"/3/movie/${id}/releases?${ApiKey}"))
.flatMap { response =>
Unmarshal(response.entity).to[Releases]
}
}
def searchMovie(query: String, page: Int): Future[Results] = {
tmdbRequest(RequestBuilding.Get(s"/3/search/movie?${ApiKey}&${Language}&page=${page}&query=${URLEncoder
.encode(query, "UTF-8")}")).flatMap { response =>
Unmarshal(response.entity).to[Results]
}
}
def shutdown(): Unit = {
Http().shutdownAllConnectionPools().onComplete { _ =>
system.terminate
Await.result(system.whenTerminated, Duration.Inf)
Limiter.system.terminate()
Await.result(Limiter.system.whenTerminated, Duration.Inf)
()
}
}
//http://stackoverflow.com/questions/34912143/how-to-download-a-http-resource-to-a-file-with-akka-streams-and-http
def downloadPoster(movie: Movie, path: Path): Option[Future[IOResult]] = {
val posterPath = movie.poster_path
val settings = ConnectionPoolSettings(system).withMaxOpenRequests(64)
if (posterPath.isDefined) {
val url = s"${baseUrl}w154${posterPath.get}"
val result: Future[HttpResponse] =
Http()
.singleRequest(HttpRequest(uri = url), settings = settings)
.mapTo[HttpResponse]
Some(result.flatMap { resp =>
val source = resp.entity.dataBytes
source.runWith(FileIO.toPath(path))
})
} else {
None
}
}
}
|
newca12/TMDb-async-client
|
src/main/scala/org/edla/tmdb/client/TmdbClient.scala
|
Scala
|
gpl-3.0
| 6,438 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.internal.forkJoin
import java.util.concurrent.ForkJoinTask
private[monix] final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
def setRawResult(u: Unit): Unit = ()
def getRawResult(): Unit = ()
def exec(): Boolean =
try {
runnable.run()
true
} catch {
case anything: Throwable =>
val t = Thread.currentThread
t.getUncaughtExceptionHandler match {
case null =>
case some => some.uncaughtException(t, anything)
}
throw anything
}
}
|
monix/monix
|
monix-execution/jvm/src/main/scala/monix/execution/internal/forkJoin/AdaptedForkJoinTask.scala
|
Scala
|
apache-2.0
| 1,248 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.{lang => jl}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckSuccess, TypeCheckFailure}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.NumberConverter
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A leaf expression specifically for math constants. Math constants expect no input.
*
* There is no code generation because they should get constant folded by the optimizer.
*
* @param c The math constant.
* @param name The short name of the function
*/
abstract class LeafMathExpression(c: Double, name: String)
extends LeafExpression with CodegenFallback {
override def dataType: DataType = DoubleType
override def foldable: Boolean = true
override def nullable: Boolean = false
override def toString: String = s"$name()"
override def eval(input: InternalRow): Any = c
}
/**
* A unary expression specifically for math functions. Math Functions expect a specific type of
* input format, therefore these functions extend `ExpectsInputTypes`.
* @param f The math function.
* @param name The short name of the function
*/
abstract class UnaryMathExpression(f: Double => Double, name: String)
extends UnaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(DoubleType)
override def dataType: DataType = DoubleType
override def nullable: Boolean = true
override def toString: String = s"$name($child)"
protected override def nullSafeEval(input: Any): Any = {
f(input.asInstanceOf[Double])
}
// name of function in java.lang.Math
def funcName: String = name.toLowerCase
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, c => s"java.lang.Math.${funcName}($c)")
}
}
abstract class UnaryLogExpression(f: Double => Double, name: String)
extends UnaryMathExpression(f, name) {
// values less than or equal to yAsymptote eval to null in Hive, instead of NaN or -Infinity
protected val yAsymptote: Double = 0.0
protected override def nullSafeEval(input: Any): Any = {
val d = input.asInstanceOf[Double]
if (d <= yAsymptote) null else f(d)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.primitive} = java.lang.Math.${funcName}($c);
}
"""
)
}
}
/**
* A binary expression specifically for math functions that take two `Double`s as input and returns
* a `Double`.
* @param f The math function.
* @param name The short name of the function
*/
abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
extends BinaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType)
override def toString: String = s"$name($left, $right)"
override def dataType: DataType = DoubleType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
f(input1.asInstanceOf[Double], input2.asInstanceOf[Double])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.${name.toLowerCase}($c1, $c2)")
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Leaf math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Euler's number. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
case class EulerNumber() extends LeafMathExpression(math.E, "E")
/**
* Pi. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
case class Pi() extends LeafMathExpression(math.Pi, "PI")
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Unary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
case class Acos(child: Expression) extends UnaryMathExpression(math.acos, "ACOS")
case class Asin(child: Expression) extends UnaryMathExpression(math.asin, "ASIN")
case class Atan(child: Expression) extends UnaryMathExpression(math.atan, "ATAN")
case class Cbrt(child: Expression) extends UnaryMathExpression(math.cbrt, "CBRT")
case class Ceil(child: Expression) extends UnaryMathExpression(math.ceil, "CEIL")
case class Cos(child: Expression) extends UnaryMathExpression(math.cos, "COS")
case class Cosh(child: Expression) extends UnaryMathExpression(math.cosh, "COSH")
/**
* Convert a num from one base to another
* @param numExpr the number to be converted
* @param fromBaseExpr from which base
* @param toBaseExpr to which base
*/
case class Conv(numExpr: Expression, fromBaseExpr: Expression, toBaseExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
override def children: Seq[Expression] = Seq(numExpr, fromBaseExpr, toBaseExpr)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, IntegerType, IntegerType)
override def dataType: DataType = StringType
override def nullSafeEval(num: Any, fromBase: Any, toBase: Any): Any = {
NumberConverter.convert(
num.asInstanceOf[UTF8String].getBytes,
fromBase.asInstanceOf[Int],
toBase.asInstanceOf[Int])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val numconv = NumberConverter.getClass.getName.stripSuffix("$")
nullSafeCodeGen(ctx, ev, (num, from, to) =>
s"""
${ev.primitive} = $numconv.convert($num.getBytes(), $from, $to);
if (${ev.primitive} == null) {
${ev.isNull} = true;
}
"""
)
}
}
case class Exp(child: Expression) extends UnaryMathExpression(math.exp, "EXP")
case class Expm1(child: Expression) extends UnaryMathExpression(math.expm1, "EXPM1")
case class Floor(child: Expression) extends UnaryMathExpression(math.floor, "FLOOR")
object Factorial {
def factorial(n: Int): Long = {
if (n < factorials.length) factorials(n) else Long.MaxValue
}
private val factorials: Array[Long] = Array[Long](
1,
1,
2,
6,
24,
120,
720,
5040,
40320,
362880,
3628800,
39916800,
479001600,
6227020800L,
87178291200L,
1307674368000L,
20922789888000L,
355687428096000L,
6402373705728000L,
121645100408832000L,
2432902008176640000L
)
}
case class Factorial(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(IntegerType)
override def dataType: DataType = LongType
// If the value not in the range of [0, 20], it still will be null, so set it to be true here.
override def nullable: Boolean = true
protected override def nullSafeEval(input: Any): Any = {
val value = input.asInstanceOf[jl.Integer]
if (value > 20 || value < 0) {
null
} else {
Factorial.factorial(value)
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, eval => {
s"""
if ($eval > 20 || $eval < 0) {
${ev.isNull} = true;
} else {
${ev.primitive} =
org.apache.spark.sql.catalyst.expressions.Factorial.factorial($eval);
}
"""
})
}
}
case class Log(child: Expression) extends UnaryLogExpression(math.log, "LOG")
case class Log2(child: Expression)
extends UnaryLogExpression((x: Double) => math.log(x) / math.log(2), "LOG2") {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.primitive} = java.lang.Math.log($c) / java.lang.Math.log(2);
}
"""
)
}
}
case class Log10(child: Expression) extends UnaryLogExpression(math.log10, "LOG10")
case class Log1p(child: Expression) extends UnaryLogExpression(math.log1p, "LOG1P") {
protected override val yAsymptote: Double = -1.0
}
case class Rint(child: Expression) extends UnaryMathExpression(math.rint, "ROUND") {
override def funcName: String = "rint"
}
case class Signum(child: Expression) extends UnaryMathExpression(math.signum, "SIGNUM")
case class Sin(child: Expression) extends UnaryMathExpression(math.sin, "SIN")
case class Sinh(child: Expression) extends UnaryMathExpression(math.sinh, "SINH")
case class Sqrt(child: Expression) extends UnaryMathExpression(math.sqrt, "SQRT")
case class Tan(child: Expression) extends UnaryMathExpression(math.tan, "TAN")
case class Tanh(child: Expression) extends UnaryMathExpression(math.tanh, "TANH")
case class ToDegrees(child: Expression) extends UnaryMathExpression(math.toDegrees, "DEGREES") {
override def funcName: String = "toDegrees"
}
case class ToRadians(child: Expression) extends UnaryMathExpression(math.toRadians, "RADIANS") {
override def funcName: String = "toRadians"
}
case class Bin(child: Expression)
extends UnaryExpression with Serializable with ImplicitCastInputTypes {
override def inputTypes: Seq[DataType] = Seq(LongType)
override def dataType: DataType = StringType
protected override def nullSafeEval(input: Any): Any =
UTF8String.fromString(jl.Long.toBinaryString(input.asInstanceOf[Long]))
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (c) =>
s"UTF8String.fromString(java.lang.Long.toBinaryString($c))")
}
}
object Hex {
val hexDigits = Array[Char](
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
).map(_.toByte)
// lookup table to translate '0' -> 0 ... 'F'/'f' -> 15
val unhexDigits = {
val array = Array.fill[Byte](128)(-1)
(0 to 9).foreach(i => array('0' + i) = i.toByte)
(0 to 5).foreach(i => array('A' + i) = (i + 10).toByte)
(0 to 5).foreach(i => array('a' + i) = (i + 10).toByte)
array
}
def hex(bytes: Array[Byte]): UTF8String = {
val length = bytes.length
val value = new Array[Byte](length * 2)
var i = 0
while (i < length) {
value(i * 2) = Hex.hexDigits((bytes(i) & 0xF0) >> 4)
value(i * 2 + 1) = Hex.hexDigits(bytes(i) & 0x0F)
i += 1
}
UTF8String.fromBytes(value)
}
def hex(num: Long): UTF8String = {
// Extract the hex digits of num into value[] from right to left
val value = new Array[Byte](16)
var numBuf = num
var len = 0
do {
len += 1
value(value.length - len) = Hex.hexDigits((numBuf & 0xF).toInt)
numBuf >>>= 4
} while (numBuf != 0)
UTF8String.fromBytes(java.util.Arrays.copyOfRange(value, value.length - len, value.length))
}
def unhex(bytes: Array[Byte]): Array[Byte] = {
val out = new Array[Byte]((bytes.length + 1) >> 1)
var i = 0
if ((bytes.length & 0x01) != 0) {
// padding with '0'
if (bytes(0) < 0) {
return null
}
val v = Hex.unhexDigits(bytes(0))
if (v == -1) {
return null
}
out(0) = v
i += 1
}
// two characters form the hex value.
while (i < bytes.length) {
if (bytes(i) < 0 || bytes(i + 1) < 0) {
return null
}
val first = Hex.unhexDigits(bytes(i))
val second = Hex.unhexDigits(bytes(i + 1))
if (first == -1 || second == -1) {
return null
}
out(i / 2) = (((first << 4) | second) & 0xFF).toByte
i += 2
}
out
}
}
/**
* If the argument is an INT or binary, hex returns the number as a STRING in hexadecimal format.
* Otherwise if the number is a STRING, it converts each character into its hex representation
* and returns the resulting STRING. Negative numbers would be treated as two's complement.
*/
case class Hex(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(LongType, BinaryType, StringType))
override def dataType: DataType = StringType
protected override def nullSafeEval(num: Any): Any = child.dataType match {
case LongType => Hex.hex(num.asInstanceOf[Long])
case BinaryType => Hex.hex(num.asInstanceOf[Array[Byte]])
case StringType => Hex.hex(num.asInstanceOf[UTF8String].getBytes)
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"${ev.primitive} = " + (child.dataType match {
case StringType => s"""$hex.hex($c.getBytes());"""
case _ => s"""$hex.hex($c);"""
})
})
}
}
/**
* Performs the inverse operation of HEX.
* Resulting characters are returned as a byte array.
*/
case class Unhex(child: Expression) extends UnaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType)
override def nullable: Boolean = true
override def dataType: DataType = BinaryType
protected override def nullSafeEval(num: Any): Any =
Hex.unhex(num.asInstanceOf[UTF8String].getBytes)
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"""
${ev.primitive} = $hex.unhex($c.getBytes());
${ev.isNull} = ${ev.primitive} == null;
"""
})
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Binary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
case class Atan2(left: Expression, right: Expression)
extends BinaryMathExpression(math.atan2, "ATAN2") {
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
// With codegen, the values returned by -0.0 and 0.0 are different. Handled with +0.0
math.atan2(input1.asInstanceOf[Double] + 0.0, input2.asInstanceOf[Double] + 0.0)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.atan2($c1 + 0.0, $c2 + 0.0)")
}
}
case class Pow(left: Expression, right: Expression)
extends BinaryMathExpression(math.pow, "POWER") {
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.pow($c1, $c2)")
}
}
/**
* Bitwise unsigned left shift.
* @param left the base number to shift.
* @param right number of bits to left shift.
*/
case class ShiftLeft(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l << input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i << input2.asInstanceOf[jl.Integer]
}
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (left, right) => s"$left << $right")
}
}
/**
* Bitwise unsigned left shift.
* @param left the base number to shift.
* @param right number of bits to left shift.
*/
case class ShiftRight(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >> input2.asInstanceOf[jl.Integer]
}
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (left, right) => s"$left >> $right")
}
}
/**
* Bitwise unsigned right shift, for integer and long data type.
* @param left the base number.
* @param right the number of bits to right shift.
*/
case class ShiftRightUnsigned(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >>> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >>> input2.asInstanceOf[jl.Integer]
}
}
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
defineCodeGen(ctx, ev, (left, right) => s"$left >>> $right")
}
}
case class Hypot(left: Expression, right: Expression)
extends BinaryMathExpression(math.hypot, "HYPOT")
/**
* Computes the logarithm of a number.
* @param left the logarithm base, default to e.
* @param right the number to compute the logarithm of.
*/
case class Logarithm(left: Expression, right: Expression)
extends BinaryMathExpression((c1, c2) => math.log(c2) / math.log(c1), "LOG") {
/**
* Natural log, i.e. using e as the base.
*/
def this(child: Expression) = {
this(EulerNumber(), child)
}
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val dLeft = input1.asInstanceOf[Double]
val dRight = input2.asInstanceOf[Double]
// Unlike Hive, we support Log base in (0.0, 1.0]
if (dLeft <= 0.0 || dRight <= 0.0) null else math.log(dRight) / math.log(dLeft)
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
if (left.isInstanceOf[EulerNumber]) {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.primitive} = java.lang.Math.log($c2);
}
""")
} else {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c1 <= 0.0 || $c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.primitive} = java.lang.Math.log($c2) / java.lang.Math.log($c1);
}
""")
}
}
}
/**
* Round the `child`'s result to `scale` decimal place when `scale` >= 0
* or round at integral part when `scale` < 0.
* For example, round(31.415, 2) = 31.42 and round(31.415, -1) = 30.
*
* Child of IntegralType would round to itself when `scale` >= 0.
* Child of FractionalType whose value is NaN or Infinite would always round to itself.
*
* Round's dataType would always equal to `child`'s dataType except for DecimalType,
* which would lead scale decrease from the origin DecimalType.
*
* @param child expr to be round, all [[NumericType]] is allowed as Input
* @param scale new scale to be round to, this should be a constant int at runtime
*/
case class Round(child: Expression, scale: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
import BigDecimal.RoundingMode.HALF_UP
def this(child: Expression) = this(child, Literal(0))
override def left: Expression = child
override def right: Expression = scale
// round of Decimal would eval to null if it fails to `changePrecision`
override def nullable: Boolean = true
override def foldable: Boolean = child.foldable
override lazy val dataType: DataType = child.dataType match {
// if the new scale is bigger which means we are scaling up,
// keep the original scale as `Decimal` does
case DecimalType.Fixed(p, s) => DecimalType(p, if (_scale > s) s else _scale)
case t => t
}
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType, IntegerType)
override def checkInputDataTypes(): TypeCheckResult = {
super.checkInputDataTypes() match {
case TypeCheckSuccess =>
if (scale.foldable) {
TypeCheckSuccess
} else {
TypeCheckFailure("Only foldable Expression is allowed for scale arguments")
}
case f => f
}
}
// Avoid repeated evaluation since `scale` is a constant int,
// avoid unnecessary `child` evaluation in both codegen and non-codegen eval
// by checking if scaleV == null as well.
private lazy val scaleV: Any = scale.eval(EmptyRow)
private lazy val _scale: Int = scaleV.asInstanceOf[Int]
override def eval(input: InternalRow): Any = {
if (scaleV == null) { // if scale is null, no need to eval its child at all
null
} else {
val evalE = child.eval(input)
if (evalE == null) {
null
} else {
nullSafeEval(evalE)
}
}
}
// not overriding since _scale is a constant int at runtime
def nullSafeEval(input1: Any): Any = {
child.dataType match {
case _: DecimalType =>
val decimal = input1.asInstanceOf[Decimal]
if (decimal.changePrecision(decimal.precision, _scale)) decimal else null
case ByteType =>
BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, HALF_UP).toByte
case ShortType =>
BigDecimal(input1.asInstanceOf[Short]).setScale(_scale, HALF_UP).toShort
case IntegerType =>
BigDecimal(input1.asInstanceOf[Int]).setScale(_scale, HALF_UP).toInt
case LongType =>
BigDecimal(input1.asInstanceOf[Long]).setScale(_scale, HALF_UP).toLong
case FloatType =>
val f = input1.asInstanceOf[Float]
if (f.isNaN || f.isInfinite) {
f
} else {
BigDecimal(f).setScale(_scale, HALF_UP).toFloat
}
case DoubleType =>
val d = input1.asInstanceOf[Double]
if (d.isNaN || d.isInfinite) {
d
} else {
BigDecimal(d).setScale(_scale, HALF_UP).toDouble
}
}
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val ce = child.gen(ctx)
val evaluationCode = child.dataType match {
case _: DecimalType =>
s"""
if (${ce.primitive}.changePrecision(${ce.primitive}.precision(), ${_scale})) {
${ev.primitive} = ${ce.primitive};
} else {
${ev.isNull} = true;
}"""
case ByteType =>
if (_scale < 0) {
s"""
${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).byteValue();"""
} else {
s"${ev.primitive} = ${ce.primitive};"
}
case ShortType =>
if (_scale < 0) {
s"""
${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).shortValue();"""
} else {
s"${ev.primitive} = ${ce.primitive};"
}
case IntegerType =>
if (_scale < 0) {
s"""
${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).intValue();"""
} else {
s"${ev.primitive} = ${ce.primitive};"
}
case LongType =>
if (_scale < 0) {
s"""
${ev.primitive} = new java.math.BigDecimal(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).longValue();"""
} else {
s"${ev.primitive} = ${ce.primitive};"
}
case FloatType => // if child eval to NaN or Infinity, just return it.
if (_scale == 0) {
s"""
if (Float.isNaN(${ce.primitive}) || Float.isInfinite(${ce.primitive})){
${ev.primitive} = ${ce.primitive};
} else {
${ev.primitive} = Math.round(${ce.primitive});
}"""
} else {
s"""
if (Float.isNaN(${ce.primitive}) || Float.isInfinite(${ce.primitive})){
${ev.primitive} = ${ce.primitive};
} else {
${ev.primitive} = java.math.BigDecimal.valueOf(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).floatValue();
}"""
}
case DoubleType => // if child eval to NaN or Infinity, just return it.
if (_scale == 0) {
s"""
if (Double.isNaN(${ce.primitive}) || Double.isInfinite(${ce.primitive})){
${ev.primitive} = ${ce.primitive};
} else {
${ev.primitive} = Math.round(${ce.primitive});
}"""
} else {
s"""
if (Double.isNaN(${ce.primitive}) || Double.isInfinite(${ce.primitive})){
${ev.primitive} = ${ce.primitive};
} else {
${ev.primitive} = java.math.BigDecimal.valueOf(${ce.primitive}).
setScale(${_scale}, java.math.BigDecimal.ROUND_HALF_UP).doubleValue();
}"""
}
}
if (scaleV == null) { // if scale is null, no need to eval its child at all
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
"""
} else {
s"""
${ce.code}
boolean ${ev.isNull} = ${ce.isNull};
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
$evaluationCode
}
"""
}
}
}
|
ArvinDevel/onlineAggregationOnSparkV2
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
|
Scala
|
apache-2.0
| 27,334 |
package com.coiney.akka.mailer.providers
import com.coiney.akka.mailer.EmailException
import com.coiney.akka.mailer.protocol.Email
object MailerProvider {
trait Mailer {
@throws(classOf[EmailException])
def sendEmail(email: Email): Unit
}
}
trait MailerProvider {
def getMailer: MailerProvider.Mailer
}
|
Coiney/akka-mailer
|
akka-mailer-core/src/main/scala/com/coiney/akka/mailer/providers/MailerProvider.scala
|
Scala
|
bsd-3-clause
| 321 |
package com.krrrr38.mackerel4s
package builder
import dispatch.Req
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import com.krrrr38.mackerel4s.model.{ HostStatus, SuccessResponse }
import com.krrrr38.mackerel4s.model.Types.{ Path, HostID }
object UpdateHostStatusBuilder extends APIBuilder[HostID] {
override val FullPath = (hostId: HostID) => s"/hosts/$hostId/status"
override val MethodVerb = MethodVerbPost
def apply(client: Path => Req, hostId: HostID, status: HostStatus): UpdateHostStatusBuilder =
UpdateHostStatusBuilder(baseRequest(client, hostId), UpdateHostStatusParams(status))
}
private[builder] case class UpdateHostStatusParams(status: HostStatus)
private[builder] case class UpdateHostStatusBuilder(private val req: Req, params: UpdateHostStatusParams) extends RequestBuilder[SuccessResponse] {
/**
* build request with parameters before run http request
* @return
*/
override protected def buildRequest: Req =
req.setBody(Serialization.write(params))
}
|
krrrr38/mackerel-client-scala
|
src/main/scala/com/krrrr38/mackerel4s/builder/UpdateHostStatusBuilder.scala
|
Scala
|
mit
| 1,026 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.singleTest
import org.jetbrains.plugins.scala.testingSupport.scalatest.generators.FreeSpecGenerator
/**
* @author Roman.Shein
* @since 20.01.2015.
*/
trait FreeSpecSingleTestTest extends FreeSpecGenerator {
val freeSpecTestPath = List("[root]", "FreeSpecTest", "A FreeSpecTest", "should be able to run single tests")
def testFreeSpec() {
addFreeSpec()
runTestByLocation(6, 3, "FreeSpecTest.scala",
checkConfigAndSettings(_, "FreeSpecTest", "A FreeSpecTest should be able to run single tests"),
root => checkResultTreeHasExactNamedPath(root, freeSpecTestPath:_*) &&
checkResultTreeDoesNotHaveNodes(root, "should not run tests that are not selected"),
debug = true
)
}
}
|
triggerNZ/intellij-scala
|
test/org/jetbrains/plugins/scala/testingSupport/scalatest/singleTest/FreeSpecSingleTestTest.scala
|
Scala
|
apache-2.0
| 783 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.io.File
import java.util.ArrayList
import java.util.concurrent.ExecutionException
import kafka.admin.AclCommand
import kafka.common.TopicAndPartition
import kafka.security.auth._
import kafka.server._
import kafka.utils._
import org.apache.kafka.clients.consumer.{Consumer, ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.apache.kafka.common.{KafkaException, TopicPartition}
import org.apache.kafka.common.errors.{GroupAuthorizationException, TimeoutException, TopicAuthorizationException}
import org.junit.Assert._
import org.junit.{After, Before, Test}
import scala.collection.JavaConverters._
/**
* The test cases here verify that a producer authorized to publish to a topic
* is able to, and that consumers in a group authorized to consume are able to
* to do so.
*
* This test relies on a chain of test harness traits to set up. It directly
* extends IntegrationTestHarness. IntegrationTestHarness creates producers and
* consumers, and it extends KafkaServerTestHarness. KafkaServerTestHarness starts
* brokers, but first it initializes a ZooKeeper server and client, which happens
* in ZooKeeperTestHarness.
*
* To start brokers we need to set a cluster ACL, which happens optionally in KafkaServerTestHarness.
* The remaining ACLs to enable access to producers and consumers are set here. To set ACLs, we use AclCommand directly.
*
* Finally, we rely on SaslSetup to bootstrap and setup Kerberos. We don't use
* SaslTestHarness here directly because it extends ZooKeeperTestHarness, and we
* would end up with ZooKeeperTestHarness twice.
*/
abstract class EndToEndAuthorizationTest extends IntegrationTestHarness with SaslSetup {
override val producerCount = 1
override val consumerCount = 2
override val serverCount = 3
override def configureSecurityBeforeServersStart() {
AclCommand.main(clusterAclArgs)
}
val numRecords = 1
val group = "group"
val topic = "e2etopic"
val topicWildcard = "*"
val part = 0
val tp = new TopicPartition(topic, part)
val topicAndPartition = new TopicAndPartition(topic, part)
val clientPrincipal: String
val kafkaPrincipal: String
override protected lazy val trustStoreFile = Some(File.createTempFile("truststore", ".jks"))
val topicResource = new Resource(Topic, topic)
val groupResource = new Resource(Group, group)
val clusterResource = Resource.ClusterResource
// Arguments to AclCommand to set ACLs. There are three definitions here:
// 1- Provides read and write access to topic
// 2- Provides only write access to topic
// 3- Provides read access to consumer group
def clusterAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--cluster",
s"--operation=ClusterAction",
s"--allow-principal=$kafkaPrincipalType:$kafkaPrincipal")
def topicBrokerReadAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--topic=$topicWildcard",
s"--operation=Read",
s"--allow-principal=$kafkaPrincipalType:$kafkaPrincipal")
def produceAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--topic=$topic",
s"--producer",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def describeAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--topic=$topic",
s"--operation=Describe",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def deleteDescribeAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--remove",
s"--force",
s"--topic=$topic",
s"--operation=Describe",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def deleteWriteAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--remove",
s"--force",
s"--topic=$topic",
s"--operation=Write",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def consumeAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--topic=$topic",
s"--group=$group",
s"--consumer",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def groupAclArgs: Array[String] = Array("--authorizer-properties",
s"zookeeper.connect=$zkConnect",
s"--add",
s"--group=$group",
s"--operation=Read",
s"--allow-principal=$kafkaPrincipalType:$clientPrincipal")
def ClusterActionAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, kafkaPrincipal), Allow, Acl.WildCardHost, ClusterAction))
def TopicBrokerReadAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, kafkaPrincipal), Allow, Acl.WildCardHost, Read))
def GroupReadAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, clientPrincipal), Allow, Acl.WildCardHost, Read))
def TopicReadAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, clientPrincipal), Allow, Acl.WildCardHost, Read))
def TopicWriteAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, clientPrincipal), Allow, Acl.WildCardHost, Write))
def TopicDescribeAcl = Set(new Acl(new KafkaPrincipal(kafkaPrincipalType, clientPrincipal), Allow, Acl.WildCardHost, Describe))
// The next two configuration parameters enable ZooKeeper secure ACLs
// and sets the Kafka authorizer, both necessary to enable security.
this.serverConfig.setProperty(KafkaConfig.ZkEnableSecureAclsProp, "true")
this.serverConfig.setProperty(KafkaConfig.AuthorizerClassNameProp, classOf[SimpleAclAuthorizer].getName)
// Some needed configuration for brokers, producers, and consumers
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "3")
this.serverConfig.setProperty(KafkaConfig.MinInSyncReplicasProp, "3")
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "group")
/**
* Starts MiniKDC and only then sets up the parent trait.
*/
@Before
override def setUp {
super.setUp
AclCommand.main(topicBrokerReadAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicBrokerReadAcl, s.apis.authorizer.get, new Resource(Topic, "*"))
}
// create the test topic with all the brokers as replicas
TestUtils.createTopic(zkUtils, topic, 1, 3, this.servers)
}
override def createNewProducer: KafkaProducer[Array[Byte], Array[Byte]] = {
TestUtils.createNewProducer(brokerList,
maxBlockMs = 3000L,
securityProtocol = this.securityProtocol,
trustStoreFile = this.trustStoreFile,
saslProperties = this.clientSaslProperties,
props = Some(producerConfig))
}
/**
* Closes MiniKDC last when tearing down.
*/
@After
override def tearDown {
consumers.foreach(_.wakeup())
super.tearDown
closeSasl()
}
/**
* Tests the ability of producing and consuming with the appropriate ACLs set.
*/
@Test
def testProduceConsumeViaAssign {
setAclsAndProduce()
consumers.head.assign(List(tp).asJava)
consumeRecords(this.consumers.head, numRecords)
}
@Test
def testProduceConsumeViaSubscribe {
setAclsAndProduce()
consumers.head.subscribe(List(topic).asJava)
consumeRecords(this.consumers.head, numRecords)
}
protected def setAclsAndProduce() {
AclCommand.main(produceAclArgs)
AclCommand.main(consumeAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicReadAcl ++ TopicWriteAcl ++ TopicDescribeAcl, s.apis.authorizer.get, topicResource)
TestUtils.waitAndVerifyAcls(GroupReadAcl, s.apis.authorizer.get, groupResource)
}
sendRecords(numRecords, tp)
}
/**
* Tests that a producer fails to publish messages when the appropriate ACL
* isn't set.
*/
@Test(expected = classOf[TimeoutException])
def testNoProduceWithoutDescribeAcl {
sendRecords(numRecords, tp)
}
@Test
def testNoProduceWithDescribeAcl {
AclCommand.main(describeAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicDescribeAcl, s.apis.authorizer.get, topicResource)
}
try{
sendRecords(numRecords, tp)
fail("exception expected")
} catch {
case e: TopicAuthorizationException =>
assertEquals(Set(topic).asJava, e.unauthorizedTopics())
}
}
/**
* Tests that a consumer fails to consume messages without the appropriate
* ACL set.
*/
@Test(expected = classOf[KafkaException])
def testNoConsumeWithoutDescribeAclViaAssign {
noConsumeWithoutDescribeAclSetup
consumers.head.assign(List(tp).asJava)
// the exception is expected when the consumer attempts to lookup offsets
consumeRecords(this.consumers.head)
}
@Test(expected = classOf[TimeoutException])
def testNoConsumeWithoutDescribeAclViaSubscribe {
noConsumeWithoutDescribeAclSetup
consumers.head.subscribe(List(topic).asJava)
// this should timeout since the consumer will not be able to fetch any metadata for the topic
consumeRecords(this.consumers.head, timeout = 3000)
}
private def noConsumeWithoutDescribeAclSetup {
AclCommand.main(produceAclArgs)
AclCommand.main(groupAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicWriteAcl ++ TopicDescribeAcl, s.apis.authorizer.get, topicResource)
TestUtils.waitAndVerifyAcls(GroupReadAcl, s.apis.authorizer.get, groupResource)
}
sendRecords(numRecords, tp)
AclCommand.main(deleteDescribeAclArgs)
AclCommand.main(deleteWriteAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(GroupReadAcl, s.apis.authorizer.get, groupResource)
}
}
/**
* Tests that a consumer fails to consume messages without the appropriate
* ACL set.
*/
@Test
def testNoConsumeWithDescribeAclViaAssign {
noConsumeWithDescribeAclSetup
consumers.head.assign(List(tp).asJava)
try {
consumeRecords(this.consumers.head)
fail("Topic authorization exception expected")
} catch {
case e: TopicAuthorizationException =>
assertEquals(Set(topic).asJava, e.unauthorizedTopics())
}
}
@Test
def testNoConsumeWithDescribeAclViaSubscribe {
noConsumeWithDescribeAclSetup
consumers.head.subscribe(List(topic).asJava)
try {
consumeRecords(this.consumers.head)
fail("Topic authorization exception expected")
} catch {
case e: TopicAuthorizationException =>
assertEquals(Set(topic).asJava, e.unauthorizedTopics())
}
}
private def noConsumeWithDescribeAclSetup {
AclCommand.main(produceAclArgs)
AclCommand.main(groupAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicWriteAcl ++ TopicDescribeAcl, s.apis.authorizer.get, topicResource)
TestUtils.waitAndVerifyAcls(GroupReadAcl, s.apis.authorizer.get, groupResource)
}
sendRecords(numRecords, tp)
}
/**
* Tests that a consumer fails to consume messages without the appropriate
* ACL set.
*/
@Test
def testNoGroupAcl {
AclCommand.main(produceAclArgs)
servers.foreach { s =>
TestUtils.waitAndVerifyAcls(TopicWriteAcl ++ TopicDescribeAcl, s.apis.authorizer.get, topicResource)
}
sendRecords(numRecords, tp)
consumers.head.assign(List(tp).asJava)
try {
consumeRecords(this.consumers.head)
fail("Topic authorization exception expected")
} catch {
case e: GroupAuthorizationException =>
assertEquals(group, e.groupId())
}
}
private def sendRecords(numRecords: Int, tp: TopicPartition) {
val futures = (0 until numRecords).map { i =>
val record = new ProducerRecord(tp.topic(), tp.partition(), s"$i".getBytes, s"$i".getBytes)
debug(s"Sending this record: $record")
this.producers.head.send(record)
}
try {
futures.foreach(_.get)
} catch {
case e: ExecutionException => throw e.getCause
}
}
protected def consumeRecords(consumer: Consumer[Array[Byte], Array[Byte]],
numRecords: Int = 1,
startingOffset: Int = 0,
topic: String = topic,
part: Int = part,
timeout: Long = 10000) {
val records = new ArrayList[ConsumerRecord[Array[Byte], Array[Byte]]]()
val deadlineMs = System.currentTimeMillis() + timeout
while (records.size < numRecords && System.currentTimeMillis() < deadlineMs) {
for (record <- consumer.poll(50).asScala)
records.add(record)
}
if (records.size < numRecords)
throw new TimeoutException
for (i <- 0 until numRecords) {
val record = records.get(i)
val offset = startingOffset + i
assertEquals(topic, record.topic())
assertEquals(part, record.partition())
assertEquals(offset.toLong, record.offset())
}
}
}
|
wangcy6/storm_app
|
frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/integration/kafka/api/EndToEndAuthorizationTest.scala
|
Scala
|
apache-2.0
| 15,805 |
/**
* Copyright (C) 2016 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xml.dom
import org.orbeon.dom.io.{SAXContentHandler, SAXReader}
import org.orbeon.oxf.xml.XMLReceiver
import org.xml.sax.{Attributes, Locator}
class LocationSAXContentHandler
extends SAXContentHandler(
systemIdOpt = None,
mergeAdjacentText = SAXReader.MergeAdjacentText,
stripWhitespaceText = SAXReader.StripWhitespaceText,
ignoreComments = SAXReader.IgnoreComments
) with XMLReceiver {
private var locator: Locator = null
override def setDocumentLocator(locator: Locator): Unit = this.locator = locator
override def startElement(
namespaceURI : String,
localName : String,
qualifiedName : String,
attributes : Attributes
): Unit = {
super.startElement(namespaceURI, localName, qualifiedName, attributes)
val locationData = XmlLocationData.createIfPresent(locator)
if (locationData ne null)
elementStack.get(elementStack.size - 1).setData(locationData)
}
}
|
orbeon/orbeon-forms
|
core-cross-platform/shared/src/main/scala/org/orbeon/oxf/xml/dom/LocationSAXContentHandler.scala
|
Scala
|
lgpl-2.1
| 1,620 |
package org.jetbrains.plugins.scala
package testingSupport.test.utest
import com.intellij.execution.configurations.{ConfigurationType, RunConfiguration}
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.testingSupport.test.AbstractTestRunConfigurationFactory
class UTestRunConfigurationFactory (override val typez: ConfigurationType)
extends AbstractTestRunConfigurationFactory(typez) {
def createTemplateConfiguration(project: Project): RunConfiguration = {
val configuration = new UTestRunConfiguration(project, this, "")
configuration
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/testingSupport/test/utest/UTestRunConfigurationFactory.scala
|
Scala
|
apache-2.0
| 593 |
package io.udash.web.guide.views.frontend.demos
import io.udash.web.guide.styles.partials.GuideStyles
import io.udash.web.guide.demos.AutoDemo
import scalatags.JsDom.all._
object DateDemo extends AutoDemo {
private val ((firstInput, secondInput), source) = {
import io.udash._
import io.udash.bootstrap.form.UdashInputGroup
import io.udash.bootstrap.utils.BootstrapStyles._
import io.udash.css.CssView._
import scalatags.JsDom.all._
import org.scalajs.dom.html.Div
import scalatags.JsDom
val dateProperty = Property.blank[String]
def input: JsDom.TypedTag[Div] = div(Grid.row)(
div(Grid.col(4, ResponsiveBreakpoint.Medium))(
UdashInputGroup()(
UdashInputGroup.input(
DateInput(dateProperty)().render
),
)
),
div(Grid.col(4, ResponsiveBreakpoint.Medium))(
produce(dateProperty) { date => span(s"Selected date: $date": Modifier).render }
)
)
input.render
(input, input)
}.withSourceCode
override protected def demoWithSource(): (Modifier, Iterator[String]) = {
import io.udash.bootstrap.utils.BootstrapStyles._
import io.udash.css.CssView._
(div(id := "date-input-demo", GuideStyles.frame, GuideStyles.useBootstrap)(
form(containerFluid)(
firstInput, br, secondInput
)
), source.linesIterator.take(source.linesIterator.size - 2))
}
}
|
UdashFramework/udash-core
|
guide/guide/.js/src/main/scala/io/udash/web/guide/views/frontend/demos/DateDemo.scala
|
Scala
|
apache-2.0
| 1,413 |
package com.panda
import java.util.concurrent.TimeUnit
import akka.actor.ActorSystem
import akka.pattern.ask
import akka.testkit.{ImplicitSender, TestActorRef, TestKit}
import akka.util.Timeout
import com.panda.domain.{CountData, Event, GetEventCount, GetWordCount}
import org.scalatest._
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
/**
* Created by vega on 02/09/2016.
*/
class EventsActorTest extends TestKit(ActorSystem("test")) with ImplicitSender
with FlatSpecLike with Matchers {
trait TestData {
val actorRef = TestActorRef(new EventsActor)
implicit val timeout = Timeout(100, TimeUnit.MILLISECONDS)
}
"EvensActor" should
"count events" in {
new TestData {
actorRef ! Event("foo", "sit", 1472835624)
actorRef ! Event("bar", "dor", 1472835624)
actorRef ! Event("foo", "sit", 1472835624)
val futureEvents = (actorRef ? GetEventCount).asInstanceOf[Future[List[CountData]]]
val eventsResult = Await.result(futureEvents, 100.millis).sortBy(_.count)
eventsResult should be(List(
CountData("bar", 1),
CountData("foo", 2)
))
val futureWords = (actorRef ? GetWordCount).asInstanceOf[Future[List[CountData]]]
val wordsResult = Await.result(futureWords, 100.millis).sortBy(_.count)
wordsResult should be(List(
CountData("dor", 1),
CountData("sit", 2)
))
}
}
}
|
vega113/panda-stats
|
src/test/scala-2.11/com/panda/EventsActorTest.scala
|
Scala
|
apache-2.0
| 1,427 |
object Test {
case class Foo(name: String, children: Int *)
def foo(f: Foo) = f match {
case Foo(name, ns: _*) =>
assert(name == "hello")
assert(ns(0) == 3)
assert(ns(1) == 5)
}
def bar(f: Foo) = f match {
case Foo(name, x, y, ns : _*) =>
assert(name == "hello")
assert(x == 3)
assert(y == 5)
assert(ns.isEmpty)
}
def main(args: Array[String]): Unit = {
val f = new Foo("hello", 3, 5)
foo(f)
bar(f)
}
}
|
som-snytt/dotty
|
tests/run/i3248c.scala
|
Scala
|
apache-2.0
| 482 |
package kafka
import app.EventPusher
import kafka.consumer.{Whitelist, Consumer, ConsumerConfig}
import kafka.message._
import kafka.serializer._
import kafka.utils._
import java.util.Properties
import kafka.utils.Logging
import scala.collection.JavaConversions._
/**
* Created by goldratio on 2/14/15.
*/
class KafkaConsumer(topic: String,
groupId: String,
zookeeperConnect: String,
readFromStartOfStream: Boolean = true)extends Logging {
val props = new Properties()
props.put("group.id", groupId)
props.put("zookeeper.connect", zookeeperConnect)
props.put("auto.offset.reset", if(readFromStartOfStream) "smallest" else "largest")
props.put("zookeeper.session.timeout.ms", "500")
props.put("zookeeper.sync.time.ms", "250")
props.put("auto.commit.interval.ms", "1000")
val config = new ConsumerConfig(props)
val connector = Consumer.create(config)
val filterSpec = new Whitelist(topic)
info("setup:start topic=%s for zk=%s and groupId=%s".format(topic,zookeeperConnect,groupId))
val stream = connector.createMessageStreamsByFilter(filterSpec, 1, new DefaultDecoder(), new DefaultDecoder()).get(0)
info("setup:complete topic=%s for zk=%s and groupId=%s".format(topic,zookeeperConnect,groupId))
def read(write: (Array[Byte])=>Unit) = {
info("reading on stream now")
for(messageAndTopic <- stream) {
try {
info("writing from stream")
write(messageAndTopic.message)
info("written to stream")
} catch {
case e: Throwable =>
if (true) { //this is objective even how to conditionalize on it
error("Error processing message, skipping this message: ", e)
} else {
throw e
}
}
}
}
def close() {
connector.shutdown()
}
}
|
mqshen/gitbucket
|
src/main/scala/kafka/KafkaConsumer.scala
|
Scala
|
apache-2.0
| 1,836 |
package mesosphere.marathon.tasks
import java.io._
import javax.inject.Inject
import com.codahale.metrics.MetricRegistry
import mesosphere.marathon.MarathonConf
import mesosphere.marathon.Protos._
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state.{ PathId, StateMetrics, Timestamp }
import mesosphere.util.state.{ PersistentEntity, PersistentStore }
import org.apache.log4j.Logger
import org.apache.mesos.Protos.TaskStatus
import scala.collection.JavaConverters._
import scala.collection._
import scala.collection.concurrent.TrieMap
import scala.collection.immutable.Set
import scala.concurrent.{ Await, Future }
class TaskTracker @Inject() (
store: PersistentStore,
config: MarathonConf,
val metrics: Metrics)
extends StateMetrics {
import mesosphere.marathon.tasks.TaskTracker._
import mesosphere.util.ThreadPoolContext.context
implicit val timeout = config.zkTimeoutDuration
private[this] val log = Logger.getLogger(getClass.getName)
val PREFIX = "task:"
val ID_DELIMITER = ":"
private[this] val apps = TrieMap[PathId, InternalApp]()
private[tasks] def fetchFromState(id: String): Option[PersistentEntity] = timedRead {
Await.result(store.load(id), timeout)
}
private[tasks] def getKey(appId: PathId, taskId: String): String = {
PREFIX + appId.safePath + ID_DELIMITER + taskId
}
def get(appId: PathId): Set[MarathonTask] =
getInternal(appId).values.toSet
def getVersion(appId: PathId, taskId: String): Option[Timestamp] =
get(appId).collectFirst {
case mt: MarathonTask if mt.getId == taskId =>
Timestamp(mt.getVersion)
}
private def getInternal(appId: PathId): TrieMap[String, MarathonTask] =
apps.getOrElseUpdate(appId, fetchApp(appId)).tasks
def list: Map[PathId, App] = apps.mapValues(_.toApp).toMap
def count(appId: PathId): Int = getInternal(appId).size
def contains(appId: PathId): Boolean = apps.contains(appId)
def take(appId: PathId, n: Int): Set[MarathonTask] = get(appId).take(n)
def created(appId: PathId, task: MarathonTask): Unit = {
// Keep this here so running() can pick it up
// FIXME: Should be persisted here for task reconciliation
// Wont fix for now since this should be completely remodeled in #462
getInternal(appId) += (task.getId -> task)
}
def running(appId: PathId, status: TaskStatus): Future[MarathonTask] = {
val taskId = status.getTaskId.getValue
getInternal(appId).get(taskId) match {
case Some(oldTask) if !oldTask.hasStartedAt => // staged
val task = oldTask.toBuilder
.setStartedAt(System.currentTimeMillis)
.setStatus(status)
.build
getInternal(appId) += (task.getId -> task)
store(appId, task).map(_ => task)
case Some(oldTask) => // running
val msg = s"Task for ID $taskId already running, ignoring"
log.warn(msg)
Future.failed(new Exception(msg))
case _ =>
val msg = s"No staged task for ID $taskId, ignoring"
log.warn(msg)
Future.failed(new Exception(msg))
}
}
def terminated(appId: PathId, taskId: String): Future[Option[MarathonTask]] = {
val appTasks = getInternal(appId)
val app = apps(appId)
appTasks.get(taskId) match {
case Some(task) =>
app.tasks.remove(task.getId)
timedWrite { Await.result(store.delete(getKey(appId, taskId)), timeout) }
log.info(s"Task $taskId expunged and removed from TaskTracker")
if (app.shutdown && app.tasks.isEmpty) {
// Are we shutting down this app? If so, remove it
remove(appId)
}
Future.successful(Some(task))
case None =>
if (app.shutdown && app.tasks.isEmpty) {
// Are we shutting down this app? If so, remove it
remove(appId)
}
Future.successful(None)
}
}
def shutdown(appId: PathId): Unit = {
apps.getOrElseUpdate(appId, fetchApp(appId)).shutdown = true
if (apps(appId).tasks.isEmpty) remove(appId)
}
private[this] def remove(appId: PathId): Unit = {
apps.remove(appId)
log.warn(s"App $appId removed from TaskTracker")
}
def statusUpdate(appId: PathId, status: TaskStatus): Future[Option[MarathonTask]] = {
val taskId = status.getTaskId.getValue
getInternal(appId).get(taskId) match {
case Some(task) if statusDidChange(task.getStatus, status) =>
val updatedTask = task.toBuilder
.setStatus(status)
.build
getInternal(appId) += (task.getId -> updatedTask)
store(appId, updatedTask).map(_ => Some(updatedTask))
case Some(task) =>
log.debug(s"Ignoring status update for ${task.getId}. Status did not change.")
Future.successful(Some(task))
case _ =>
log.warn(s"No task for ID $taskId")
Future.successful(None)
}
}
def stagedTasks(): Iterable[MarathonTask] = apps.values.flatMap(_.tasks.values.filter(_.getStartedAt == 0))
def checkStagedTasks: Iterable[MarathonTask] = {
// stagedAt is set when the task is created by the scheduler
val now = System.currentTimeMillis
val expires = now - config.taskLaunchTimeout()
val toKill = stagedTasks().filter(_.getStagedAt < expires)
toKill.foreach(t => {
log.warn(s"Task '${t.getId}' was staged ${(now - t.getStagedAt) / 1000}s ago and has not yet started")
})
toKill
}
def expungeOrphanedTasks(): Unit = {
// Remove tasks that don't have any tasks associated with them. Expensive!
log.info("Expunging orphaned tasks from store")
val stateTaskKeys = timedRead { Await.result(store.allIds(), timeout).filter(_.startsWith(PREFIX)) }
val appsTaskKeys = apps.values.flatMap { app =>
app.tasks.keys.map(taskId => getKey(app.appName, taskId))
}.toSet
for (stateTaskKey <- stateTaskKeys) {
if (!appsTaskKeys.contains(stateTaskKey)) {
log.info(s"Expunging orphaned task with key $stateTaskKey")
timedWrite {
Await.result(store.delete(stateTaskKey), timeout)
}
}
}
}
private[tasks] def fetchApp(appId: PathId): InternalApp = {
log.debug(s"Fetching app from store $appId")
val names = timedRead { Await.result(store.allIds(), timeout).toSet }
val tasks = TrieMap[String, MarathonTask]()
val taskKeys = names.filter(name => name.startsWith(PREFIX + appId.safePath + ID_DELIMITER))
for {
taskKey <- taskKeys
task <- fetchTask(taskKey)
} tasks += (task.getId -> task)
new InternalApp(appId, tasks, false)
}
def fetchTask(appId: PathId, taskId: String): Option[MarathonTask] =
fetchTask(getKey(appId, taskId))
private[tasks] def fetchTask(taskKey: String): Option[MarathonTask] = {
fetchFromState(taskKey).flatMap { entity =>
val source = new ObjectInputStream(new ByteArrayInputStream(entity.bytes.toArray))
deserialize(taskKey, source)
}
}
def deserialize(taskKey: String, source: ObjectInputStream): Option[MarathonTask] = {
if (source.available > 0) {
try {
val size = source.readInt
val bytes = new Array[Byte](size)
source.readFully(bytes)
Some(MarathonTask.parseFrom(bytes))
}
catch {
case e: com.google.protobuf.InvalidProtocolBufferException =>
log.warn(s"Unable to deserialize task state for $taskKey", e)
None
}
}
else {
log.warn(s"Unable to deserialize task state for $taskKey")
None
}
}
def legacyDeserialize(appId: PathId, source: ObjectInputStream): TrieMap[String, MarathonTask] = {
var results = TrieMap[String, MarathonTask]()
if (source.available > 0) {
try {
val size = source.readInt
val bytes = new Array[Byte](size)
source.readFully(bytes)
val app = MarathonApp.parseFrom(bytes)
if (app.getName != appId.toString) {
log.warn(s"App name from task state for $appId is wrong! Got '${app.getName}' Continuing anyway...")
}
results ++= app.getTasksList.asScala.map(x => x.getId -> x)
}
catch {
case e: com.google.protobuf.InvalidProtocolBufferException =>
log.warn(s"Unable to deserialize task state for $appId", e)
}
}
else {
log.warn(s"Unable to deserialize task state for $appId")
}
results
}
def serialize(task: MarathonTask, sink: ObjectOutputStream): Unit = {
val size = task.getSerializedSize
sink.writeInt(size)
sink.write(task.toByteArray)
sink.flush()
}
def store(appId: PathId, task: MarathonTask): Future[PersistentEntity] = {
val byteStream = new ByteArrayOutputStream()
val output = new ObjectOutputStream(byteStream)
serialize(task, output)
val bytes = byteStream.toByteArray
val key: String = getKey(appId, task.getId)
timedWrite(fetchFromState(key) match {
case Some(entity) => store.update(entity.withNewContent(bytes))
case None => store.create(key, bytes)
})
}
private[tasks] def statusDidChange(statusA: TaskStatus, statusB: TaskStatus): Boolean = {
val healthy = statusB.hasHealthy &&
(!statusA.hasHealthy || statusA.getHealthy != statusB.getHealthy)
healthy || statusA.getState != statusB.getState
}
}
object TaskTracker {
private[marathon] class InternalApp(
val appName: PathId,
var tasks: TrieMap[String, MarathonTask],
var shutdown: Boolean) {
def toApp: App = App(appName, tasks.values.toSet, shutdown)
}
case class App(appName: PathId, tasks: Set[MarathonTask], shutdown: Boolean)
}
|
MrMarvin/marathon
|
src/main/scala/mesosphere/marathon/tasks/TaskTracker.scala
|
Scala
|
apache-2.0
| 9,623 |
import sbt._
import Keys._
/**
* archive resolving gets a bit tricky depending on if we're compiling in github, twitter,
* or somewhere else
*/
object ZipkinResolver extends Plugin {
val proxyRepo = Option(System.getenv("SBT_PROXY_REPO"))
val isTravisCi = "true".equalsIgnoreCase(System.getenv("SBT_TRAVIS_CI"))
val defaultResolvers = SettingKey[Seq[Resolver]](
"default-resolvers",
"maven repositories to use by default, unless a proxy repo is set via SBT_PROXY_REPO"
)
val travisCiResolvers = SettingKey[Seq[Resolver]](
"travisci-central",
"Use these resolvers when building on travis-ci"
)
val localRepo = SettingKey[File](
"local-repo",
"local folder to use as a repo (and where publish-local publishes to)"
)
val newSettings = Seq(
defaultResolvers := proxyRepo map { url =>
// only resolve using an internal proxy if the env is set
Seq("proxy-repo" at url)
} getOrElse {
// for everybody else
Seq(
// used for github continuous integration
"travisci-central" at "http://maven.travis-ci.org/nexus/content/repositories/central/",
"travisci-sonatype" at "http://maven.travis-ci.org/nexus/content/repositories/sonatype/",
// standard resolvers
"ibiblio" at "http://mirrors.ibiblio.org/pub/mirrors/maven2/",
"twitter.com" at "http://maven.twttr.com/",
"powermock-api" at "http://powermock.googlecode.com/svn/repo/",
"scala-tools.org" at "http://scala-tools.org/repo-releases/",
"testing.scala-tools.org" at "http://scala-tools.org/repo-releases/testing/",
"oauth.net" at "http://oauth.googlecode.com/svn/code/maven",
"download.java.net" at "http://download.java.net/maven/2/",
"atlassian" at "https://m2proxy.atlassian.com/repository/public/",
// for netty:
"jboss" at "http://repository.jboss.org/nexus/content/groups/public/"
)
},
travisCiResolvers := Seq(
"travisci-central" at "http://maven.travis-ci.org/nexus/content/repositories/central/",
"travisci-sonatype" at "http://maven.travis-ci.org/nexus/content/repositories/sonatype/"
),
localRepo := file(System.getProperty("user.home") + "/.m2/repository"),
// configure resolvers for the build
resolvers <<= (
resolvers,
defaultResolvers,
travisCiResolvers,
localRepo
) { (resolvers, defaultResolvers, travisCiResolvers, localRepo) =>
(proxyRepo map { url =>
Seq("proxy-repo" at url)
} getOrElse {
(if (isTravisCi) travisCiResolvers else Seq.empty[Resolver]) ++ resolvers ++ defaultResolvers
}) ++ Seq(
// the local repo has to be in here twice, because sbt won't push to a "file:"
// repo, but it won't read artifacts from a "Resolver.file" repo. (head -> desk)
"local-lookup" at ("file:" + localRepo.getAbsolutePath),
Resolver.file("local", localRepo)(Resolver.mavenStylePatterns)
)
},
// don't add any special resolvers.
externalResolvers <<= (resolvers) map identity
)
}
|
kevinyang0906/zipkin
|
project/ZipkinResolver.scala
|
Scala
|
apache-2.0
| 3,125 |
/*
* Copyright 2006-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import java.net.{URLDecoder, URLEncoder}
import scala.collection.mutable.ListBuffer
import scala.xml._
import scala.collection.{Map}
import scala.collection.mutable.HashMap
import java.util.concurrent.atomic.AtomicLong
import common._
object HttpHelpers extends ListHelpers with StringHelpers
trait HttpHelpers {
self: ListHelpers with StringHelpers =>
/**
* The list of known suffixes used to split the URI into path parts and suffixes.
*/
val knownSuffixes: Set[String] = Set("json","rss","atom","do","3dm",
"3dmf","a","aab","aam","aas","abc","acgi","afl","ai","aif","aifc","aiff",
"aim","aip","ani","aos","aps","arc","arj","art","asf","asm","asp","asx","au","avi","avs",
"bcpio","bin","bm","bmp","boo","book","boz","bsh","bz","bz2","c","c++","cat","cc","ccad",
"cco","cdf","cer","cha","chat","class","com","conf","cpio","cpp","cpt","crl","crt","csh",
"css","cxx","dcr","deepv","def","der","dif","dir","dl","doc","dot","dp","drw","dump","dv",
"dvi","dwf","dwg","dxf","dxr","el","elc","env","eps","es","etx","evy","exe","f","f77",
"f90","fdf","fif","fli","flo","flx","fmf","for","fpx","frl","funk","g","g3","gif","gl","gsd",
"gsm","gsp","gss","gtar","gz","gzip","h","hdf","help","hgl","hh","hlb","hlp","hpg","hpgl",
"hqx","hta","htc","htm","html","htmls","htt","htx","ice","ico","idc","ief","iefs","iges","igs",
"ima","imap","inf","ins","ip","isu","it","iv","ivr","ivy","jam","jav","java","jcm","jfif",
"jfif-tbnl","jpe","jpeg","jpg","jps","js","jut","kar","ksh","la","lam","latex","lha","lhx",
"list","lma","log","lsp","lst","lsx","ltx","lzh","lzx","m","m1v","m2a","m2v","m3u","man","map",
"mar","mbd","mc$","mcd","mcf","mcp","me","mht","mhtml","mid","midi","mif","mime","mjf","mjpg",
"mm","mme","mod","moov","mov","movie","mp2","mp3","mpa","mpc","mpe","mpeg","mpg","mpga","mpp",
"mpt","mpv","mpx","mrc","ms","mv","my","mzz","nap","naplps","nc","ncm","nif","niff","nix",
"nsc","nvd","o","oda","omc","omcd","omcr","p","p10","p12","p7a","p7c","p7m","p7r","p7s","part",
"pas","pbm","pcl","pct","pcx","pdb","pdf","pfunk","pgm","pic","pict","pkg","pko","pl","plx","pm",
"pm4","pm5","png","pnm","pot","pov","ppa","ppm","pps","ppt","ppz","pre","prt","ps","psd",
"pvu","pwz","py","pyc","qcp","qd3","qd3d","qif","qt","qtc","qti","qtif","ra","ram","ras",
"rast","rexx","rf","rgb","rm","rmi","rmm","rmp","rng","rnx","roff","rp","rpm","rt","rtf","rtx",
"rv","s","s3m","saveme","sbk","scm","sdml","sdp","sdr","sea","set","sgm","sgml","sh","shar",
"shtml","sid","sit","skd","skm","skp","skt","sl","smi","smil","snd","sol","spc","spl","spr",
"sprite","src","ssi","ssm","sst","step","stl","stp","sv4cpio","sv4crc","svf","svr","swf","t",
"talk","tar","tbk","tcl","tcsh","tex","texi","texinfo","text","tgz","tif","tiff","tr","tsi",
"tsp","tsv","turbot","txt","uil","uni","unis","unv","uri","uris","ustar","uu","uue","vcd","vcs",
"vda","vdo","vew","viv","vivo","vmd","vmf","voc","vos","vox","vqe","vqf","vql","vrml","vrt",
"vsd","vst","vsw","w60","w61","w6w","wav","wb1","wbmp","web","wiz","wk1","wmf","wml","wmlc",
"wmls","wmlsc","word","wp","wp5","wp6","wpd","wq1","wri","wrl","wrz","wsc","wsrc","wtk","x-png",
"xbm","xdr","xgz","xif","xl","xla","xlb","xlc","xld","xlk","xll","xlm","xls","xlt","xlv","xlw",
"xm","xml","xmz","xpix","xpm","xsr","xwd","xyz","z","zip","zoo","zsh")
/**
* URL decode the string.
*
* This is a pass-through to Java's URL decode with UTF-8
*/
def urlDecode(in : String) = URLDecoder.decode(in, "UTF-8")
/**
* URL encode the string.
*
* This is a pass-through to Java's URL encode with UTF-8
*/
def urlEncode(in : String) = URLEncoder.encode(in, "UTF-8")
/**
* Take a list of name/value parse and turn them into a URL query string
*
* @param params the name/value pairs
* @return a valid query string
*/
def paramsToUrlParams(params: List[(String, String)]): String = params.map {
case (n, v) => urlEncode(n) + "=" + urlEncode(v)
}.mkString("&")
/**
* Append parameters to a URL
*
* @param url the url to append the params to
* @param params the parameters (name/value) to append to the URL
*
* @return the url with the parameters appended
*/
def appendParams(url: String, params: Seq[(String, String)]): String = params.toList match {
case Nil => url
case xs if !url.contains("?") => url + "?" + paramsToUrlParams(xs)
case xs => url + "&" + paramsToUrlParams(xs)
}
/**
* Given a map of HTTP properties, return true if the "Content-type"
* value in the map is either "text/html" or "application/xhtml+xml"
* @param in Map which may contain a key named Content-Type
* @return true if there is a pair ("Content-Type", "text/html") or
* ("Content-Type", "application/xhtml+xml")
*/
def couldBeHtml(in: Map[String, String]): Boolean =
in match {
case null => true
case n => {
n.get("Content-Type") match {
case Some(s) => { (s.toLowerCase == "text/html") ||
(s.toLowerCase == "application/xhtml+xml") }
case None => true
}
}
}
/**
* Return true if the xml doesn't contain an <html> tag
*/
def noHtmlTag(in: NodeSeq): Boolean = findElems(in)(_.label == "html").length != 1
/**
* Transform a general Map to a nutable HashMap
*/
def toHashMap[A,B](in : Map[A,B]) : HashMap[A,B] = {
val ret = new HashMap[A,B];
in.keysIterator.foreach { k => ret += Pair(k, in(k)) }
ret
}
/**
* Ensure that all the appropriate fields are in the header.
*/
def insureField(toInsure: List[(String, String)], headers: List[(String, String)]): List[(String, String)] = {
def insureField_inner(toInsure : List[(String, String)], field : (String, String)): List[(String, String)] =
toInsure.ciGet(field._1) match {
case Full(_) => toInsure
case _ => field :: toInsure
}
headers match {
case Nil => toInsure
case x :: xs => insureField(insureField_inner(toInsure, x), xs)
}
}
/**
* Transform a pair (name: String, value: Any) to an unprefixed XML attribute name="value"
*/
implicit def pairToUnprefixed(in: (String, Any)): MetaData = {
val value: Option[NodeSeq] = in._2 match {
case null => None
case js: ToJsCmd => Some(Text(js.toJsCmd))
case n: Node => Some(n)
case n: NodeSeq => Some(n)
case None => None
case Some(n: Node) => Some(n)
case Some(n: NodeSeq) => Some(n)
case Empty => None
case Full(n: Node) => Some(n)
case Full(n: NodeSeq) => Some(n)
case s => Some(Text(s.toString))
}
value.map(v => new UnprefixedAttribute(in._1, v, Null)) getOrElse Null
}
/**
* If the specified Elem has an attribute named 'id', return it, otherwise
* construct a new Elem with a randomly generated id attribute and return the pair
*
* @param in the element to test & add 'id' to
* @return the new element and the id
*/
def findOrAddId(in: Elem): (Elem, String) = (in \\ "@id").toList match {
case Nil => {
val id = nextFuncName
(in % ("id" -> id), id)
}
case x :: xs => (in, x.text)
}
/**
* Within a NodeSeq, find the first elem and run it through
* the function. Return the resulting NodeSeq
*/
def evalElemWithId(f: (String, Elem) => NodeSeq)(ns: NodeSeq): NodeSeq = {
var found = false
ns.flatMap {
case e: Elem if !found => {
found = true
val (ne, id) = findOrAddId(e)
f(id, ne)
}
case x => x
}
}
/**
* Given a URL and a Lift function String, append the function
* even if the URL has query params and a #
*/
def appendFuncToURL(url: String, funcStr: String): String =
splitAtHash(url){to => to +
(if (to.indexOf("?") >= 0) "&" else "?") + funcStr}
/**
* Split a String at the Hash sign, run the function
* on the non-# side and then append the hash side
*/
def splitAtHash(str: String)(f: String => String): String =
str.indexOf("#") match {
case idx if idx < 0 => f(str)
case idx => f(str.substring(0, idx)) + str.substring(idx)
}
/**
* Given a list of query parameters, append them to the
* URL taking into account # and if there are any other query
* parameters
*/
def appendQueryParameters(url: String, params: List[(String, String)]): String =
params match {
case Nil => url
case ps => splitAtHash(url) {
to => to +
(if (to.indexOf("?") >= 0) "&" else "?") +
ps.map{case (n, v) => urlEncode(n) + "=" + urlEncode(v)}.
mkString("&")
}
}
private val serial = new AtomicLong(math.abs(Helpers.randomLong(Helpers.millis)) + 1000000L)
/**
* Get a monotonically increasing number that's guaranteed to be unique for the
* current session
*/
def nextNum = serial.incrementAndGet
/**
* Find the elements of the specified NodeSeq that match
* the specified predicate and concatenate them into
* a resulting NodeSeq.
*
* @param nodes - the NodeSeq to search for elements matching the predicate
* @param f - the predicate to match elements with
* @return the NodeSeq resulting from concatenation of the matched elements.
*/
def findElems(nodes: NodeSeq)(f: Elem => Boolean): NodeSeq = {
val ret = new ListBuffer[Elem]
def find(what: NodeSeq) {
what.foreach {
case Group(g) => find(g)
case e: Elem =>
if (f(e)) ret += e
find(e.child)
case n => find(n.child)
}
}
find(nodes)
ret.toList
}
/**
* Map the specified function over the elements of the
* specified NodeSeq and return the concatenated result.
* This is essentially a container-type-transforming flatMap operation.
*/
def findInElems[T](nodes: NodeSeq)(f: Elem => Iterable[T]): List[T] = {
val ret = new ListBuffer[T]
def find(what: NodeSeq) {
what.foreach {
case Group(g) => find(g)
case e: Elem =>
ret ++= f(e)
find(e.child)
case n => find(n.child)
}
}
find(nodes)
ret.toList
}
/**
* Get a guaranteed unique field name
* (16 or 17 letters and numbers, starting with a letter)
*/
def nextFuncName: String = nextFuncName(0)
/**
* Get a guaranteed unique field name
* (16 or 17 letters and numbers, starting with a letter)
*/
def nextFuncName(seed: Long): String = {
val sb = new StringBuilder(24)
sb.append('F')
sb.append(nextNum + seed)
// sb.append('_')
sb.append(randomString(6))
sb.toString
}
def findKids(in: NodeSeq, prefix: String, label: String): NodeSeq =
in.filter(n => n.label == label && n.prefix == prefix).flatMap(_.child)
def deepFindKids(in: NodeSeq, prefix: String, label: String): NodeSeq = {
val ret: ListBuffer[Node] = new ListBuffer
def doIt(in: NodeSeq) {
in.foreach {
case e: Elem if e.prefix == prefix && e.label == label =>
e.child.foreach(ret.+=)
case g: Group => doIt(g.nodes)
case n => doIt(n.child)
}
}
doIt(in)
ret.toList
}
}
/**
* TODO: Is this something that can be converted to a JavaScript Command
*/
trait ToJsCmd {
def toJsCmd: String
}
object CheckNodeSeq {
def unapply(in: Any): Option[NodeSeq] = in match {
case Some(ns: NodeSeq) => Some(ns)
case Full(ns: NodeSeq) => Some(ns)
case Some(sq: Seq[_]) if sq.forall(_.isInstanceOf[Node])=> val ns: NodeSeq = sq.asInstanceOf[Seq[Node]]
Some(ns)
case Full(sq: Seq[_]) if sq.forall(_.isInstanceOf[Node])=> val ns: NodeSeq = sq.asInstanceOf[Seq[Node]]
Some(ns)
case ns: NodeSeq => Some(ns)
case sq: Seq[_] if sq.forall(_.isInstanceOf[Node])=> val ns: NodeSeq = sq.asInstanceOf[Seq[Node]]
Some(ns)
case _ => None
}
}
|
pbrant/framework
|
core/util/src/main/scala/net/liftweb/util/HttpHelpers.scala
|
Scala
|
apache-2.0
| 13,704 |
/*
* Copyright (c) 2015, 2016 Alexey Kuzin <[email protected]>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package sourepoheatmap.treemap
import scala.collection.mutable.ListBuffer
/** Class to represent generic Treemap.
*
* @author Alexey Kuzin <[email protected]>
*/
class Treemap[R <: TreemapRectangle](
private val boxWidth: Double,
private val boxHeight: Double,
diffInfo: Map[String, Int],
private val creator: TreemapRectangle.RectAttrs => R = TreemapRectangle.SimpleRectangle.tupled) {
require(boxWidth > 0.0 && boxHeight > 0.0 && diffInfo.nonEmpty)
private val mDiffInfo = sortByWeight(diffInfo.toList)
private val mScale = getScale(boxWidth, boxHeight, mDiffInfo)
def rectangles: List[R] = {
val rectsBuffer = new ListBuffer[R]
squarify(mDiffInfo, Nil, (0, 0), (boxWidth, boxHeight), rectsBuffer)
rectsBuffer.toList
}
private def getScale(width: Double, height: Double, rects: List[(String, Int)]): Double = {
width * height / sumWeights(rects)
}
private def sortByWeight(rectInfo: List[(String, Int)]): List[(String, Int)] = {
rectInfo.sortBy(- _._2)
}
private def sumWeights(rects: List[(String, Int)]): Int = {
(0 /: rects) (_ + _._2)
}
/** Recursive method to build squarified treemap.
* Squarify algorithm developed by M. Bruls, K. Huizing and J.J. van Wijk is used here.
*
* @param nodes Nodes which are not placed in the treemap yet.
* @param row Current treemap row which will be put soon.
* @param pos Current position of the empty area. Next rectangle will be placed here.
* @param dimen Current dimension of the empty area.
* @param rectsBuffer [[R]] where treemap [[TreemapRectangle]]s will be put.
* @see [[http://www.win.tue.nl/~vanwijk/stm.pdf Squarified Treemaps article]]
*/
private def squarify(nodes: List[(String, Int)],
row: List[(String, Int)],
pos: (Double, Double),
dimen: (Double, Double),
rectsBuffer: ListBuffer[R]): Unit = {
try {
val rowWith = row ::: List(nodes.head)
val w = getShortestSide(dimen._1, dimen._2)
if ((row == Nil) || (worst(rowWith, w) < worst(row, w))) {
squarify(nodes.tail, rowWith, pos, dimen, rectsBuffer)
} else {
val (newPos, newDimen) = layoutRow(row, pos, dimen, rectsBuffer)
squarify(nodes, Nil, newPos, newDimen, rectsBuffer)
}
} catch {
case ex: NoSuchElementException => layoutRow(row, pos, dimen, rectsBuffer)
}
}
/** Method to get the worst aspect ratio in the row.
*
* @param row Nodes row where weights we can get.
* @param w Shortest side of the empty area.
* @return the worst aspect ratio.
*/
private def worst(row: List[(String, Int)], w: Double): Double = {
val sum = sumWeights(row) * mScale
val maxWeight = row.maxBy(_._2)._2 * mScale
val minWeight = row.minBy(_._2)._2 * mScale
math.max((w * w * maxWeight) / (sum * sum), (sum * sum) / (w * w * minWeight))
}
private def getShortestSide(dimen: (Double, Double)): Double = {
if (dimen._2 < dimen._1) dimen._2 else dimen._1
}
private def layoutRow(row: List[(String, Int)], pos: (Double, Double), dimen: (Double, Double),
rectsBuffer: ListBuffer[R]):
((Double, Double), (Double, Double)) = {
// Longest side length
val rectLong = sumWeights(row) * mScale / getShortestSide(dimen)
// Is layout vertical
val vertical = dimen._2 < dimen._1
rectsBuffer ++= createRow(row, pos, rectLong, vertical).toList
if (vertical) {
((pos._1 + rectLong, pos._2), (dimen._1 - rectLong, dimen._2))
} else {
((pos._1, pos._2 + rectLong), (dimen._1, dimen._2 - rectLong))
}
}
private def createRow(row: List[(String, Int)],
pos: (Double, Double),
rectLong: Double,
vertical: Boolean): Stream[R] = {
if (row.isEmpty) {
Stream.empty
} else {
val rectShort = row.head._2 * mScale / rectLong
if (vertical) {
val rectY = pos._2 + rectShort
TreemapRectangle(pos, (rectLong, rectShort), row.head, creator) #::
createRow(row.tail, (pos._1, rectY), rectLong, vertical)
} else {
val rectX = pos._1 + rectShort
TreemapRectangle(pos, (rectShort, rectLong), row.head, creator) #::
createRow(row.tail, (rectX, pos._2), rectLong, vertical)
}
}
}
}
|
leviathan941/sourepoheatmap
|
core/src/main/scala/sourepoheatmap/treemap/Treemap.scala
|
Scala
|
bsd-3-clause
| 5,895 |
package com.twitter.finagle.http
import com.twitter.conversions.time._
import org.specs.SpecificationWithJUnit
class CookieSpec extends SpecificationWithJUnit {
"Cookie" should {
"mutate underlying" in {
val cookie = new Cookie("name", "value")
cookie.comment = "hello"
cookie.commentUrl = "hello.com"
cookie.domain = ".twitter.com"
cookie.maxAge = 100.seconds
cookie.path = "/1/statuses/show"
cookie.ports = Seq(1, 2, 3)
cookie.value = "value2"
cookie.version = 1
cookie.httpOnly = true
cookie.isDiscard = false
cookie.isSecure = true
cookie.name must_== "name"
cookie.comment must_== "hello"
cookie.commentUrl must_== "hello.com"
cookie.domain must_== ".twitter.com"
cookie.maxAge must_== 100.seconds
cookie.path must_== "/1/statuses/show"
cookie.ports must_== Set(1, 2, 3)
cookie.value must_== "value2"
cookie.version must_== 1
cookie.httpOnly must_== true
cookie.isDiscard must_== false
cookie.isSecure must_== true
}
}
}
|
firebase/finagle
|
finagle-http/src/test/scala/com/twitter/finagle/http/CookieSpec.scala
|
Scala
|
apache-2.0
| 1,161 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.reactive.Ack.Continue
import monifu.reactive.internals._
import monifu.reactive.{Ack, Observable, Observer}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Future
import scala.concurrent.duration.{Duration, FiniteDuration}
private[reactive] object buffer {
/**
* Implementation for [[Observable.buffer]].
*/
def skipped[T](source: Observable[T], count: Int, skip: Int): Observable[Seq[T]] = {
require(count > 0, "count must be strictly positive")
require(skip > 0, "skip must be strictly positive")
Observable.create { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
private[this] val shouldDrop = skip > count
private[this] var leftToDrop = 0
private[this] val shouldOverlap = skip < count
private[this] var nextBuffer = ArrayBuffer.empty[T]
private[this] var buffer = null : ArrayBuffer[T]
private[this] var size = 0
def onNext(elem: T): Future[Ack] = {
if (shouldDrop && leftToDrop > 0) {
leftToDrop -= 1
Continue
}
else {
if (buffer == null) {
buffer = nextBuffer
size = nextBuffer.length
nextBuffer = ArrayBuffer.empty[T]
}
size += 1
buffer.append(elem)
if (shouldOverlap && size - skip > 0) nextBuffer += elem
if (size >= count) {
if (shouldDrop) leftToDrop = skip - count
val continue = subscriber.onNext(buffer)
buffer = null
continue
}
else
Continue
}
}
def onError(ex: Throwable): Unit = {
if (buffer != null) {
subscriber.onNext(buffer).onContinueSignalError(subscriber, ex)
buffer = null
nextBuffer = null
}
else
subscriber.onError(ex)
}
def onComplete(): Unit = {
if (buffer != null) {
subscriber.onNext(buffer).onContinueSignalComplete(subscriber)
buffer = null
nextBuffer = null
}
else
subscriber.onComplete()
}
})
}
}
/**
* Implementation for [[Observable.buffer]].
*/
def timed[T](source: Observable[T], timespan: FiniteDuration, maxCount: Int): Observable[Seq[T]] = {
require(timespan >= Duration.Zero, "timespan must be positive")
require(maxCount >= 0, "maxCount must be positive")
Observable.create[Seq[T]] { subscriber =>
implicit val s = subscriber.scheduler
source.onSubscribe(new Observer[T] {
private[this] val timespanMillis = timespan.toMillis
private[this] var buffer = ArrayBuffer.empty[T]
private[this] var expiresAt = s.currentTimeMillis() + timespanMillis
def onNext(elem: T) = {
val rightNow = s.currentTimeMillis()
buffer.append(elem)
if (expiresAt <= rightNow || (maxCount > 0 && maxCount <= buffer.length)) {
val oldBuffer = buffer
buffer = ArrayBuffer.empty[T]
expiresAt = rightNow + timespanMillis
subscriber.onNext(oldBuffer)
}
else
Continue
}
def onError(ex: Throwable): Unit = {
if (buffer.nonEmpty) {
subscriber.onNext(buffer).onContinueSignalError(subscriber, ex)
buffer = null
}
else
subscriber.onError(ex)
}
def onComplete(): Unit = {
if (buffer.nonEmpty) {
subscriber.onNext(buffer).onContinueSignalComplete(subscriber)
buffer = null
}
else
subscriber.onComplete()
}
})
}
}
}
|
virtualirfan/monifu
|
monifu/shared/src/main/scala/monifu/reactive/internals/operators/buffer.scala
|
Scala
|
apache-2.0
| 4,577 |
// Copyright (C) 2010-2011 Monash University
//
// This file is part of Factotum.
//
// Factotum is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Factotum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with Factotum. If not, see <http://www.gnu.org/licenses/>.
//
// Designed and implemented by Dmitri Nikulin.
//
// Repository: https://github.com/dnikulin/factotum
// Email: [email protected]
package com.dnikulin.factotum.model
import net.liftweb.common._
import net.liftweb.mapper._
import net.liftweb.util.Helpers._
object StoredReport extends StoredReport with LongKeyedMetaMapper[StoredReport] {
override def fieldOrder = List(name, hash, dateAdded, owner, isPublic)
}
class StoredReport extends StoredObject[StoredReport] {
override def getSingleton = StoredReport
}
|
dnikulin/factotum
|
src/main/scala/com/dnikulin/factotum/model/StoredReport.scala
|
Scala
|
agpl-3.0
| 1,285 |
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import hu.bme.mit.ire.messages.{ChangeSet, Primary, Secondary}
import hu.bme.mit.ire.nodes.binary.AntiJoinNode
import hu.bme.mit.ire.util.Utils
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
class AntiJoinNodeTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
with WordSpecLike with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("MySpec"))
override def afterAll {
TestKit.shutdownActorSystem(system)
}
import hu.bme.mit.ire.util.TestUtil._
"AntiJoin" must {
"do simple antijoins 0" in {
val primary = ChangeSet(
positive = Vector(tuple(1, 2), tuple(1, 3), tuple(1, 4))
)
val secondary = ChangeSet(
positive = Vector(tuple(3, 5), tuple(3, 6), tuple(4, 7))
)
val primaryMask = mask(1)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Secondary(secondary)
joiner ! Primary(primary)
expectMsg(ChangeSet(positive = Vector(tuple(1, 2))))
joiner ! Secondary(ChangeSet(negative = Vector(tuple(3, 5), tuple(4, 7))))
expectMsg(ChangeSet(positive = Vector(tuple(1, 4))))
}
"do simple antijoins 1" in {
val primary = ChangeSet(
positive = Vector(tuple(1, 2), tuple(1, 3), tuple(1, 4))
)
val secondary = ChangeSet(
positive = Vector(tuple(3, 5), tuple(3, 6), tuple(4, 7))
)
val primaryMask = mask(1)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Secondary(secondary)
joiner ! Primary(primary)
expectMsg(ChangeSet(positive = Vector(tuple(1, 2))))
joiner ! Secondary(ChangeSet(positive = Vector(tuple(2, 8), tuple(3, 9))))
expectMsg(ChangeSet(negative = Vector(tuple(1, 2))))
}
"do simple antijoins 2" in {
val prim = ChangeSet(
positive = Vector(tuple(15, 16, 17, 18), tuple(4, 5, 6, 7))
)
val sec = ChangeSet(
positive = Vector(tuple(13, 15, 16))
)
val primaryMask = mask(0, 1)
val secondaryMask = mask(1, 2)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Secondary(sec)
joiner ! Primary(prim)
expectMsg(ChangeSet(
positive = Vector(tuple(4, 5, 6, 7))
))
}
//based on https://github.com/FTSRG/incqueryd/tree/master/hu.bme.mit.incqueryd.client/hu.bme.mit.incqueryd.rete.nodes/src/test/resources/test-cases
"do antijoin 1" in {
val prim = ChangeSet(
positive = Vector(tuple(5, 6, 7), tuple(10, 11, 7))
)
val sec = ChangeSet(
positive = Vector(tuple(7, 8))
)
val primaryMask = mask(2)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Primary(prim)
expectMsg(ChangeSet(positive = Vector(tuple(5, 6, 7), tuple(10, 11, 7))))
joiner ! Secondary(sec)
expectMsgAnyOf(Utils.changeSetPermutations(
ChangeSet(negative = Vector(tuple(5, 6, 7), tuple(10, 11, 7)))): _*)
}
"do antijoin 2" in {
val prim = ChangeSet(
positive = Vector(tuple(1, 5), tuple(2, 6))
)
val sec = ChangeSet(
positive = Vector(tuple(5, 10))
)
val primaryMask = mask(1)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Primary(prim)
expectMsgAnyOf(
Utils.changeSetPermutations(ChangeSet(positive = Vector(tuple(1, 5), tuple(2, 6)))): _*
)
joiner ! Secondary(sec)
expectMsg(ChangeSet(
negative = Vector(tuple(1, 5))
)
)
}
"do antijoin new 1" in {
val primaryMask = mask(1)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Primary(ChangeSet(positive = Vector(tuple(1, 2), tuple(3, 4))))
expectMsg(
ChangeSet(positive = Vector(tuple(1, 2), tuple(3, 4)))
)
joiner ! Secondary(ChangeSet(positive = Vector(tuple(2, 3), tuple(2, 4), tuple(4, 5))))
expectMsgAnyOf(Utils.changeSetPermutations(
ChangeSet(negative = Vector(tuple(1, 2), tuple(3, 4)))
): _*)
joiner ! Secondary(ChangeSet(negative = Vector(tuple(4, 5))))
expectMsg(
ChangeSet(positive = Vector(tuple(3, 4)))
)
joiner ! Secondary(ChangeSet(negative = Vector(tuple(2, 3))))
joiner ! Secondary(ChangeSet(negative = Vector(tuple(2, 4))))
expectMsg(
ChangeSet(positive = Vector(tuple(1, 2)))
)
joiner ! Secondary(ChangeSet(positive = Vector(tuple(3, 4))))
joiner ! Secondary(ChangeSet(positive = Vector(tuple(4, 3))))
expectMsg(ChangeSet(negative = Vector(tuple(3, 4))))
joiner ! Primary(ChangeSet(positive = Vector(tuple(1, 4))))
joiner ! Primary(ChangeSet(positive = Vector(tuple(1, 5))))
expectMsg(ChangeSet(positive = Vector(tuple(1, 5))))
joiner ! Primary(ChangeSet(negative = Vector(tuple(1, 5))))
expectMsg(ChangeSet(negative = Vector(tuple(1, 5))))
}
"do antijoin new 2" in {
val prim = ChangeSet(
positive = Vector(tuple(2, 4), tuple(3, 4), tuple(5, 4), tuple(6, 4), tuple(1, 3), tuple(2, 3))
)
val secondary = ChangeSet(
positive = Vector(tuple(4, 8), tuple(4, 9), tuple(3, 4))
)
val primaryMask = mask(1)
val secondaryMask = mask(0)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Primary(prim)
expectMsg(ChangeSet(positive = Vector(tuple(2, 4), tuple(3, 4), tuple(5, 4), tuple(6, 4), tuple(1, 3), tuple(2, 3))))
joiner ! Secondary(secondary)
expectMsgAnyOf(
Utils.changeSetPermutations(
ChangeSet(negative = Vector(tuple(2, 4), tuple(3, 4), tuple(5, 4), tuple(6, 4), tuple(1, 3), tuple(2, 3)))
): _*
)
joiner ! Secondary(ChangeSet(negative = Vector(tuple(4, 7))))
joiner ! Secondary(ChangeSet(negative = Vector(tuple(4, 8))))
joiner ! Secondary(ChangeSet(negative = Vector(tuple(4, 9))))
expectMsgAnyOf(Utils.changeSetPermutations(ChangeSet(positive = Vector(tuple(2, 4), tuple(3, 4), tuple(5, 4), tuple(6, 4)))): _*)
joiner ! Secondary(ChangeSet(positive = Vector(tuple(4, 5))))
expectMsgAnyOf(Utils.changeSetPermutations(ChangeSet(negative = Vector(tuple(2, 4), tuple(3, 4), tuple(5, 4), tuple(6, 4)))): _*)
joiner ! Secondary(ChangeSet(negative = Vector(tuple(3, 4))))
expectMsgAnyOf(Utils.changeSetPermutations(ChangeSet(positive = Vector(tuple(1, 3), tuple(2, 3)))): _*)
joiner ! Primary(ChangeSet(positive = Vector(tuple(4, 3))))
expectMsg(ChangeSet(positive = Vector(tuple(4, 3))))
joiner ! Secondary(ChangeSet(positive = Vector(tuple(3, 5))))
expectMsgAnyOf(Utils.changeSetPermutations(ChangeSet(negative = Vector(tuple(1, 3), tuple(2, 3), tuple(4, 3)))): _*)
joiner ! Primary(ChangeSet(positive = Vector(tuple(7, 4))))
}
"do antijoin new 3" in {
val prim = ChangeSet(
positive = Vector(tuple(1, 2, 3, 4), tuple(1, 5, 6, 7), tuple(3, 2, 5, 4))
)
val primaryMask = mask(1, 3)
val secondaryMask = mask(0, 2)
val echoActor = system.actorOf(TestActors.echoActorProps)
val joiner = system.actorOf(Props(new AntiJoinNode(echoActor ! _, primaryMask, secondaryMask)))
joiner ! Primary(prim)
expectMsg(ChangeSet(Vector(tuple(1, 2, 3, 4), tuple(1, 5, 6, 7), tuple(3, 2, 5, 4))))
joiner ! Primary(ChangeSet(positive = Vector(tuple(8, 2, 6, 4))))
expectMsg(ChangeSet(positive = Vector(tuple(8, 2, 6, 4))))
joiner ! Secondary(ChangeSet(positive = Vector(tuple(2, 5, 4, 3))))
expectMsgAnyOf(
Utils.changeSetPermutations(ChangeSet(negative = Vector(tuple(1, 2, 3, 4), tuple(3, 2, 5, 4), tuple(8, 2, 6, 4)))): _*
)
joiner ! Secondary(ChangeSet(
positive = Vector(tuple(5, 5, 7, 3))
))
expectMsg(ChangeSet(negative = Vector(tuple(1, 5, 6, 7))))
joiner ! Secondary(ChangeSet(negative = Vector(tuple(2, 5, 4, 3))))
expectMsgAnyOf(
Utils.changeSetPermutations(ChangeSet(positive = Vector(tuple(1, 2, 3, 4), tuple(3, 2, 5, 4), tuple(8, 2, 6, 4)))): _*
)
}
}
}
|
FTSRG/ire
|
src/test/scala/AntiJoinNodeTest.scala
|
Scala
|
epl-1.0
| 9,107 |
package io.youi.example.ui
import io.youi._
import io.youi.component.FontAwesomeView
import io.youi.component.support.{MeasuredSupport, PositionSupport}
import io.youi.example.screen.UIExampleScreen
import io.youi.font.FontAwesome
import io.youi.net._
import scribe.Execution.global
import scala.concurrent.Future
class FontAwesomeExample extends UIExampleScreen {
override def title: String = "Font Awesome Example"
override def path: Path = path"/examples/font-awesome.html"
override def createUI(): Future[Unit] = for {
_ <- FontAwesome.load()
} yield {
val iconView = new FontAwesomeView with PositionSupport with MeasuredSupport {
icon @= FontAwesome.Brands.Android
font.weight @= "bold"
font.size @= 128.0
color @= Color.Blue
position.center := container.size.center
position.middle := container.size.middle
}
container.children += iconView
}
}
|
outr/youi
|
example/js/src/main/scala/io/youi/example/ui/FontAwesomeExample.scala
|
Scala
|
mit
| 916 |
package controller
import model._
import org.scalatest._
import skinny.SkinnyConfig
import skinny.test.scalatest.SkinnyFlatSpec
import skinny.test.{ FactoryGirl, SkinnyTestSupport }
class OGPSpec extends SkinnyFlatSpec with Matchers with SkinnyTestSupport with TestDBSettings {
addFilter(Controllers.articles, "/*")
it should "redirect to / with ref param when not logged in" in {
get(uri = "/articles/1?dummy=a", headers = Map("User-Agent" -> "Browser")) {
status should equal(302)
header.get("Location").get should endWith("/?ref=%2Farticles%2F1%3Fdummy%3Da") // with query string
}
}
it should "redirect to / with ref param when OGP request with not logged in" in {
get(uri = "/articles/1?o", headers = Map("User-Agent" -> "Browser")) {
status should equal(302)
header.get("Location").get should endWith("/?ref=%2Farticles%2F1") // without o parameter
}
}
it should "redirect to / with ref param when OGP request with valid UA" in {
FactoryGirl(User).withVariables('userId -> 1).create()
FactoryGirl(Article).withVariables('articleId -> 1).create()
val validUA = SkinnyConfig.stringConfigValue("ogp.allowUAs").get + "V2"
get(uri = "/articles/1?o", headers = Map("User-Agent" -> validUA)) {
status should equal(200)
body should include("og:")
}
}
}
|
atware/sharedocs
|
src/test/scala/controller/OGPSpec.scala
|
Scala
|
mit
| 1,344 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package purescala
import scala.collection.mutable.ListBuffer
import Common._
import Definitions._
import Expressions._
import Extractors._
import Constructors.letDef
class ScopeSimplifier extends Transformer {
case class Scope(inScope: Set[Identifier] = Set(), oldToNew: Map[Identifier, Identifier] = Map(), funDefs: Map[FunDef, FunDef] = Map()) {
def register(oldNew: (Identifier, Identifier)): Scope = {
val newId = oldNew._2
copy(inScope = inScope + newId, oldToNew = oldToNew + oldNew)
}
def register(oldNews: Seq[(Identifier, Identifier)]): Scope = {
(this /: oldNews){ case (oldScope, oldNew) => oldScope.register(oldNew) }
}
def registerFunDef(oldNew: (FunDef, FunDef)): Scope = {
copy(funDefs = funDefs + oldNew)
}
}
protected def genId(id: Identifier, scope: Scope): Identifier = {
val existCount = scope.inScope.count(_.name == id.name)
FreshIdentifier.forceId(id.name, existCount, id.getType, existCount >= 1)
}
protected def rec(e: Expr, scope: Scope): Expr = e match {
case Let(i, e, b) =>
val si = genId(i, scope)
val se = rec(e, scope)
val sb = rec(b, scope.register(i -> si))
Let(si, se, sb)
case LetDef(fds, body: Expr) =>
var newScope: Scope = scope
// First register all functions
val fds_newIds = for(fd <- fds) yield {
val newId = genId(fd.id, scope)
newScope = newScope.register(fd.id -> newId)
(fd, newId)
}
val fds_mapping = for((fd, newId) <- fds_newIds) yield {
val localScopeToRegister = ListBuffer[(Identifier, Identifier)]() // We record the mapping of these variables only for the function.
val newArgs = for(ValDef(id) <- fd.params) yield {
val newArg = genId(id, newScope.register(localScopeToRegister))
localScopeToRegister += (id -> newArg) // This renaming happens only inside the function.
ValDef(newArg)
}
val newFd = fd.duplicate(id = newId, params = newArgs)
newScope = newScope.registerFunDef(fd -> newFd)
(newFd, localScopeToRegister, fd)
}
for((newFd, localScopeToRegister, fd) <- fds_mapping) {
newFd.fullBody = rec(fd.fullBody, newScope.register(localScopeToRegister))
}
letDef(fds_mapping.map(_._1), rec(body, newScope))
case MatchExpr(scrut, cases) =>
val rs = rec(scrut, scope)
def trPattern(p: Pattern, scope: Scope): (Pattern, Scope) = {
val (newBinder, newScope) = p.binder match {
case Some(id) =>
val newId = genId(id, scope)
val newScope = scope.register(id -> newId)
(Some(newId), newScope)
case None =>
(None, scope)
}
var curScope = newScope
val newSubPatterns = for (sp <- p.subPatterns) yield {
val (subPattern, subScope) = trPattern(sp, curScope)
curScope = subScope
subPattern
}
val newPattern = p match {
case InstanceOfPattern(b, ctd) =>
InstanceOfPattern(newBinder, ctd)
case WildcardPattern(b) =>
WildcardPattern(newBinder)
case CaseClassPattern(b, ccd, sub) =>
CaseClassPattern(newBinder, ccd, newSubPatterns)
case TuplePattern(b, sub) =>
TuplePattern(newBinder, newSubPatterns)
case UnapplyPattern(b, obj, sub) =>
UnapplyPattern(newBinder, obj, newSubPatterns)
case LiteralPattern(_, lit) =>
LiteralPattern(newBinder, lit)
}
(newPattern, curScope)
}
MatchExpr(rs, cases.map { c =>
val (newP, newScope) = trPattern(c.pattern, scope)
MatchCase(newP, c.optGuard map {rec(_, newScope)}, rec(c.rhs, newScope))
})
case Variable(id) =>
Variable(scope.oldToNew.getOrElse(id, id))
case FunctionInvocation(tfd, args) =>
val newFd = scope.funDefs.getOrElse(tfd.fd, tfd.fd)
val newArgs = args.map(rec(_, scope))
FunctionInvocation(newFd.typed(tfd.tps), newArgs)
case Operator(es, builder) =>
builder(es.map(rec(_, scope)))
case _ =>
sys.error("Expression "+e+" ["+e.getClass+"] is not extractable")
}
def transform(e: Expr): Expr = {
rec(e, Scope())
}
}
|
regb/leon
|
src/main/scala/leon/purescala/ScopeSimplifier.scala
|
Scala
|
gpl-3.0
| 4,387 |
package sigmastate.basics
import java.math.BigInteger
import org.bouncycastle.util.BigIntegers
import sigmastate.Values.Value.PropositionCode
import sigmastate._
import sigmastate.basics.VerifierMessage.Challenge
import sigmastate.eval.SigmaDsl
import sigmastate.interpreter.CryptoConstants.EcPointType
import sigmastate.interpreter.CryptoConstants
import sigmastate.serialization.{OpCodes, GroupElementSerializer}
import sigmastate.serialization.OpCodes.OpCode
import special.sigma.SigmaProp
trait DiffieHellmanTupleProtocol extends SigmaProtocol[DiffieHellmanTupleProtocol] {
override type A = FirstDiffieHellmanTupleProverMessage
override type Z = SecondDiffieHellmanTupleProverMessage
}
case class DiffieHellmanTupleProverInput(w: BigInteger, commonInput: ProveDHTuple)
extends SigmaProtocolPrivateInput[DiffieHellmanTupleProtocol, ProveDHTuple] {
override lazy val publicImage: ProveDHTuple = commonInput
}
object DiffieHellmanTupleProverInput {
import sigmastate.interpreter.CryptoConstants.dlogGroup
def random(): DiffieHellmanTupleProverInput = {
val g = dlogGroup.generator
val h = dlogGroup.createRandomGenerator()
val qMinusOne = dlogGroup.order.subtract(BigInteger.ONE)
val w = BigIntegers.createRandomInRange(BigInteger.ZERO, qMinusOne, dlogGroup.secureRandom)
val u = dlogGroup.exponentiate(g, w)
val v = dlogGroup.exponentiate(h, w)
val ci = ProveDHTuple(g, h, u, v)
DiffieHellmanTupleProverInput(w, ci)
}
}
//a = g^r, b = h^r
case class FirstDiffieHellmanTupleProverMessage(a: CryptoConstants.EcPointType, b: CryptoConstants.EcPointType)
extends FirstProverMessage {
override type SP = DiffieHellmanTupleProtocol
override def bytes: Array[Byte] = {
GroupElementSerializer.toBytes(a) ++ GroupElementSerializer.toBytes(b)
}
}
//z = r + ew mod q
case class SecondDiffieHellmanTupleProverMessage(z: BigInteger) extends SecondProverMessage {
override type SP = DiffieHellmanTupleProtocol
}
/** Construct a new SigmaProp value representing public key of Diffie Hellman signature protocol.
* Common input: (g,h,u,v) */
case class ProveDHTuple(gv: EcPointType, hv: EcPointType, uv: EcPointType, vv: EcPointType)
extends SigmaProofOfKnowledgeLeaf[DiffieHellmanTupleProtocol, DiffieHellmanTupleProverInput] {
override val opCode: OpCode = OpCodes.ProveDiffieHellmanTupleCode
override def size: Int = 4 // one node for each EcPoint
lazy val g = gv
lazy val h = hv
lazy val u = uv
lazy val v = vv
}
object ProveDHTuple {
val Code: PropositionCode = 103: Byte
}
/** Helper extractor to match SigmaProp values and extract ProveDHTuple out of it. */
object ProveDHTupleProp {
def unapply(p: SigmaProp): Option[ProveDHTuple] = SigmaDsl.toSigmaBoolean(p) match {
case d: ProveDHTuple => Some(d)
case _ => None
}
}
object DiffieHellmanTupleInteractiveProver {
import sigmastate.interpreter.CryptoConstants.dlogGroup
def firstMessage(publicInput: ProveDHTuple): (BigInteger, FirstDiffieHellmanTupleProverMessage) = {
val qMinusOne = dlogGroup.order.subtract(BigInteger.ONE)
val r = BigIntegers.createRandomInRange(BigInteger.ZERO, qMinusOne, dlogGroup.secureRandom)
val a = dlogGroup.exponentiate(publicInput.g, r)
val b = dlogGroup.exponentiate(publicInput.h, r)
r -> FirstDiffieHellmanTupleProverMessage(a, b)
}
def secondMessage(privateInput: DiffieHellmanTupleProverInput,
rnd: BigInteger,
challenge: Challenge): SecondDiffieHellmanTupleProverMessage = {
val q: BigInteger = dlogGroup.order
val e: BigInteger = new BigInteger(1, challenge)
val ew: BigInteger = e.multiply(privateInput.w).mod(q)
val z: BigInteger = rnd.add(ew).mod(q)
SecondDiffieHellmanTupleProverMessage(z)
}
def simulate(publicInput: ProveDHTuple, challenge: Challenge):
(FirstDiffieHellmanTupleProverMessage, SecondDiffieHellmanTupleProverMessage) = {
val qMinusOne = dlogGroup.order.subtract(BigInteger.ONE)
//SAMPLE a random z <- Zq
val z = BigIntegers.createRandomInRange(BigInteger.ZERO, qMinusOne, dlogGroup.secureRandom)
// COMPUTE a = g^z*u^(-e) and b = h^z*v^{-e} (where -e here means -e mod q)
val e: BigInteger = new BigInteger(1, challenge)
val minusE = dlogGroup.order.subtract(e)
val hToZ = dlogGroup.exponentiate(publicInput.h, z)
val gToZ = dlogGroup.exponentiate(publicInput.g, z)
val uToMinusE = dlogGroup.exponentiate(publicInput.u, minusE)
val vToMinusE = dlogGroup.exponentiate(publicInput.v, minusE)
val a = dlogGroup.multiplyGroupElements(gToZ, uToMinusE)
val b = dlogGroup.multiplyGroupElements(hToZ, vToMinusE)
FirstDiffieHellmanTupleProverMessage(a, b) -> SecondDiffieHellmanTupleProverMessage(z)
}
/**
* The function computes initial prover's commitment to randomness
* ("a" message of the sigma-protocol, which in this case has two parts "a" and "b")
* based on the verifier's challenge ("e")
* and prover's response ("z")
*
* g^z = a*u^e, h^z = b*v^e => a = g^z/u^e, b = h^z/v^e
*
* @param proposition
* @param challenge
* @param secondMessage
* @return
*/
def computeCommitment(proposition: ProveDHTuple,
challenge: Challenge,
secondMessage: SecondDiffieHellmanTupleProverMessage): (EcPointType, EcPointType) = {
val g = proposition.g
val h = proposition.h
val u = proposition.u
val v = proposition.v
val z = secondMessage.z
val e = new BigInteger(1, challenge)
val gToZ = dlogGroup.exponentiate(g, z)
val hToZ = dlogGroup.exponentiate(h, z)
val uToE = dlogGroup.exponentiate(u, e)
val vToE = dlogGroup.exponentiate(v, e)
val a = dlogGroup.multiplyGroupElements(gToZ, dlogGroup.inverseOf(uToE))
val b = dlogGroup.multiplyGroupElements(hToZ, dlogGroup.inverseOf(vToE))
a -> b
}
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/main/scala/sigmastate/basics/DiffieHellmanTupleProtocol.scala
|
Scala
|
mit
| 5,907 |
package org.scalajs.openui5.sap.ui.core
import org.scalajs.dom
import org.scalajs.openui5.sap.ui.base.{ManagedObject, ManagedObjectSetters, ManagedObjectSettings}
import org.scalajs.openui5.util.{SettingsMap, Settings, noSettings}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
import scala.scalajs.js.|
@ScalaJSDefined
trait ElementSettings extends ManagedObjectSettings
object ElementSettings extends ElementSettingsBuilder(noSettings)
class ElementSettingsBuilder(val dict: SettingsMap)
extends Settings[ElementSettings, ElementSettingsBuilder](new ElementSettingsBuilder(_))
with ElementSetters[ElementSettings, ElementSettingsBuilder]
trait ElementSetters[T <: js.Object, B <: Settings[T,_]] extends
ManagedObjectSetters[T, B] {
def tooltip(v: String|TooltipBase) = setting("tooltip", v)
def customData(v: js.Array[CustomData]) = setting("customData", v)
def layoutData(v: LayoutData) = setting("layoutData", v)
def dependents(v: js.Array[Control]) = setting("dependents", v)
}
/** Base Class for Elements. */
@JSName("sap.ui.core.Element")
@js.native
class Element(id: js.UndefOr[String] = js.native,
settings: js.UndefOr[ElementSettings] = js.native)
extends ManagedObject {
def this(id: String) = this(id, js.undefined)
def this(settings: ElementSettings) = this(js.undefined, settings)
/** Sets a new tooltip for this object.
*
* The tooltip can either be a simple string (which in most cases will be
* rendered as the title attribute of this Element) or an instance of
* [[TooltipBase]].
*
* If a new tooltip is set, any previously set tooltip is deactivated.
*
* @param tooltip tooltip to set
*/
def setTooltip(tooltip: String | TooltipBase): this.type = js.native
/** Returns the tooltip for this element if any or an undefined value. The
* tooltip can either be a simple string or a subclass of
* sap.ui.core.TooltipBase.
*
* Callers that are only interested in tooltips of type string (e.g. to
* render them as a title attribute), should call the convenience method
* #getTooltip_AsString instead. If they want to get a tooltip text no
* matter where it comes from (be it a string tooltip or the text from a
* TooltipBase instance) then they could call #getTooltip_Text instead.
*
* @return tooltip for this [[Element]].
*/
def getTooltip(): String /*| TooltipBase*/ = js.native
/** Returns the best suitable DOM Element that represents this UI5 Element.
* By default the DOM Element with the same ID as this Element is
* returned. Subclasses should override this method if the lookup via id
* is not sufficient.
*
* Note that such a DOM Element does not necessarily exist in all cases.
* Some elements or controls might not have a DOM representation at all
* (e.g. a naive FlowLayout) while others might not have one due to their
* current state (e.g. an initial, not yet rendered control).
*
* If an ID suffix is given, the ID of this Element is concatenated with the
* suffix (separated by a single dash) and the DOM node with that compound
* ID will be returned. This matches the UI5 naming convention for named
* inner DOM nodes of a control.
*
* @param suffix ID suffix to get the DOMRef for
* @return The Element's DOM Element, sub DOM element, or null
*/
def getDomRef(suffix: js.UndefOr[String] = js.undefined): js.UndefOr[dom.Element] = js.native
/** Returns the DOM Element that should get the focus.
*
* To be overwritten by the specific control method.
*
* @return Returns the DOM Element that should get the focus
*/
def getFocusDomRef(): dom.Element = js.native
/** Cleans up the resources associated with this element and all its children.
* After an element has been destroyed, it can no longer be used in the UI!
*
* Applications should call this method if they don't need the element any
* longer.
*
* @param suppressInvalidate if true, the UI element is not marked for redraw
*/
def destroy(suppressInvalidate: js.UndefOr[Boolean]): Unit = js.native
/** Adds some dependent to the aggregation dependents.
*
* @since 1.19
*
* @param dependent The dependent to add; if empty, nothing is inserted
* @return Reference to this in order to allow method chaining
*/
def addDependent(dependent: Control): this.type = js.native
}
|
lastsys/scalajs-openui5
|
src/main/scala/org/scalajs/openui5/sap/ui/core/Element.scala
|
Scala
|
mit
| 4,485 |
package edu.colorado.plv.cuanto.jsy
package string
import edu.colorado.plv.cuanto.CuantoSpec
import edu.colorado.plv.cuanto.jsy.common.ParserBehaviors
import edu.colorado.plv.cuanto.jsy.string.Parser.parse
/**
* @author Kyle Headley
*/
class StringParserSpec extends CuantoSpec with ParserBehaviors {
override lazy val positives = Table(
"concrete" -> "abstract",
"\\"hello\\"" -> S("hello"),
"\\"world\\"" -> S("world"),
"\\"one\\" + \\"one\\"" -> Binary(Concat,S("one"),S("one"))
)
"jsy.string.Parser" should behave like parser(parse)
}
|
cuplv/cuanto
|
src/test/scala/edu/colorado/plv/cuanto/jsy/string/StringParserSpec.scala
|
Scala
|
apache-2.0
| 562 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.pos
import cc.factorie.app.nlp._
import cc.factorie.variable._
abstract class PosTag(val token:Token, initialIndex:Int) extends CategoricalVariable[String](initialIndex)
/** Penn Treebank part-of-speech tag domain. */
object PennPosDomain extends CategoricalDomain[String] {
this ++= Vector(
"#", // In WSJ but not in Ontonotes
"$",
"''",
",",
"-LRB-",
"-RRB-",
".",
":",
"CC",
"CD",
"DT",
"EX",
"FW",
"IN",
"JJ",
"JJR",
"JJS",
"LS",
"MD",
"NN",
"NNP",
"NNPS",
"NNS",
"PDT",
"POS",
"PRP",
"PRP$",
"PUNC",
"RB",
"RBR",
"RBS",
"RP",
"SYM",
"TO",
"UH",
"VB",
"VBD",
"VBG",
"VBN",
"VBP",
"VBZ",
"WDT",
"WP",
"WP$",
"WRB",
"``",
"ADD", // in Ontonotes, but not WSJ
"AFX", // in Ontonotes, but not WSJ
"HYPH", // in Ontonotes, but not WSJ
"NFP", // in Ontonotes, but not WSJ
"XX" // in Ontonotes, but not WSJ
)
freeze()
// Short-cuts for a few commonly-queried tags
val posIndex = index("POS")
val nnpIndex = index("NNP")
val nnpsIndex = index("NNPS")
val prpIndex = index("PRP")
val prpdIndex = index("PRP$")
val wpIndex = index("WP")
val wpdIndex = index("WP$")
val ccIndex = index("CC")
def isNoun(pos:String): Boolean = pos(0) == 'N'
def isProperNoun(pos:String) = { pos == "NNP" || pos == "NNPS" }
def isVerb(pos:String) = pos(0) == 'V'
def isAdjective(pos:String) = pos(0) == 'J'
def isPersonalPronoun(pos: String) = pos == "PRP"
}
/** A categorical variable, associated with a token, holding its Penn Treebank part-of-speech category. */
class PennPosTag(token:Token, initialIndex:Int)
extends PosTag(token, initialIndex) with Serializable {
def this(token:Token, initialCategory:String) = this(token, PennPosDomain.index(initialCategory))
final def domain = PennPosDomain
def isNoun = PennPosDomain.isNoun(categoryValue)
def isProperNoun = PennPosDomain.isProperNoun(categoryValue)
def isVerb = PennPosDomain.isVerb(categoryValue)
def isAdjective = PennPosDomain.isAdjective(categoryValue)
def isPersonalPronoun = PennPosDomain.isPersonalPronoun(categoryValue)
}
/** A categorical variable, associated with a token, holding its Penn Treebank part-of-speech category,
which also separately holds its desired correct "target" value. */
class LabeledPennPosTag(token:Token, targetValue:String)
extends PennPosTag(token, targetValue) with CategoricalLabeling[String] with Serializable
/** The "A Universal Part-of-Speech Tagset"
by Slav Petrov, Dipanjan Das and Ryan McDonald
http://arxiv.org/abs/1104.2086
http://code.google.com/p/universal-pos-tags
VERB - verbs (all tenses and modes)
NOUN - nouns (common and proper)
PRON - pronouns
ADJ - adjectives
ADV - adverbs
ADP - adpositions (prepositions and postpositions)
CONJ - conjunctions
DET - determiners
NUM - cardinal numbers
PRT - particles or other function words
X - other: foreign words, typos, abbreviations
. - punctuation
*/
object UniversalPosDomain extends EnumDomain {
this ++= Vector("VERB", "NOUN", "PRON", "ADJ", "ADV", "ADP", "CONJ", "DET", "NUM", "PRT", "X", ".")
freeze()
private val Penn2universal = new scala.collection.mutable.HashMap[String,String] ++= Vector(
"!" -> ".",
"#" -> ".",
"$" -> ".",
"''" -> ".",
"(" -> ".",
")" -> ".",
"," -> ".",
"-LRB-" -> ".",
"-RRB-" -> ".",
"." -> ".",
":" -> ".",
"?" -> ".",
"CC" -> "CONJ",
"CD" -> "NUM",
"CD|RB" -> "X",
"DT" -> "DET",
"EX"-> "DET",
"FW" -> "X",
"IN" -> "ADP",
"IN|RP" -> "ADP",
"JJ" -> "ADJ",
"JJR" -> "ADJ",
"JJRJR" -> "ADJ",
"JJS" -> "ADJ",
"JJ|RB" -> "ADJ",
"JJ|VBG" -> "ADJ",
"LS" -> "X",
"MD" -> "VERB",
"NN" -> "NOUN",
"NNP" -> "NOUN",
"NNPS" -> "NOUN",
"NNS" -> "NOUN",
"NN|NNS" -> "NOUN",
"NN|SYM" -> "NOUN",
"NN|VBG" -> "NOUN",
"NP" -> "NOUN",
"PDT" -> "DET",
"POS" -> "PRT",
"PRP" -> "PRON",
"PRP$" -> "PRON",
"PRP|VBP" -> "PRON",
"PRT" -> "PRT",
"RB" -> "ADV",
"RBR" -> "ADV",
"RBS" -> "ADV",
"RB|RP" -> "ADV",
"RB|VBG" -> "ADV",
"RN" -> "X",
"RP" -> "PRT",
"SYM" -> "X",
"TO" -> "PRT",
"UH" -> "X",
"VB" -> "VERB",
"VBD" -> "VERB",
"VBD|VBN" -> "VERB",
"VBG" -> "VERB",
"VBG|NN" -> "VERB",
"VBN" -> "VERB",
"VBP" -> "VERB",
"VBP|TO" -> "VERB",
"VBZ" -> "VERB",
"VP" -> "VERB",
"WDT" -> "DET",
"WH" -> "X",
"WP" -> "PRON",
"WP$" -> "PRON",
"WRB" -> "ADV",
"``" -> ".")
def categoryFromPenn(PennPosCategory:String): String = Penn2universal(PennPosCategory)
}
/** A categorical variable, associated with a token, holding its Google Universal part-of-speech category. */
class UniversalPosTag(val token:Token, initialValue:String) extends CategoricalVariable(initialValue) {
def this(token:Token, other:PennPosTag) = this(token, UniversalPosDomain.categoryFromPenn(other.categoryValue))
def domain = UniversalPosDomain
}
/** A categorical variable, associated with a token, holding its Google Universal part-of-speech category,
which also separately holds its desired correct "target" value. */
class LabeledUniversalPosTag(token:Token, targetValue:String) extends UniversalPosTag(token, targetValue) with CategoricalLabeling[String]
/** Penn Treebank part-of-speech tag domain. */
object SpanishPosDomain extends CategoricalDomain[String] {
this ++= Vector(
"a", // adjective
"c", // conjunction
"d", // determiner
"f", // punctuation
"i", // interjection
"n", // noun
"p", // pronoun
"r", // adverb
"s", // preposition
"v", // verb
"w", // date
"z", // number
"_" // unknown
)
freeze()
def isNoun(pos:String): Boolean = pos(0) == 'n'
// def isProperNoun(pos:String) = { pos == "NNP" || pos == "NNPS" }
def isVerb(pos:String) = pos(0) == 'v'
def isAdjective(pos:String) = pos(0) == 'a'
// def isPersonalPronoun(pos: String) = pos == "PRP"
}
/** A categorical variable, associated with a token, holding its Penn Treebank part-of-speech category. */
class SpanishPosTag(token:Token, initialIndex:Int) extends PosTag(token, initialIndex) {
def this(token:Token, initialCategory:String) = this(token, SpanishPosDomain.index(initialCategory))
final def domain = SpanishPosDomain
def isNoun = SpanishPosDomain.isNoun(categoryValue)
// def isProperNoun = SpanishPosDomain.isProperNoun(categoryValue)
def isVerb = SpanishPosDomain.isVerb(categoryValue)
def isAdjective = SpanishPosDomain.isAdjective(categoryValue)
// def isPersonalPronoun = SpanishPosDomain.isPersonalPronoun(categoryValue)
}
/** A categorical variable, associated with a token, holding its Spanish Treebank part-of-speech category,
which also separately holds its desired correct "target" value. */
class LabeledSpanishPosTag(token:Token, targetValue:String) extends SpanishPosTag(token, targetValue) with CategoricalLabeling[String]
|
patverga/factorie
|
src/main/scala/cc/factorie/app/nlp/pos/PosTag.scala
|
Scala
|
apache-2.0
| 8,165 |
package vaadin.scala
import vaadin.scala.mixins.LinkMixin
package mixins {
trait LinkMixin extends AbstractComponentMixin
}
object Link {
object TargetBorder extends Enumeration {
import com.vaadin.ui.Link._
val None = Value(TARGET_BORDER_NONE)
val Minimal = Value(TARGET_BORDER_MINIMAL)
val Default = Value(TARGET_BORDER_DEFAULT)
}
}
class Link(override val p: com.vaadin.ui.Link with LinkMixin = new com.vaadin.ui.Link with LinkMixin) extends AbstractComponent(p) {
def targetName = Option(p.getTargetName)
def targetName_=(targetName: Option[String]) = p.setTargetName(caption.orNull)
def targetName_=(targetName: String) = p.setTargetName(targetName)
def targetBorder = Link.TargetBorder(p.getTargetBorder)
def targetBorder_=(targetBorder: Link.TargetBorder.Value) = p.setTargetBorder(targetBorder.id)
def targetWidth = p.getTargetWidth()
def targetWidth_=(targetWidth: Int) = p.setTargetWidth(targetWidth)
def targetHeight = p.getTargetHeight()
def targetHeight_=(targetHeight: Int) = p.setTargetHeight(targetHeight)
def resource: Option[Resource] = wrapperFor[Resource](p.getResource)
def resource_=(resource: Option[Resource]) = if (resource.isDefined) p.setResource(resource.get.p) else p.setResource(null)
def resource_=(resource: Resource) = if (resource == null) p.setResource(null) else p.setResource(resource.p)
}
|
CloudInABox/scalavaadinutils
|
src/main/scala/vaadin/scala/Link.scala
|
Scala
|
mit
| 1,390 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs/contributors
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.sexp
import java.lang.String
import org.scalatest._
import org.scalatest.Matchers._
class DerivedSexpReaderTest extends FlatSpec {
import SexpReader.ops._
import examples._
implicit class Helper(s: String) {
def parseAs[A: SexpReader]: A = SexpParser(s).as[A]
}
"DerivedSexpWriter" should "support anyval" in {
SexpString("hello").as[Optimal] shouldBe Optimal("hello")
}
it should "support generic products" in {
"""(:s "hello")""".parseAs[Foo] shouldBe Foo("hello")
"nil".parseAs[Baz.type] shouldBe Baz
"""(:o "hello")""".parseAs[Faz] shouldBe Faz(Some("hello"))
}
it should "support generic coproducts" in {
"""(:foo (:s "hello"))""".parseAs[SimpleTrait] shouldBe Foo("hello")
""":baz""".parseAs[SimpleTrait] shouldBe Baz
""":WIBBLE""".parseAs[AbstractThing] shouldBe Wibble
"""(:WOBBLE (:ID "hello"))""".parseAs[AbstractThing] shouldBe Wobble(
"hello"
)
}
it should "support generic recursive ADTs" in {
"""(:h "hello" :t (:h "goodbye"))""".parseAs[Recursive] shouldBe
Recursive("hello", Some(Recursive("goodbye")))
}
}
|
fommil/ensime-server
|
s-express/src/test/scala/org/ensime/sexp/DerivedSexpReaderTest.scala
|
Scala
|
gpl-3.0
| 1,277 |
package coursier.test
import java.util.concurrent.{ConcurrentHashMap, Executors}
import coursier.{Module, moduleString}
import coursier.core.ResolutionProcess
import coursier.util.Task
import utest._
import scala.jdk.CollectionConverters._
import scala.concurrent.{Await, ExecutionContext}
import scala.concurrent.duration.DurationInt
import scala.util.Try
object ResolutionProcessTests extends TestSuite {
val es = Executors.newFixedThreadPool(4) // ensure threads are daemon?
val ec = ExecutionContext.fromExecutorService(es)
override def utestAfterAll(): Unit =
es.shutdown()
val tests = Tests {
test("fetchAll") {
// check that tasks fetching different versions of the same module are spawned sequentially
// rather than all at once
def check(extra: Int): Unit = {
val mod = mod"org:name"
val modVers = (1 to (9 + extra))
.map(_.toString)
.map((mod, _))
val called = new ConcurrentHashMap[String, Unit]
val fetch: ResolutionProcess.Fetch[Task] = {
case Seq((`mod`, "9")) =>
val save = Task.delay {
called.put("9", ())
}
save.flatMap(_ => Task.never)
case Seq(mv @ (`mod`, v)) =>
val save = Task.delay {
called.put(v, ())
}
save.map(_ => Seq((mv, Left(Seq("w/e")))))
case _ => sys.error(s"Cannot possibly happen ($modVers)")
}
val f = ResolutionProcess.fetchAll(modVers, fetch)
.future()(ec)
val res = Try(Await.result(f, 1.second))
// must have timed out
assert {
res.failed.toOption.exists {
case _: java.util.concurrent.TimeoutException => true
case _ => false
}
}
val called0 = called.asScala.iterator.map(_._1).toSet
val expectedCalled = (0 to extra)
.map(9 + _)
.map(_.toString)
.toSet
assert(called0 == expectedCalled)
}
test - check(0)
test - check(1)
test - check(3)
}
}
}
|
alexarchambault/coursier
|
modules/tests/jvm/src/test/scala/coursier/test/ResolutionProcessTests.scala
|
Scala
|
apache-2.0
| 2,150 |
/*
* Copyright 2012-2013 Stephane Godbillon (@sgodbillon) and Zenexity
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactivemongo.core.protocol
import java.nio.ByteOrder
import akka.util.{ByteString, ByteStringBuilder}
import reactivemongo.bson.buffer.ReadableBuffer
private[core] object ByteStringBuilderHelper {
implicit val byteOrder = ByteOrder.LITTLE_ENDIAN
sealed trait TypeWriter[T] {
def apply(builder: ByteStringBuilder, t:T): Unit
}
implicit object IntWriter extends TypeWriter[Int] {
override def apply(builder: ByteStringBuilder, t:Int): Unit = builder.putInt(t)
}
implicit object LongWriter extends TypeWriter[Long] {
override def apply(builder: ByteStringBuilder, t: Long): Unit = builder.putLong(t)
}
implicit object StringWriter extends TypeWriter[String] {
override def apply(builder: ByteStringBuilder, t: String): Unit = {
val bytes = t.getBytes("utf-8")
builder.putBytes(bytes)
builder.putByte(0)
}
}
def write[A](a: A)(builder: ByteStringBuilder)(implicit w1: TypeWriter[A]) : Unit = w1(builder, a)
def write[A,B](a: A, b: B)(builder: ByteStringBuilder)(implicit w1: TypeWriter[A], w2: TypeWriter[B]) : Unit = {
w1(builder, a);
w2(builder, b);
}
def write[A,B,C](a: A, b: B, c: C)(builder: ByteStringBuilder)(implicit w1: TypeWriter[A], w2: TypeWriter[B], w3: TypeWriter[C]) : Unit = {
w1(builder, a);
w2(builder, b);
w3(builder, c);
}
def write[A,B,C,E](a: A, b: B, c: C, e: E)(builder: ByteStringBuilder)(implicit w1: TypeWriter[A], w2: TypeWriter[B], w3: TypeWriter[C], w4: TypeWriter[E]) : Unit = {
w1(builder, a);
w2(builder, b);
w3(builder, c);
w4(builder, e);
}
}
import ByteStringBuilderHelper._
/** A Mongo Wire Protocol operation */
sealed trait Op {
/** operation code */
val code: Int
}
/**
* A Mongo Wire Protocol request operation.
*
* Actually, all operations excepted Reply are requests.
*/
sealed trait RequestOp extends Op with ByteStringBuffer {
/** States if this request expects a response. */
val expectsResponse: Boolean = false
/** States if this request has to be run on a primary. */
val requiresPrimary: Boolean = false
}
/** A request that needs to know the full collection name. */
sealed trait CollectionAwareRequestOp extends RequestOp {
/** The full collection name (''<dbname.collectionname>'') */
val fullCollectionName: String
/** Database and collection name */
lazy val (db: String, collectionName: String) = fullCollectionName.span(_ != '.')
}
/** A request that will perform a write on the database */
sealed trait WriteRequestOp extends CollectionAwareRequestOp
/**
* Reply operation.
*
* @param flags The flags of this response.
* @param cursorID The cursor id. Strictly positive if a cursor has been created server side, 0 if none or exhausted.
* @param startingFrom The index the returned documents start from.
* @param numberReturned The number of documents that are present in this reply.
*/
case class Reply(
flags: Int,
cursorID: Long,
startingFrom: Int,
numberReturned: Int) extends Op {
override val code = 1
val size = 4 + 8 + 4 + 4
/** States whether the cursor given in the request was found */
lazy val cursorNotFound = (flags & 0x01) != 0
/** States if the request encountered an error */
lazy val queryFailure = (flags & 0x02) != 0
/** States if the answering server supports the AwaitData query option */
lazy val awaitCapable = (flags & 0x08) != 0
private def str(b: Boolean, s: String) = if (b) s else ""
/** States if this reply is in error */
lazy val inError = cursorNotFound || queryFailure
lazy val stringify = toString + " [" + str(cursorNotFound, "CursorNotFound;") + str(queryFailure, "QueryFailure;") + str(awaitCapable, "AwaitCapable") + "]"
}
object Reply extends BufferReadable[Reply] with ReadableFrom[ByteString, Reply] {
val size = 4 + 8 + 4 + 4
override def readFrom(buffer: ReadableBuffer) = Reply(
buffer.readInt,
buffer.readLong,
buffer.readInt,
buffer.readInt
)
override def readFrom(buffer: ByteString): Reply = {
val iterator = buffer.iterator
Reply(
iterator.getInt,
iterator.getLong,
iterator.getInt,
iterator.getInt
)
}
}
/**
* Update operation.
*
* @param flags Operation flags.
*/
case class Update(
fullCollectionName: String,
flags: Int) extends WriteRequestOp {
override val code = 2001
override def size = 4 /* int32 = ZERO */ + 4 + fullCollectionName.length + 1
override val requiresPrimary = true
override val append = write(0, fullCollectionName, flags) _
}
object UpdateFlags {
/** If set, the database will insert the supplied object into the collection if no matching document is found. */
val Upsert = 0x01
/** If set, the database will update all matching objects in the collection. Otherwise only updates first matching doc. */
val MultiUpdate = 0x02
}
/**
* Insert operation.
*
* @param flags Operation flags.
*/
case class Insert(
flags: Int,
fullCollectionName: String) extends WriteRequestOp {
override val code = 2002
override def size = 4 + fullCollectionName.length + 1
override val requiresPrimary = true
override def append = write(flags, fullCollectionName) _
}
/**
* Query operation.
*
* @param flags Operation flags.
* @param numberToSkip number of documents to skip in the response.
* @param numberToReturn number of documents to return in the response. 0 means the server will choose.
*/
case class Query(
flags: Int,
fullCollectionName: String,
numberToSkip: Int,
numberToReturn: Int) extends CollectionAwareRequestOp {
override val expectsResponse = true
override val code = 2004
override def size = 4 + fullCollectionName.length + 1 + 4 + 4
override val append = write(flags, fullCollectionName, numberToSkip, numberToReturn) _
}
/**
* Query flags.
*/
object QueryFlags {
/** Makes the cursor not to close after all the data is consumed. */
val TailableCursor = 0x02
/** The query is might be run on a secondary. */
val SlaveOk = 0x04
/** OplogReplay */
val OplogReplay = 0x08
/** The cursor will not expire automatically */
val NoCursorTimeout = 0x10
/**
* Block a little while waiting for more data instead of returning immediately if no data.
* Use along with TailableCursor.
*/
val AwaitData = 0x20
/** Exhaust */
val Exhaust = 0x40
/** The response can be partial - if a shard is down, no error will be thrown. */
val Partial = 0x80
}
/**
* GetMore operation.
*
* Allows to get more data from a cursor.
* @param numberToReturn number of documents to return in the response. 0 means the server will choose.
* @param cursorId id of the cursor.
*/
case class GetMore(
fullCollectionName: String,
numberToReturn: Int,
cursorID: Long) extends CollectionAwareRequestOp {
override val expectsResponse = true
override val code = 2005
override def size = 4 /* int32 ZERO */ + fullCollectionName.length + 1 + 4 + 8
override def append = write(0, fullCollectionName, numberToReturn, cursorID)
}
/**
* Delete operation.
*
* @param flags operation flags.
*/
case class Delete(
fullCollectionName: String,
flags: Int) extends WriteRequestOp {
override val code = 2006
override def size = 4 /* int32 ZERO */ + fullCollectionName.length + 1 + 4
override val requiresPrimary = true
override def append = write(0, fullCollectionName, flags)
}
/**
* KillCursors operation.
*
* @param cursorIDs ids of the cursors to kill. Should not be empty.
*/
case class KillCursors(
cursorIDs: Set[Long]) extends RequestOp {
override val code = 2007
override def size = 4 /* int32 ZERO */ + 4 + cursorIDs.size * 8
override def append = { builder: ByteStringBuilder =>
write(0, cursorIDs.size)(builder)
for (cursorID <- cursorIDs)
write(cursorID)(builder)
}
}
|
sh1ng/ReactiveMongo
|
driver/src/main/scala/core/protocol/operations.scala
|
Scala
|
apache-2.0
| 8,464 |
package org.storrent
import akka.actor.{ Actor, ActorSystem, Props}
import akka.testkit.{ TestActorRef, TestKit, TestLatch, ImplicitSender, TestProbe }
import scala.concurrent.duration._
import scala.concurrent.Await
import org.scalatest.{ WordSpec, BeforeAndAfterAll}
import org.scalatest.matchers.MustMatchers
import akka.util.ByteString
class BTProtocolSpec extends TestKit(ActorSystem("BTProtocolSpec"))
with ImplicitSender
with WordSpec
with MustMatchers
with BeforeAndAfterAll {
import BTProtocol._
import Frame._
object fakeTCPClient {
}
trait fakeTCPClient extends TCPClientProvider {
def recieve = Actor.emptyBehavior
}
val fakePeerConnect = TestProbe()
def slicedBTProtocol = new BTProtocol("", 0, fakePeerConnect.ref ,Array.fill(20){0}, 16384*10, 16384) with fakeTCPClient
// FIXME: this code is here because the client only supports recieving, for now
def createChokeFrame(): ByteString = {
val headerLenB = intToByte(1, 4)
val headerIdB = ByteString(0)
headerLenB ++ headerIdB
}
def createUnchokeFrame(): ByteString = {
val headerLenB = intToByte(1, 4)
val headerIdB = ByteString(1)
headerLenB ++ headerIdB
}
def createHaveFrame(piece: Int): ByteString = {
val headerLenB = intToByte(5, 4)
val headerIdB = ByteString(4)
val pieceB = intToByte(piece, 4)
headerLenB ++ headerIdB ++ pieceB
}
def createHandshakeFrame(): ByteString = {
ByteString(Array.fill(68){0.toByte})
}
"BTProtocol" should {
"choke" in {
val a = TestActorRef[BTProtocol](Props(slicedBTProtocol))
a ! TCPClient.DataReceived(createHandshakeFrame())
a ! TCPClient.DataReceived(createChokeFrame())
fakePeerConnect.expectMsg(Choke())
}
"unchoke" in {
val a = TestActorRef[BTProtocol](Props(slicedBTProtocol))
a ! TCPClient.DataReceived(createHandshakeFrame())
a ! TCPClient.DataReceived(createUnchokeFrame())
fakePeerConnect.expectMsg(Unchoke())
}
"have" in {
val a = TestActorRef[BTProtocol](Props(slicedBTProtocol))
a ! TCPClient.DataReceived(createHandshakeFrame())
a ! TCPClient.DataReceived(createHaveFrame(1))
fakePeerConnect.expectMsg(Have(1))
}
}
}
|
bowlofstew/storrent
|
src/test/scala/BTProtocolSpec.scala
|
Scala
|
apache-2.0
| 2,231 |
package pl.newicom.dddd.utils
import java.util.UUID
object UUIDSupport {
def uuid: String = uuidObj.toString.replaceAllLiterally("-", "")
def uuid7: String = uuid.substring(0, 6)
def uuid10: String = uuid.substring(0, 9)
def uuidObj: UUID = UUID.randomUUID()
}
trait UUIDSupport {
def uuid = UUIDSupport.uuid
def uuid7 = UUIDSupport.uuid7
def uuid10 = UUIDSupport.uuid10
def uuidObj = UUIDSupport.uuidObj
}
|
AndreyLadniy/akka-ddd
|
akka-ddd-protocol/src/main/scala/pl/newicom/dddd/utils/UUIDSupport.scala
|
Scala
|
mit
| 426 |
package hello
object HelloWorld {
def main(args: Array[String]): Unit =
println(erased(5))
def erased(x: Int): Any = x
}
|
som-snytt/dotty
|
tests/pos/i7944.scala
|
Scala
|
apache-2.0
| 131 |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.deeplearning4j.scalnet.layers.convolutional
import org.deeplearning4j.nn.conf.layers.{ Upsampling1D => JUpsampling1D }
import org.deeplearning4j.scalnet.layers.core.Layer
/**
* 1D upsampling layer
*
* @author Max Pumperla
*/
class Upsampling1D(size: List[Int], nChannels: Int = 0, nIn: Option[List[Int]] = None, override val name: String = "")
extends Upsampling(dimension = 1, size, nChannels, nIn, name)
with Layer {
if (size.length != 1) {
throw new IllegalArgumentException("Size must be length 1.")
}
override def reshapeInput(nIn: List[Int]): Upsampling1D =
new Upsampling1D(size, nChannels, Some(nIn), name)
override def compile: org.deeplearning4j.nn.conf.layers.Layer =
new JUpsampling1D.Builder()
.size(size.toArray)
.name(name)
.build()
}
object Upsampling1D {
def apply(size: List[Int], nChannels: Int = 0, nIn: Option[List[Int]] = None): Upsampling1D =
new Upsampling1D(size, nChannels, nIn)
}
|
deeplearning4j/deeplearning4j
|
scalnet/src/main/scala/org/deeplearning4j/scalnet/layers/convolutional/Upsampling1D.scala
|
Scala
|
apache-2.0
| 1,759 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval.internal
/** A mapping function type that is also able to handle errors.
*
* Used in the `Task` and `Coeval` implementations to specify
* error handlers in their respective `FlatMap` internal states.
*/
private[eval] abstract class StackFrame[-A, +R] extends (A => R) { self =>
def apply(a: A): R
def recover(e: Throwable): R
}
private[eval] object StackFrame {
/** [[StackFrame]] used in the implementation of `redeemWith`. */
final class RedeemWith[-A, +R](fe: Throwable => R, fa: A => R) extends StackFrame[A, R] {
def apply(a: A): R = fa(a)
def recover(e: Throwable): R = fe(e)
}
/** [[StackFrame]] reference that only handles errors,
* useful for quick filtering of `onErrorHandleWith` frames.
*/
final class ErrorHandler[-A, +R](fe: Throwable => R, fa: A => R) extends StackFrame[A, R] {
def apply(a: A): R = fa(a)
def recover(e: Throwable): R = fe(e)
}
}
|
alexandru/monifu
|
monix-eval/shared/src/main/scala/monix/eval/internal/StackFrame.scala
|
Scala
|
apache-2.0
| 1,610 |
package probability_monad
import java.util.concurrent.ThreadLocalRandom
import scala.annotation.tailrec
import scala.collection.parallel.immutable.ParSeq
import scala.math.BigDecimal
import scala.util.Random
trait Distribution[A] {
self =>
protected def get: A
override def toString = "<distribution>"
def map[B](f: A => B): Distribution[B] = new Distribution[B] {
override def get = f(self.get)
}
def flatMap[B](f: A => Distribution[B]): Distribution[B] = new Distribution[B] {
override def get = f(self.get).get
}
def filter(pred: A => Boolean): Distribution[A] = new Distribution[A] {
@tailrec
override def get = {
val s = self.get
if (pred(s)) s else this.get
}
}
def withFilter(pred: A => Boolean): Distribution[A] = filter(pred)
def given(pred: A => Boolean): Distribution[A] = filter(pred)
def until(pred: List[A] => Boolean): Distribution[List[A]] = new Distribution[List[A]] {
override def get = {
@tailrec
def helper(sofar: List[A]): List[A] = {
if (pred(sofar)) sofar
else helper(self.get :: sofar)
}
helper(Nil)
}
}
def repeat(n: Int): Distribution[List[A]] = new Distribution[List[A]] {
override def get = self.sample(n)
}
/**
* Using this distribution as a prior, compute the posterior distribution after running an experiment
* and observing some outcomes and not others.
*/
def posterior[B](experiment: A => Distribution[B])(observed: B => Boolean): Distribution[A] = {
case class Trial(p: A, evidence: B)
val d = for {
p <- this
e <- experiment(p)
} yield Trial(p, e)
d.filter(t => observed(t.evidence)).map(_.p)
}
/**
* Markov chains
*/
@tailrec
final def markov(n: Int)(f: A => Distribution[A]): Distribution[A] = {
if (n == 0) this
else this.flatMap(f).markov(n-1)(f)
}
def markov(pred: A => Boolean)(f: A => Distribution[A]): Distribution[A] = new Distribution[A] {
override def get = {
@tailrec
def helper(a: A): A = {
if (pred(a)) a
else helper(f(a).get)
}
helper(self.get)
}
}
private val N = 10000
def pr(pred: A => Boolean, given: A => Boolean = (a: A) => true, samples: Int = N): Double = {
1.0 * this.filter(given).samplePar(samples).count(pred) / samples
}
// NB: Expected value only makes sense for real-valued distributions. If you want to find the expected
// value of a die roll, for example, you have to do die.map(_.toDouble).ev.
def ev(implicit toDouble: A <:< Double): Double = {
(0 until N).par.map(_ => toDouble(self.get)).aggregate(0d)(_ + _ / N, _ + _)
}
def mean(implicit toDouble: A <:< Double): Double = ev
private def square(x: Double) = x * x
private def cube(x: Double) = x * x * x
def variance(implicit toDouble: A <:< Double): Double = {
val mean = this.mean
this.map(x => {
square(toDouble(x) - mean)
}).ev
}
def stdev(implicit toDouble: A <:< Double): Double = {
math.sqrt(this.variance)
}
def skewness(implicit toDouble: A <:< Double): Double = {
val mean = this.mean
val stdev = this.stdev
this.map(x => {
cube((toDouble(x) - mean) / stdev)
}).ev
}
def kurtosis(implicit toDouble: A <:< Double): Double = {
val mean = this.mean
val variance = this.variance
this.map(x => {
square(square(toDouble(x) - mean))
}).ev / square(variance)
}
def sample(n: Int = N): List[A] = List.fill(n)(self.get)
def samplePar(n: Int = N): ParSeq[A] = (0 until N).par.map(i => self.get)
/**
* "Freeze" a distribution by taking a sample and serving values out of that sample at random.
* Useful for when a distribution is expensive to compute and is being sampled from repeatedly.
*/
def freeze: Distribution[A] = {
Distribution.discreteUniform(sample(N*10))
}
def zip[B](d: Distribution[B]): Distribution[(A, B)] = new Distribution[(A, B)] {
override def get = (self.get, d.get)
}
def zipWith[B, C](d: Distribution[B])(f: (A, B) => C): Distribution[C] = new Distribution[C] {
override def get = f(self.get, d.get)
}
def +(d: Distribution[A])(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.plus(self.get, d.get)
}
def +(x: A)(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.plus(self.get, x)
}
def -(d: Distribution[A])(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.minus(self.get, d.get)
}
def -(x: A)(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.minus(self.get, x)
}
def *(d: Distribution[A])(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.times(self.get, d.get)
}
def *(x: A)(implicit n: Numeric[A]): Distribution[A] = new Distribution[A] {
override def get = n.times(self.get, x)
}
def /(d: Distribution[A])(implicit toDouble: A <:< Double): Distribution[Double] = new Distribution[Double] {
override def get = toDouble(self.get) / toDouble(d.get)
}
def /(x: A)(implicit toDouble: A <:< Double): Distribution[Double] = new Distribution[Double] {
override def get = toDouble(self.get) / toDouble(x)
}
def hist(implicit ord: Ordering[A] = null, d: A <:< Double = null) = {
if (d == null) {
plotHist(ord)
} else {
bucketedHist(20)(ord, d)
}
}
def histData: Map[A, Double] = {
this.sample(N).groupBy(x=>x).mapValues(_.length.toDouble / N)
}
private def plotHist(implicit ord: Ordering[A] = null) {
val histogram = this.histData.toList
val sorted = if (ord == null) histogram else histogram.sortBy(_._1)(ord)
doPlot(sorted)
}
private def findBucketWidth(min: Double, max: Double, buckets: Int): (BigDecimal, BigDecimal, BigDecimal, Int) = {
// Use BigDecimal to avoid annoying rounding errors.
val widths = List(0.1, 0.2, 0.25, 0.5, 1.0, 2.0, 2.5, 5.0, 10.0).map(BigDecimal.apply)
val span = max - min
val p = (math.log(span) / math.log(10)).toInt - 1
val scale = BigDecimal(10).pow(p)
val scaledWidths = widths.map(_ * scale)
val bestWidth = scaledWidths.minBy(w => (span / w - buckets).abs)
val outerMin = (min / bestWidth).toInt * bestWidth
val outerMax = ((max / bestWidth).toInt + 1) * bestWidth
val actualBuckets = ((outerMax - outerMin) / bestWidth).toInt
(outerMin, outerMax, bestWidth, actualBuckets)
}
def bucketedHist(buckets: Int)(implicit ord: Ordering[A], toDouble: A <:< Double) {
val data = this.sample(N).toList.sorted
val min = data.head
val max = data.last
val (outerMin, outerMax, width, nbuckets) = findBucketWidth(toDouble(min), toDouble(max), buckets)
bucketedHistHelper(outerMin, outerMax, nbuckets, data, roundDown = false)(ord, toDouble)
}
def bucketedHist(min: Double, max: Double, nbuckets: Int, roundDown: Boolean = false)
(implicit ord: Ordering[A], toDouble: A <:< Double) {
val data = this.sample(N).filter(a => {
val x = toDouble(a)
min <= x && x <= max
}).sorted
bucketedHistHelper(BigDecimal(min), BigDecimal(max), nbuckets, data, roundDown)(ord, toDouble)
}
private def bucketedHistHelper(min: BigDecimal, max: BigDecimal, nbuckets: Int, data: List[A], roundDown: Boolean)
(implicit ord: Ordering[A], toDouble: A <:< Double) {
val rm = if (roundDown) BigDecimal.RoundingMode.DOWN else BigDecimal.RoundingMode.HALF_UP
val width = (max - min) / nbuckets
def toBucket(a: A): BigDecimal = ((toDouble(a) - min) / width).setScale(0, rm) * width + min
val n = data.size
val bucketToProb = data
.groupBy(toBucket)
.mapValues(_.size.toDouble / n)
val bucketed = (min to max by width).map(a => a -> bucketToProb.getOrElse(a, 0.0))
doPlot(bucketed)
}
private def doPlot[B](data: Iterable[(B, Double)]) = {
val scale = 100
val maxWidth = data.map(_._1.toString.length).max
val fmt = "%"+maxWidth+"s %5.2f%% %s"
data.foreach{ case (a, p) => {
val hashes = (p * scale).toInt
println(fmt.format(a.toString, p*100, "#" * hashes))
}}
}
}
object Distribution {
private val rand = ThreadLocalRandom.current()
def always[A](value: A) = new Distribution[A] {
override def get = value
}
/**
* Discrete distributions
*/
sealed abstract class Coin
case object H extends Coin
case object T extends Coin
def coin: Distribution[Coin] = discreteUniform(List(H, T))
def biasedCoin(p: Double): Distribution[Coin] = discrete(H -> p, T -> (1-p))
def d(n: Int) = discreteUniform(1 to n)
def die = d(6)
def dice(n: Int) = die.repeat(n)
def tf(p: Double = 0.5) = discrete(true -> p, false -> (1-p))
def bernoulli(p: Double = 0.5) = discrete(1 -> p, 0 -> (1-p))
def discreteUniform[A](values: Iterable[A]): Distribution[A] = new Distribution[A] {
private val vec = Vector() ++ values
override def get = vec(rand.nextInt(vec.length))
}
def discrete[A](weightedValues: (A, Double)*): Distribution[A] = new Distribution[A] {
val len = weightedValues.size
val scale = len / weightedValues.map(_._2).sum
val scaled = weightedValues.map{ case (a, p) => (a, p * scale) }.toList
val (smaller, bigger) = scaled.partition(_._2 < 1.0)
// The alias method: http://www.keithschwarz.com/darts-dice-coins/
@tailrec
private def alias(smaller: List[(A, Double)], bigger: List[(A, Double)], rest: List[(A, Double, Option[A])]): List[(A, Double, Option[A])] = {
(smaller, bigger) match {
case ((s, sp) :: ss, (b, pb) :: bb) =>
val remainder = (b, pb - (1.0 - sp))
val newRest = (s, sp, Some(b)) :: rest
if (remainder._2 < 1)
alias(remainder :: ss, bb, newRest)
else
alias(ss, remainder :: bb, newRest)
case (_, (b, pb) :: bb) =>
alias(smaller, bb, (b, 1.0, None) :: rest)
case ((s, sp) :: ss, _) =>
alias(ss, bigger, (s, 1.0, None) :: rest)
case _ =>
rest
}
}
val table = Vector() ++ alias(smaller, bigger, Nil)
private def select(p1: Double, p2: Double, table: Vector[(A, Double, Option[A])]): A = {
table((p1 * len).toInt) match {
case (a, _, None) => a
case (a, p, Some(b)) => if (p2 <= p) a else b
}
}
override def get = {
select(uniform.get, uniform.get, table)
}
}
def geometric(p: Double): Distribution[Int] = {
tf(p).until(_.headOption == Some(true)).map(_.size - 1)
}
def binomial(p: Double, n: Int): Distribution[Int] = {
bernoulli(p).repeat(n).map(_.sum)
}
def negativeBinomial(p: Double, r: Int): Distribution[Int] = {
tf(p).until(_.count(_ == false) == r).map(_.size - r)
}
def poisson(lambda: Double): Distribution[Int] = {
exponential(1).until(_.sum > lambda).map(_.size - 1)
}
def zipf(s: Double, n: Int): Distribution[Int] = {
discrete((1 to n).map(k => k -> 1.0 / math.pow(k, s)): _*)
}
/**
* Continuous distributions
*/
object uniform extends Distribution[Double] {
override def get = rand.nextDouble()
}
object normal extends Distribution[Double] {
override def get = rand.nextGaussian()
}
def chi2(n: Int): Distribution[Double] = {
normal.map(x => x*x).repeat(n).map(_.sum)
}
def students_t(df: Int): Distribution[Double] = {
for {
z <- normal
v <- chi2(df)
} yield z * math.sqrt(df / v)
}
def pareto(a: Double, xm: Double = 1.0): Distribution[Double] = {
for {
x <- uniform
} yield xm * math.pow(x, -1/a)
}
def exponential(l: Double): Distribution[Double] = {
for {
x <- uniform
} yield math.log(x) / (-l)
}
def laplace(b: Double): Distribution[Double] = {
val d = exponential(1/b)
d - d
}
def F(d1: Int, d2: Int): Distribution[Double] = {
chi2(d1) / chi2(d2)
}
def lognormal: Distribution[Double] = {
for {
z <- normal
} yield math.exp(z)
}
def cauchy: Distribution[Double] = {
normal / normal
}
def weibull(l: Double, k: Double): Distribution[Double] = {
for {
y <- exponential(1)
} yield l * math.pow(y, 1/k)
}
def gamma(k: Double, theta: Double): Distribution[Double] = {
val n = k.toInt
val gammaInt = uniform.repeat(n).map(_.map(x => -math.log(x)).sum)
val gammaFrac = {
val delta = k - n
// From https://en.wikipedia.org/wiki/Gamma_distribution#Generating_gamma-distributed_random_variables
def helper(): Distribution[Double] = {
for {
u1 <- uniform
u2 <- uniform
u3 <- uniform
(zeta, eta) = {
val v0 = math.E / (math.E + delta)
if (u1 <= v0) {
val zeta = math.pow(u2, 1/delta)
val eta = u3 * math.pow(zeta, delta - 1)
(zeta, eta)
} else {
val zeta = 1 - math.log(u2)
val eta = u3 * math.exp(-zeta)
(zeta, eta)
}
}
r <- if (eta > math.pow(zeta, delta - 1) * math.exp(-zeta)) helper() else always(zeta)
} yield r
}
helper()
}
(gammaInt + gammaFrac) * theta
}
def beta(a: Double, b: Double): Distribution[Double] = {
for {
x <- gamma(a, 1)
y <- gamma(b, 1)
} yield x / (x + y)
}
def sequence[T](ds: List[Distribution[T]]): Distribution[List[T]] = new Distribution[List[T]] {
override def get = ds.map(_.get)
}
def dirichlet(alphas: List[Double]): Distribution[List[Double]] = {
sequence(alphas.map(a => gamma(a, 1))).map(ys => {
val sum = ys.sum
ys.map(_ / sum)
})
}
/**
* Tests if two probability distributions are the same using the Kolmogorov-Smirnov test.
* The distributions are unlikely to be the same (p < 0.05) if the value is greater than 1.35
* and very unlikely (p < 0.001) if the value is greater than 1.95.
*/
def ksTest[A](d1: Distribution[A], d2: Distribution[A])(implicit ord: Ordering[A]): Double = {
val n = 100000
val d1s = d1.sample(n).sorted.zipWithIndex
val d2s = d2.sample(n).sorted.zipWithIndex
val all = (d1s ++ d2s).sorted.zipWithIndex
// 2i is the expected index in the combined list and j is the actual index.
val worstOffset = all.map{ case ((x, i), j) => math.abs(2 * i - j) }.max / 2
val ksStatistic = worstOffset.toDouble / n
ksStatistic / math.sqrt(2.0 * n / (n * n))
}
/**
* Determine if a joint probability distribution is composed of 2 independent events.
* Uses the G-test: http://en.wikipedia.org/wiki/G-test
*/
def chi2test[A, B](d: Distribution[(A, B)]): Double = {
val data = d.histData
val total = data.map(_._2).sum
val rowValues = data.map(_._1._1).toSet
val colValues = data.map(_._1._2).toSet
val rowTotals = (for {
row <- rowValues
} yield row -> colValues.map(col => data.getOrElse((row, col), 0.0)).sum).toMap
val colTotals = (for {
col <- colValues
} yield col -> rowValues.map(row => data.getOrElse((row, col), 0.0)).sum).toMap
val chi2stat = (for {
row <- rowValues
col <- colValues
} yield {
val observed = data.getOrElse((row, col), 0.0)
val expected = {
val rowTotal = rowTotals.getOrElse(row, 0.0)
val colTotal = colTotals.getOrElse(col, 0.0)
rowTotal.toDouble * colTotal / total
}
observed * math.log(observed / expected)
}).sum * 2
val df = (rowValues.size - 1) * (colValues.size - 1)
chi2(df).pr(_ > chi2stat)
}
}
|
codeaudit/probability-monad
|
src/main/scala/probability-monad/Distribution.scala
|
Scala
|
apache-2.0
| 15,676 |
package memnets.ml
import memnets.model._
/**
* NaiveBayes algorithm
* uses m-estimates
* categoryA is positive, categoryB is negative
*/
class NaiveBayes(val features: Features) extends Learner with Logging {
val name = "Naive Bayes"
val probs: Array[Prob] = features.map {
case df: DiscFeature =>
if (!df.isOutput)
DiscProb(df)
else
OutputProb(df)
case cf: ContFeature => ContProb(cf)
}
def train(data: Data): Unit = {
val output = data.output.get
val posCount = data.count(output.isPos)
val negCount = data.length - posCount
for (p <- probs) {
p.train(data, posCount, negCount)
// logger.trace(""+p)
}
}
/**
* using Shavlik "+ vs - odds" classification formulation
* based on log values
*/
def classify(pt: Row): Boolean = {
import Math._
var positive = 0.0
var negative = 0.0
for (p <- probs) {
// ADD since using logs
positive += log(p.pos(pt))
negative += log(p.neg(pt))
}
// take DIFF since logs
(positive - negative) >= 0.0
}
}
object NaiveBayes {
val MITCHELL_LAPLACE = 30.0
}
trait Prob {
def feature: Feature
def pos(pt: Row): Double
def neg(pt: Row): Double
def train(data: Data, posCount: Int, negCount: Int): Unit
}
case class DiscProb(feature: DiscFeature) extends Prob {
import NaiveBayes._
val size = feature.values.length // don't use dim b/c output.dim = 0
val laplaceSmooth = MITCHELL_LAPLACE / size
// create "+" and "-" for each feature
// init w/ LaPlace smoothing
val positives = Array.fill(size) { laplaceSmooth }
val negatives = Array.fill(size) { laplaceSmooth }
def pos(pt: Row) = positives(feature.valueIndex(pt))
def neg(pt: Row) = negatives(feature.valueIndex(pt))
def train(data: Data, posCount: Int, negCount: Int): Unit = {
val outF = data.output.get
for (pt <- data) {
if (outF.isPos(pt))
positives(feature.valueIndex(pt)) += 1.0
else
negatives(feature.valueIndex(pt)) += 1.0
}
// LaPlace smoothing: add # of categories
val laplacePos = posCount + MITCHELL_LAPLACE
val laplaceNeg = negCount + MITCHELL_LAPLACE
var i = 0
while (i < size) {
positives(i) /= laplacePos
negatives(i) /= laplaceNeg
i += 1
}
}
override def toString = s"${feature}, pos = ${positives.mkString(",")}, neg = ${negatives.mkString(",")}"
}
/**
* calcs the prior dist. does NOT look at pt to classify!
*/
case class OutputProb(feature: DiscFeature) extends Prob {
import NaiveBayes._
// init with LaPlace smoothing (binary so 2 features)
var positive = MITCHELL_LAPLACE / 2.0
var negative = MITCHELL_LAPLACE / 2.0
def pos(pt: Row) = positive
def neg(pt: Row) = negative
def train(data: Data, posCount: Int, negCount: Int) = {
val outF = data.output.get
for (pt <- data) {
if (outF.isPos(pt))
positive += 1.0
else
negative += 1.0
}
val total = posCount + negCount + MITCHELL_LAPLACE
positive /= total
negative /= total
}
}
/**
* NOTE: could be much more efficient. this is naive but easier to follow implementation
* using Shavlik method #3 to put same # examples in each bin
*/
case class ContProb(feature: ContFeature, numBins: Int = 5) extends Prob {
import NaiveBayes._
val laplaceSmooth = MITCHELL_LAPLACE / 2.0
// create "+" and "-" for each feature
// init w/ LaPlace smoothing
val positives = Array.fill(numBins) { laplaceSmooth }
val negatives = Array.fill(numBins) { laplaceSmooth }
val limits = Array.ofDim[Double](numBins) // define boundaries of bins
/*
* find bin based on feature value of given data point
*/
private[memnets] def binIndex(pt: Row): Int = {
val value = feature(pt)
// resolves ties by placing value in lower bin
val bin = limits.indexWhere(value <= _)
if (bin < 0)
limits.length - 1
// special case where many repeat values, don't want to take lower bin
else if (bin < limits.length - 2 && limits(bin) == limits(bin + 1))
bin + 1
else
bin
}
def pos(pt: Row) = positives(binIndex(pt))
def neg(pt: Row) = negatives(binIndex(pt))
def train(data: Data, posCount: Int, negCount: Int) = {
val outF = data.output.get
data.sort((a: Row, b: Row) => feature(a) < feature(b))
val binSize = data.size / numBins
var row: Row = null.asInstanceOf[Row]
for (i <- 0 until numBins) {
val binOffset = i * binSize
for (j <- 0 until binSize) {
row = data(binOffset + j)
if (outF.isPos(row))
positives(i) += 1
else
negatives(i) += 1
}
// use last point as limit for bin boundary
limits(i) = feature(row)
}
// LaPlace smoothing: add # of categories
var i = 0
while (i < numBins) {
val total = positives(i) + negatives(i) + MITCHELL_LAPLACE
positives(i) /= total
negatives(i) /= total
i += 1
}
}
override def toString = s"${feature}, limits= ${limits.pretty}, pos= ${positives.pretty}, neg= ${negatives.pretty}"
}
|
MemoryNetworks/memnets
|
api/src/main/scala/memnets/ml/NaiveBayes.scala
|
Scala
|
apache-2.0
| 5,085 |
package courses.parsing
import java.io.File
import scala.io.Source.fromFile
import scala.util.parsing.json.JSON
import scala.collection.mutable.{Map => mMap}
import com.mongodb.casbah.Imports._
import util.Implicits._
import util.Data.using
import util.Debug._
/**
* Thrown when FileHandler is given a file of an unknown format.
*/
class UnknownFileTypeException(message: String) extends Exception(message)
/**
* Thrown when a FileHandler subclass is given a file of a type that it cannot handle.
*/
class UnsupportedFileTypeException(message: String) extends Exception(message)
abstract class FileHandler(val fileName: String, collectionName: String, fieldsToRemove: List[String] = List()) {
def processFile(path: String, courseID: String)
/**
* Removes fields (keys) from the given mutable map.
*/
def removeFields(map: mMap[String, _ <: Any]) {
fieldsToRemove foreach { field => map -= field }
}
/**
* Get the extension of fileName.
*/
private def getExtension(): String = {
val i = fileName.lastIndexOf('.')
if (i >= 0) fileName.substring(i + 1) else ""
}
/**
* Convert a file extension (String) into a FileFormat.
*/
def getFileFormat: FileFormat = {
getExtension match {
case "sql" => SQLFile
case "xml" => XMLFile
case "mongo" => MongoFile
case _ => UnknownFile
}
}
}
class BasicFileHandler(fileName: String, collectionName: String, fieldsToRemove: List[String] = List())
extends FileHandler(fileName, collectionName, fieldsToRemove) {
def processFile(path: String, courseID: String) {
val fullFileName = new File(path, courseID + fileName).getAbsolutePath()
val source = fromFile(fullFileName)
try {
using(courseID >> collectionName) { (mongoCollection) =>
getFileFormat match {
case SQLFile =>
SQLParser.parse(source) foreach { mapOption =>
mapOption match {
case Some(map) =>
/* remove fields that we don't need to store */
removeFields(map)
/* add the document to the collection */
mongoCollection += map
case None =>
}
}
case XMLFile =>
throw new UnsupportedFileTypeException("XML file handling is not yet supported.")
case MongoFile =>
MongoParser.parse(source) foreach { map =>
// TODO make removeFields(map) work; MongoParser currently returns an immutable map
mongoCollection += map
}
case UnknownFile =>
throw new UnknownFileTypeException("Unknown file type!")
}
}
} finally {
source.close()
}
}
}
class StudentModuleFileHandler(fileName: String, collectionName: String, fieldsToRemove: List[String] = List())
extends FileHandler(fileName, collectionName, fieldsToRemove) {
def processFile(path: String, courseID: String) {
val fullFileName = new File(path, courseID + fileName).getAbsolutePath()
val source = fromFile(fullFileName)
val database = MongoClient()(courseID)
var collections = mMap[String, MongoCollection]()
try {
getFileFormat match {
case SQLFile =>
SQLParser.parse(source) foreach { mapOption =>
mapOption match {
case Some(map) =>
val moduleType = map("module_type").toString
if (!collections.contains(moduleType)) {
collections += moduleType -> database(collectionName + "_" + moduleType)
}
/* remove fields that we don't need to store */
removeFields(map)
/* add the document to the collection */
collections(moduleType) += map
case None =>
}
}
case XMLFile =>
throw new UnsupportedFileTypeException("XML file handling is not yet supported.")
case MongoFile =>
throw new UnsupportedFileTypeException("StudentModuleFileHandler doesn't support .mongo files.")
case UnknownFile =>
throw new UnknownFileTypeException("Unknown file type!")
}
} finally {
source.close()
}
}
}
|
jimwaldo/HarvardX-Tools
|
src/main/scala/courses/parsing/FileHandler.scala
|
Scala
|
bsd-3-clause
| 5,027 |
/*
* Copyright 2015 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend.oo.behavioral
import com.github.dnvriend.TestSpec
class CommandPatternTest extends TestSpec {
"" should "" in {
}
}
|
dnvriend/design-patterns-study
|
src/test/scala/com/github/dnvriend/oo/behavioral/CommandPatternTest.scala
|
Scala
|
apache-2.0
| 749 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.calculations
import uk.gov.hmrc.ct.box.CtTypeConverters
import uk.gov.hmrc.ct.computations.{CP295, HmrcAccountingPeriod}
import uk.gov.hmrc.ct.ct600.NumberRounding
import uk.gov.hmrc.ct.ct600.calculations.AccountingPeriodHelper._
import uk.gov.hmrc.ct.ct600.calculations.Ct600AnnualConstants._
object CorporationTaxHelper extends CtTypeConverters with NumberRounding {
def calculateApportionedProfitsChargeableFy1(params: CorporationTaxCalculatorParameters): Int = {
validateAccountingPeriod(params.accountingPeriod)
val fy1: Int = fallsInFinancialYear(params.accountingPeriod.start.value)
val fy1Result = calculateApportionedProfitsChargeableForYear(fy1, params, constantsForTaxYear(TaxYear(fy1)))
roundedToIntHalfUp(fy1Result)
}
def calculateApportionedProfitsChargeableFy2(params: CorporationTaxCalculatorParameters): Int = {
validateAccountingPeriod(params.accountingPeriod)
if (accountingPeriodSpansTwoFinancialYears(params.accountingPeriod)) {
params.profitsChargeableToCT - calculateApportionedProfitsChargeableFy1(params)
} else {
0
}
}
private def calculateApportionedProfitsChargeableForYear(year: Int, params: CorporationTaxCalculatorParameters, constants: CtConstants): BigDecimal = {
val profitsChargeable = BigDecimal(params.profitsChargeableToCT.value)
val apDaysInFy = accountingPeriodDaysInFinancialYear(year, params.accountingPeriod)
val apFyRatio = apDaysInFy / daysInAccountingPeriod(params.accountingPeriod)
val proRataProfitsChargeable = profitsChargeable * apFyRatio
proRataProfitsChargeable
}
}
case class CorporationTaxCalculatorParameters(profitsChargeableToCT: CP295,
accountingPeriod: HmrcAccountingPeriod)
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/calculations/CorporationTaxHelper.scala
|
Scala
|
apache-2.0
| 2,404 |
package breeze.linalg
import breeze.generic.UFunc
import breeze.linalg.support.CanTraverseValues
import breeze.linalg.support.CanTraverseValues.ValuesVisitor
import breeze.macros.expand
import breeze.math.Semiring
/** Computes the product */
object product extends UFunc {
@expand
implicit def reduce[T, @expand.args(Int, Double, Float, Long) S](implicit iter: CanTraverseValues[T, S]): Impl[T, S] =
new Impl[T, S] {
def apply(v: T): S = {
class ProductVisitor extends ValuesVisitor[S] {
var product: S = 1
def visit(a: S): Unit = {
product *= a
}
def zeros(numZero: Int, zeroValue: S): Unit = {
if (numZero > 0)
product = 0
}
}
val visit = new ProductVisitor
iter.traverse(v, visit)
visit.product
}
}
implicit def reduceSemiring[T, S](implicit iter: CanTraverseValues[T, S], semiring: Semiring[S]): Impl[T, S] =
new Impl[T, S] {
def apply(v: T): S = {
class ProductVisitor extends ValuesVisitor[S] {
var product: S = semiring.one
def visit(a: S): Unit = {
product = semiring.*(product, a)
}
def zeros(numZero: Int, zeroValue: S): Unit = {
if (numZero > 0)
product = semiring.zero
}
}
val visit = new ProductVisitor
iter.traverse(v, visit)
visit.product
}
}
}
|
scalanlp/breeze
|
math/src/main/codegen/breeze/linalg/product.scala
|
Scala
|
apache-2.0
| 1,471 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert.avro
import java.io.InputStream
import java.nio.charset.StandardCharsets
import com.typesafe.config.Config
import org.apache.avro.Schema
import org.apache.avro.file.DataFileStream
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.locationtech.geomesa.convert.avro.AvroConverter._
import org.locationtech.geomesa.convert.avro.AvroConverterFactory.AvroConfigConvert
import org.locationtech.geomesa.convert.{ErrorMode, ParseMode, SimpleFeatureValidator}
import org.locationtech.geomesa.convert2.AbstractConverter.{BasicField, BasicOptions}
import org.locationtech.geomesa.convert2.AbstractConverterFactory.{BasicFieldConvert, BasicOptionsConvert, ConverterConfigConvert, ConverterOptionsConvert, FieldConvert, OptionConvert}
import org.locationtech.geomesa.convert2.TypeInference.{FunctionTransform, InferredType}
import org.locationtech.geomesa.convert2.transforms.Expression
import org.locationtech.geomesa.convert2.{AbstractConverterFactory, TypeInference}
import org.locationtech.geomesa.features.avro._
import org.locationtech.geomesa.features.serialization.ObjectType
import org.locationtech.geomesa.utils.io.WithClose
import org.opengis.feature.simple.SimpleFeatureType
import pureconfig.ConfigObjectCursor
import pureconfig.error.{ConfigReaderFailures, FailureReason}
import scala.util.control.NonFatal
class AvroConverterFactory extends AbstractConverterFactory[AvroConverter, AvroConfig, BasicField, BasicOptions] {
import AvroSimpleFeatureUtils.{AVRO_SIMPLE_FEATURE_USERDATA, AVRO_SIMPLE_FEATURE_VERSION, FEATURE_ID_AVRO_FIELD_NAME}
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichIterator
import scala.collection.JavaConverters._
override protected val typeToProcess: String = "avro"
override protected implicit def configConvert: ConverterConfigConvert[AvroConfig] = AvroConfigConvert
override protected implicit def fieldConvert: FieldConvert[BasicField] = BasicFieldConvert
override protected implicit def optsConvert: ConverterOptionsConvert[BasicOptions] = BasicOptionsConvert
/**
* Note: only works on Avro files with embedded schemas
*
* @param is input
* @param sft simple feature type, if known ahead of time
* @return
*/
override def infer(is: InputStream, sft: Option[SimpleFeatureType]): Option[(SimpleFeatureType, Config)] = {
try {
WithClose(new DataFileStream[GenericRecord](is, new GenericDatumReader[GenericRecord]())) { dfs =>
val (schema, id, fields, userData) = if (AvroDataFile.canParse(dfs)) {
// this is a file written in the geomesa avro format
val records = dfs.iterator.asScala.take(AbstractConverterFactory.inferSampleSize)
// get the version from the first record
val version =
records.headOption
.flatMap(r => Option(r.get(AVRO_SIMPLE_FEATURE_VERSION)).map(_.asInstanceOf[Int]))
.getOrElse(AvroSimpleFeatureUtils.VERSION)
val nameEncoder = new FieldNameEncoder(version)
val dataSft = AvroDataFile.getSft(dfs)
val fields = dataSft.getAttributeDescriptors.asScala.map { descriptor =>
// some types need a function applied to the underlying avro value
val fn = ObjectType.selectType(descriptor).head match {
case ObjectType.DATE => Some("millisToDate")
case ObjectType.UUID => Some("avroBinaryUuid")
case ObjectType.LIST => Some("avroBinaryList")
case ObjectType.MAP => Some("avroBinaryMap")
case ObjectType.GEOMETRY => Some("geometry") // note: handles both wkt (v1) and wkb (v2)
case _ => None
}
val path = s"avroPath($$1, '/${nameEncoder.encode(descriptor.getLocalName)}')"
val expression = fn.map(f => s"$f($path)").getOrElse(path)
BasicField(descriptor.getLocalName, Some(Expression(expression)))
}
val id = Expression(s"avroPath($$1, '/$FEATURE_ID_AVRO_FIELD_NAME')")
val userData: Map[String, Expression] =
if (dfs.getSchema.getField(AVRO_SIMPLE_FEATURE_USERDATA) == null) { Map.empty } else {
// avro user data is stored as an array of 'key', 'keyClass', 'value', and 'valueClass'
// our converters require global key->expression, so pull out the unique keys
val kvs = scala.collection.mutable.Map.empty[String, Expression]
records.foreach { record =>
val ud = record.get(AVRO_SIMPLE_FEATURE_USERDATA).asInstanceOf[java.util.Collection[GenericRecord]]
ud.asScala.foreach { rec =>
Option(rec.get("key")).map(_.toString).foreach { key =>
kvs.getOrElseUpdate(key, {
var expression = s"avroPath($$1, '/$AVRO_SIMPLE_FEATURE_USERDATA[$$key=$key]/value')"
if (Option(rec.get("valueClass")).map(_.toString).contains("java.util.Date")) {
// dates have to be converted from millis
expression = s"millisToDate($expression)"
}
Expression(expression)
})
}
}
}
kvs.toMap
}
(dataSft, id, fields, userData)
} else {
// this is an arbitrary avro file, create fields based on the schema
val uniqueNames = scala.collection.mutable.HashSet.empty[String]
val types = scala.collection.mutable.ArrayBuffer.empty[InferredType]
def mapField(field: Schema.Field, path: String = ""): Unit = {
// get a valid attribute name
val base = s"${field.name().replaceAll("[^A-Za-z0-9]+", "_")}"
var name = base
var i = 0
while (!uniqueNames.add(name)) {
name = s"${base}_$i"
i += 1
}
// checks for nested array/map types we can handle
def isSimple: Boolean = field.schema().getFields.asScala.map(_.schema().getType).forall {
case Schema.Type.STRING => true
case Schema.Type.INT => true
case Schema.Type.LONG => true
case Schema.Type.FLOAT => true
case Schema.Type.DOUBLE => true
case Schema.Type.BOOLEAN => true
case _ => false
}
val transform = FunctionTransform("avroPath(", s",'$path/${field.name}')")
field.schema().getType match {
case Schema.Type.STRING => types += InferredType(name, ObjectType.STRING, transform)
case Schema.Type.BYTES => types += InferredType(name, ObjectType.BYTES, transform)
case Schema.Type.INT => types += InferredType(name, ObjectType.INT, transform)
case Schema.Type.LONG => types += InferredType(name, ObjectType.LONG, transform)
case Schema.Type.FLOAT => types += InferredType(name, ObjectType.FLOAT, transform)
case Schema.Type.DOUBLE => types += InferredType(name, ObjectType.DOUBLE, transform)
case Schema.Type.BOOLEAN => types += InferredType(name, ObjectType.BOOLEAN, transform)
case Schema.Type.ARRAY => if (isSimple) { types += InferredType(name, ObjectType.LIST, transform) }
case Schema.Type.MAP => if (isSimple) { types += InferredType(name, ObjectType.MAP, transform) }
case Schema.Type.FIXED => types += InferredType(name, ObjectType.BYTES, transform)
case Schema.Type.ENUM => types += InferredType(name, ObjectType.STRING, transform.copy(suffix = transform.suffix + "::string"))
case Schema.Type.UNION => types += InferredType(name, ObjectType.STRING, transform.copy(suffix = transform.suffix + "::string"))
case Schema.Type.RECORD => field.schema().getFields.asScala.foreach(mapField(_, s"$path/${field.name}"))
case _ => // no-op
}
}
dfs.getSchema.getFields.asScala.foreach(mapField(_))
// check if we can derive a geometry field
TypeInference.deriveGeometry(types).foreach(g => types += g)
val dataSft = TypeInference.schema("inferred-avro", types)
// note: avro values are always stored at index 1
val id = Expression("md5(string2bytes($1::string))")
val fields = types.map(t => BasicField(t.name, Some(Expression(t.transform.apply(1)))))
(dataSft, id, fields, Map.empty[String, Expression])
}
// validate the existing schema, if any
if (sft.exists(_.getAttributeDescriptors.asScala != schema.getAttributeDescriptors.asScala)) {
throw new IllegalArgumentException("Inferred schema does not match existing schema")
}
val converterConfig = AvroConfig(typeToProcess, SchemaEmbedded, Some(id), Map.empty, userData)
val options = BasicOptions(SimpleFeatureValidator.default, ParseMode.Default, ErrorMode.Default,
StandardCharsets.UTF_8, verbose = true)
val config = configConvert.to(converterConfig)
.withFallback(fieldConvert.to(fields))
.withFallback(optsConvert.to(options))
.toConfig
Some((schema, config))
}
} catch {
case NonFatal(e) =>
logger.debug(s"Could not infer Avro converter from input:", e)
None
}
}
}
object AvroConverterFactory {
object AvroConfigConvert extends ConverterConfigConvert[AvroConfig] with OptionConvert {
override protected def decodeConfig(cur: ConfigObjectCursor,
`type`: String,
idField: Option[Expression],
caches: Map[String, Config],
userData: Map[String, Expression]): Either[ConfigReaderFailures, AvroConfig] = {
def schemaOrFile(schema: Option[String],
schemaFile: Option[String]): Either[ConfigReaderFailures, SchemaConfig] = {
(schema, schemaFile) match {
case (Some(s), None) if s.equalsIgnoreCase(SchemaEmbedded.name) => Right(SchemaEmbedded)
case (Some(s), None) => Right(SchemaString(s))
case (None, Some(s)) => Right(SchemaFile(s))
case _ =>
val reason = new FailureReason {
override val description: String = "Exactly one of 'schema' or 'schema-file' must be defined"
}
cur.failed(reason)
}
}
for {
schema <- optional(cur, "schema").right
schemaFile <- optional(cur, "schema-file").right
either <- schemaOrFile(schema, schemaFile).right
} yield {
AvroConfig(`type`, either, idField, caches, userData)
}
}
override protected def encodeConfig(config: AvroConfig, base: java.util.Map[String, AnyRef]): Unit = {
config.schema match {
case SchemaEmbedded => base.put("schema", SchemaEmbedded.name)
case SchemaString(s) => base.put("schema", s)
case SchemaFile(s) => base.put("schema-file", s)
}
}
}
}
|
ddseapy/geomesa
|
geomesa-convert/geomesa-convert-avro/src/main/scala/org/locationtech/geomesa/convert/avro/AvroConverterFactory.scala
|
Scala
|
apache-2.0
| 11,757 |
trait Monad[T]
class Foo
object Foo {
given Monad[Foo]
}
opaque type Bar = Foo
object Bar {
given Monad[Bar] = summon[Monad[Foo]]
}
object Test {
val mf = summon[Monad[Foo]]
val mb = summon[Monad[Bar]]
}
|
som-snytt/dotty
|
tests/pos/i6716.scala
|
Scala
|
apache-2.0
| 213 |
package reductions
import org.scalameter._
import common._
object LineOfSightRunner {
val standardConfig = config(
Key.exec.minWarmupRuns -> 40,
Key.exec.maxWarmupRuns -> 80,
Key.exec.benchRuns -> 100,
Key.verbose -> true
) withWarmer(new Warmer.Default)
def main(args: Array[String]) {
val length = 10000000
val input = (0 until length).map(_ % 100 * 1.0f).toArray
val output = new Array[Float](length + 1)
val seqtime = standardConfig measure {
LineOfSight.lineOfSight(input, output)
}
println(s"sequential time: $seqtime ms")
val partime = standardConfig measure {
LineOfSight.parLineOfSight(input, output, 10000)
}
println(s"parallel time: $partime ms")
println(s"speedup: ${seqtime / partime}")
}
}
object LineOfSight {
def max(a: Float, b: Float): Float = if (a > b) a else b
def lineOfSight(input: Array[Float], output: Array[Float]): Unit = {
downsweepSequential(input, output, 0, 0, input.length)
}
sealed abstract class Tree {
def maxPrevious: Float
}
case class Node(left: Tree, right: Tree) extends Tree {
val maxPrevious = max(left.maxPrevious, right.maxPrevious)
}
case class Leaf(from: Int, until: Int, maxPrevious: Float) extends Tree
/** Traverses the specified part of the array and returns the maximum angle.
*/
def upsweepSequential(input: Array[Float], from: Int, until: Int): Float = {
input.slice(from, until).zipWithIndex.map { case (height, idx) =>
height / (from + idx).toFloat
}.max
}
/** Traverses the part of the array starting at `from` and until `end`, and
* returns the reduction tree for that part of the array.
*
* The reduction tree is a `Leaf` if the length of the specified part of the
* array is smaller or equal to `threshold`, and a `Node` otherwise.
* If the specified part of the array is longer than `threshold`, then the
* work is divided and done recursively in parallel.
*/
def upsweep(input: Array[Float], from: Int, end: Int,
threshold: Int): Tree = {
if (end - from <= threshold) {
return Leaf(from, end, upsweepSequential(input, from, end))
}
val mid = (end + from) / 2
val (l, r) = parallel(
upsweep(input, from, mid, threshold),
upsweep(input, mid, end, threshold)
)
Node(l, r)
}
/** Traverses the part of the `input` array starting at `from` and until
* `until`, and computes the maximum angle for each entry of the output array,
* given the `startingAngle`.
*/
def downsweepSequential(input: Array[Float], output: Array[Float],
startingAngle: Float, from: Int, until: Int): Unit = {
if (from >= until) return
val currentAngle = max(input(from) / from.toFloat, startingAngle)
output(from) = currentAngle
downsweepSequential(
input, output,
currentAngle, from + 1, until
)
}
/** Pushes the maximum angle in the prefix of the array to each leaf of the
* reduction `tree` in parallel, and then calls `downsweepTraverse` to write
* the `output` angles.
*/
def downsweep(input: Array[Float], output: Array[Float], startingAngle: Float,
tree: Tree): Unit = {
tree match {
case Leaf(from, until, maxPrevious) =>
downsweepSequential(input, output, startingAngle, from, until)
case Node(left, right) =>
val (_, _) = parallel(
downsweep(input, output, startingAngle, left),
downsweep(input, output, left.maxPrevious, right)
)
}
}
/** Compute the line-of-sight in parallel. */
def parLineOfSight(input: Array[Float], output: Array[Float],
threshold: Int): Unit = {
val t = upsweep(input, 0, input.length, threshold)
downsweep(input, output, 0, t)
}
}
|
shouya/thinking-dumps
|
parprog/reductions/src/main/scala/reductions/LineOfSight.scala
|
Scala
|
mit
| 3,774 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package knobs
/** A directive in a configuration file */
sealed abstract class Directive extends Product with Serializable
final case class Import(path: Path) extends Directive
final case class Bind(name: Name, value: CfgValue) extends Directive
final case class Group(name: Name, directives: List[Directive]) extends Directive
|
oncue/knobs
|
core/src/main/scala/knobs/Directive.scala
|
Scala
|
apache-2.0
| 1,138 |
package org.nexbook.performance.app
import com.typesafe.config.ConfigFactory
import org.nexbook.tags.Performance
import org.slf4j.LoggerFactory
/**
* Created by milczu on 1/2/16.
*/
class OrderBookAppScenarioB05PerformanceTest extends OrderBookAppPerformanceTest {
val logger = LoggerFactory.getLogger(classOf[OrderBookAppScenarioB05PerformanceTest])
val scenarioName = "scenario_B_05"
System.setProperty("config.name", s"scenarios/$scenarioName")
val config = ConfigFactory.load(s"config/scenarios/$scenarioName").withFallback(ConfigFactory.load("config/general"))
override val benchmarkConfig = config.getConfig("benchmark")
override val testDataPath = s"src/test/resources/data/${benchmarkConfig.getString("testDataFile")}"
override val resultLog = s"$appRoot/logs/test/$scenarioName.log"
override val expectedTotalOrdersCount = benchmarkConfig.getInt("expectedOrderCount")
import org.scalatest.time.SpanSugar._
s"OrderBook: $scenarioName" should {
"work fast!" taggedAs Performance in {
failAfter(600 seconds) {
executeTest()
}
}
}
}
|
milczarekIT/nexbook
|
src/test/scala/org/nexbook/performance/app/OrderBookAppScenarioB05PerformanceTest.scala
|
Scala
|
apache-2.0
| 1,081 |
package net.mentalarray.doozie
/**
* Created by kdivincenzo on 9/30/14.
*/
case class WorkflowStep(action: WorkflowTask, onError: Option[TasksSequence], always: Option[TasksSequence])
// Value object for WorkflowStep (entry in TaskGraph or TaskSequence)
object WorkflowStep {
def apply(action: WorkflowTask) : WorkflowStep = {
WorkflowStep(action, None, None)
}
def apply(action: WorkflowTask, onError: TasksSequence) : WorkflowStep = {
WorkflowStep(action, Some(onError), None)
}
def apply(action: WorkflowTask, onError: TasksSequence, always: TasksSequence) : WorkflowStep = {
WorkflowStep(action, Some(onError), Some(always))
}
}
|
antagonist112358/tomahawk
|
workflow-engine/src/net/mentalarray/doozie/WorkflowStep.scala
|
Scala
|
apache-2.0
| 664 |
/*
* Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.forclift.bugs
import edu.ucla.cs.starai.forclift.examples.models._
class Bug1Model(
domainSize: Int,
knownElements: Seq[String] = Nil,
evidence: Seq[String] = Nil) extends WeightedCNFModel {
def theoryString = (
"domain Person " + domainSize + knownElements.mkString(" {", ",", "}") + "\n" +
"""
predicate f4(Person,Person) 4.0551999668446745 1.0
predicate smokes1(Person,Person) 1.0 1.0
predicate friends1(Person,Person) 1.0 1.0
predicate friends(Person,Person) 1.0 1.0
predicate smokes2(Person,Person) 1.0 1.0
predicate theta2(Person,Person) 0.022639774517592998 0.977360225482407
friends(X,Y) ∨ ¬friends1(Y,X), Y≠X
smokes1(X,Y) ∨ ¬theta2(X,Y), Y≠X
smokes1(X,Y) ∨ ¬f4(X,Y) ∨ ¬smokes2(X,Y) ∨ ¬friends1(X,Y), Y≠X
"""
+ evidence.mkString("\n"))
/*
"""
predicate f4(Person,Person) 4.0551999668446745 1.0
predicate smokes(Person) 1.0 1.0
predicate smokes1(Person,Person) 1.0 1.0
predicate theta3(Person,Person) 0.024201427434700842 0.9757985725652991
predicate theta2(Person,Person) 0.022639774517592998 0.977360225482407
predicate friends1(Person,Person) 1.0 1.0
predicate friends(Person,Person) 1.0 1.0
predicate theta1(Person,Person) 0.003827671862274425 0.9961723281377256
predicate smokes2(Person,Person) 1.0 1.0
friends(X,Y) ∨ ¬friends1(Y,X), Y≠X
friends(X,Y) ∨ ¬theta1(X,Y), Y≠X
friends1(X,Y) ∨ ¬friends(Y,X), Y≠X
smokes1(X,Y) ∨ ¬theta2(X,Y), Y≠X
smokes1(X,Y) ∨ ¬f4(X,Y) ∨ ¬smokes2(X,Y) ∨ ¬friends1(X,Y), Y≠X
smokes2(X,Y) ∨ ¬theta3(X,Y), Y≠X
theta1(X,Y) ∨ ¬friends(X,Y), Y≠X
theta2(X,Y) ∨ ¬smokes1(X,Y), Y≠X
theta3(X,Y) ∨ ¬smokes2(X,Y), Y≠X
f4(X,Y) ∨ friends1(X,Y), X≠Y
f4(X,Y) ∨ smokes2(X,Y), Y≠X
f4(X,Y) ∨ ¬smokes1(X,Y), X≠Y
"""
*/
}
|
UCLA-StarAI/Forclift
|
src/test/scala/edu/ucla/cs/starai/forclift/bugs/Bug1Model.scala
|
Scala
|
apache-2.0
| 2,427 |
package org.alitouka.spark.dbscan.spatial.rdd
import org.apache.spark.Partitioner
import org.alitouka.spark.dbscan.spatial.{PointSortKey, Point, Box}
import org.alitouka.spark.dbscan.BoxId
/** A partitioner which assigns each entry in a dataset to a [[org.alitouka.spark.dbscan.spatial.Box]]
*
* @param boxes A collection of [[org.alitouka.spark.dbscan.spatial.Box]]es
*/
private [dbscan] class BoxPartitioner (val boxes: Iterable[Box]) extends Partitioner {
assert (boxes.forall(_.partitionId >= 0))
private val boxIdsToPartitions = generateBoxIdsToPartitionsMap(boxes)
override def numPartitions: Int = boxes.size
def getPartition(key: Any): Int = {
key match {
case k: PointSortKey => boxIdsToPartitions(k.boxId)
case boxId: BoxId => boxIdsToPartitions(boxId)
case pt: Point => boxIdsToPartitions(pt.boxId)
case _ => 0 // throw an exception?
}
}
private def generateBoxIdsToPartitionsMap (boxes: Iterable[Box]): Map[BoxId, Int] = {
boxes.map ( x => (x.boxId, x.partitionId)).toMap
}
}
private [dbscan] object BoxPartitioner {
def assignPartitionIdsToBoxes (boxes: Iterable[Box]): Iterable[Box] = {
boxes.zip (0 until boxes.size).map ( x => x._1.withPartitionId(x._2) )
}
}
|
isaacboucinha/CardioStream
|
web-app/src/main/scala/org/alitouka/spark/dbscan/spatial/rdd/BoxPartitioner.scala
|
Scala
|
apache-2.0
| 1,251 |
package spire
package math
import org.scalatest._
import spire.algebra._
import spire.implicits._
class JetTest extends FunSuite with Matchers {
// Default test with 3-dimensional Jet's
implicit val dim = JetDim(3)
val maxError = 1.0e-12
test("JetDim") {
dim.dimension should equal (3)
}
test("Jet(scalar, array) constructor") {
val a = Array[Double](2.3, 3.4, 4.5)
val j = new Jet[Double](8.9, a)
j.dimension should equal (dim.dimension)
j.jetDimension.dimension should equal (dim.dimension)
j.infinitesimal.size should equal (dim.dimension)
j.real should equal (8.9)
j.infinitesimal.toArray should equal (a.toArray)
}
test("Jet() constructor yields a zero jet") {
val jz = Jet[Double]()
jz.real should equal (0)
jz.isReal should be (true)
jz.infinitesimal.size should equal (dim.dimension)
jz.isInfinitesimal should be (false)
jz.isZero should be (true)
}
test("Jet.zero yields a zero jet") {
val jzz = Jet.zero[Double]
jzz.real should equal (0)
jzz.isReal should be (true)
jzz.infinitesimal.size should equal (dim.dimension)
jzz.isInfinitesimal should be (false)
jzz.isZero should be (true)
}
test("Jet.one yields a unitary jet") {
val jo = Jet.one[Double]
jo.real should equal (1.0)
jo.isReal should be (true)
jo.infinitesimal.size should equal (dim.dimension)
jo.isInfinitesimal should be (false)
}
test("Jet.h yields an infinitesimal jet") {
val jk = Jet.h[Double](k = 1)
jk.real should equal (0.0)
jk.isReal should be (false)
jk.isInfinitesimal should be (true)
jk.infinitesimal.toArray should equal (Array(0.0, 1.0, 0.0))
}
test("Jet(x, k) yields a jet for evaluating a function and its k-th partial derivative") {
val jk = Jet(2.3, k = 2)
jk.real should equal (2.3)
jk.isReal should be (false)
jk.isInfinitesimal should be (false)
jk.infinitesimal.toArray should equal (Array(0.0, 0.0, 1.0))
}
test("Jet(x) constructors from scalars") {
val jf = Jet(2.0f)
jf.real should equal (2.0f)
jf.isReal should be (true)
jf.infinitesimal.size should equal (dim.dimension)
jf.isInfinitesimal should be (false)
val jd = Jet(2.6)
jd.real should equal (2.6)
jd.isReal should be (true)
jd.infinitesimal.size should equal (dim.dimension)
jd.isInfinitesimal should be (false)
val jbd = Jet(BigDecimal(2847.694984))
jbd.real should equal (BigDecimal(2847.694984))
jbd.isReal should be (true)
jbd.infinitesimal.size should equal (dim.dimension)
jbd.isInfinitesimal should be (false)
val jfi = Jet.fromInt[Float](2)
jfi.real should equal (2.0)
jfi.isReal should be (true)
jfi.infinitesimal.size should equal (dim.dimension)
jfi.isInfinitesimal should be (false)
}
test("Conversions from scalars") {
val jfi = Jet.intToJet(2)
jfi.real should equal (2.0)
jfi.isReal should be (true)
jfi.infinitesimal.size should equal (dim.dimension)
jfi.isInfinitesimal should be (false)
val jfl = Jet.longToJet(2L)
jfl.real should equal (2.0)
jfl.isReal should be (true)
jfl.infinitesimal.size should equal (dim.dimension)
jfl.isInfinitesimal should be (false)
val jff = Jet.floatToJet(2.47f)
jff.real should equal (2.47f)
jff.isReal should be (true)
jff.infinitesimal.size should equal (dim.dimension)
jff.isInfinitesimal should be (false)
val jfd = Jet.doubleToJet(2.47)
jfd.real should equal (2.47)
jfd.isReal should be (true)
jfd.infinitesimal.size should equal (dim.dimension)
jfd.isInfinitesimal should be (false)
val jfbi = Jet.bigIntToJet(BigInt(247847))
jfbi.real should equal (BigDecimal(247847))
jfbi.isReal should be (true)
jfbi.infinitesimal.size should equal (dim.dimension)
jfbi.isInfinitesimal should be (false)
val jfbd = Jet.bigDecimalToJet(BigDecimal(247847.28375))
jfbd.real should equal (BigDecimal(247847.28375))
jfbd.isReal should be (true)
jfbd.infinitesimal.size should equal (dim.dimension)
jfbd.isInfinitesimal should be (false)
}
test("Conversions to scalars") {
val j = Jet(8.7, Array(7.97, 9.31, 0.0))
j.doubleValue should equal (8.7)
j.floatValue should equal (8.7f)
j.longValue should equal (8L)
j.intValue should equal (8)
j.shortValue should equal (8.toShort)
j.byteValue should equal (8.toByte)
}
test("Conversion to tuple") {
val j = Jet(-3.1, Array(1.0, 2.1, 3.3))
val tj = j.asTuple
tj._1 should be(j.real)
tj._2 should be(j.infinitesimal)
}
test("Signed") {
Jet(9.1).signum should equal (1)
Jet(-3.1, Array(1.0, 2.1, 3.3)).signum should equal (-1)
}
test("isWhole iff real and integer") {
Jet(4.0f).isWhole should be (true)
Jet(9.2).isWhole should be (false)
Jet(3.0f, Array(1.0f, 2.0f, 3.0f)).isWhole should be (false)
}
test("isValidInt iff real, whole and in range") {
Jet(-4.0).isValidInt should be (true)
Jet(4.1).isValidInt should be (false)
Jet(3.0f, Array(1.0f, 2.0f, 3.0f)).isValidInt should be (false)
Jet(Int.MinValue.toDouble - 1000.0).isValidInt should be (false)
Jet(Int.MaxValue.toDouble + 1000.0).isValidInt should be (false)
}
test("Equality-comparable and hashable") {
val r = 13.0f
val i = Array(1.0f, 2.0f, 3.0f)
val a = Jet(r, i)
val b = Jet(r.toDouble, i.map(_.toDouble))
val c = Jet(b.real.toFloat, b.infinitesimal.map(_.toFloat))
// Value-based, symmetric, reflexive, transitive
a should equal (a)
a should equal (b)
b should equal (a)
b should equal (c)
a should equal (c)
// Same for eqv
val bf: Jet[Float] = a.copy()
val cf: Jet[Float] = a.copy()
a eqv a should be (true)
a eqv bf should be (true)
bf eqv a should be (true)
bf eqv cf should be (true)
a eqv cf should be (true)
// Reverse for neqv
a neqv a should be (false)
a neqv bf should be (false)
bf neqv a should be (false)
bf neqv cf should be (false)
a neqv cf should be (false)
// Likewise for hashes
a.hashCode should equal (a.hashCode)
a.hashCode should equal (b.hashCode)
b.hashCode should equal (a.hashCode)
b.hashCode should equal (c.hashCode)
a.hashCode should equal (c.hashCode)
}
test("Prints into a human-readable string") {
Jet(2.4, k = 2).toString should equal ("(2.4 + [0.0, 0.0, 1.0]h)")
}
test("Unary minus operator") {
val r = -13.0f
val i = Array(1.0f, 2.0f, 3.0f)
val a = Jet(r, i)
-a should equal (new Jet(-r, -i))
-(-a) should equal (a)
}
test("Arithmetic combinations with scalars") {
val r = 13.0f
val i = Array(1.0f, 2.0f, 3.0f)
val a = Jet(r, i)
val b = 97.0f
(a + b) should equal (a.copy(real = r + b))
(a - b) should equal (a.copy(real = r - b))
(a * b) should equal (a.copy(real = r * b, infinitesimal = i :* b))
(a / b) should equal (a.copy(real = r / b, infinitesimal = i :/ b))
}
test("Arithmetic combinations with scalar and non-scalar Jets") {
val a = Jet(1.0, Array(2.0, 3.0, 4.0))
val b = Jet(2.0)
(a + b) should equal (Jet(a.real + b.real, a.infinitesimal))
(a - b) should equal (Jet(a.real - b.real, a.infinitesimal))
(a * b) should equal (Jet(a.real * b.real, a.infinitesimal :* b.real))
(a / b) should equal (Jet(a.real / b.real, a.infinitesimal :/ b.real))
}
test("Evaluation and differentiation of arithmetic operations") {
def a[@sp(Double) T : Field : Trig](x: T): T = 2.0 * x + spire.math.cos[T](x)
def b[@sp(Double) T : Field : Trig](x: T): T = spire.math.sin(x) - spire.math.log[T](x) + 7.3
def da(x: Double) = 2.0 - scala.math.sin(x)
def db(x: Double) = scala.math.cos(x) - 1/x
val x = 894.3287562
val jx = x + Jet.h[Double](0)
def a_plus_b[@sp(Double) T : Field : Trig](x: T): T = a(x) + b(x)
a_plus_b(jx).real should be(a(x) + b(x))
a_plus_b(jx).infinitesimal(0) should be(da(x) + db(x) +- maxError)
def a_minus_b[@sp(Double) T : Field : Trig](x: T): T = a(x) - b(x)
a_minus_b(jx).real should be(a(x) - b(x))
a_minus_b(jx).infinitesimal(0) should be(da(x) - db(x) +- maxError)
def a_times_b[@sp(Double) T : Field : Trig](x: T): T = a(x) * b(x)
a_times_b(jx).real should be(a(x) * b(x))
a_times_b(jx).infinitesimal(0) should be(da(x) * b(x) + a(x) * db(x) +- maxError)
def a_div_b[@sp(Double) T : Field : Trig](x: T): T = a(x) / b(x)
a_div_b(jx).real should be(a(x) / b(x))
a_div_b(jx).infinitesimal(0) should be((da(x) * b(x) - a(x) * db(x)) / (b(x) * b(x)) +- maxError)
}
test("Evaluation and differentiation of exponential and logarithm") {
val x = 27.98847750
val jx = x + Jet.h[Double](0)
val lx = spire.math.log(jx)
lx.real should be(scala.math.log(x))
lx.infinitesimal(0) should be(1.0 / x +- maxError)
val ex = spire.math.exp(jx)
ex.real should be(scala.math.exp(x))
ex.infinitesimal(0) should be (scala.math.exp(x) +- maxError)
}
test("Evaluation and differentiation of powers and roots") {
// Constant integer exponent: x ** n
val x = 9.3874983
val jx = x + Jet.h[Double](0)
val n = 5
val jxn = jx.pow(n)
jxn.real should be(scala.math.pow(x, n) +- maxError)
jxn.infinitesimal(0) should be(n * scala.math.pow(x, n - 1) +- maxError)
// Constant double exponent: x ** d
val d = 0.387
val jxd = jx.pow(d)
jxd.real should be(scala.math.pow(x, d))
jxd.infinitesimal(0) should be(scala.math.exp(d * scala.math.log(x)) * d / x +- maxError)
// Variable base and exponent: sin(x) ** x
val jex = spire.math.sin(jx)
val jp = jx.pow(jex)
jp.real should be(scala.math.pow(x, scala.math.sin(x)) +- maxError)
jp.infinitesimal(0) should be(scala.math.exp(scala.math.sin(x) * scala.math.log(x)) *
(scala.math.cos(x) * scala.math.log(x) + scala.math.sin(x) / x) +- maxError)
// Square root
val sq = spire.math.sqrt(jx)
sq.real should be(scala.math.sqrt(x))
sq.infinitesimal(0) should be(0.5 / scala.math.sqrt(x))
}
test("Evaluation and differentiation of trigonometric functions") {
val x = 0.8377469
val jx = x + Jet.h[Double](0)
spire.math.cos(jx).real should be(scala.math.cos(x))
spire.math.cos(jx).infinitesimal(0) should be(-scala.math.sin(x) +- maxError)
spire.math.sin(jx).real should be(scala.math.sin(x))
spire.math.sin(jx).infinitesimal(0) should be(scala.math.cos(x) +- maxError)
spire.math.tan(jx).real should be(scala.math.tan(x))
spire.math.tan(jx).infinitesimal(0) should be(
1.0 + scala.math.tan(x) * scala.math.tan(x) +- maxError)
}
test("Evaluation and differentiation of inverse trigonometric functions") {
val x = 0.133645
val jx = x + Jet.h[Double](0)
spire.math.acos(jx).real should be(scala.math.acos(x))
spire.math.acos(jx).infinitesimal(0) should be(-1.0 / scala.math.sqrt(1.0 - x * x) +- maxError)
spire.math.asin(jx).real should be(scala.math.asin(x))
spire.math.asin(jx).infinitesimal(0) should be(1.0 / scala.math.sqrt(1.0 - x * x) +- maxError)
spire.math.atan(jx).real should be(scala.math.atan(x))
spire.math.atan(jx).infinitesimal(0) should be(1.0 / (1.0 + x * x) +- maxError)
val y = 0.857264
val jy = y + Jet.h[Double](1)
val axy2 = spire.math.atan2(jy, jx)
spire.math.atan2(jy, jx).real should be(scala.math.atan2(y, x))
axy2.infinitesimal(0) should be(-y / (x * x + y * y) +- maxError)
axy2.infinitesimal(1) should be(x / (x * x + y * y) +- maxError)
}
test("Evaluation and differentiation of hyperbolic functions") {
val x = 0.9472836
val jx = x + Jet.h[Double](0)
spire.math.cosh(jx).real should be(scala.math.cosh(x))
spire.math.cosh(jx).infinitesimal(0) should be(scala.math.sinh(x) +- maxError)
spire.math.sinh(jx).real should be(scala.math.sinh(x))
spire.math.sinh(jx).infinitesimal(0) should be(scala.math.cosh(x) +- maxError)
spire.math.tanh(jx).real should be(scala.math.tanh(x))
spire.math.tanh(jx).infinitesimal(0) should be(
1.0 - scala.math.tanh(x) * scala.math.tanh(x) +- maxError)
}
test("Chain-rule differentiation") {
def a[@sp(Double) T : Field : Trig](x: T): T = 2.0 * x * x - 3.14 * x + 2.71
def b[@sp(Double) T : Field : Trig](x: T): T = 3.14 * x * x - spire.math.tan(x)
def c[@sp(Double) T : Field : Trig](x: T): T = spire.math.acos(x) * spire.math.sin(x) + x
def abc[@sp(Double) T : Field : Trig](x: T): T = a(b(c(x)))
def da(x: Double) = 4.0 * x - 3.14
def db(x: Double) = 3.14 * 2.0 * x - (1.0 + scala.math.tan(x) * scala.math.tan(x))
def dc(x: Double) = 1.0 + (-1.0 / scala.math.sqrt(1.0 - x * x)) * scala.math.sin(x) +
scala.math.acos(x) * scala.math.cos(x)
def dabc(x: Double) = da(b(c(x))) * db(c(x)) * dc(x)
val x = 0.293745
val jx = x + Jet.h[Double](0)
a(jx).infinitesimal(0) should be(da(x) +- maxError)
b(jx).infinitesimal(0) should be(db(x) +- maxError)
c(jx).infinitesimal(0) should be(dc(x) +- maxError)
abc(jx).infinitesimal(0) should be(dabc(x) +- maxError)
abc(jx).real should be(abc(x))
}
test("Evaluation and differentiation of a generic function") {
def func[@sp(Double) T : Field : Trig](x: T): T = 3.14 * x * x - spire.math.tan(x)
def dfunc(x: Double) = 3.14 * 2.0 * x - (1.0 + scala.math.tan(x) * scala.math.tan(x))
val x = 0.293745
val jx = x + Jet.h[Double](0)
val jfunc = func(jx)
jfunc.real should be(func(x))
jfunc.infinitesimal(0) should be(dfunc(x) +- maxError)
}
test("Evaluation and differentiation of a generic function of two variables") {
def func[@sp(Double) T : Field : Trig](x: T, y: T): T = 3.14 * x * y - spire.math.tan(x - y)
def dfuncX(x: Double, y: Double) =
3.14 * y - (1.0 + scala.math.tan(x - y) * scala.math.tan(x - y))
def dfuncY(x: Double, y: Double) =
3.14 * x + (1.0 + scala.math.tan(x - y) * scala.math.tan(x - y))
val x = 0.293745
val y = 1.2983764
val jx = x + Jet.h[Double](0)
val jy = y + Jet.h[Double](1)
val jfunc = func(jx, jy)
jfunc.real should be(func(x, y))
jfunc.infinitesimal(0) should be(dfuncX(x, y) +- maxError)
jfunc.infinitesimal(1) should be(dfuncY(x, y) +- maxError)
}
}
|
adampingel/spire
|
tests/src/test/scala/spire/math/JetTest.scala
|
Scala
|
mit
| 14,288 |
package com.github.ldaniels528.trifecta.io
import scala.concurrent.{ExecutionContext, Future}
/**
* This class acts as a wrapper for an asynchronous job processing
* an input, an output both.
* @author [email protected]
*/
case class AsyncIO(task: Future[_], counter: IOCounter) {
val startTime: Long = counter.startTimeMillis
val endTime: Long = counter.lastUpdateMillis
def getCount: Seq[IOCount] = Seq(counter.get)
}
/**
* Asynchronous I/O
* @author [email protected]
*/
object AsyncIO {
/**
* Syntactic sugar for executing asynchronous I/O as an executable block
* @param block the given code block
* @return the asynchronous I/O instance
*/
def apply(block: IOCounter => Unit)(implicit ec: ExecutionContext): AsyncIO = {
val counter = IOCounter(System.currentTimeMillis())
AsyncIO(Future(block), counter)
}
}
|
ldaniels528/trifecta
|
app-modules/core/src/main/scala/com/github/ldaniels528/trifecta/io/AsyncIO.scala
|
Scala
|
apache-2.0
| 878 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.util.Timeout
import org.scalatest._
import org.scalatest.concurrent.{ Eventually, ScalaFutures }
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import play.api.inject.BindingKey
import play.api.libs.json.{ JsValue, Json, Writes }
import play.api.test.WsTestClient
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import scala.reflect.ClassTag
import scala.util.Try
object Person {
implicit val format = Json.format[Person]
implicit class ValueObjectOps(val self: Person) {
def toJson: JsValue = Json.toJson(self)
}
implicit class IterableOps(val self: Iterable[Person]) {
def toJson: JsValue = Json.toJson(self)
}
}
final case class Person(firstName: String, age: Int)
class TestSpec extends FlatSpec
with Matchers
with GivenWhenThen
with OptionValues
with TryValues
with ScalaFutures
with WsTestClient
with BeforeAndAfterAll
with BeforeAndAfterEach
with Eventually
with GuiceOneServerPerSuite {
def getComponent[A: ClassTag] = app.injector.instanceOf[A]
def getNamedComponent[A](name: String)(implicit ct: ClassTag[A]): A =
app.injector.instanceOf[A](BindingKey(ct.runtimeClass.asInstanceOf[Class[A]]).qualifiedWith(name))
// set the port number of the HTTP server
override lazy val port: Int = getNamedComponent[Int]("test.port")
implicit val timeout: Timeout = getComponent[Timeout]
implicit val pc: PatienceConfig = PatienceConfig(timeout = 30.seconds, interval = 300.millis)
implicit val system: ActorSystem = getComponent[ActorSystem]
implicit val ec: ExecutionContext = getComponent[ExecutionContext]
implicit val mat: Materializer = getComponent[Materializer]
// ================================== Supporting Operations ====================================
def id: String = java.util.UUID.randomUUID().toString
implicit class PimpedFuture[T](self: Future[T]) {
def toTry: Try[T] = Try(self.futureValue)
}
/**
* Final vals get inlined in bytecode. It does the same as Java's 'final static' variant.
* Leaving final from the val will not inline the constant (right hand side) in bytecode.
* see: http://stackoverflow.com/questions/13412386/why-are-private-val-and-private-final-val-different
*
* To be a constant, the first letter needs to be uppercase:
* see: http://www.artima.com/pins1ed/functional-objects.html#6.10
*/
final val FirstName: String = "John"
final val LastName: String = "Doe"
override protected def beforeEach(): Unit = {
}
}
|
dnvriend/study-category-theory
|
scalaz-test/src/test/com/github/dnvriend/TestSpec.scala
|
Scala
|
apache-2.0
| 3,235 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.message
import org.apache.kafka.common.record.{AbstractLegacyRecordBatch, Record, RecordBatch}
object MessageAndOffset {
def fromRecordBatch(batch: RecordBatch): MessageAndOffset = {
batch match {
case legacyBatch: AbstractLegacyRecordBatch =>
MessageAndOffset(Message.fromRecord(legacyBatch.outerRecord), legacyBatch.lastOffset)
case _ =>
throw new IllegalArgumentException(s"Illegal batch type ${batch.getClass}. The older message format classes " +
s"only support conversion from ${classOf[AbstractLegacyRecordBatch]}, which is used for magic v0 and v1")
}
}
def fromRecord(record: Record): MessageAndOffset = {
record match {
case legacyBatch: AbstractLegacyRecordBatch =>
MessageAndOffset(Message.fromRecord(legacyBatch.outerRecord), legacyBatch.lastOffset)
case _ =>
throw new IllegalArgumentException(s"Illegal record type ${record.getClass}. The older message format classes " +
s"only support conversion from ${classOf[AbstractLegacyRecordBatch]}, which is used for magic v0 and v1")
}
}
}
case class MessageAndOffset(message: Message, offset: Long) {
/**
* Compute the offset of the next message in the log
*/
def nextOffset: Long = offset + 1
}
|
wangcy6/storm_app
|
frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/main/scala/kafka/message/MessageAndOffset.scala
|
Scala
|
apache-2.0
| 2,094 |
package chandu0101.scalajs.react.components
package semanticui
import chandu0101.macros.tojs.JSMacro
import japgolly.scalajs.react._
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.JSName
/**
* This file is generated - submit issues instead of PR against it
*/
@js.native @JSName("semanticui")
object Sui extends js.Object {
val Button: js.Dynamic = js.native
val Icon: js.Dynamic = js.native
val IconGroup: js.Dynamic = js.native
val Container: js.Dynamic = js.native
val Divider: js.Dynamic = js.native
val Header: js.Dynamic = js.native
val HeaderContent: js.Dynamic = js.native
val HeaderSubHeader: js.Dynamic = js.native
val Image: js.Dynamic = js.native
val ImageGroup: js.Dynamic = js.native
val Input: js.Dynamic = js.native
val Label: js.Dynamic = js.native
val LabelDetail: js.Dynamic = js.native
val LabelGroup: js.Dynamic = js.native
val List: js.Dynamic = js.native
val ListContent: js.Dynamic = js.native
val ListDescription: js.Dynamic = js.native
val ListHeader: js.Dynamic = js.native
val ListIcon: js.Dynamic = js.native
val ListItem: js.Dynamic = js.native
val ListList: js.Dynamic = js.native
val Form: js.Dynamic = js.native
val FormField: js.Dynamic = js.native
val FormButton: js.Dynamic = js.native
val FormCheckbox: js.Dynamic = js.native
val FormDropdown: js.Dynamic = js.native
val FormGroup: js.Dynamic = js.native
val FormInput: js.Dynamic = js.native
val FormRadio: js.Dynamic = js.native
val FormSelect: js.Dynamic = js.native
val FormTextArea: js.Dynamic = js.native
val Grid: js.Dynamic = js.native
val GridColumn: js.Dynamic = js.native
val GridRow: js.Dynamic = js.native
val Flag: js.Dynamic = js.native
val Segment: js.Dynamic = js.native
val SegmentGroup: js.Dynamic = js.native
}
|
rleibman/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/semanticui/Sui.scala
|
Scala
|
apache-2.0
| 2,073 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.util.Date
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Cast, ExprId, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeUtils}
import org.apache.spark.sql.catalyst.util.quoteIdentifier
import org.apache.spark.sql.types._
/**
* A function defined in the catalog.
*
* @param identifier name of the function
* @param className fully qualified class name, e.g. "org.apache.spark.util.MyFunc"
* @param resources resource types and Uris used by the function
*/
case class CatalogFunction(
identifier: FunctionIdentifier,
className: String,
resources: Seq[FunctionResource])
/**
* Storage format, used to describe how a partition or a table is stored.
*/
case class CatalogStorageFormat(
locationUri: Option[URI],
inputFormat: Option[String],
outputFormat: Option[String],
serde: Option[String],
compressed: Boolean,
properties: Map[String, String]) {
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("Storage(", ", ", ")")
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
locationUri.foreach(l => map.put("Location", l.toString))
serde.foreach(map.put("Serde Library", _))
inputFormat.foreach(map.put("InputFormat", _))
outputFormat.foreach(map.put("OutputFormat", _))
if (compressed) map.put("Compressed", "")
CatalogUtils.maskCredentials(properties) match {
case props if props.isEmpty => // No-op
case props =>
map.put("Storage Properties", props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]"))
}
map
}
}
object CatalogStorageFormat {
/** Empty storage format for default values and copies. */
val empty = CatalogStorageFormat(locationUri = None, inputFormat = None,
outputFormat = None, serde = None, compressed = false, properties = Map.empty)
}
/**
* A partition (Hive style) defined in the catalog.
*
* @param spec partition spec values indexed by column name
* @param storage storage format of the partition
* @param parameters some parameters for the partition
* @param createTime creation time of the partition, in milliseconds
* @param lastAccessTime last access time, in milliseconds
* @param stats optional statistics (number of rows, total size, etc.)
*/
case class CatalogTablePartition(
spec: CatalogTypes.TablePartitionSpec,
storage: CatalogStorageFormat,
parameters: Map[String, String] = Map.empty,
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
stats: Option[CatalogStatistics] = None) {
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
map.put("Partition Values", s"[$specString]")
map ++= storage.toLinkedHashMap
if (parameters.nonEmpty) {
map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
}
map.put("Created Time", new Date(createTime).toString)
val lastAccess = {
if (-1 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
}
map.put("Last Access", lastAccess)
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogPartition(\\n\\t", "\\n\\t", ")")
}
/** Readable string representation for the CatalogTablePartition. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\\n", "")
}
/** Return the partition location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
throw new AnalysisException(s"Partition [$specString] did not specify locationUri")
}
/**
* Given the partition schema, returns a row with that schema holding the partition values.
*/
def toRow(partitionSchema: StructType, defaultTimeZondId: String): InternalRow = {
val caseInsensitiveProperties = CaseInsensitiveMap(storage.properties)
val timeZoneId = caseInsensitiveProperties.getOrElse(
DateTimeUtils.TIMEZONE_OPTION, defaultTimeZondId)
InternalRow.fromSeq(partitionSchema.map { field =>
val partValue = if (spec(field.name) == ExternalCatalogUtils.DEFAULT_PARTITION_NAME) {
null
} else {
spec(field.name)
}
Cast(Literal(partValue), field.dataType, Option(timeZoneId)).eval()
})
}
}
/**
* A container for bucketing information.
* Bucketing is a technology for decomposing data sets into more manageable parts, and the number
* of buckets is fixed so it does not fluctuate with data.
*
* @param numBuckets number of buckets.
* @param bucketColumnNames the names of the columns that used to generate the bucket id.
* @param sortColumnNames the names of the columns that used to sort data in each bucket.
*/
case class BucketSpec(
numBuckets: Int,
bucketColumnNames: Seq[String],
sortColumnNames: Seq[String]) {
if (numBuckets <= 0 || numBuckets >= 100000) {
throw new AnalysisException(
s"Number of buckets should be greater than 0 but less than 100000. Got `$numBuckets`")
}
override def toString: String = {
val bucketString = s"bucket columns: [${bucketColumnNames.mkString(", ")}]"
val sortString = if (sortColumnNames.nonEmpty) {
s", sort columns: [${sortColumnNames.mkString(", ")}]"
} else {
""
}
s"$numBuckets buckets, $bucketString$sortString"
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
mutable.LinkedHashMap[String, String](
"Num Buckets" -> numBuckets.toString,
"Bucket Columns" -> bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]"),
"Sort Columns" -> sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
)
}
}
/**
* A table defined in the catalog.
*
* Note that Hive's metastore also tracks skewed columns. We should consider adding that in the
* future once we have a better understanding of how we want to handle skewed columns.
*
* @param provider the name of the data source provider for this table, e.g. parquet, json, etc.
* Can be None if this table is a View, should be "hive" for hive serde tables.
* @param unsupportedFeatures is a list of string descriptions of features that are used by the
* underlying table but not supported by Spark SQL yet.
* @param tracksPartitionsInCatalog whether this table's partition metadata is stored in the
* catalog. If false, it is inferred automatically based on file
* structure.
* @param schemaPreservesCase Whether or not the schema resolved for this table is case-sensitive.
* When using a Hive Metastore, this flag is set to false if a case-
* sensitive schema was unable to be read from the table properties.
* Used to trigger case-sensitive schema inference at query time, when
* configured.
* @param ignoredProperties is a list of table properties that are used by the underlying table
* but ignored by Spark SQL yet.
* @param createVersion records the version of Spark that created this table metadata. The default
* is an empty string. We expect it will be read from the catalog or filled by
* ExternalCatalog.createTable. For temporary views, the value will be empty.
*/
case class CatalogTable(
identifier: TableIdentifier,
tableType: CatalogTableType,
storage: CatalogStorageFormat,
schema: StructType,
provider: Option[String] = None,
partitionColumnNames: Seq[String] = Seq.empty,
bucketSpec: Option[BucketSpec] = None,
owner: String = "",
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
createVersion: String = "",
properties: Map[String, String] = Map.empty,
stats: Option[CatalogStatistics] = None,
viewText: Option[String] = None,
comment: Option[String] = None,
unsupportedFeatures: Seq[String] = Seq.empty,
tracksPartitionsInCatalog: Boolean = false,
schemaPreservesCase: Boolean = true,
ignoredProperties: Map[String, String] = Map.empty) {
import CatalogTable._
/**
* schema of this table's partition columns
*/
def partitionSchema: StructType = {
val partitionFields = schema.takeRight(partitionColumnNames.length)
assert(partitionFields.map(_.name) == partitionColumnNames)
StructType(partitionFields)
}
/**
* schema of this table's data columns
*/
def dataSchema: StructType = {
val dataFields = schema.dropRight(partitionColumnNames.length)
StructType(dataFields)
}
/** Return the database this table was specified to belong to, assuming it exists. */
def database: String = identifier.database.getOrElse {
throw new AnalysisException(s"table $identifier did not specify database")
}
/** Return the table location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
throw new AnalysisException(s"table $identifier did not specify locationUri")
}
/** Return the fully qualified name of this table, assuming the database was specified. */
def qualifiedName: String = identifier.unquotedString
/**
* Return the default database name we use to resolve a view, should be None if the CatalogTable
* is not a View or created by older versions of Spark(before 2.2.0).
*/
def viewDefaultDatabase: Option[String] = properties.get(VIEW_DEFAULT_DATABASE)
/**
* Return the output column names of the query that creates a view, the column names are used to
* resolve a view, should be empty if the CatalogTable is not a View or created by older versions
* of Spark(before 2.2.0).
*/
def viewQueryColumnNames: Seq[String] = {
for {
numCols <- properties.get(VIEW_QUERY_OUTPUT_NUM_COLUMNS).toSeq
index <- 0 until numCols.toInt
} yield properties.getOrElse(
s"$VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX$index",
throw new AnalysisException("Corrupted view query output column names in catalog: " +
s"$numCols parts expected, but part $index is missing.")
)
}
/** Syntactic sugar to update a field in `storage`. */
def withNewStorage(
locationUri: Option[URI] = storage.locationUri,
inputFormat: Option[String] = storage.inputFormat,
outputFormat: Option[String] = storage.outputFormat,
compressed: Boolean = false,
serde: Option[String] = storage.serde,
properties: Map[String, String] = storage.properties): CatalogTable = {
copy(storage = CatalogStorageFormat(
locationUri, inputFormat, outputFormat, serde, compressed, properties))
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
identifier.database.foreach(map.put("Database", _))
map.put("Table", identifier.table)
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
map.put("Created Time", new Date(createTime).toString)
map.put("Last Access", new Date(lastAccessTime).toString)
map.put("Created By", "Spark " + createVersion)
map.put("Type", tableType.name)
provider.foreach(map.put("Provider", _))
bucketSpec.foreach(map ++= _.toLinkedHashMap)
comment.foreach(map.put("Comment", _))
if (tableType == CatalogTableType.VIEW) {
viewText.foreach(map.put("View Text", _))
viewDefaultDatabase.foreach(map.put("View Default Database", _))
if (viewQueryColumnNames.nonEmpty) {
map.put("View Query Output Columns", viewQueryColumnNames.mkString("[", ", ", "]"))
}
}
if (properties.nonEmpty) map.put("Table Properties", tableProperties)
stats.foreach(s => map.put("Statistics", s.simpleString))
map ++= storage.toLinkedHashMap
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
if (partitionColumnNames.nonEmpty) map.put("Partition Columns", partitionColumns)
if (schema.nonEmpty) map.put("Schema", schema.treeString)
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogTable(\\n", "\\n", ")")
}
/** Readable string representation for the CatalogTable. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\\n", "")
}
}
object CatalogTable {
val VIEW_DEFAULT_DATABASE = "view.default.database"
val VIEW_QUERY_OUTPUT_PREFIX = "view.query.out."
val VIEW_QUERY_OUTPUT_NUM_COLUMNS = VIEW_QUERY_OUTPUT_PREFIX + "numCols"
val VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX = VIEW_QUERY_OUTPUT_PREFIX + "col."
}
/**
* This class of statistics is used in [[CatalogTable]] to interact with metastore.
* We define this new class instead of directly using [[Statistics]] here because there are no
* concepts of attributes or broadcast hint in catalog.
*/
case class CatalogStatistics(
sizeInBytes: BigInt,
rowCount: Option[BigInt] = None,
colStats: Map[String, CatalogColumnStat] = Map.empty) {
/**
* Convert [[CatalogStatistics]] to [[Statistics]], and match column stats to attributes based
* on column names.
*/
def toPlanStats(planOutput: Seq[Attribute], cboEnabled: Boolean): Statistics = {
if (cboEnabled && rowCount.isDefined) {
val attrStats = AttributeMap(planOutput
.flatMap(a => colStats.get(a.name).map(a -> _.toPlanStat(a.name, a.dataType))))
// Estimate size as number of rows * row size.
val size = EstimationUtils.getOutputSize(planOutput, rowCount.get, attrStats)
Statistics(sizeInBytes = size, rowCount = rowCount, attributeStats = attrStats)
} else {
// When CBO is disabled or the table doesn't have other statistics, we apply the size-only
// estimation strategy and only propagate sizeInBytes in statistics.
Statistics(sizeInBytes = sizeInBytes)
}
}
/** Readable string representation for the CatalogStatistics. */
def simpleString: String = {
val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else ""
s"$sizeInBytes bytes$rowCountString"
}
}
/**
* This class of statistics for a column is used in [[CatalogTable]] to interact with metastore.
*/
case class CatalogColumnStat(
distinctCount: Option[BigInt] = None,
min: Option[String] = None,
max: Option[String] = None,
nullCount: Option[BigInt] = None,
avgLen: Option[Long] = None,
maxLen: Option[Long] = None,
histogram: Option[Histogram] = None) {
/**
* Returns a map from string to string that can be used to serialize the column stats.
* The key is the name of the column and name of the field (e.g. "colName.distinctCount"),
* and the value is the string representation for the value.
* min/max values are stored as Strings. They can be deserialized using
* [[CatalogColumnStat.fromExternalString]].
*
* As part of the protocol, the returned map always contains a key called "version".
* Any of the fields that are null (None) won't appear in the map.
*/
def toMap(colName: String): Map[String, String] = {
val map = new scala.collection.mutable.HashMap[String, String]
map.put(s"${colName}.${CatalogColumnStat.KEY_VERSION}", "1")
distinctCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_DISTINCT_COUNT}", v.toString)
}
nullCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_NULL_COUNT}", v.toString)
}
avgLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_AVG_LEN}", v.toString) }
maxLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_LEN}", v.toString) }
min.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MIN_VALUE}", v) }
max.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_VALUE}", v) }
histogram.foreach { h =>
map.put(s"${colName}.${CatalogColumnStat.KEY_HISTOGRAM}", HistogramSerializer.serialize(h))
}
map.toMap
}
/** Convert [[CatalogColumnStat]] to [[ColumnStat]]. */
def toPlanStat(
colName: String,
dataType: DataType): ColumnStat =
ColumnStat(
distinctCount = distinctCount,
min = min.map(CatalogColumnStat.fromExternalString(_, colName, dataType)),
max = max.map(CatalogColumnStat.fromExternalString(_, colName, dataType)),
nullCount = nullCount,
avgLen = avgLen,
maxLen = maxLen,
histogram = histogram)
}
object CatalogColumnStat extends Logging {
// List of string keys used to serialize CatalogColumnStat
val KEY_VERSION = "version"
private val KEY_DISTINCT_COUNT = "distinctCount"
private val KEY_MIN_VALUE = "min"
private val KEY_MAX_VALUE = "max"
private val KEY_NULL_COUNT = "nullCount"
private val KEY_AVG_LEN = "avgLen"
private val KEY_MAX_LEN = "maxLen"
private val KEY_HISTOGRAM = "histogram"
/**
* Converts from string representation of data type to the corresponding Catalyst data type.
*/
def fromExternalString(s: String, name: String, dataType: DataType): Any = {
dataType match {
case BooleanType => s.toBoolean
case DateType => DateTimeUtils.fromJavaDate(java.sql.Date.valueOf(s))
case TimestampType => DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf(s))
case ByteType => s.toByte
case ShortType => s.toShort
case IntegerType => s.toInt
case LongType => s.toLong
case FloatType => s.toFloat
case DoubleType => s.toDouble
case _: DecimalType => Decimal(s)
// This version of Spark does not use min/max for binary/string types so we ignore it.
case BinaryType | StringType => null
case _ =>
throw new AnalysisException("Column statistics deserialization is not supported for " +
s"column $name of data type: $dataType.")
}
}
/**
* Converts the given value from Catalyst data type to string representation of external
* data type.
*/
def toExternalString(v: Any, colName: String, dataType: DataType): String = {
val externalValue = dataType match {
case DateType => DateTimeUtils.toJavaDate(v.asInstanceOf[Int])
case TimestampType => DateTimeUtils.toJavaTimestamp(v.asInstanceOf[Long])
case BooleanType | _: IntegralType | FloatType | DoubleType => v
case _: DecimalType => v.asInstanceOf[Decimal].toJavaBigDecimal
// This version of Spark does not use min/max for binary/string types so we ignore it.
case _ =>
throw new AnalysisException("Column statistics serialization is not supported for " +
s"column $colName of data type: $dataType.")
}
externalValue.toString
}
/**
* Creates a [[CatalogColumnStat]] object from the given map.
* This is used to deserialize column stats from some external storage.
* The serialization side is defined in [[CatalogColumnStat.toMap]].
*/
def fromMap(
table: String,
colName: String,
map: Map[String, String]): Option[CatalogColumnStat] = {
try {
Some(CatalogColumnStat(
distinctCount = map.get(s"${colName}.${KEY_DISTINCT_COUNT}").map(v => BigInt(v.toLong)),
min = map.get(s"${colName}.${KEY_MIN_VALUE}"),
max = map.get(s"${colName}.${KEY_MAX_VALUE}"),
nullCount = map.get(s"${colName}.${KEY_NULL_COUNT}").map(v => BigInt(v.toLong)),
avgLen = map.get(s"${colName}.${KEY_AVG_LEN}").map(_.toLong),
maxLen = map.get(s"${colName}.${KEY_MAX_LEN}").map(_.toLong),
histogram = map.get(s"${colName}.${KEY_HISTOGRAM}").map(HistogramSerializer.deserialize)
))
} catch {
case NonFatal(e) =>
logWarning(s"Failed to parse column statistics for column ${colName} in table $table", e)
None
}
}
}
case class CatalogTableType private(name: String)
object CatalogTableType {
val EXTERNAL = new CatalogTableType("EXTERNAL")
val MANAGED = new CatalogTableType("MANAGED")
val VIEW = new CatalogTableType("VIEW")
}
/**
* A database defined in the catalog.
*/
case class CatalogDatabase(
name: String,
description: String,
locationUri: URI,
properties: Map[String, String])
object CatalogTypes {
/**
* Specifications of a table partition. Mapping column name to column value.
*/
type TablePartitionSpec = Map[String, String]
/**
* Initialize an empty spec.
*/
lazy val emptyTablePartitionSpec: TablePartitionSpec = Map.empty[String, String]
}
/**
* A placeholder for a table relation, which will be replaced by concrete relation like
* `LogicalRelation` or `HiveTableRelation`, during analysis.
*/
case class UnresolvedCatalogRelation(tableMeta: CatalogTable) extends LeafNode {
assert(tableMeta.identifier.database.isDefined)
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* A `LogicalPlan` that represents a hive table.
*
* TODO: remove this after we completely make hive as a data source.
*/
case class HiveTableRelation(
tableMeta: CatalogTable,
dataCols: Seq[AttributeReference],
partitionCols: Seq[AttributeReference]) extends LeafNode with MultiInstanceRelation {
assert(tableMeta.identifier.database.isDefined)
assert(tableMeta.partitionSchema.sameType(partitionCols.toStructType))
assert(tableMeta.dataSchema.sameType(dataCols.toStructType))
// The partition column should always appear after data columns.
override def output: Seq[AttributeReference] = dataCols ++ partitionCols
def isPartitioned: Boolean = partitionCols.nonEmpty
override def doCanonicalize(): HiveTableRelation = copy(
tableMeta = tableMeta.copy(
storage = CatalogStorageFormat.empty,
createTime = -1
),
dataCols = dataCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index))
},
partitionCols = partitionCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index + dataCols.length))
}
)
override def computeStats(): Statistics = {
tableMeta.stats.map(_.toPlanStats(output, conf.cboEnabled)).getOrElse {
throw new IllegalStateException("table stats must be specified.")
}
}
override def newInstance(): HiveTableRelation = copy(
dataCols = dataCols.map(_.newInstance()),
partitionCols = partitionCols.map(_.newInstance()))
}
|
rikima/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala
|
Scala
|
apache-2.0
| 24,443 |
package utils
import play.api.libs.json.JsArray
import utils.JsValueWrapper.improveJsValue
import scala.language.implicitConversions
/**
* Created by valtechuk on 22/01/2015.
*/
object SortBy {
def claimTypeDateTime(data: Option[JsArray]): Option[JsArray] = {
data match {
case Some(data) =>
Some(JsArray(
data.value.seq.sortWith(_.p.claimDateTime.asLong < _.p.claimDateTime.asLong)
.sortWith(_.p.claimType.asType < _.p.claimType.asType)
))
case _ => data
}
}
def dateTime(d: Option[JsArray]): Option[JsArray] = {
d match {
case Some(data) =>
Some(JsArray(
data.value.seq.sortWith(_.p.claimDateTime.asLong < _.p.claimDateTime.asLong)
))
case _ => d
}
}
def surname(d: Option[JsArray]): Option[JsArray] = {
d match {
case Some(data) =>
Some(JsArray(
data.value.seq.sortWith((p1,p2)=>p1.p.surname.toString().compareToIgnoreCase(p2.p.surname.toString()) < 0)
))
case _ => d
}
}
def name(d: Option[JsArray]): Option[JsArray] = {
d match {
case Some(data) =>
Some(JsArray(
data.value.seq.sortWith((p1,p2)=>p1.p.forename.toString().compareToIgnoreCase(p2.p.forename.toString()) < 0)
))
case _ => d
}
}
}
|
Department-for-Work-and-Pensions/CarersAllowanceStaffAccess
|
casa/app/utils/SortBy.scala
|
Scala
|
mit
| 1,322 |
package epam.bdcc_app.util
import com.epam.common.TimeUtils
object ProfilingUtil {
def profileTime[R](blockName: String)(block: => R): R = {
val start = System.nanoTime()
val result = block
println(s"PROFILING: '$blockName' last: ${TimeUtils.toString((System.nanoTime() - start) / 1000000L)}")
result
}
}
|
mkasatkin/bdcc_app.vk_samza
|
src/main/scala/epam/bdcc_app/util/ProfilingUtil.scala
|
Scala
|
apache-2.0
| 348 |
package cephui.models
import json._
object JsFormats {
implicit val locationAccessor = ObjectAccessor.create[Location]
implicit val jobAccessor = ObjectAccessor.create[Job]
implicit val danglingReservationAccessor = ObjectAccessor.create[DanglingReservation]
implicit val errorReservationAccessor = ObjectAccessor.create[ErrorResponse]
}
|
vivint-smarthome/ceph-on-mesos
|
ui/src/main/scala/cephui/models/JsFormats.scala
|
Scala
|
apache-2.0
| 348 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
/**
* Represents the data to be sent using the Producer send API
* @param topic the topic under which the message is to be published
* @param key the key used by the partitioner to pick a broker partition
* @param data variable length data to be published as Kafka messages under topic
*/
class ProducerData[K, V](private val topic: String,
private val key: K,
private val data: Seq[V]) {
def this(t: String, d: Seq[V]) = this(topic = t, key = null.asInstanceOf[K], data = d)
def this(t: String, d: V) = this(topic = t, key = null.asInstanceOf[K], data = List(d))
def getTopic: String = topic
def getKey: K = key
def getData: Seq[V] = data
}
|
quipo/kafka
|
core/src/main/scala/kafka/producer/ProducerData.scala
|
Scala
|
apache-2.0
| 1,326 |
case class MyClass(number: Int, char: Char, b: Boolean) {
override val toString = char + number
}
object MyClass {
MyClass(2, '1', true)
new MyClass(2, '1', true)
}
|
ilinum/intellij-scala
|
testdata/changeSignature/fromScala/CaseClass_after.scala
|
Scala
|
apache-2.0
| 171 |
package com.sksamuel.scapegoat.inspections.unnecessary
import com.sksamuel.scapegoat.{ Warning, PluginRunner }
import com.sksamuel.scapegoat.inspections.unneccesary.UnusedMethodParameter
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
/** @author Stephen Samuel */
class UnusedMethodParameterTest
extends FreeSpec
with Matchers with PluginRunner with OneInstancePerTest {
override val inspections = Seq(new UnusedMethodParameter)
"UnusedMethodParameter" - {
"should report warning" - {
"for unused parameters in concrete methods" in {
val code = """class Test {
val initstuff = "sammy"
def foo(a:String, b:Int, c:Int) {
println(b)
foo(a,b,b)
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
compiler.scapegoat.feedback.warns.size shouldBe 1
}
}
"should ignore @SuppressWarnings" in {
val code = """class Test {
@SuppressWarnings(Array("all"))
def foo(a:String, b:Int, c:Int) {
println(b)
foo(a,b,b)
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"should not report warning" - {
"for abstract methods" in {
val code = """abstract class Test {
def foo(name:String) : String
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for methods not returning" in {
val code = """class Test {
| def foo(name:String) = throw new RuntimeException
|}""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for overriden method" in {
val code = """package com.sam
trait Foo {
def foo(name:String):String
}
object Fool extends Foo {
override def foo(name:String) : String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for overriden method without override keyword" in {
val code = """package com.sam
trait Foo {
def foo(name:String):String
}
object Fool extends Foo {
def foo(name:String) : String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for implemented method" in {
val code = """package com.sam
trait Foo {
def foo(name:String): String
}
case class Fool() extends Foo {
def foo(name:String): String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
"should handle constructor params" - {
"ignore unused case class primary param" in {
assertNoWarnings("""case class Foo(x: Int)""")
}
"warn on unused case class secondary params" in {
val code = """case class Foo(x: Int)(y: Int)"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings match {
case Seq(warning: Warning) =>
warning.snippet.get should include("y")
}
}
"not warn on case class secondary params used as fields" in {
assertNoWarnings(
"""case class Foo(x: Int)(y: Int) {
| def example: String = {
| s"x = $x, y = $y"
| }
|}
""".stripMargin)
}
"not warn on case class secondary params used as params" in {
assertNoWarnings(
"""case class Foo(x: Int)(y: Int) {
| println(s"x = $x, y = $y")
|
| def example: String = "irrelevant"
|}
""".stripMargin)
}
"warn on unused non-case class primary params" in {
val code = """class Foo(x: Int)"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings match {
case Seq(warning: Warning) =>
warning.snippet.get should include("x")
}
}
"not warn on non-case class primary params used as fields" in {
assertNoWarnings(
"""class Foo(x: Int) {
| def example: String = {
| s"x = $x"
| }
|}
""".stripMargin)
}
"not warn on non-case class primary params used as params" in {
assertNoWarnings(
"""class Foo(x: Int) {
| println(s"x = $x")
|
| def example: String = "irrelevant"
|}
""".stripMargin)
}
"not warn on non-case class primary params marked val" in {
assertNoWarnings("""class Foo(val x: Int)""")
}
}
}
private def assertNoWarnings(code: String) = {
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/test/scala/com/sksamuel/scapegoat/inspections/unnecessary/UnusedMethodParameterTest.scala
|
Scala
|
apache-2.0
| 5,528 |
package io.sphere.util
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.must.Matchers
import scala.language.postfixOps
class LangTagSpec extends AnyFunSpec with Matchers {
describe("LangTag") {
it("should accept valid language tags") {
LangTag.unapply("de").isEmpty must be(false)
LangTag.unapply("fr").isEmpty must be(false)
LangTag.unapply("de-DE").isEmpty must be(false)
LangTag.unapply("de-AT").isEmpty must be(false)
LangTag.unapply("de-CH").isEmpty must be(false)
LangTag.unapply("fr-FR").isEmpty must be(false)
LangTag.unapply("fr-CA").isEmpty must be(false)
LangTag.unapply("he-IL-u-ca-hebrew-tz-jeruslm").isEmpty must be(false)
}
it("should not accept invalid language tags") {
LangTag.unapply(" de").isEmpty must be(true)
LangTag.unapply("de_DE").isEmpty must be(true)
LangTag.unapply("e-DE").isEmpty must be(true)
}
}
}
|
sphereio/sphere-scala-libs
|
util/src/test/scala/LangTagSpec.scala
|
Scala
|
apache-2.0
| 942 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.