code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package org.apache.spark.util
import java.io.File
import java.util.UUID
import org.apache.spark.SparkConf
import org.apache.spark.ml.linalg.SQLDataTypes.VectorType
import org.apache.spark.ml.linalg.{Vector, VectorUDT}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.UnsafeProjection
/**
* @author debasish83 on 12/15/16.
*/
object DalUtils {
def getLocalDir(conf: SparkConf) : String = {
Utils.getLocalDir(conf)
}
def getTempFile(identifier: String, path: File): File = {
new File(path.getAbsolutePath + s".${identifier}." + UUID.randomUUID())
}
def deserializeVector(row: InternalRow): Vector = {
VectorType.asInstanceOf[VectorUDT].deserialize(row)
}
def serializeVector(value: Vector): InternalRow = {
VectorType.asInstanceOf[VectorUDT].serialize(value)
}
def projectVector(): UnsafeProjection = {
UnsafeProjection.create(VectorType.asInstanceOf[VectorUDT].sqlType)
}
}
|
Verizon/trapezium
|
dal/src/main/scala/org/apache/spark/util/DalUtils.scala
|
Scala
|
apache-2.0
| 973 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.log.cassandra
import java.lang.{ Long => JLong }
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
private[eventuate] class CassandraDeletedToStore(cassandra: Cassandra, logId: String) {
def writeDeletedTo(deletedTo: Long): Unit =
cassandra.execute(cassandra.preparedWriteDeletedToStatement.bind(logId, deletedTo: JLong), cassandra.settings.writeTimeout)
def readDeletedToAsync(implicit executor: ExecutionContext): Future[Long] = {
cassandra.session.executeAsync(cassandra.preparedReadDeletedToStatement.bind(logId)).map { resultSet =>
if (resultSet.isExhausted) 0L else resultSet.one().getLong("deleted_to")
}
}
}
|
ianclegg/eventuate
|
eventuate-log-cassandra/src/main/scala/com/rbmhtechnology/eventuate/log/cassandra/CassandraDeletedToStore.scala
|
Scala
|
apache-2.0
| 1,379 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import monix.execution.{Cancelable, ChannelType}
import scala.util.control.NonFatal
import monix.reactive.observers.{BufferedSubscriber, Subscriber}
import monix.reactive.{Observable, OverflowStrategy}
/** Implementation for [[monix.reactive.Observable.create]]. */
private[reactive] final class CreateObservable[+A](
overflowStrategy: OverflowStrategy.Synchronous[A],
producerType: ChannelType.ProducerSide,
f: Subscriber.Sync[A] => Cancelable)
extends Observable[A] {
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
val out = BufferedSubscriber.synchronous(subscriber, overflowStrategy, producerType)
try f(out)
catch {
case ex if NonFatal(ex) =>
subscriber.scheduler.reportFailure(ex)
Cancelable.empty
}
}
}
|
monix/monix
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/builders/CreateObservable.scala
|
Scala
|
apache-2.0
| 1,497 |
package edu.gemini.phase2.template.factory.impl.nici
import edu.gemini.spModel.gemini.nici.blueprint.SpNiciBlueprintCoronagraphic
import edu.gemini.spModel.gemini.nici.NICIParams.{Channel2FW, Channel1FW}
import scala.collection.JavaConverters._
case class NiciCoronographic(blueprint:SpNiciBlueprintCoronagraphic) extends NiciBase[SpNiciBlueprintCoronagraphic] {
import blueprint._
// **** IF MODE == Coronagraphic ****
// INCLUDE {3},{4} in target-specific Scheduling Group
// SET DICHROIC FROM PI
// SET RED CHANNEL FILTER FROM PI if defined ELSE SET to "Block"
// SET BLUE CHANNEL FILTER FROM PI if defined ELSE SET to "Block"
// SET FPM FROM PI
include(3, 4) in TargetGroup
forGroup(TargetGroup)(
setDichroic(dichroic),
setRedChannelFilter(firstRedFilterOrBlock),
setBlueChannelFilter(firstBlueFilterOrBlock),
setFPM(fpm))
}
|
arturog8m/ocs
|
bundle/edu.gemini.phase2.skeleton.servlet/src/main/scala/edu/gemini/phase2/template/factory/impl/nici/NiciCoronographic.scala
|
Scala
|
bsd-3-clause
| 902 |
/*
* Copyright (c) 2017 Uber Technologies, Inc. ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package com.uber.hoodie
import com.uber.hoodie.DataSourceReadOptions._
import com.uber.hoodie.exception.HoodieException
import org.apache.log4j.LogManager
import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext}
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.streaming.Sink
import org.apache.spark.sql.sources._
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.StructType
/**
* Hoodie Spark Datasource, for reading and writing hoodie datasets
*
*/
class DefaultSource extends RelationProvider
with SchemaRelationProvider
with CreatableRelationProvider
with DataSourceRegister
with StreamSinkProvider
with Serializable {
private val log = LogManager.getLogger(classOf[DefaultSource])
override def createRelation(sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
createRelation(sqlContext, parameters, null)
}
override def createRelation(sqlContext: SQLContext,
optParams: Map[String, String],
schema: StructType): BaseRelation = {
// Add default options for unspecified read options keys.
val parameters = Map(VIEW_TYPE_OPT_KEY -> DEFAULT_VIEW_TYPE_OPT_VAL) ++: optParams
val path = parameters.get("path")
if (path.isEmpty) {
throw new HoodieException("'path' must be specified.")
}
if (parameters(VIEW_TYPE_OPT_KEY).equals(VIEW_TYPE_REALTIME_OPT_VAL)) {
throw new HoodieException("Realtime view not supported yet via data source. Please use HiveContext route.")
}
if (parameters(VIEW_TYPE_OPT_KEY).equals(VIEW_TYPE_INCREMENTAL_OPT_VAL)) {
new IncrementalRelation(sqlContext, path.get, optParams, schema)
} else {
// this is just effectively RO view only, where `path` can contain a mix of
// non-hoodie/hoodie path files. set the path filter up
sqlContext.sparkContext.hadoopConfiguration.setClass(
"mapreduce.input.pathFilter.class",
classOf[com.uber.hoodie.hadoop.HoodieROTablePathFilter],
classOf[org.apache.hadoop.fs.PathFilter]);
log.info("Constructing hoodie (as parquet) data source with options :" + parameters)
// simply return as a regular parquet relation
DataSource.apply(
sparkSession = sqlContext.sparkSession,
userSpecifiedSchema = Option(schema),
className = "parquet",
options = parameters)
.resolveRelation()
}
}
override def createRelation(sqlContext: SQLContext,
mode: SaveMode,
optParams: Map[String, String],
df: DataFrame): BaseRelation = {
val parameters = HoodieSparkSqlWriter.parametersWithWriteDefaults(optParams)
HoodieSparkSqlWriter.write(sqlContext, mode, parameters, df)
createRelation(sqlContext, parameters, df.schema)
}
override def createSink(sqlContext: SQLContext,
optParams: Map[String, String],
partitionColumns: Seq[String],
outputMode: OutputMode): Sink = {
val parameters = HoodieSparkSqlWriter.parametersWithWriteDefaults(optParams)
new HoodieStreamingSink(
sqlContext,
parameters,
partitionColumns,
outputMode)
}
override def shortName(): String = "hoodie"
}
|
vinothchandar/hoodie
|
hoodie-spark/src/main/scala/com/uber/hoodie/DefaultSource.scala
|
Scala
|
apache-2.0
| 4,088 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.examples.kafka_hdfs_pipeline
import org.apache.gearpump.Message
import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.examples.kafka_hdfs_pipeline.ParquetWriterTask._
import org.apache.gearpump.streaming.task.{Task, TaskContext}
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.parquet.avro.AvroParquetWriter
import org.apache.hadoop.yarn.conf.YarnConfiguration
import scala.util.{Failure, Success, Try}
class ParquetWriterTask(taskContext : TaskContext, config: UserConfig) extends Task(taskContext, config) {
val outputFileName = taskContext.appName + ".parquet"
val absolutePath = Option(config.getString(PARQUET_OUTPUT_DIRECTORY).get + "/" + outputFileName).map(name => {
val file = new java.io.File(name.stripPrefix("file://"))
if(file.exists) {
LOG.info("deleting $name")
file.delete match {
case true =>
case false =>
LOG.info("could not delete $name")
}
}
name
}).get
val outputPath = new Path(absolutePath)
val parquetWriter = new AvroParquetWriter[SpaceShuttleRecord](outputPath, SpaceShuttleRecord.SCHEMA$)
def getYarnConf = new YarnConfiguration
def getFs = FileSystem.get(getYarnConf)
def getHdfs = new Path(getFs.getHomeDirectory, "/user/gearpump")
var count = 0
override def onNext(msg: Message): Unit = {
Try({
LOG.info("ParquetWriter")
parquetWriter.write(msg.msg.asInstanceOf[SpaceShuttleRecord])
if(count % 50 == 0) {
getFs.copyFromLocalFile(false, true, new Path(config.getString(PARQUET_OUTPUT_DIRECTORY).get, outputFileName), getHdfs)
}
count = count + 1
}) match {
case Success(ok) =>
case Failure(throwable) =>
LOG.error(s"failed ${throwable.getMessage}")
}
}
override def onStop(): Unit ={
LOG.info("ParquetWriter.onStop")
parquetWriter.close()
getFs.copyFromLocalFile(false, true, new Path(config.getString(PARQUET_OUTPUT_DIRECTORY).get, outputFileName), getHdfs)
}
}
object ParquetWriterTask {
val PARQUET_OUTPUT_DIRECTORY = "parquet.output.directory"
}
|
skw1992/gearpump-examples
|
kafka-hdfs-pipeline/src/main/scala/org/apache/gearpump/examples/kafka_hdfs_pipeline/ParquetWriterTask.scala
|
Scala
|
apache-2.0
| 2,902 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util
import org.geotools.data.DataUtilities
import org.geotools.data.simple.{SimpleFeatureCollection, SimpleFeatureIterator}
import org.geotools.factory.Hints
import org.geotools.feature.collection.AbstractFeatureCollection
import org.geotools.geometry.jts.ReferencedEnvelope
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
class TypeUpdatingFeatureCollection(collection: SimpleFeatureCollection, newType: SimpleFeatureType) extends AbstractFeatureCollection(newType) {
val delegate = new TypeUpdatingFeatureIterator(collection.features, newType)
def openIterator(): util.Iterator[SimpleFeature] = delegate
def closeIterator(close: util.Iterator[SimpleFeature]) {delegate.remove()}
def size(): Int = collection.size
def getBounds: ReferencedEnvelope = collection.getBounds
}
class TypeUpdatingFeatureIterator(delegate: SimpleFeatureIterator, newType: SimpleFeatureType) extends util.Iterator[SimpleFeature] {
def hasNext: Boolean = delegate.hasNext
def next(): SimpleFeature = {
val delegateNext = delegate.next
val newFeature = DataUtilities.reType(newType, delegateNext)
newFeature.setDefaultGeometry(delegateNext.getDefaultGeometry)
newFeature.getUserData.put(Hints.USE_PROVIDED_FID, Boolean.box(x = true))
newFeature
}
def remove() {delegate.close()}
}
|
giserh/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/TypeUpdatingFeatureCollection.scala
|
Scala
|
apache-2.0
| 1,847 |
package breeze.stats.hypothesis
import org.scalatest._
import org.scalatest.funsuite._
import matchers.should.Matchers._
class TTestTest extends AnyFunSuite {
val threshold = 0.01
test("T Test two sample") {
tTest(List(1.0, 1, 2, 3), List(9.0, 9, 8, 9)) should be(4.29E-5 +- threshold)
}
test("T Test one sample") {
tTest(Array(1.0, 1, 2, 3)) should be(0.0336 +- threshold)
}
test("T Test one sample for Traversable") {
//This test is designed to detect this bug, just in case a refactoring re-introduces it: https://github.com/scalanlp/breeze/issues/486
tTest(List(1.0, 1, 2, 3)) should be(0.0336 +- threshold)
}
test("T Test one sample should throw error when given vector of length 1") {
intercept[IllegalArgumentException] {
tTest(Array(1.0))
}
}
test("T Test two sample should throw error when given vector of length 1") {
intercept[IllegalArgumentException] {
tTest(Array(1.0, 2), Array(9.0))
}
}
}
|
scalanlp/breeze
|
math/src/test/scala/breeze/stats/hypothesis/TTestTest.scala
|
Scala
|
apache-2.0
| 973 |
package pages
import org.scalajs.dom._
import scala.scalajs.js.annotation.JSExportTopLevel
import API.Models
import scala.scalajs.js
import scala.util.{Failure, Success}
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.scalajs.js.JSON
class Budgets {
Budgets.list()
}
object Budgets {
var `type`: String = "Income"
var name: String = ""
var amount: Double = 0.0
var persistent: Int = 0
var reported: Boolean = false
var color: String = "chocolate"
var takesFrom: js.Array[Models.BudgetRef] = js.Array()
var editedID: Int = 0
private val budgetsWrapper = document.getElementById("budgets-wrapper").asInstanceOf[html.Div]
private val defaultBudgetWrapper = document.getElementById("budgets-defaults-wrapper").asInstanceOf[html.Div]
private val takesFromWrapper = document.getElementById("takes-from-wrapper").asInstanceOf[html.Div]
private val takesFromSelectorWrapper = document.getElementById("takes-from-selector-wrapper").asInstanceOf[html.Div]
private val incomesStats = document.getElementById("stats-incomes").asInstanceOf[html.Div]
private val outcomesStats = document.getElementById("stats-outcomes").asInstanceOf[html.Div]
private def getValues(): Unit = {
name = document.getElementById("budget-name").asInstanceOf[html.Input].value
amount = document.getElementById("budget-amount").asInstanceOf[html.Input].value.toDouble
if (
document.getElementById("income-tab-link").asInstanceOf[html.Div].className.contains("active")
) `type` = "Income"
else `type` = "Outcome"
}
def list(): Unit = {
var incomesSum = 0.0
var outcomesSum = 0.0
budgetsWrapper.innerHTML = ""
takesFromSelectorWrapper.innerHTML = ""
outcomesStats.innerHTML = ""
incomesStats.innerHTML = ""
API.getBudgets.onComplete {
case Success(resp) =>
val budgets = js.JSON.parse(resp.responseText).asInstanceOf[js.Array[Models.Budget]]
budgets.sortBy(_.`type`).reverse.foreach(budget => {
addBudget(budget)
if (budget.`type` == "Income") {
addTakesFromSelector(budget)
incomesSum += budget.left + budget.used + budget.exceeding
} else {
outcomesSum += budget.left + budget.used + budget.exceeding
}
})
budgets.foreach(addBudgetStats(_, incomesSum, outcomesSum))
document.getElementById("stats-incomes-sum").innerHTML = incomesSum.toString
document.getElementById("stats-outcomes-sum").innerHTML = outcomesSum.toString
API.getExchanges.onComplete {
case Success(respIn) =>
val exchanges = js.JSON.parse(respIn.responseText).asInstanceOf[js.Array[Models.Exchange]]
var borrowSum = 0.0
var lendSum = 0.0
exchanges.foreach(exchange =>
exchange.`type` match {
case "Borrow" =>
borrowSum += exchange.amount
case _ =>
lendSum += exchange.amount
})
addDefaultBudgets(borrowSum, lendSum, incomesSum, outcomesSum, 0)
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
}
def addDefaultBudgets(borrowSum: Double, lendSum: Double, incomeSum: Double, outcomeSum: Double, debtSum: Double): Unit = {
val borrowPercent: Int = (borrowSum / (borrowSum + lendSum) * 100).toInt
val exchanges = "<div class=\\"budget default\\">" +
"<h4 style=\\"padding-left: 15px\\">" +
"Exchanges" +
"<span class=\\"float-right\\">" +
"<i class=\\"fa fa-refresh\\" aria-hidden=\\"true\\"></i>" +
"</span>" +
"</h4>" +
"<div class=\\"progress\\">" +
"<div class=\\"progress-bar bg-danger\\" role=\\"progressbar\\" style=\\"width: " + borrowPercent + "%;\\">" + borrowSum + " CHF</div>" +
"<div class=\\"progress-bar bg-success\\" role=\\"progressbar\\" style=\\"width: " + (100 - borrowPercent) + "%;\\">" + lendSum + " CHF</div>" +
"</div>" +
"<div class=\\"details row\\">" +
"<div class=\\"col-3 text-left\\">" +
"| 0 CHF" +
"</div>" +
"<div class=\\"col-6 text-center\\">" +
"Borrow " + borrowPercent + "% - " + (100 - borrowPercent) + "% Lend" +
"</div>" +
"<div class=\\"col-3 text-right\\">" +
(borrowSum + lendSum) + " CHF |" +
"</div>" +
"</div>" +
"</div>"
val outcomePercent: Int = (outcomeSum / (incomeSum + outcomeSum) * 100).toInt
val balance = "<div class=\\"budget default\\">" +
"<h4 style=\\"padding-left: 15px\\">" +
"Balance" +
"<span class=\\"float-right\\">" +
"<i class=\\"fa fa-balance-scale\\" aria-hidden=\\"true\\"></i>" +
"</span>" +
"</h4>" +
"<div class=\\"progress\\">" +
"<div class=\\"progress-bar bg-danger\\" role=\\"progressbar\\" style=\\"width: " + outcomePercent + "%;\\">" + outcomeSum + " CHF</div>" +
"<div class=\\"progress-bar bg-success\\" role=\\"progressbar\\" style=\\"width: " + (100 -outcomePercent) + "%;\\">" + incomeSum + " CHF</div>" +
"</div>" +
"<div class=\\"details row\\">" +
"<div class=\\"col-3 text-left\\">" +
"| 0 CHF" +
"</div>" +
"<div class=\\"col-6 text-center\\">" +
"Outcome " + outcomePercent + "% - " + (100 -outcomePercent) + "% Income" +
"</div>" +
"<div class=\\"col-3 text-right\\">" +
(outcomeSum + incomeSum) +" CHF |" +
"</div>" +
"</div>" +
"</div>"
val debt = "<div class=\\"budget default\\">" +
"<h4 style=\\"padding-left: 15px\\">" +
"Debt" +
"<span class=\\"float-right\\">" +
"<i class=\\"fa fa-asterisk\\" aria-hidden=\\"true\\"></i>" +
"</span>" +
"</h4>" +
"<div class=\\"progress\\">" +
"<div class=\\"progress-bar bg-debt\\" role=\\"progressbar\\" style=\\"width: 100%;\\">" + debtSum + " CHF</div>" +
"</div>" +
"<div class=\\"details row\\">" +
"<div class=\\"col-3 text-left\\">" +
"| 0 CHF" +
"</div>" +
"<div class=\\"col-6 text-center\\">" +
"When all incomes are used, the outcomes uses debt" +
"</div>" +
"<div class=\\"col-3 text-right\\">" +
debtSum + " CHF |" +
"</div>" +
"</div>" +
"</div>"
defaultBudgetWrapper.innerHTML = exchanges + balance + debt
}
def addBudget(budget: Models.Budget): Unit = {
var usedPercent = 0
var leftPercent = 0
var exceedingPercent = 0
if (budget.used == 0 && budget.left == 0) {
usedPercent = 50
leftPercent = 50
} else {
leftPercent = (budget.left / (budget.left + budget.used + budget.exceeding) * 100).toInt
usedPercent = (budget.used / (budget.left + budget.used + budget.exceeding) * 100).toInt
exceedingPercent = (budget.exceeding / (budget.left + budget.used + budget.exceeding) * 100).toInt
if (budget.`type` == "Income")
leftPercent = 100 - exceedingPercent - usedPercent
else
usedPercent = 100 - exceedingPercent - leftPercent
}
val row = "<h4>" +
"<button onclick=\\"budgetsSetEditedID(" + budget.id + ")\\" class=\\"btn edit\\" data-toggle=\\"modal\\" data-target=\\"#budgets-modal\\"><i class=\\"fa fa-ellipsis-v\\" aria-hidden=\\"true\\"></i></button>" +
budget.name +
"<span class=\\"float-right\\">" +
(if (budget.reported)
"<i class=\\"fa fa-retweet\\" aria-hidden=\\"true\\"></i>" else "") +
(if (budget.persistent != 0)
"<i class=\\"fa fa-calendar-check-o\\" aria-hidden=\\"true\\"></i>" else "") +
(if (budget.`type` == "Income")
"<i class=\\"fa fa-arrow-down\\" aria-hidden=\\"true\\"></i>"
else
"<i class=\\"fa fa-arrow-up\\" aria-hidden=\\"true\\"></i>") +
"</span>" +
"</h4>" +
"<div class=\\"progress\\">" +
(if (budget.`type` == "Income")
"<div class=\\"progress-bar\\" role=\\"progressbar\\" style=\\"width: " + exceedingPercent + "%; background: green\\">" + budget.exceeding + " CHF</div>" +
"<div class=\\"progress-bar bg-faded left\\" role=\\"progressbar\\" style=\\"width: " + usedPercent + "%;\\">" + budget.used + " CHF</div>" +
"<div class=\\"progress-bar\\" role=\\"progressbar\\" style=\\"width: " + leftPercent + "%; background: " + budget.color + "\\">" + budget.left + " CHF</div>"
else
"<div class=\\"progress-bar\\" role=\\"progressbar\\" style=\\"width: " + usedPercent + "%; background: " + budget.color + "\\">" + budget.used + " CHF</div>" +
"<div class=\\"progress-bar bg-faded left\\" role=\\"progressbar\\" style=\\"width: " + leftPercent + "%;\\">" + budget.left + " CHF</div>" +
"<div class=\\"progress-bar\\" role=\\"progressbar\\" style=\\"width: " + exceedingPercent + "%; background: red\\">" + budget.exceeding + " CHF</div>") +
"</div>" +
"<div class=\\"details row\\">" +
"<div class=\\"col-3 text-left\\">" +
"| 0 CHF" +
"</div>" +
"<div class=\\"col-6 text-center\\">" +
(if (budget.`type` == "Income")
"Used " + usedPercent + "% - " + (leftPercent + 2*exceedingPercent) + "% Left"
else
"Used " + (usedPercent + 2*exceedingPercent) + "% - " + leftPercent + "% Left") +
"</div>" +
"<div class=\\"col-3 text-right\\">" +
(budget.used + budget.left + budget.exceeding) + " CHF |" +
"</div>" +
"</div>"
val div = document.createElement("div").asInstanceOf[html.Div]
div.className = "budget"
div.innerHTML = row
budgetsWrapper.appendChild(div)
}
def addBudgetStats(budget: Models.Budget, incomesSum: Double, outcomesSum: Double): Unit = {
val div = document.createElement("div").asInstanceOf[html.Div]
div.className = "progress-bar"
div.style.background = budget.color
div.innerHTML = (budget.used + budget.left + budget.exceeding) + "CHF"
if (budget.`type` == "Income") {
div.style.width = ((budget.used + budget.left + budget.exceeding) / incomesSum * 100) + "%"
incomesStats.appendChild(div)
} else {
div.style.width = ((budget.used + budget.left + budget.exceeding) / outcomesSum * 100) + "%"
outcomesStats.appendChild(div)
}
}
def addTakesFromSelector(income: Models.Budget): Unit = {
val a = "<a onclick=\\"budgetsAddTakesFrom(" + income.id + ", '" + income.name + "', true)\\" class=\\"dropdown-item\\" href=\\"#\\">" + income.name + "</a>"
val div = document.createElement("div").asInstanceOf[html.Div]
div.innerHTML = a
takesFromSelectorWrapper.appendChild(div)
}
@JSExportTopLevel("budgetsAddTakesFrom")
def addTakesFrom(id: Int, name: String, checkDuplicates: Boolean = true): Unit = {
if (!checkDuplicates || takesFrom.indexWhere(_.budgetId == id) == -1) {
takesFrom.append(new Models.BudgetRef(
if (takesFrom.length == 0)
0
else
takesFrom.last.order + 1,
id
))
val row = "<span class=\\"order\\">" + takesFrom.last.order + "</span> " + name + " <a onclick=\\"budgetsRemoveTakesFrom(this.parentNode, " + id + ")\\" href=\\"#\\" class=\\"remove\\"><i class=\\"fa fa-times\\" aria-hidden=\\"true\\"></i></a>"
val div = document.createElement("div").asInstanceOf[html.Div]
div.className = "related-income"
div.innerHTML = row
takesFromWrapper.appendChild(div)
}
}
@JSExportTopLevel("budgetsRemoveTakesFrom")
def removeTakesFrom(elem: html.Div, id: Int): Unit = {
takesFromWrapper.removeChild(elem)
takesFrom.remove(takesFrom.indexWhere(_.order == id))
}
@JSExportTopLevel("budgetsSetEditedID")
def setEditedID(id: Int): Unit = {
editedID = id
id match {
case 0 =>
name = ""
amount = 0.0
document.getElementById("budget-name").asInstanceOf[html.Input].value = ""
document.getElementById("budget-amount").asInstanceOf[html.Input].value = ""
case _ =>
API.getBudget(id).onComplete {
case Success(resp) =>
val budget = JSON.parse(resp.responseText).asInstanceOf[Models.Budget]
`type` = budget.`type`
name = budget.name
amount = budget.used + budget.left + budget.exceeding
persistent = budget.persistent
reported = budget.reported
color = budget.color
takesFrom = budget.takesFrom
document.getElementById("budget-name").asInstanceOf[html.Input].value = name
document.getElementById("budget-amount").asInstanceOf[html.Input].value = amount.toString
document.getElementById("persistent-selector").asInstanceOf[html.Link].innerHTML = persistent match {
case 1 => "Every day"
case 7 => "Every week"
case 30 => "Every month"
case 365 => "Every year"
case _ => "None"
}
document.getElementById("reported-selector").asInstanceOf[html.Link].innerHTML = if (reported) {
"Yes"
} else {
"No"
}
document.getElementById("color-selector").asInstanceOf[html.Link].innerHTML =
"<span class=\\"preview-color\\" style=\\"background: " + color + "\\"></span> " + color match {
case "aquamarine" => "Aquamarine"
case "blueviolet" => "Blue violet"
case "brown" => "Brown"
case "cadetblue" => "CadetBlue"
case "chartreuse" => "Chartreuse"
case "chocolate" => "Chocolate"
case "coral" => "Coral"
case "cornflowerblue" => "Cornflower blue"
case "darkblue" => "Dark blue"
case "darkcyan" => "Dark cyan"
case "darkgoldenrod" => "Dark golden rod"
case "darkgreen" => "Dark green"
case "darkkhaki" => "Dark khaki"
case "darkmagenta" => "Dark magenta"
case "darkorange" => "Dark orange"
case "darksalmon" => "Dark salmon"
case "darkturquoise" => "Dark turquoise"
case "deeppink" => "Deep pink"
case "deepskyblue" => "Deep sky blue"
case "goldenrod" => "Golden rod"
case "green" => "Green"
case "indianred" => "Indian red"
case "indigo" => "Indigo"
case "lightslategray" => "Light slate gray"
case "limegreen" => "Lime green"
case "mediumblue" => "Medium blue"
case "olive" => "Olive"
case "orange" => "Orange"
case other => other
}
takesFromWrapper.innerHTML = ""
takesFrom.foreach(el => {
API.getBudget(el.budgetId).onComplete {
case Success(respIn) =>
val budgetIn = JSON.parse(respIn.responseText).asInstanceOf[Models.Budget]
addTakesFrom(el.budgetId, budgetIn.name, checkDuplicates = false)
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
})
`type` match {
case "Outcome" =>
document.getElementById("outcome-tab-link").asInstanceOf[html.Link].click()
case _ =>
document.getElementById("income-tab-link").asInstanceOf[html.Link].click()
}
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
}
}
@JSExportTopLevel("setPersistent")
def setPersistent(elem: html.Link): Unit = {
persistent = elem.innerHTML match {
case "Every day" => 1
case "Every week" => 7
case "Every month" => 30
case "Every year" => 365
case _ => 0
}
document.getElementById("persistent-selector").innerHTML = elem.innerHTML
}
@JSExportTopLevel("setReported")
def setReported(elem: html.Link): Unit = {
reported = elem.innerHTML match {
case "Yes" => true
case _ => false
}
document.getElementById("reported-selector").innerHTML = elem.innerHTML
}
@JSExportTopLevel("setColor")
def setColor(elem: html.Link): Unit = {
val pattern = "(<span.*background: )(.*)(\\"><.*)".r
val pattern(_, colorName, _) = elem.innerHTML
color = colorName
document.getElementById("color-selector").innerHTML = elem.innerHTML
}
@JSExportTopLevel("budgetsDelete")
def delete(): Unit = {
if(editedID != 0) {
API.deleteBudget(editedID).onComplete {
case Success(resp) =>
Utils.addAlert("success", js.JSON.parse(resp.responseText).selectDynamic("message").asInstanceOf[String])
list()
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
}
}
@JSExportTopLevel("budgetsSave")
def save(): Unit = {
getValues()
val budget = new Models.Budget(
editedID,
name,
`type`,
0,
amount,
0,
persistent,
reported,
color,
takesFrom
)
editedID match {
case 0 =>
API.postBudget(budget).onComplete {
case Success(resp) =>
Utils.addAlert("success", js.JSON.parse(resp.responseText).selectDynamic("message").asInstanceOf[String])
list()
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
case _ =>
API.patchBudget(budget).onComplete {
case Success(resp) =>
Utils.addAlert("success", js.JSON.parse(resp.responseText).selectDynamic("message").asInstanceOf[String])
list()
case Failure(e: ext.AjaxException) =>
Utils.addAlert("danger", js.JSON.parse(e.xhr.responseText).selectDynamic("message").asInstanceOf[String])
}
}
}
}
|
MathieuUrstein/HEIG.SCALA.Projet
|
client/src/main/scala/pages/Budgets.scala
|
Scala
|
apache-2.0
| 18,294 |
/*
* This file is part of the ToolXiT project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolxit.bibtex
package test
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class NameFormattingTest extends FlatSpec with Matchers {
"The formatter" should "format correctly" in {
val formatter = new NameFormatter("{f. }{vv }{ll}")
formatter(Author("Lucas", "", "Satabin", "")) should equal("L. Satabin")
formatter(Author("Jean-Baptiste", "", "Poquelin", "")) should equal("J.-B. Poquelin")
}
}
|
jopasserat/toolxit-bibtex
|
core/src/test/scala/toolxit/bibtex/test/NameFormattingTest.scala
|
Scala
|
apache-2.0
| 1,129 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.mv.plans
import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.aggregate.Count
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.{LeftOuter, RightOuter, _}
import org.apache.carbondata.mv.dsl.Plans._
import org.apache.carbondata.mv.plans.modular.Flags._
import org.apache.carbondata.mv.plans.modular.{JoinEdge, ModularRelation}
import org.apache.carbondata.mv.testutil.ModularPlanTest
class LogicalToModularPlanSuite extends ModularPlanTest {
val testRelation0 = LocalRelation('a.int, 'b.int, 'c.int)
val testRelation1 = LocalRelation('d.int)
val testRelation2 = LocalRelation('c.int, 'd.int)
test("select only") {
val originalQuery =
testRelation0
.select('a.attr)
.analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Project(proj,MatchLocalRelation(tbl,_)) =>
ModularRelation(null,null,tbl,NoFlags,Seq.empty).select(proj:_*)(tbl:_*)()(Map.empty)()
}
comparePlans(modularized, correctAnswer)
}
test("select-project-groupby grouping without aggregate function") {
val originalQuery =
testRelation0
.select('a)
.groupBy('a)('a)
.select('a).analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Project(proj1,logical.Aggregate(grp,agg,logical.Project(proj2,MatchLocalRelation(tbl,_)))) =>
ModularRelation(null,null,tbl,NoFlags,Seq.empty).select(proj2:_*)(tbl:_*)()(Map.empty)().groupBy(agg:_*)(proj2:_*)(grp:_*).select(proj1:_*)(proj1:_*)()(Map.empty)()
}
comparePlans(modularized, correctAnswer)
}
test("select-project with filter") {
val originalQuery =
testRelation0
.where('a + 'b === 1)
.select('a + 'b as 'e)
.analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Project(proj,logical.Filter(cond,MatchLocalRelation(tbl,_))) =>
ModularRelation(null,null,tbl,NoFlags,Seq.empty).select(proj:_*)(tbl:_*)(cond)(Map.empty)()
}
comparePlans(modularized, correctAnswer)
}
test("join") {
val left = testRelation0.where('a === 1)
val right = testRelation1
val originalQuery =
left.join(right, condition = Some("d".attr === "b".attr || "d".attr === "c".attr)).analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Join(logical.Filter(cond1,MatchLocalRelation(tbl1,_)),MatchLocalRelation(tbl2,_),Inner,Some(cond2)) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty)).select(tbl1++tbl2:_*)(tbl1++tbl2:_*)(Seq(cond1,cond2):_*)(Map.empty)(JoinEdge(0,1,Inner))
}
comparePlans(modularized, correctAnswer)
}
test("left outer join") {
val left = testRelation0.where('a === 1)
val right = testRelation1
val originalQuery =
left.join(right, LeftOuter, condition = Some("d".attr === "b".attr || "d".attr === "c".attr)).analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Join(logical.Filter(cond1,MatchLocalRelation(tbl1,_)),MatchLocalRelation(tbl2,_),LeftOuter,Some(cond2)) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty)).select(tbl1++tbl2:_*)(tbl1++tbl2:_*)(Seq(cond1,cond2):_*)(Map.empty)(JoinEdge(0,1,LeftOuter))
}
comparePlans(modularized, correctAnswer)
}
test("right outer join") {
val left = testRelation0.where('a === 1)
val right = testRelation1
val originalQuery =
left.join(right, RightOuter, condition = Some("d".attr === "b".attr || "d".attr === "c".attr)).analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Join(logical.Filter(cond1,MatchLocalRelation(tbl1,_)),MatchLocalRelation(tbl2,_),RightOuter,Some(cond2)) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty)).select(tbl1++tbl2:_*)(tbl1++tbl2:_*)(Seq(cond1,cond2):_*)(Map.empty)(JoinEdge(0,1,RightOuter))
}
comparePlans(modularized, correctAnswer)
}
ignore("joins: conjunctive predicates #1 with alias") {
val left = testRelation0.where('a === 1).subquery('x)
val right = testRelation1.subquery('y)
val originalQuery =
left.join(right, condition = Some("x.b".attr === "y.d".attr)).analyze
val modularized = analysis.EliminateSubqueryAliases(originalQuery).modularize
val correctAnswer = originalQuery match {
case logical.Join(logical.Filter(cond1,MatchLocalRelation(tbl1,_)),MatchLocalRelation(tbl2,_),Inner,Some(cond2)) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty)).select(tbl1++tbl2:_*)(tbl1++tbl2:_*)(Seq(cond1,cond2):_*)(Map.empty)(JoinEdge(0,1,Inner))
}
comparePlans(modularized, correctAnswer)
}
ignore("joins: conjunctive predicates #2 with alias") {
val lleft = testRelation0.where('a >= 3).subquery('z)
val left = testRelation0.where('a === 1).subquery('x)
val right = testRelation0.subquery('y)
val originalQuery =
lleft.join(
left.join(right, condition = Some("x.b".attr === "y.b".attr)),
condition = Some("z.a".attr === "x.b".attr))
.analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Join(logical.Filter(cond1,MatchLocalRelation(tbl1,_)),logical.Join(logical.Filter(cond2,MatchLocalRelation(tbl2,_)),MatchLocalRelation(tbl3,_),Inner,Some(cond3)),Inner,Some(cond4)) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty),ModularRelation(null,null,tbl3,NoFlags,Seq.empty)).select(tbl1++tbl2++tbl3:_*)(tbl1++tbl2++tbl3:_*)(Seq(cond1,cond2,cond3,cond4):_*)(Map.empty)(JoinEdge(0,1,Inner),JoinEdge(1,2,Inner))
}
comparePlans(modularized, correctAnswer)
}
ignore("SPJGH query") {
val left = testRelation0.where('b >= 1).subquery('x)
val right = testRelation2.where('d >= 2).subquery('y)
val originalQuery =
left.join(right, Inner, Option("x.c".attr ==="y.c".attr))
.groupBy("x.a".attr)("x.a".attr as 'f, Count("x.b") as 'g)
.select('f)
.where('g > 1).analyze
val modularized = originalQuery.modularize
val correctAnswer = originalQuery match {
case logical.Project(proj0, logical.Filter(cond1, logical.Project(proj1, logical.Aggregate(grp,agg,logical.Join(logical.Filter(cond2,MatchLocalRelation(tbl1,_)),logical.Filter(cond3,MatchLocalRelation(tbl2,_)),Inner,Some(cond4)))))) =>
Seq(ModularRelation(null,null,tbl1,NoFlags,Seq.empty),ModularRelation(null,null,tbl2,NoFlags,Seq.empty)).select(tbl1++tbl2:_*)(tbl1++tbl2:_*)(Seq(cond2,cond3,cond4):_*)(Map.empty)(JoinEdge(0,1,Inner)).groupBy(agg:_*)(tbl1++tbl2:_*)(grp:_*).select(proj0:_*)(proj1:_*)(cond1)(Map.empty)()
}
comparePlans(modularized, correctAnswer)
}
ignore("non-SPJGH query") {
val mqoAnswer = try testRelation0.where('b > 2).select('a).orderBy('a.asc).analyze.modularize catch {
case e: Exception =>
s"""
|Exception thrown while modularizing query:
|== Exception ==
|$e
""".stripMargin.trim
}
val correctAnswer =
s"""
|Exception thrown while modularizing query:
|== Exception ==
|java.lang.UnsupportedOperationException: unsupported operation: No modular plan for
|Sort [a#0 ASC NULLS FIRST], true
|+- Project [a#0]
| +- Filter (b#1 > 2)
| +- LocalRelation <empty>, [a#0, b#1, c#2]
""".stripMargin.trim
compareMessages(mqoAnswer.toString,correctAnswer)
}
}
|
sgururajshetty/carbondata
|
datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/LogicalToModularPlanSuite.scala
|
Scala
|
apache-2.0
| 9,005 |
package extruder.tests
import cats.Monad
import cats.data.{OptionT, ValidatedNel}
import cats.instances.either._
import cats.instances.int._
import cats.instances.list._
import cats.instances.map.catsKernelStdEqForMap
import cats.instances.option._
import cats.instances.string._
import cats.instances.tuple._
import cats.instances.stream._
import cats.kernel.laws.discipline.MonoidTests
import extruder.core.{MultiShow, Settings, Show}
import extruder.data.Validation
import extruder.laws.EncoderDecoderGenericTests
import extruder.map._
import org.scalatest.FunSuite
import org.typelevel.discipline.scalatest.Discipline
class MapSuite extends FunSuite with Discipline {
checkAll("Map Monoid", MonoidTests[Map[String, String]].monoid)
checkAll(
"Map",
EncoderDecoderGenericTests[Validation, Settings, Map[String, String], Map[String, String], Map[String, String]](
defaultSettings
).genericEncodeDecode[Int, Int, String]
)
}
|
janstenpickle/extruder
|
tests/src/test/scala/extruder/tests/MapSuite.scala
|
Scala
|
mit
| 954 |
package chapter.six
object ExerciseSix extends App {
// todo: is there a cleaner way to define this?
object Suit extends Enumeration {
type Suit = Value
val Clubs = Value("\\u2663") // ♣
val Diamonds = Value("\\u2666") // ♦
val Hearts = Value("\\u2665") // ♥
val Spades = Value("\\u2660") // ♠
}
}
|
deekim/impatient-scala
|
src/main/scala/chapter/six/ExerciseSix.scala
|
Scala
|
apache-2.0
| 340 |
// Copyright (c) 2016 PSForever.net to present
package net.psforever.packet.game
import net.psforever.packet.{GamePacketOpcode, Marshallable, PacketHelpers, PlanetSideGamePacket}
import scodec.Codec
import scodec.codecs._
final case class ReloadMessage(item_guid : PlanetSideGUID,
ammo_clip : Long,
unk1 : Int)
extends PlanetSideGamePacket {
type Packet = ReloadMessage
def opcode = GamePacketOpcode.ReloadMessage
def encode = ReloadMessage.encode(this)
}
object ReloadMessage extends Marshallable[ReloadMessage] {
implicit val codec : Codec[ReloadMessage] = (
("item_guid" | PlanetSideGUID.codec) ::
("ammo_clip" | uint32L) ::
("unk1" | int32L)
).as[ReloadMessage]
}
|
Fate-JH/PSF-Server
|
common/src/main/scala/net/psforever/packet/game/ReloadMessage.scala
|
Scala
|
gpl-3.0
| 769 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.runtime
package graph
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import java.io.File
import scala.concurrent.{ Await, Future }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
import org.apache.hadoop.conf.Configuration
import org.apache.spark.SparkConf
import com.asakusafw.runtime.directio.hadoop.HadoopDataSource
@RunWith(classOf[JUnitRunner])
class DirectOutputSetupSpecTest extends DirectOutputSetupSpec
class DirectOutputSetupSpec
extends FlatSpec
with SparkForAll
with JobContextSugar
with RoundContextSugar
with TempDirForAll {
import DirectOutputSetupSpec._
behavior of classOf[DirectOutputSetup].getSimpleName
private var root: File = _
override def configure(conf: SparkConf): SparkConf = {
root = createTempDirectoryForAll("directio-").toFile()
conf.setHadoopConf("com.asakusafw.directio.test", classOf[HadoopDataSource].getName)
conf.setHadoopConf("com.asakusafw.directio.test.path", "test")
conf.setHadoopConf("com.asakusafw.directio.test.fs.path", root.getAbsolutePath)
}
it should "delete simple" in {
implicit val jobContext = newJobContext(sc)
val file = new File(root, "out1/testing.bin")
file.getParentFile.mkdirs()
file.createNewFile()
val setup = new Setup(Set(("id", "test/out1", Seq("*.bin"))))
val rc = newRoundContext()
Await.result(setup.perform(rc), Duration.Inf)
assert(file.exists() === false)
}
it should "not delete out of scope" in {
implicit val jobContext = newJobContext(sc)
val file = new File(root, "out2/testing.bin")
file.getParentFile.mkdirs()
file.createNewFile()
val setup = new Setup(Set(("id", "test/out2", Seq("*.txt"))))
val rc = newRoundContext()
Await.result(setup.perform(rc), Duration.Inf)
assert(file.exists() === true)
}
}
object DirectOutputSetupSpec {
class Setup(
val specs: Set[(String, String, Seq[String])])(
implicit jobContext: JobContext)
extends DirectOutputSetup with CacheOnce[RoundContext, Future[Unit]]
}
|
ueshin/asakusafw-spark
|
runtime/src/test/scala/com/asakusafw/spark/runtime/graph/DirectOutputSetupSpec.scala
|
Scala
|
apache-2.0
| 2,768 |
import scala.pickling._
import scala.pickling.Defaults._
import scala.pickling.binary._
object ListIntBench extends scala.pickling.testing.PicklingBenchmark {
val lst = (1 to size).toList
override def run() {
val pickle = lst.pickle
pickle.unpickle[List[Int]]
}
}
|
beni55/pickling
|
benchmark/ListIntBench.scala
|
Scala
|
bsd-3-clause
| 280 |
package org.jetbrains.plugins.scala.macroAnnotations
import scala.annotation.StaticAnnotation
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
/**
* This annotation makes the compiler generate code that calls CachesUtil.get(..,)
*
* NOTE: Annotated function should preferably be top-level or in a static object for better performance.
* The field for Key is generated and it is more efficient to just keep it stored in this field, rather than get
* it from CachesUtil every time
*
* Author: Svyatoslav Ilinskiy
* Date: 9/25/15.
*/
class CachedInsidePsiElement(psiElement: Any, dependencyItem: Object, useOptionalProvider: Boolean = false) extends StaticAnnotation {
def macroTransform(annottees: Any*) = macro CachedInsidePsiElement.cachedInsidePsiElementImpl
}
object CachedInsidePsiElement {
def cachedInsidePsiElementImpl(c: whitebox.Context)(annottees: c.Tree*): c.Expr[Any] = {
import CachedMacroUtil._
import c.universe._
implicit val x: c.type = c
def parameters: (Tree, Tree, Boolean) = {
c.prefix.tree match {
case q"new CachedInsidePsiElement(..$params)" if params.length == 2 =>
(params.head, modCountParamToModTracker(c)(params(1), params.head), false)
case q"new CachedInsidePsiElement(..$params)" if params.length == 3 =>
val optional = params.last match {
case q"useOptionalProvider = $v" => c.eval[Boolean](c.Expr(v))
case q"$v" => c.eval[Boolean](c.Expr(v))
}
(params.head, modCountParamToModTracker(c)(params(1), params.head), optional)
case _ => abort("Wrong annotation parameters!")
}
}
//annotation parameters
val (elem, dependencyItem, useOptionalProvider) = parameters
annottees.toList match {
case DefDef(mods, name, tpParams, paramss, retTp, rhs) :: Nil =>
if (retTp.isEmpty) {
abort("You must specify return type")
}
//generated names
val cachedFunName = generateTermName("cachedFun")
val keyId = c.freshName(name.toString + "cacheKey")
val key = generateTermName(name + "Key")
val cacheStatsName = generateTermName("cacheStats")
val defdefFQN = thisFunctionFQN(name.toString)
val analyzeCaches = analyzeCachesEnabled(c)
val provider =
if (useOptionalProvider) TypeName("MyOptionalProvider")
else TypeName("MyProvider")
val actualCalculation = transformRhsToAnalyzeCaches(c)(cacheStatsName, retTp, rhs)
val analyzeCachesEnterCacheArea =
if (analyzeCaches) q"$cacheStatsName.aboutToEnterCachedArea()"
else EmptyTree
val updatedRhs = q"""
def $cachedFunName(): $retTp = $actualCalculation
..$analyzeCachesEnterCacheArea
$cachesUtilFQN.get($elem, $key, new $cachesUtilFQN.$provider[Any, $retTp]($elem, _ => $cachedFunName())($dependencyItem))
"""
val updatedDef = DefDef(mods, name, tpParams, paramss, retTp, updatedRhs)
val res = q"""
private val $key = $cachesUtilFQN.getOrCreateKey[$keyTypeFQN[$cachedValueTypeFQN[$retTp]]]($keyId)
${if (analyzeCaches) q"private val $cacheStatsName = $cacheStatisticsFQN($keyId, $defdefFQN)" else EmptyTree}
..$updatedDef
"""
println(res)
c.Expr(res)
case _ => abort("You can only annotate one function!")
}
}
}
|
JetBrains/intellij-scala-historical
|
macroAnnotations/src/org/jetbrains/plugins/scala/macroAnnotations/CachedInsidePsiElement.scala
|
Scala
|
apache-2.0
| 3,451 |
package test
import org.specs2.mutable._
import play.api.test._
import play.api.test.Helpers._
class AuthenticationBrowser extends Specification {
"Authentication Browser" should {
"try invalid login" in {
running(TestServer(3333), HTMLUNIT) { browser =>
browser.goTo("http://localhost:3333/")
browser.$("#email").text("[email protected]")
browser.$("#password").text("secret111")
browser.$("#loginbutton").click()
browser.url must equalTo("http://localhost:3333/login")
/*browser.pageSource must contain("Invalid email or password")*/
}
}
"log in" in {
running(TestServer(3333), HTMLUNIT) { browser =>
browser.goTo("http://localhost:3333/")
browser.$("#email").text("[email protected]")
browser.$("#password").text("secret")
browser.$("#loginbutton").click()
browser.pageSource must contain("logout")
browser.getCookies().size must equalTo(1)
browser.goTo("http://localhost:3333/")
browser.url must equalTo("http://localhost:3333/")
browser.goTo("http://localhost:3333/sandra")
browser.pageSource must contain("michele")
browser.pageSource must contain("sandra")
}
}
"create double user" in {
running(TestServer(3333), HTMLUNIT) { browser =>
// test repeat email
browser.goTo("http://localhost:3333/createUser")
browser.$("#email").text("[email protected]")
browser.$("#name").text("newName!!")
browser.$("#passw1").text("secret111")
browser.$("#passw1").text("secret111")
browser.$("#createbutton").click()
browser.url must equalTo("http://localhost:3333/createUser")
// test repeat name
browser.$("#email").text("[email protected]")
browser.$("#name").text("michele")
browser.$("#passw1").text("secret111")
browser.$("#passw1").text("secret111")
browser.$("#createbutton").click()
browser.url must equalTo("http://localhost:3333/createUser")
}
}
}
}
|
mresposito/flipFeed
|
test/AuthenticationBrowser.scala
|
Scala
|
apache-2.0
| 2,084 |
package org.precompiler.spark101.env
import com.typesafe.config.ConfigFactory
import org.scalatest.FunSuite
/**
*
* @author Richard Li
*/
class TypesafeConfigTest extends FunSuite {
test("loadConfigFile") {
val conf = ConfigFactory.load("application-test.properties")
assert("test1" == conf.getString("key"))
val conf2 = ConfigFactory.load("conf/application-test.properties")
assert("test2" == conf2.getString("key"))
}
}
|
precompiler/spark-101
|
learning-spark/src/test/scala/org/precompiler/spark101/env/TypesafeConfigTest.scala
|
Scala
|
apache-2.0
| 452 |
package preprocess
import scala.collection.mutable.{HashMap, ArrayBuilder}
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkContext, HashPartitioner, storage}
import org.apache.spark.SparkContext._
import org.apache.spark.serializer.KryoRegistrator
object KDDCup2012 {
val dataDir = "/home/xz60/data/KDDCup2012/"
val trainFile = dataDir + "training.txt"
val despTokenFile = dataDir + "descriptionid_tokensid.txt"
val keywordTokenFile = dataDir + "purchasedkeywordid_tokensid.txt"
val queryTokenFile = dataDir + "queryid_tokensid.txt"
val titleTokenFile = dataDir + "titleid_tokensid.txt"
val userProfileFile = dataDir + "userid_profile.txt"
val testFeatureFile = dataDir + "test_features"
val testLabelFile = dataDir + "test_labels"
val alpha = 0.05f
val beta = 75
val outputDir = "output/KDDCup2012/"
val tmpDir = "/tmp/spark"
val numCores = 16
val numReducers = 16*numCores
val mode = "local[16]"
val jars = Seq("examples/target/scala-2.9.3/" +
"spark-examples-assembly-0.8.1-incubating.jar")
def main(args : Array[String]) {
System.setProperty("spark.local.dir", tmpDir)
System.setProperty("spark.default.parallelism", numReducers.toString)
System.setProperty("spark.storage.memoryFraction", "0.3")
System.setProperty("spark.akka.frameSize", "1110") //for large .collect() objects
// System.setProperty("spark.serializer",
// "org.apache.spark.serializer.KryoSerializer")
// System.setProperty("spark.kryo.registrator", "utilities.Registrator")
// System.setProperty("spark.kryoserializer.buffer.mb", "16")
// System.setProperty("spark.kryo.referenceTracking", "false")
val jobName = "Preprocess_KDDCup2012"
val logPath = outputDir + jobName + ".txt"
val storageLevel = storage.StorageLevel.MEMORY_AND_DISK_SER
val sc = new SparkContext(mode, jobName, System.getenv("SPARK_HOME"), jars)
// Read in the descriptionid_tokensid.txt file
//num unique desriptions = 3171830, max desriptionID = 3171829
//if despTokenSize = 20000, each token appears at least 81 times
val numDesps = 3171829+1
val despTokenSize = 20000
val despToken = new Array[Array[Int]](numDesps)
val tokensMap = new HashMap[Int, Int]
val despTokenRDD = sc.textFile(despTokenFile).map(line => {
val array = line.split("\\t")
assert(array.length == 2)
(array(0).toInt, array(1).split("\\\\|").map(_.toInt))
}).persist(storageLevel)
despTokenRDD.flatMap(_._2).map((_, 1)).reduceByKey(_+_)
.map(pair => (pair._2, pair._1)).sortByKey(false).map(_._2)
.take(despTokenSize).zipWithIndex.foreach(pair => tokensMap(pair._1) = pair._2)
despTokenRDD.collect.par.foreach(pair => {
despToken(pair._1) = pair._2.map(tokensMap.getOrElse(_, -1)).filter(_>=0)
})
despTokenRDD.unpersist(false)
// Read in the purchasedkeywordid_tokensid.txt file
//num unique keywords = 1249785, max keywordID = 1249784
//if keywordTokenSize = 20000, each token appears at least 11 times
val numKeywords = 1249784 + 1
val keywordTokenFreqTh = 20
val keywordToken = new Array[Array[Int]](numKeywords)
tokensMap.clear
val keywordTokenRDD = sc.textFile(keywordTokenFile).map(line => {
val array = line.split("\\t")
assert(array.length == 2)
(array(0).toInt, array(1).split("\\\\|").map(_.toInt))
}).persist(storageLevel)
keywordTokenRDD.flatMap(_._2).map((_, 1)).reduceByKey(_+_)
.filter(_._2 > keywordTokenFreqTh).map(_._1)
.collect.zipWithIndex.foreach(pair => tokensMap(pair._1) = pair._2)
val keywordTokenSize = tokensMap.size
keywordTokenRDD.collect.par.foreach(pair => {
keywordToken(pair._1) = pair._2.map(tokensMap.getOrElse(_, -1)).filter(_>=0)
})
keywordTokenRDD.unpersist(false)
// Read in the queryid_tokensid.txt file
//num unique queries: 26243606, max queryID = 26243605
val numQueries = 26243605 + 1
val queryTokenFreqTh = 20
val queryToken = new Array[Array[Int]](numQueries)
tokensMap.clear
val queryTokenRDD = sc.textFile(queryTokenFile).map(line => {
val array = line.split("\\t")
assert(array.length == 2)
(array(0).toInt, array(1).split("\\\\|").map(_.toInt))
}).persist(storageLevel)
queryTokenRDD.flatMap(_._2).map((_, 1)).reduceByKey(_+_)
.filter(_._2>queryTokenFreqTh).map(_._1)
.collect.zipWithIndex.foreach(pair => tokensMap(pair._1) = pair._2)
val queryTokenSize = tokensMap.size
queryTokenRDD.collect.par.foreach(pair => {
queryToken(pair._1) = pair._2.map(tokensMap.getOrElse(_, -1)).filter(_>=0)
})
queryTokenRDD.unpersist(false)
// Read in the titleid_tokensid.txt file
//num of unique titiles = 4051441, max titleID = 4051440 (246.6 MB)
//if titleTokenSize = 20000, each token appears at least 49 times
val numTitles = 4051440 + 1
val titleTokenFreqTh = 50
val titleToken = new Array[Array[Int]](numTitles)
tokensMap.clear
val titleTokenRDD = sc.textFile(titleTokenFile).map(line => {
val array = line.split("\\t")
assert(array.length == 2)
(array(0).toInt, array(1).split("\\\\|").map(_.toInt))
}).persist(storageLevel)
titleTokenRDD.flatMap(_._2).map((_, 1)).reduceByKey(_+_)
.filter(_._2 > titleTokenFreqTh).map(_._1)
.collect.zipWithIndex.foreach(pair => tokensMap(pair._1) = pair._2)
val titleTokenSize = tokensMap.size
titleTokenRDD.collect.par.foreach(pair => {
titleToken(pair._1) = pair._2.map(tokensMap.getOrElse(_, -1)).filter(_>=0)
})
titleTokenRDD.unpersist(false)
// Read in the userid_profile.txt file
//num of unique users = 23669283, max userID = 23907634
val numUsers = 23907634 + 1
val userProfile = new Array[Array[Int]](numUsers)
sc.textFile(userProfileFile).map(line => {
val array = line.split("\\t")
assert(array.length == 3)
(array(0).toInt, Array(array(1).toInt, array(2).toInt))
}).collect.par.foreach(pair => userProfile(pair._1) = pair._2)
// Read in the training.txt file
val train = sc.textFile(trainFile).mapPartitions(_.map(_.split("\\t")).map(arr =>
Array(arr(0).toInt, arr(1).toInt, arr(3).toInt, arr(4).toInt, arr(5).toInt,
arr(6).toInt, arr(7).toInt, arr(8).toInt, arr(9).toInt, arr(10).toInt,
arr(11).toInt))
).persist(storageLevel)
// after preprocessing, trainFile line format:
// 0. Click, 1. Impression, 2. AdID, 3. AdvertiserID, 4. Depth, 5. Position,
// 6. QueryID, 7. KeywordID, 8. TitleID, 9. DescriptionID, 10. UserID
// Number of appearances of the same user
// Average click-through-rate for user
// num of unique users = 23669283, max userID = 23907634
val userStats = train.map(tokens => (tokens(10), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2)).collect.par
val userFreq = new Array[Int](numUsers)
userStats.map(pair => (pair._1, pair._2._1))
.foreach(pair => userFreq(pair._1) = pair._2)
val userWithClicks = new Array[Boolean](numUsers)
userStats.filter(_._2._1>0).map(_._1).foreach(i => userWithClicks(i) = true)
val userCtr = new Array[Float](numUsers)
userStats.map{
case(id, (click, impression)) => (id, (click+alpha*beta)/(impression + beta))
}.foreach(pair => userCtr(pair._1) = pair._2)
// Number of occurrences of the same query
// Average click-through-rate for query
//num unique queries: 26243606, max queryID = 26243605
val queryStats = train.map(tokens => (tokens(6), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2)).collect.par
val queryFreq = new Array[Int](numQueries)
queryStats.map(pair => (pair._1, pair._2._1))
.foreach(pair => queryFreq(pair._1) = pair._2)
val queryWithClicks = new Array[Boolean](numQueries)
queryStats.filter(_._2._1>0).map(_._1).foreach(i => queryWithClicks(i) = true)
val queryCtr = new Array[Float](numQueries)
queryStats.map{
case(id, (click, impression)) => (id, (click+alpha*beta)/(impression + beta))
}.foreach(pair => queryCtr(pair._1) = pair._2)
// Number of occurrences of the same ad
// Average click-through-rate for ads' id
//num of unique Ads 641707, max AdsID 22238277
val adsStats = train.map(tokens => (tokens(2), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2)).collect
val adIDMap = new HashMap[Int, Int]
adsStats.map(_._1).zipWithIndex.foreach(pair => adIDMap(pair._1) = pair._2)
val numAds = adIDMap.size
val adFreq = new Array[Int](numAds)
adsStats.map(pair => (pair._1, pair._2._1))
.foreach(pair => adFreq(adIDMap(pair._1)) = pair._2)
val adWithClicks = new Array[Boolean](numAds)
adsStats.filter(_._2._1 > 0).map(_._1)
.foreach(i => adWithClicks(adIDMap(i)) = true)
val adCtr = new Array[Float](numAds)
adsStats.map{
case(id, (click, impression)) => (id, (click+alpha*beta)/(impression + beta))
}.foreach(pair => adCtr(adIDMap(pair._1)) = pair._2)
// Average click-through-rate for advertiser
//num of unique advertisers 14847, max advertiserID 39191
val numAdvrs = 39191+1
val advrCtr = new Array[Float](numAdvrs)
train.map(tokens => (tokens(3), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2))
.mapValues{case(click, impression) => (click+alpha*beta)/(impression + beta)}
.collect.foreach(pair => advrCtr(pair._1) = pair._2)
// Average click-through-rate for keyword advertised
val keywordStats = train.map(tokens => (tokens(7), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2)).collect.par
val keywordWithClicks = new Array[Boolean](numKeywords)
keywordStats.filter(_._2._1>0).map(_._1).foreach(i => keywordWithClicks(i) = true)
val keywordCtr = new Array[Float](numKeywords)
keywordStats.map{
case(id, (click, impression)) => (id, (click+alpha*beta)/(impression + beta))
}.foreach(pair => keywordCtr(pair._1) = pair._2)
// Average click-through-rate for titile id
val titleCtr = new Array[Float](numTitles)
train.map(tokens => (tokens(8), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2))
.mapValues{case(click, impression) => (click+alpha*beta)/(impression + beta)}
.collect.foreach(pair => titleCtr(pair._1) = pair._2)
// Average click-through-rate for description id
val despCtr = new Array[Float](numDesps)
train.map(tokens => (tokens(9), (tokens(0), tokens(1))))
.reduceByKey((p1, p2) => (p1._1+p2._1, p1._2+p2._2))
.mapValues{case(click, impression) => (click+alpha*beta)/(impression + beta)}
.collect.foreach(pair => despCtr(pair._1) = pair._2)
val queryTokenBC = sc.broadcast(queryToken)
val queryFreqBC = sc.broadcast(queryFreq)
val queryFreqDim = 25
val queryFreqBinSize = math.max(queryFreq.reduce(math.max(_,_))/queryFreqDim, 1)
val queryCtrBC = sc.broadcast(queryCtr)
val queryCtrBinDim = 100
val queryCtrBinSize = queryCtr.reduce(math.max(_,_))/queryCtrBinDim
val queryIDBinDim = 10000
val queryIDBinSize = numQueries/queryIDBinDim
val queryWithClicksBC = sc.broadcast(queryWithClicks)
val despTokenBC = sc.broadcast(despToken)
val despCtrBC = sc.broadcast(despCtr)
val despCtrBinDim = 100
val despCtrBinSize = despCtr.reduce(math.max(_,_))/despCtrBinDim
val keywordTokenBC = sc.broadcast(keywordToken)
val keywordCtrBC = sc.broadcast(keywordCtr)
val keywordCtrBinDim = 100
val keywordCtrBinSize = keywordCtr.reduce(math.max(_,_))/keywordCtrBinDim
val keywordWithClicksBC = sc.broadcast(keywordWithClicks)
val titleTokenBC = sc.broadcast(titleToken)
val titleCtrBC = sc.broadcast(titleCtr)
val titleCtrBinDim = 100
val titleCtrBinSize = titleCtr.reduce(math.max(_,_))/titleCtrBinDim
val userProfileBC = sc.broadcast(userProfile)
val userFreqBC = sc.broadcast(userFreq)
val userFreqDim = 25
val userFreqBinSize = math.max(userFreq.reduce(math.max(_,_))/userFreqDim, 1)
val userCtrBC = sc.broadcast(userCtr)
val userCtrBinDim = 100
val userCtrBinSize = userCtr.reduce(math.max(_,_))/userCtrBinDim
val userIDBinDim = 10000
val userIDBinSize = numUsers/userIDBinDim
val userWithClicksBC = sc.broadcast(userWithClicks)
val adFreqBC = sc.broadcast(adFreq)
val adFreqDim = 25
val adFreqBinSize = math.max(adFreq.reduce(math.max(_,_))/adFreqDim, 1)
val adCtrBC = sc.broadcast(adCtr)
val adCtrBinDim = 100
val adCtrBinSize = math.max(adCtr.reduce(math.max(_,_))/adCtrBinDim, 1)
val adIDMapBC = sc.broadcast(adIDMap)
val adIDBinDim = 10000
val adIDBinSize = numUsers/adIDBinDim
val adWithClicksBC = sc.broadcast(adWithClicks)
val advrCtrBC = sc.broadcast(advrCtr)
val advrCtrBinDim = 100
val advrCtrBinSize = advrCtr.reduce(math.max(_,_))/advrCtrBinDim
// Form features from raw data:
train.flatMap(arr => {
val queryToken = queryTokenBC.value
val queryFreq = queryFreqBC.value
val queryCtr = queryCtrBC.value
val queryWithClicks = queryWithClicksBC.value
val despToken = despTokenBC.value
val despCtr= despCtrBC.value
val keywordToken = keywordTokenBC.value
val keywordCtr = keywordCtrBC.value
val keywordWithClicks = keywordWithClicksBC.value
val titleToken = titleTokenBC.value
val titleCtr = titleCtrBC.value
val userProfile = userProfileBC.value
val userFreq = userFreqBC.value
val userCtr = userCtrBC.value
val userWithClicks = userWithClicksBC.value
val adFreq = adFreqBC.value
val adCtr = adCtrBC.value
val adWithClicks = adWithClicksBC.value
val adIDMap = adIDMapBC.value
val advrCtr = advrCtrBC.value
var click = arr(0)
var impression = arr(1)
val feature = new ArrayBuilder.ofInt
feature.sizeHint(25)
//intercept
feature += 0
var offset = 1
//AdCtr, D=adCtrBinDim
feature +=
math.min((adCtr(adIDMap(arr(2)))/adCtrBinSize).toInt, adCtrBinDim-1) + offset
offset += adCtrBinDim
//AdvrCtr, D=advrCtrBinDim
feature +=
math.min((advrCtr(arr(3))/advrCtrBinSize).toInt, advrCtrBinDim-1) + offset
offset += advrCtrBinDim
//QueryCtr, D=queryCtrBinDim
feature +=
math.min((queryCtr(arr(6))/queryCtrBinSize).toInt, queryCtrBinDim-1) + offset
offset += queryCtrBinDim
//UserCtr, D=userCtrBinDim
feature +=
math.min((userCtr(arr(10))/userCtrBinSize).toInt, userCtrBinDim-1) + offset
offset += userCtrBinDim
//WordCtr, D=keywordCtrBinDim
feature += math.min((keywordCtr(arr(7))/keywordCtrBinSize).toInt,
keywordCtrBinDim-1) + offset
offset += keywordCtrBinDim
//binary User ID, Ad ID, query ID for records with clicks
if (userWithClicks(arr(10))) feature += arr(10) + offset
offset += numUsers
if (adWithClicks(adIDMap(arr(2)))) feature += adIDMap(arr(2)) + offset
offset += numAds
if (queryWithClicks(arr(6))) feature += arr(6) + offset
offset += numQueries
//value-User, value-Query
feature += math.min(arr(10)/userIDBinSize, userIDBinDim-1) + offset
offset += userIDBinDim
feature += math.min(arr(6)/queryIDBinSize, queryIDBinDim-1) + offset
offset += queryIDBinDim
//Number of tokens in query/title/description/keyword
//Query token's length, D=20
feature += math.min(queryToken(arr(6)).length, 19) + offset
offset += 20
//Title token's length, D=30
feature += math.min(titleToken(arr(8)).length, 29) + offset
offset += 30
//Desp token's length, D=50
feature += math.min(despToken(arr(9)).length, 49) + offset
offset += 50
//Keyword token's length, D=10
feature += math.min(keywordToken(arr(7)).length, 9) + offset
offset += 10
//binary-Gender, binary-Age, binary-PositionDepth, binary-QueryTokens
//Gender, D=3
if (userProfile(arr(10)) != null) feature += userProfile(arr(10))(0) + offset
offset += 3
//Age, D=6
if (userProfile(arr(10)) != null) feature += userProfile(arr(10))(1)-1 + offset
offset += 6
//binary Position-Depth
feature += 6*arr(5)/arr(4) + offset
offset += 6
//binary query tokens, D=queryTokenSize
if (queryWithClicks(arr(6))) feature ++= queryToken(arr(6)).map(_+offset)
offset += queryTokenSize
if (keywordWithClicks(arr(7))) feature ++= keywordToken(arr(7)).map(_+offset)
offset += keywordTokenSize
val records = new Array[Array[Int]](impression)
if (click >= 1) feature += 1
else feature += -1
records(0) = feature.result
val length = records(0).length
impression -= 1
click -= 1
var count = 1
while (impression > 0) {
val record = new Array[Int](length)
Array.copy(records(0), 0, record, 0, length)
record(length-1) = if (click >= 1) 1 else -1
impression -= 1
click -= 1
records(count) = record
count += 1
}
records
}).saveAsObjectFile(outputDir + "train_seq")
// .map(arr => arr(arr.length-1) + "\\t" + arr.take(arr.length-1).mkString(" "))
// .saveAsTextFile(outputDir + "train_text")
val test_feature = sc.textFile(testFeatureFile).map(_.split("\\t"))
.map(arr => (arr(0).trim.toInt, arr.drop(2).map(_.toInt)))
val test_label = sc.textFile(testLabelFile).map(_.split("\\t"))
.map(arr => (arr(0).trim.toInt, arr(1).split(",").take(2).map(_.toInt)))
val test = test_label.join(test_feature).map{
case(id, (arr1, arr2)) =>
val builder = new ArrayBuilder.ofInt
val length = arr1.length+arr2.length
assert(length == 11)
builder.sizeHint(length)
builder ++= arr1
builder ++= arr2
builder.result
}.flatMap( arr => {
val queryToken = queryTokenBC.value
val queryFreq = queryFreqBC.value
val queryCtr = queryCtrBC.value
val queryWithClicks = queryWithClicksBC.value
val despToken = despTokenBC.value
val despCtr= despCtrBC.value
val keywordToken = keywordTokenBC.value
val keywordCtr = keywordCtrBC.value
val keywordWithClicks = keywordWithClicksBC.value
val titleToken = titleTokenBC.value
val titleCtr = titleCtrBC.value
val userProfile = userProfileBC.value
val userFreq = userFreqBC.value
val userCtr = userCtrBC.value
val userWithClicks = userWithClicksBC.value
val adFreq = adFreqBC.value
val adCtr = adCtrBC.value
val adWithClicks = adWithClicksBC.value
val adIDMap = adIDMapBC.value
val advrCtr = advrCtrBC.value
var click = arr(0)
var impression = arr(1)
val feature = new ArrayBuilder.ofInt
feature.sizeHint(25)
//intercept
feature += 0
var offset = 1
//AdCtr, D=adCtrBinDim
if (adIDMap.contains(arr(2))) {
feature += math.min((adCtr(adIDMap(arr(2)))/adCtrBinSize).toInt,
adCtrBinDim-1) + offset
}
offset += adCtrBinDim
//AdvrCtr, D=advrCtrBinDim
if (arr(3) < advrCtr.length) {
feature +=
math.min((advrCtr(arr(3))/advrCtrBinSize).toInt, advrCtrBinDim-1) + offset
}
offset += advrCtrBinDim
//QueryCtr, D=queryCtrBinDim
feature +=
math.min((queryCtr(arr(6))/queryCtrBinSize).toInt, queryCtrBinDim-1) + offset
offset += queryCtrBinDim
//UserCtr, D=userCtrBinDim
feature +=
math.min((userCtr(arr(10))/userCtrBinSize).toInt, userCtrBinDim-1) + offset
offset += userCtrBinDim
//WordCtr, D=keywordCtrBinDim
feature += math.min((keywordCtr(arr(7))/keywordCtrBinSize).toInt,
keywordCtrBinDim-1) + offset
offset += keywordCtrBinDim
//binary User ID, Ad ID, query ID for records with clicks
if (userWithClicks(arr(10))) feature += arr(10) + offset
offset += numUsers
if (adIDMap.contains(arr(2)) && adWithClicks(adIDMap(arr(2)))) {
feature += adIDMap(arr(2)) + offset
}
offset += numAds
if (queryWithClicks(arr(6))) feature += arr(6) + offset
offset += numQueries
//value-User, value-Query
feature += math.min(arr(10)/userIDBinSize, userIDBinDim-1) + offset
offset += userIDBinDim
feature += math.min(arr(6)/queryIDBinSize, queryIDBinDim-1) + offset
offset += queryIDBinDim
//Number of tokens in query/title/description/keyword
//Query token's length, D=20
feature += math.min(queryToken(arr(6)).length, 19) + offset
offset += 20
//Title token's length, D=30
feature += math.min(titleToken(arr(8)).length, 29) + offset
offset += 30
//Desp token's length, D=50
feature += math.min(despToken(arr(9)).length, 49) + offset
offset += 50
//Keyword token's length, D=10
feature += math.min(keywordToken(arr(7)).length, 9) + offset
offset += 10
//binary-Gender, binary-Age, binary-PositionDepth, binary-QueryTokens
//Gender, D=3
if (userProfile(arr(10)) != null) feature += userProfile(arr(10))(0) + offset
offset += 3
//Age, D=6
if (userProfile(arr(10)) != null) feature += userProfile(arr(10))(1)-1 + offset
offset += 6
//binary Position-Depth
feature += 6*arr(5)/arr(4) + offset
offset += 6
//binary query tokens, D=queryTokenSize
if (queryWithClicks(arr(6))) feature ++= queryToken(arr(6)).map(_+offset)
offset += queryTokenSize
if (keywordWithClicks(arr(7))) feature ++= keywordToken(arr(7)).map(_+offset)
offset += keywordTokenSize
val records = new Array[Array[Int]](impression)
if (click >= 1) feature += 1
else feature += -1
records(0) = feature.result
val length = records(0).length
impression -= 1
click -= 1
var count = 1
while (impression > 0) {
val record = new Array[Int](length)
Array.copy(records(0), 0, record, 0, length)
record(length-1) = if (click >= 1) 1 else -1
impression -= 1
click -= 1
records(count) = record
count += 1
}
records
}).saveAsObjectFile(outputDir + "test_seq")
// .map(arr => arr(arr.length-1) + "\\t" + arr.take(arr.length-1).mkString(" "))
// .saveAsTextFile(outputDir + "test_text")
System.exit(0)
}
}
|
XianXing/bdl
|
src/main/scala/bdl/preprocess/KDDCup2012.scala
|
Scala
|
apache-2.0
| 22,718 |
package com.datastax.spark.connector.cql
import java.io.IOException
import org.apache.spark.SparkEnv
import org.mockito.Mockito
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.FlatSpec
import org.scalatestplus.mockito.MockitoSugar
class DefaultConnectionFactoryTest extends FlatSpec with MockitoSugar {
/** DefaultConnectionFactory relies on a non-null SparkEnv */
private def mockedSparkEnv[T](code: => T): T = {
val original = SparkEnv.get
val sparkEnv = Mockito.mock(classOf[SparkEnv], new Answer[Option[String]] {
override def answer(invocation: InvocationOnMock): Option[String] = None
})
SparkEnv.set(sparkEnv)
try {
code
} finally {
SparkEnv.set(original)
}
}
it should "complain when a malformed URL is provided" in mockedSparkEnv {
intercept[IOException] {
DefaultConnectionFactory.maybeGetLocalFile("secure-bundle.zip")
}
}
it should "complain when an URL with unrecognized scheme is provided" in mockedSparkEnv {
intercept[IOException] {
DefaultConnectionFactory.maybeGetLocalFile("hdfs:///secure-bundle.zip")
}
}
}
|
datastax/spark-cassandra-connector
|
connector/src/test/scala/com/datastax/spark/connector/cql/DefaultConnectionFactoryTest.scala
|
Scala
|
apache-2.0
| 1,177 |
package org.contourweb.client.view
import java.util.logging.Logger
import org.contourweb.common.model.model
trait location { this: view =>
private[location] val log = Logger.getLogger("location")
trait LocatedView {
def uri: List[String] = Nil
}
}
|
kazachonak/contour
|
contour-client/src/main/scala/org/contourweb/client/view/location.scala
|
Scala
|
lgpl-3.0
| 259 |
package idbase
import play.api.data.Forms._
import play.api.data.validation._
import play.api.data._
package object models {
type Markdown = String
def nonEmptyList[A](mapping: Mapping[A]): Mapping[List[A]] =
RepeatedMapping(mapping) verifying Constraint[List[_]]("constraint.required") { o ⇒
if (o.isEmpty) Invalid(ValidationError("error.required")) else Valid
}
}
|
ornicar/idbase
|
app/models/package.scala
|
Scala
|
mit
| 390 |
// Copyright 2014-2016 Leonardo Schwarz (leoschwarz.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import com.leoschwarz.quest_on._
import org.scalatra._
import javax.servlet.ServletContext
class ScalatraBootstrap extends LifeCycle {
override def init(context: ServletContext) {
context.mount(new QuestOnServlet, "/*")
}
}
|
evotopid/quest_on
|
src/main/scala/ScalatraBootstrap.scala
|
Scala
|
apache-2.0
| 845 |
package com.socrata.curator
import org.slf4j.LoggerFactory
import com.socrata.http.client.{RequestBuilder, Response, SimpleHttpRequest}
import ServerProvider.Complete
/**
* Manages connections and requests to the provided service.
* @param provider Service discovery object.
* @param config The configuration for this client.
*/
case class CuratedServiceClient(provider: ServerProvider,
config: CuratedClientConfig) {
private val logger = LoggerFactory.getLogger(getClass)
private val connectTimeout = config.connectTimeout
private val maxRetries = config.maxRetries
/**
* Sends a get request to the provided service.
* @return HTTP response code and body
*/
def execute[T](request: RequestBuilder => SimpleHttpRequest,
callback: Response => T): T = {
// Not using a default paramter to avoid breaking binary compatibility.
execute(request, callback, ServerProvider.standardRetryOn)
}
/**
* Sends a get request to the provided service.
* @return HTTP response code and body
*/
def execute[T](request: RequestBuilder => SimpleHttpRequest,
callback: Response => T,
retryWhen: ServerProvider.RetryWhen): T = {
val requestWithTimeout = { base: RequestBuilder =>
val req = base.connectTimeoutMS match {
case Some(timeout) => base
case None => base.connectTimeoutMS(connectTimeout)
}
request(req)
}
provider.withRetries(maxRetries,
requestWithTimeout,
retryWhen) {
case Some(response) =>
Complete(callback(response))
case None =>
throw ServiceDiscoveryException(s"Failed to discover service: ${config.serviceName}")
}
}
}
|
socrata-platform/socrata-curator-utils
|
core/src/main/scala/com.socrata.curator/CuratedServiceClient.scala
|
Scala
|
apache-2.0
| 1,793 |
package com.lkroll.ep.mapviewer
import org.denigma.threejs._
import org.scalajs.dom
import org.scalajs.dom.raw.HTMLElement
import scala.scalajs.js
import js.JSConverters._
trait IntersectionControls {
def camera: Camera
def scene: Object3D
def element: HTMLElement
def sceneObjects: Array[Object3D]
def overlayObjects: Array[Object3D]
val rayLength = 1e16;
val rayOffset = new Vector3(0.0, 0.0, 0.0);
// lazy val (raycaster, ray) = {
// val rc = new Raycaster();
// //rc.setFromCamera(mouse, camera);
// val r = buildRay(rc.ray.origin, rc.ray.direction);
// r.matrixAutoUpdate = true;
// //scene.add(r);
// (rc, r)
// }
lazy val raycaster = new Raycaster();
lazy val screenT = {
val rect = element.getBoundingClientRect();
// val width = if (rect.width == 0.0) {
// dom.window.innerWidth
// } else {
// rect.width
// }
// val height = if (rect.height == 0.0) {
// dom.window.innerHeight
// } else {
// rect.height
// }
val (width, height) = (dom.window.innerWidth, dom.window.innerHeight);
//println(s"Bounding rectangle: left=${rect.left}, top=${rect.top}, width=${width}, hight=${height}")
new graphics.ScreenTransform(width, height, rect.left, rect.top)
}
var intersections = List.empty[Intersection]
var underMouse = Map.empty[Object3D, List[Intersection]]
var last = Map.empty[Object3D, List[Intersection]]
var exit = Map.empty[Object3D, List[Intersection]]
var enter = Map.empty[Object3D, List[Intersection]]
def findIntersections(mouse: Vector2): List[Intersection] = {
//println(s"Mouse position: ${mouse.toArray().mkString(",")}");
raycaster.setFromCamera(mouse, camera);
//updateRay(raycaster.ray.origin, raycaster.ray.direction)
//println(s"Raycaster: ${raycaster.ray.origin.toArray().mkString(",")} -> ${raycaster.ray.direction.toArray().mkString(",")}, near=${raycaster.near},far=${raycaster.far}");
val sceneIntersections = raycaster.intersectObjects(sceneObjects.toJSArray); //.sortWith((a, b) => a.point.distanceTo(raycaster.ray.origin) < b.point.distanceTo(raycaster.ray.origin)).toList
val overlayIntersections = graphics.TacticalOverlay.intersectObjects(mouse, camera, screenT, overlayObjects);
val intersectionsB = List.newBuilder[Intersection];
intersectionsB ++= sceneIntersections;
intersectionsB ++= overlayIntersections;
intersectionsB.result()
}
val coords = new Vector2();
def onCursorMove(cordX: Double, cordY: Double): Unit = {
coords.set(cordX, cordY);
val ncs = screenT.toNormalizedCameraSpace(coords);
// val ss = screenT.toScreenSpace(ncs);
// println(s"coords=${coords.pretty}, ncs=${ncs.pretty}, ss=${ss.pretty}, screenT=${screenT}");
intersections = findIntersections(ncs);
//println(s"Intersections: ${intersections.map { x => x.`object`.name }.mkString(",")}")
underMouse = intersections.groupBy(_.`object`)
val l = last // if I do not do this assigment and use last instead of l I get into trouble
this.exit = l.filterKeys(!underMouse.contains(_))
this.enter = underMouse.filterKeys(!l.contains(_))
// if(exit.exists{case (key,value)=>enter.contains(key)}) dom.console.error("same enterexit")
//val s = enter.size
last = underMouse
//if (s != enter.size) dom.console.error("ScalaJS error with immutable collections")
}
}
|
Bathtor/ep-explorer
|
src/main/scala/com/lkroll/ep/mapviewer/IntersectionControls.scala
|
Scala
|
mit
| 3,528 |
package sssg.renderer
import java.io.{File, FileWriter}
import java.util.Locale
import com.typesafe.scalalogging.LazyLogging
import nz.net.ultraq.thymeleaf.LayoutDialect
import org.thymeleaf.TemplateEngine
import org.thymeleaf.context.Context
import org.thymeleaf.templateresolver.FileTemplateResolver
import sssg.Configuration
/**
* sssg.renderer
* User: nk
* Date: 2016-03-17 00:54
*/
trait ThymeleafRenderer extends Renderer with LazyLogging {
this: Configuration =>
lazy val engine: TemplateEngine = new TemplateEngine
private val templateResolver: FileTemplateResolver = new FileTemplateResolver()
templateResolver.setTemplateMode("HTML5")
templateResolver.setSuffix(".html")
templateResolver.setPrefix(TEMPLATE_PATH)
templateResolver.setCacheable(false)
engine.setTemplateResolver(templateResolver)
engine.addDialect(new LayoutDialect())
override def render(template: String, file: File, context: scala.collection.mutable.Map[String, AnyRef]): Unit = {
import scala.collection.JavaConversions._
if(file.exists()) {
file.delete()
}
val writer: FileWriter = new FileWriter(file)
val c: Context = new Context(Locale.getDefault, context)
engine.process(template, c, writer)
logger.debug(s"Rendered ${file.getCanonicalPath}")
writer.flush()
writer.close()
}
}
|
nikosk/sssg
|
src/main/scala/sssg/renderer/ThymeleafRenderer.scala
|
Scala
|
mit
| 1,343 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.dataset
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.Calc
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rex._
import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.api.java.DataSet
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.internal.BatchTableEnvImpl
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.codegen.FunctionCodeGenerator
import org.apache.flink.table.plan.schema.RowSchema
import org.apache.flink.table.runtime.FlatMapRunner
import org.apache.flink.types.Row
import scala.collection.JavaConverters._
/**
* Flink RelNode which matches along with LogicalCalc.
*/
class DataSetCalc(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
rowRelDataType: RelDataType,
calcProgram: RexProgram,
ruleDescription: String)
extends DataSetCalcBase(
cluster,
traitSet,
input,
rowRelDataType,
calcProgram,
ruleDescription) {
override def copy(traitSet: RelTraitSet, child: RelNode, program: RexProgram): Calc = {
new DataSetCalc(cluster, traitSet, child, getRowType, program, ruleDescription)
}
override def translateToPlan(tableEnv: BatchTableEnvImpl): DataSet[Row] = {
val config = tableEnv.getConfig
val inputDS = getInput.asInstanceOf[DataSetRel].translateToPlan(tableEnv)
val generator = new FunctionCodeGenerator(config, false, inputDS.getType)
val returnType = FlinkTypeFactory.toInternalRowTypeInfo(getRowType).asInstanceOf[RowTypeInfo]
val projection = calcProgram.getProjectList.asScala.map(calcProgram.expandLocalRef)
val condition = if (calcProgram.getCondition != null) {
Some(calcProgram.expandLocalRef(calcProgram.getCondition))
} else {
None
}
val genFunction = generateFunction(
generator,
ruleDescription,
new RowSchema(getRowType),
projection,
condition,
config,
classOf[FlatMapFunction[Row, Row]])
val runner = new FlatMapRunner(genFunction.name, genFunction.code, returnType)
inputDS.flatMap(runner).name(calcOpName(calcProgram, getExpressionString))
}
}
|
hequn8128/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetCalc.scala
|
Scala
|
apache-2.0
| 3,137 |
package mesosphere.mesos.protos
case class ExecutorID(value: String)
|
gsantovena/marathon
|
src/main/scala/mesosphere/mesos/protos/ExecutorID.scala
|
Scala
|
apache-2.0
| 70 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.ScMethodLike
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeParametersOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScTemplateDefinition}
import org.jetbrains.plugins.scala.lang.psi.types.Compatibility.Expression
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{ScTypePolymorphicType, TypeParameter}
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypeResult}
import org.jetbrains.plugins.scala.lang.resolve.StdKinds
import org.jetbrains.plugins.scala.lang.resolve.processor.MethodResolveProcessor
import scala.collection.Seq
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
class ScSelfInvocationImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScSelfInvocation {
override def toString: String = "SelfInvocation"
def bind: Option[PsiElement] = bindInternal(shapeResolve = false)
private def bindInternal(shapeResolve: Boolean): Option[PsiElement] = {
val seq = bindMultiInternal(shapeResolve)
if (seq.length == 1) Some(seq(0))
else None
}
private def bindMultiInternal(shapeResolve: Boolean): Seq[PsiElement] = {
val psiClass = PsiTreeUtil.getContextOfType(this, classOf[PsiClass])
if (psiClass == null) return Seq.empty
if (!psiClass.isInstanceOf[ScClass]) return Seq.empty
val clazz = psiClass.asInstanceOf[ScClass]
val method = PsiTreeUtil.getContextOfType(this, classOf[ScFunction])
if (method == null) return Seq.empty
val expressions: Seq[Expression] = args match {
case Some(arguments) => arguments.exprs.map(new Expression(_))
case None => Seq.empty
}
val proc = new MethodResolveProcessor(this, "this", List(expressions), Seq.empty,
Seq.empty /*todo: ? */, StdKinds.methodsOnly, constructorResolve = true, isShapeResolve = shapeResolve,
enableTupling = true, selfConstructorResolve = true)
for (constr <- clazz.secondaryConstructors.filter(_ != method) if constr != method) {
proc.execute(constr, ResolveState.initial)
}
clazz.constructor match {
case Some(constr) => proc.execute(constr, ResolveState.initial())
case _ =>
}
proc.candidates.toSeq.map(_.element)
}
private def workWithBindInternal(bindInternal: Option[PsiElement], i: Int): TypeResult[ScType] = {
val (res: ScType, clazz: ScTemplateDefinition) = bindInternal match {
case Some(c: ScMethodLike) =>
val methodType = ScType.nested(c.methodType, i).getOrElse(return Failure("Not enough parameter sections", Some(this)))
(methodType, c.containingClass)
case _ => return Failure("Cannot shape resolve self invocation", Some(this))
}
clazz match {
case tp: ScTypeParametersOwner if tp.typeParameters.length > 0 =>
val params: Seq[TypeParameter] = tp.typeParameters.map(new TypeParameter(_))
Success(ScTypePolymorphicType(res, params), Some(this))
case _ => Success(res, Some(this))
}
}
def shapeType(i: Int): TypeResult[ScType] = {
val option = bindInternal(shapeResolve = true)
workWithBindInternal(option, i)
}
def shapeMultiType(i: Int): Seq[TypeResult[ScType]] = {
bindMultiInternal(shapeResolve = true).map(pe => workWithBindInternal(Some(pe), i))
}
def multiType(i: Int): Seq[TypeResult[ScType]] = {
bindMultiInternal(shapeResolve = false).map(pe => workWithBindInternal(Some(pe), i))
}
override def accept(visitor: ScalaElementVisitor) {
visitor.visitSelfInvocation(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitSelfInvocation(this)
case _ => super.accept(visitor)
}
}
}
|
igrocki/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScSelfInvocationImpl.scala
|
Scala
|
apache-2.0
| 4,182 |
package microtools.hateoas
import play.api.libs.json._
object JsonTransformers {
def addHAL(json: JsValue, actions: Seq[BusinessAction])(
implicit linkBuilder: LinkBuilder
): JsValue = {
if (actions.isEmpty) json
else
json.transform(jsonHAL(actions)) match {
case JsSuccess(transformed, _) => transformed
case _ => json
}
}
def jsonHAL(actions: Seq[BusinessAction])(implicit linkBuilder: LinkBuilder): Reads[JsObject] = {
val links = JsObject(actions.map { action =>
action.rel -> Json.toJson(linkBuilder.actionLink(action))
})
__.read[JsObject].map(o => o ++ JsObject(Seq("_links" -> links)))
}
}
|
21re/play-error-handling
|
src/main/scala/microtools/hateoas/JsonTransformers.scala
|
Scala
|
mit
| 695 |
package com.sksamuel.elastic4s.source
/** @author Stephen Samuel */
trait DocumentSource {
def json: String
}
@deprecated("prefer JsonDocumentSource instead; same semantics just different name", "1.5.0")
case class StringDocumentSource(str: String) extends DocumentSource {
override def json = str
}
/** An instance of DocumentSource that just provides json as is
*/
case class JsonDocumentSource(j: String) extends DocumentSource {
override def json = j
}
trait DocumentMap {
def map: Map[String, Any]
}
|
alexander-svendsen/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/source/Source.scala
|
Scala
|
apache-2.0
| 520 |
package com.github.mostroverkhov.adventofcode16
import org.scalatest.{FlatSpec, Matchers}
/**
* Created with IntelliJ IDEA.
* Author: mostroverkhov
*/
class SecurityThroughObscurity4Spec extends FlatSpec with Matchers {
"Parse RoomCode row" should "be consistent" in {
val input = "aaaaa-bbb-z-y-x-123[abxyz]"
val row = SecurityThroughObscurity4.parseRoomCodeRow(input)
row should be(RoomCode("aaaaabbbzyx", 123, "abxyz"))
}
"Parse RoomCode sum for 1 element" should "be consistent" in {
val input = "aaaaa-bbb-z-y-x-123[abxyz]"
val sum = SecurityThroughObscurity4.countSectorsSum(input)
sum should be(123)
}
"Parse RoomCode sum for input" should "be 245102" in {
val inputOption = readResourceFile("/security_through_obscurity4.txt")
val sum = SecurityThroughObscurity4.countSectorsSum(inputOption)
sum should be(245102)
}
def readResourceFile(p: String): String =
Option(getClass.getResourceAsStream(p)).map(scala.io.Source.fromInputStream)
.map(_.mkString).getOrElse(throw new IllegalArgumentException("Cant open file " + p))
}
|
mostroverkhov/adventofcode16
|
src/test/scala/com.github.mostroverkhov.adventofcode16/SecurityThroughObscurity4Spec.scala
|
Scala
|
apache-2.0
| 1,106 |
package org.geneontology.jena
import scala.collection.JavaConverters._
import org.apache.jena.datatypes.TypeMapper
import org.apache.jena.rdf.model.AnonId
import org.apache.jena.rdf.model.ResourceFactory
import org.apache.jena.rdf.model.Statement
import org.apache.jena.rdf.model.impl.ResourceImpl
import org.openrdf.model.BNode
import org.openrdf.model.Literal
import org.openrdf.model.{ Statement => SesameStatement }
import org.openrdf.model.URI
import org.openrdf.rio.helpers.StatementCollector
import org.semanticweb.owlapi.model.OWLOntology
import org.semanticweb.owlapi.rio.RioRenderer
object SesameJena {
def ontologyAsTriples(ontology: OWLOntology): Set[Statement] = {
val collector = new StatementCollector();
new RioRenderer(ontology, collector, null).render();
collector.getStatements.asScala.map(sesameTripleToJena).toSet
}
def sesameTripleToJena(triple: SesameStatement): Statement = {
val subject = triple.getSubject match {
case bnode: BNode => new ResourceImpl(new AnonId(bnode.getID))
case uri: URI => ResourceFactory.createResource(uri.stringValue)
}
val predicate = ResourceFactory.createProperty(triple.getPredicate.stringValue)
val obj = triple.getObject match {
case bnode: BNode => new ResourceImpl(new AnonId(bnode.getID))
case uri: URI => ResourceFactory.createResource(uri.stringValue)
case literal: Literal if literal.getLanguage != null => ResourceFactory.createLangLiteral(literal.getLabel, literal.getLanguage)
case literal: Literal if literal.getDatatype != null => ResourceFactory.createTypedLiteral(literal.getLabel,
TypeMapper.getInstance.getSafeTypeByName(literal.getDatatype.stringValue))
case literal: Literal => ResourceFactory.createStringLiteral(literal.getLabel)
}
ResourceFactory.createStatement(subject, predicate, obj)
}
}
|
balhoff/owl-to-rules
|
src/main/scala/org/geneontology/jena/SesameJena.scala
|
Scala
|
bsd-3-clause
| 1,945 |
package ildl
package benchmark
package aos2soa
import org.scalameter.CurveData
import org.scalameter.api._
import org.scalameter.Key
import org.scalameter.DSL._
//
// You can read about this benchmark on the following wiki page:
// https://github.com/miniboxing/ildl-plugin/wiki/Sample-~-Array-of-Struct
//
/** The benchmark object */
object BenchmarkRunner extends PerformanceTest.Microbenchmark {
//
// The benchmark object. This object is the entry point into the current
// benchmark and customizes the ScalaMeter configuration.
//
// **Note:** In the ScalaIDE, some of the benchmarked methods will appear
// as not found. This is expected, and occurs since the presentation compiler
// (the fast one which performs syntax highlighting and quick error checking)
// is a stripped-down version of the Scala compiler and does not allow the
// ildl-plugin to transform the program before the typer phase (in the
//`post-parser` phase). Nevertheless, compiling and running occurs correctly.
//
// make sure we're running on the correct setup:
Platform.checkCompatibility()
import AverageTemperature._
var aosData: Array[(Long, Long, Double)] = null
adrt(ArrayOfStructToStructOfArray) {
var soaData: Array[(Long, Long, Double)] = null
}
val bench = Gen.enumeration("bench")("direct", "adrt__")
val sizes = Gen.single("size")(5000000)// Gen.range("size")(1000000, 5000000, 1000000)
val pred = Gen.enumeration("predictable")(false, true)
override def aggregator = Aggregator.average
measure method "readingsAverage" in {
using(Gen.tupled(sizes, pred, bench)) config (
exec.independentSamples -> 1,
exec.benchRuns -> 20,
exec.jvmflags -> ("-Xmx2g -Xms2g " /* + "-verbose:gc " */)
) setUp {
// Note: It is expected that "soaData" appears as "not found" in the IDE:
case (size, pred, "direct") => aosData = createDataDirect(size, pred); soaData = null
case (size, pred, "adrt__") => soaData = createDataSoA(size, pred); aosData = null
case (_, _, _) => soaData = null; aosData = null
System.gc()
} tearDown {
_ =>
// Note: It is expected that "soaData" appears as "not found" in the IDE:
aosData = null
soaData = null
System.gc()
} in {
case (size, pred, bench) =>
// print("starting ")
// print(bench)
// print(" ")
// println(pred)
bench match {
// Note: It is expected that "getAverageSoA" and "soaData" appear as "not found" in the IDE:
case "direct" => getAverageDirect(aosData, 0)
case "adrt__" => getAverageSoA(soaData, 0)
}
// print("stopping ")
// print(bench)
// print(" ")
// println(pred)
}
}
}
|
miniboxing/ildl-plugin
|
tests/benchmarks/src/ildl/benchmark/aos2soa/Benchmark.scala
|
Scala
|
bsd-3-clause
| 2,791 |
/*
* Copyright 2018 Han van Venrooij
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.irundaia.sass
import java.io.File
import java.nio.file.{Path, Paths}
import org.irundaia.sass.jna.SassLibrary
case class Options(nativeOptions: SassLibrary.Sass_Options) {
def precision: Int = SassCompiler.libraryInstance.sass_option_get_precision(this.nativeOptions)
def precision_=(precision: Int): Unit = SassCompiler.libraryInstance.sass_option_set_precision(this.nativeOptions, precision)
def outputStyle: CssStyle = {
SassCompiler.libraryInstance.sass_option_get_output_style(this.nativeOptions) match {
case SassLibrary.Sass_Output_Style.SASS_STYLE_NESTED => Sassy
case SassLibrary.Sass_Output_Style.SASS_STYLE_EXPANDED => Maxified
case SassLibrary.Sass_Output_Style.SASS_STYLE_COMPRESSED => Minified
case style =>
throw new IllegalStateException(s"Unknown Sass output style: $style")
}
}
def outputStyle_=(outputStyle: CssStyle): Unit = SassCompiler.libraryInstance.sass_option_set_output_style(this.nativeOptions, outputStyle.intValue)
def sourceComments: Boolean = SassCompiler.libraryInstance.sass_option_get_source_comments(this.nativeOptions)
def sourceComments_=(b: Boolean): Unit =
SassCompiler.libraryInstance.sass_option_set_source_comments(this.nativeOptions, b)
def sourceMapEmbed: Boolean = SassCompiler.libraryInstance.sass_option_get_source_map_embed(this.nativeOptions)
def sourceMapEmbed_=(sourceMapEmbed: Boolean): Unit =
SassCompiler.libraryInstance.sass_option_set_source_map_embed(this.nativeOptions, sourceMapEmbed)
def sourceMapContents: Boolean = SassCompiler.libraryInstance.sass_option_get_source_map_embed(this.nativeOptions)
def sourceMapContents_=(b: Boolean): Unit = SassCompiler.libraryInstance.sass_option_set_source_map_contents(this.nativeOptions, b)
def omitSourceMapUrl: Boolean = SassCompiler.libraryInstance.sass_option_get_omit_source_map_url(this.nativeOptions)
def omitSourceMapUrl_=(b: Boolean): Unit = SassCompiler.libraryInstance.sass_option_set_omit_source_map_url(this.nativeOptions, b)
def indentedSyntaxSrc: Boolean = SassCompiler.libraryInstance.sass_option_get_is_indented_syntax_src(this.nativeOptions)
def indentedSyntaxSrc_=(b: Boolean): Unit = SassCompiler.libraryInstance.sass_option_set_is_indented_syntax_src(this.nativeOptions, b)
def inputPath: Path = Paths.get(SassCompiler.libraryInstance.sass_option_get_input_path(this.nativeOptions))
def inputPath_=(p: Path): Unit = inputPath_=(p.toFile.getAbsolutePath)
def inputPath_=(p: String): Unit = SassCompiler.libraryInstance.sass_option_set_input_path(this.nativeOptions, p)
def outputPath: Path = Paths.get(SassCompiler.libraryInstance.sass_option_get_output_path(this.nativeOptions))
def outputPath_=(p: Path): Unit = outputPath_=(p.toFile.getAbsolutePath)
def outputPath_=(p: String): Unit = SassCompiler.libraryInstance.sass_option_set_output_path(this.nativeOptions, p)
def sourceMapPath: String = SassCompiler.libraryInstance.sass_option_get_source_map_file(this.nativeOptions)
def sourceMapPath_=(file: Path): Unit = sourceMapPath_=(file.toFile.getAbsolutePath)
def sourceMapPath_=(file: String): Unit = SassCompiler.libraryInstance.sass_option_set_source_map_file(this.nativeOptions, file)
def includePaths: Iterable[Path] = {
val includePathSize = SassCompiler.libraryInstance.sass_option_get_include_path_size(this.nativeOptions)
if (includePathSize.longValue() == 0) {
Seq()
} else {
Option(SassCompiler.libraryInstance.sass_option_get_include_path(this.nativeOptions, includePathSize)) match {
case None => Seq()
case Some(includePathsString) => includePathsString.split(File.pathSeparator).map(Paths.get(_))
}
}
}
def includePaths_=(paths: String): Unit = SassCompiler.libraryInstance.sass_option_set_include_path(this.nativeOptions, paths)
def includePaths_=(paths: Path*): Unit = includePaths_=(paths.map(_.toFile.getAbsolutePath).mkString(File.pathSeparator))
def includePaths_=(paths: Iterable[Path]): Unit = includePaths_=(paths.map(_.toFile.getAbsolutePath).mkString(File.pathSeparator))
def includePaths_+=(path: String): Unit = includePaths_+=(Paths.get(path))
def includePaths_+=(path: Path): Unit = includePaths_++=(path)
def includePaths_++=(paths: Path*): Unit = paths.map(_.toString).foreach(SassCompiler.libraryInstance.sass_option_push_include_path(nativeOptions, _))
def sourceMapRoot: String = SassCompiler.libraryInstance.sass_option_get_source_map_root(this.nativeOptions)
def sourceMapRoot_=(path: Path): Unit = sourceMapRoot_=(path.toFile.getAbsolutePath)
def sourceMapRoot_=(path: String): Unit = SassCompiler.libraryInstance.sass_option_set_source_map_root(this.nativeOptions, path)
def indent_=(indent: Int):Unit = SassCompiler.libraryInstance.sass_option_set_indent(nativeOptions, " " * indent)
}
object Options {
def apply(context: Context): Options = new Options(SassCompiler.libraryInstance.sass_file_context_get_options(context.nativeContext))
}
|
irundaia/sbt-sassify
|
src/main/scala/org/irundaia/sass/Options.scala
|
Scala
|
apache-2.0
| 5,605 |
package io.buoyant.interpreter.k8s.istio
import com.twitter.finagle._
import com.twitter.finagle.naming.NameInterpreter
import io.buoyant.k8s.SingleNsNamer
import io.buoyant.k8s.istio.RouteCache
import io.buoyant.namer.ConfiguredDtabNamer
import istio.proxy.v1.config.RouteRule
object IstioInterpreter {
private val istioPfx = "/#/io.l5d.k8s.istio"
private val k8sPfx = "/#/io.l5d.k8s.ns"
private val defaultRouteDtab = Dtab.read(s"""
|/egress => $k8sPfx/incoming/istio-egress ;
|/svc/ext => /egress ;
|/svc/dest => /egress ;
|/svc/dest => $istioPfx ;
""".stripMargin)
/* A typical delegation for the default route
/svc/dest/reviews.default.svc.cluster.local/::/http
/#/io.l5d.k8s.istio/reviews.default.svc.cluster.local/::/http
A typical delegation for a matching route
/svc/route/my-cool-route/http
/#/io.l5d.k8s.istio/reviews.default.svc.cluster.local/version:v1/http
A typical delegation for no matching cluster
/svc/ext/google.com/80
/$/inet/google.com/80
*/
private[this] def mkDentry(name: String, route: RouteRule): Option[Dentry] =
route.destination.flatMap { cluster =>
val branches = route.route.map { weightedDest =>
val labels = weightedDest.tags.toSeq
.sortBy(_._1)
.map { case (k, v) => s"$k:$v" }
.mkString("::")
val labelSegment = if (labels.isEmpty) "::" else labels
val clusterSegment = weightedDest.`destination`.getOrElse(cluster)
NameTree.Weighted(
weightedDest.weight.getOrElse(0).toDouble,
NameTree.Leaf(Path.read(s"/#/io.l5d.k8s.istio/$clusterSegment/$labelSegment"))
)
}
val dst = if (branches.isEmpty)
NameTree.Leaf(Path.read(s"/#/io.l5d.k8s.istio/$cluster/::"))
else
NameTree.Union(branches: _*)
val prefix = Dentry.Prefix.read(s"/svc/route/$name")
Some(Dentry(prefix, dst))
}
def apply(routeManager: RouteCache, istioNamer: Namer, k8sNamer: SingleNsNamer): NameInterpreter = {
val routes = routeManager.routeRules
val routesDtab = routes.map { routeTable =>
val dentries = routeTable.toIndexedSeq.flatMap {
case (name, route) =>
mkDentry(name, route)
}
Dtab(dentries)
}
val dtab = routesDtab.map(defaultRouteDtab ++ _)
ConfiguredDtabNamer(dtab, Seq(
Path.read(istioPfx) -> istioNamer,
Path.read(k8sPfx) -> k8sNamer
))
}
}
|
linkerd/linkerd
|
interpreter/istio/src/main/scala/io/buoyant/interpreter/k8s/istio/IstioInterpreter.scala
|
Scala
|
apache-2.0
| 2,442 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.wiki.model
import com.mongodb.casbah.Imports._
import com.novus.salat._
import org.joda.time.DateTime
import razie.audit.Audit
import razie.{AA, Log, cdebug}
import razie.db.RazSalatContext._
import razie.db._
import razie.diesel.dom.WikiDTemplate
import razie.tconf.parser.SState
import razie.tconf.{DSpec, DTemplate, SpecPath}
import razie.wiki.Services
import razie.wiki.model.features.{FieldDef, FormStatus, WikiCount, WikiForm}
import razie.wiki.parser.WAST
import scala.collection.mutable
/**
* simple trait for a wiki
*/
trait WikiPage {
def category: String
def name: String
def label: String
def markup: String
def content: String
def by: ObjectId
def tags: Seq[String]
def realm:String
def ver: Int
def parent: Option[ObjectId]
def props: Map[String, String]
def crDtm: DateTime
def updDtm: DateTime
def _id: ObjectId
def included: String
def wid : WID
def uwid : UWID
def section (stype: String, name: String) : Option[WikiSection]
def contentProps : Map[String,String]
def isReserved : Boolean
def isDraft : Boolean
def isPrivate : Boolean
def isOwner(id: String) : Boolean
def owner : Option[WikiUser]
def ownerId : Option[ObjectId]
def getLabel : String
def getDescription : String
def getFirstParagraph : Option[String]
def wordCount : Int
def visibility : String
def wvis : String
/** attributes are props perhaps overriden in content */
def attr(name:String) : Option[String]
def linksFrom : Iterator[WikiLink]
def linksTo : Iterator[WikiLink]
}
/** a simple wiki-style entry: language (markdown, mediawiki wikidot etc) and the actual source
*
* There is an "owner" property - owner is supposed to have special privileges
*/
@RTable
case class WikiEntry(
category: String,
name: String,
label: String,
markup: String,
content: String,
by: ObjectId,
tags: Seq[String] = Seq(),
realm:String = Wikis.RK,
ver: Int = 1,
parent: Option[ObjectId] = None,
props: Map[String, String] = Map.empty, // properties - can be supplemented in the content
likes: List[String]=List.empty, // list of usernames that liked it
dislikes: List[String]=List.empty, // list of usernames that liked it
likeCount: Int=0, // list of usernames that liked it
dislikeCount: Int=0, // list of usernames that liked it
crDtm: DateTime = DateTime.now,
updDtm: DateTime = DateTime.now,
_id: ObjectId = new ObjectId()) extends WikiPage with DSpec {
import WikiEntry._
// from DSpec
override def specPath = new SpecPath("local", this.wid.wpath, this.realm) {
override def ahref: Option[String] = Some(wid.ahref)
}
/**
* find a template for that name (e-a) or implementing that URL
*
* from DSpec
*/
override def findTemplate(name: String, direction:String=""): Option[DTemplate] =
this
.templateSections
.filter(t=> t.name == name && (direction=="" || t.signature.startsWith(direction)))
.headOption
.map {t=> new WikiDTemplate (t) }
/** find template with predicate */
override def findTemplate (p : DTemplate => Boolean) : Option[DTemplate] = {
this
.templateSections
.map {t=> new WikiDTemplate (t) }
.find(p)
}
/** is this just an alias?
*
* an alias is a topic that starts with the alias markup: [[alias:xxx]]
*/
def alias: Option[WID] = {
val wikip2 = """(?s)\\[\\[alias:([^\\]]*)\\]\\].*"""
val wikip2r = wikip2.r
if (content.matches(wikip2)) {
val wikip2r(wpath) = content
WID.fromPath(wpath)
} else None
}
/** is this just a redirect?
*/
def redirect: Option[String] = {
val wikip2 = """(?s)\\{\\{redirect[ :]([^\\]]*)\\}\\}.*"""
val wikip2r = wikip2.r
if (content.matches(wikip2)) {
val wikip2r(url) = content
Some(url)
} else None
}
// what other pages I depend on, collected while parsing
var depys: List[UWID] = Nil
/** todo should use this version instead of content - this resolves includes */
// todo optimize - why isn't this a lazy?
def included : String = {
val x = Wikis.preprocessIncludes(wid, markup, content, Some(this))
x
}
def wid = WID(category, name, parent, None, if(realm == Wikis.RK) None else Some(realm))
def uwid = UWID(category, _id, if(realm == Wikis.RK) None else Some(realm))
/** i should be conservative and default to rk. Note this doesn't check Config.urlcanon */
def canonicalUrl =
if (realm != Wikis.RK) {
Wikis(realm).navTagFor(tags).map(x =>
s"http://rk.dieselapps.com/wiki/${wid.wpath}") getOrElse
s"http://rk.dieselapps.com/wiki/${wid.wpath}"
} else s"http://rk.dieselapps.com.com/wiki/${wid.wpath}"
def cloneContent(newcontent: String) = copy(content = newcontent)
def cloneNewVer(label: String, markup: String, content: String, by: ObjectId, props: Map[String, String] = this.props) =
copy(label=label, markup=markup, content=content, by=by, ver=ver + 1, props=props, updDtm=DateTime.now)
def cloneProps(m: Map[String, String], sby: ObjectId) = copy(props = this.props ++ m)
def withTags(s: Seq[String], sby: ObjectId) = copy(tags=s)
def findParent = parent flatMap (p => Wikis(realm).find(p))
def isReserved = props.get(PROP_RESERVED).exists(_ == "yes")
def isDraft = props.contains("draft")
def isPrivate = "User" == category || (props.exists(e => PROP_OWNER == e._1))
def isOwner(id: String) = ("User" == category && name == id) || (props.exists(e => PROP_OWNER == e._1 && id == e._2))
def owner = props.get(PROP_OWNER).flatMap(s => WikiUsers.impl.findUserById(new ObjectId(s)))
def ownerId = props.get(PROP_OWNER).map(s=> new ObjectId(s))
// todo trying to avoid parsing it just to get the label
def getLabel = if(content contains "label") contentProps.getOrElse("label", label) else label
def getDescription = contentProps.getOrElse("meta.description", getFirstParagraph.mkString)
def getFirstParagraph = content.lines.find(s => !s.trim.isEmpty && !".{".contains(s.trim.charAt(0)))
def wordCount = content.count(_ == ' ')
def visibility = props.get(PROP_VISIBILITY).getOrElse(Visibility.PUBLIC)
def wvis = props.get(PROP_WVIS).getOrElse(visibility)
def create = {
// TODO optimize exists
if (Wikis.find(wid).exists(_.realm == this.realm)) {
Log.error("ERR_WIKI page exists " + wid)
throw new IllegalStateException(s"page already exists: $category/$name")
}
Audit.logdbWithLink(
if(wid.cat=="Note") AUDIT_NOTE_CREATED else AUDIT_WIKI_CREATED,
wid.urlRelative,
"BY " + (WikiUsers.impl.findUserById(this.by).map(_.userName).getOrElse(this.by.toString)) +
" " + category + ":" + name)
// add form section if new wiki
if(this.ipreprocessed.isEmpty) {
this.preprocess(None)
}
var neww = this
if(fields.nonEmpty && !content.contains("{{.section:formData}}")) {
neww = this.copy(content=content + "\\n" + Wikis.mkFormData(this))
}
Wikis(realm).weTable(wid.cat) += grater[WikiEntry].asDBObject(Audit.createnoaudit(neww))
Wikis.shouldFlag(name, label, content).map(auditFlagged(_))
Wikis(realm).index.create(neww)
}
/** backup old version and update entry, update index */
def update(newVer: WikiEntry, reason:Option[String] = None)(implicit txn:Txn=tx.auto) = {
val uname = WikiUsers.impl.findUserById(newVer.by).map(_.userName).getOrElse(newVer.by.toString)
if(uname != "Razie")
Audit.logdbWithLink(
if(wid.cat=="Note") AUDIT_NOTE_UPDATED else AUDIT_WIKI_UPDATED,
newVer.wid.urlRelative,
s"""BY $uname - $category : $name ver ${newVer.ver}""")
if(!isDraft || !newVer.isDraft) WikiEntryOld(this, reason).create
// force unix style - some patterns go weird with \\r
val safeVer = newVer.copy(content = newVer.content.replaceAll("\\r", ""))
RUpdate.noAudit[WikiEntry](Wikis(realm).weTables(wid.cat), Map("_id" -> newVer._id), safeVer)
Wikis.shouldFlag(name, label, content).map(auditFlagged(_))
// this is done async from WikiEvent. if sync here it will cause problems
// Wikis(realm).index.update(this, newVer)
}
/** backup old version and update entry, update index */
def delete(sby: String) (implicit txn:Txn) = {
Audit.logdb(AUDIT_WIKI_DELETED, "BY " + sby + " " + category + ":" + name, "\\nCONTENT:\\n" + this)
WikiEntryOld(this, Some ("deleted")).create
WikiTrash("WikiEntry", this.grated, sby, txn.id).create
val key = Map("realm" -> realm, "category" -> category, "name" -> name, "parent" -> parent)
RDelete.apply (Wikis(realm).weTables(wid.cat), key)
}
def auditFlagged(f: String) { Log.audit(Audit.logdb(f, category + ":" + name)) }
/** reparsing the content - wiki sections are delimited by {{section:name}} */
/** these are normal - all sections after include */
lazy val sections = findSections(included, PATT_SEC) ::: findSections(included, PATT_TEM)
def sectionsNoInclude = findSections(content, PATT_SEC) ::: findSections(content, PATT_TEM)
/** these are when used as a template - template sections do not resolve include */
lazy val templateSections = findSections(included, PATT_TEM) ::: findSections(content, PATT_TEM)
// this ((?>.*?(?=\\{\\{/))) means non-greedy lookahead
//?s means DOTALL - multiline
// format: {{stype[ :]name:signature}}content
private final val PATT_SEC =
"""(?s)\\{\\{\\.*(section|def|lambda|inline|dfiddle|dsl\\.\\w*)([: ])?([^:}]*)?(:)?([^}]*)?\\}\\}((?>.*?(?=\\{\\{/[^`])))\\{\\{/\\.*(section|def|lambda|inline|dfiddle|dsl\\.\\w*)?\\}\\}""".r
private final val PATT_TEM =
"""(?s)\\{\\{\\.*(template)([: ])?([^ :}]*)?([: ])?([^}]*)?\\}\\}((?>.*?(?=\\{\\{/[^`])))\\{\\{/\\.*(template)?\\}\\}""".r
// 1 2 3 4 5 6
/** find the sections - used because for templating I don't want to reolves the includes */
private def findSections (c:String, pat:scala.util.matching.Regex) = {
// todo use the wiki parser later modifiers to load the sections, not a separate parser here
// todo IMPORTANT - can't quite do that: these are used WHILE parsing other elements... see WikiDomParser.pmsg
val PATT2 = pat
val x = pat replaceSomeIn (c, { m =>
None
})
(for (m <- pat.findAllIn(c).matchData) yield {
val mm = PATT2.findFirstMatchIn(m.matched).get
val g5 = mm.group(5)
val signargs = g5.split("[: ]",2)
// sometimes space divides an arg list, so we bring it back
// val args = if(signargs.length>1 && !signargs(0).contains(",")) AA(signargs(1)).toMap else if(signargs(0).contains(",")) AA(signargs(0)+","+signargs(1)).toMap else Map.empty[String,String]
// val sign = if(signargs.length > 1) signargs(0)+","+ signargs(1) else signargs(0)
var args = if(signargs.length>1) AA(signargs(1)).toMap else Map.empty[String,String]
args = args.map(t=>
(t._1, (
if(t._2.startsWith("\\"") && t._2.endsWith("\\"")) t._2.substring(1, t._2.length-1)
else t._2
))
)
val sign = signargs(0)
val ws = WikiSection(mm.source.toString, this, mm.group(1), mm.group(3), sign, mm.group(6), args)
val ss = c.substring(0, m.start)
val t = ss.lines.toList
if(t.size > 0) {
ws.line = t.size+1
ws.col = t.apply(t.size-1).length
}
ws
}).toList
}
/** find a section */
def section (stype: String, name: String) = {
if(name contains ":") sections.find(x => x.stype == stype && x.name == name)
else sections.find(x => x.stype == stype && x.name == name)
}
/** scripts are just a special section */
lazy val scripts = sections.filter(x => "def" == x.stype || "lambda" == x.stype || "inline" == x.stype)
// when signing an edited page, we don't look at includes - big boom
def scriptsNoInclude = sectionsNoInclude.filter(x => "def" == x.stype || "lambda" == x.stype || "inline" == x.stype)
/** pre processed form - parsed and graphed. No context is used when parsing - only when folding this AST, so you can reuse the AST */
lazy val ast = Wikis.preprocess(this.wid, this.markup, Wikis.noBadWords(this.content), Some(this))
/** AST folded with a context */
var ipreprocessed : Option[(SState, Option[WikiUser])] = None;
//todo don't hold the actual user, but someone that can get the user... prevents caching?
override def parsed = preprocessed.s
// smart preprocess with user and stuff
def preprocess(au:Option[WikiUser]) = {
val t1 = System.currentTimeMillis
val s = ast.fold(WAST.context(Some(this), au)) // fold the AST
// add hardcoded attribute - these can be overriden by tags in content
val res = SState(s.s,
Map("category" -> category, "name" -> name, "label" -> label, "url" -> (wid.urlRelative),
"id" -> _id.toString, "tags" -> tags.mkString(",")) ++ s.props,
s.ilinks)
ipreprocessed = Some(res, au)
val t2 = System.currentTimeMillis
cdebug << s"wikis.folded ${t2 - t1} millis for ${wid.name}"
res
}
def preprocessed = ipreprocessed.map(_._1).getOrElse(preprocess(None))
def grated = grater[WikiEntry].asDBObject(this)
override def toString: String =
grater[WikiEntry].asDBObject(this).toString
/** tags collected during parsing of the content, with some static tags like url,label etc */
def contentProps = preprocessed.props
/** all properties contained in this spec, in various forms */
def allProps : Map[String,String] = {
contentProps ++ props ++ (
if(fields.size > 0) {
this.form // parse form and populate fields
fields.map (t=> (t._1, t._2.value))
}
else Map.empty
)
}
/** attributes are props perhaps overriden in content */
def attr(name:String) : Option[String] =
// optimized to not parse if it's not in content
if(ipreprocessed.isDefined) contentProps.get(name).orElse(props.get(name))
else if(content contains name) contentProps.get(name).orElse(props.get(name))
else props.get(name)
/** either from me, parent or reactor */
def findAttr(name:String) : Option[String] =
// look at parent if child did not overwrite the setting
// todo fallback onto reactor
attr(name) orElse findParent.flatMap(_.attr(name))
/** all the links from this page to others, based on parsed content */
def ilinks = preprocessed.ilinks.filter(_.isInstanceOf[ILink]).asInstanceOf[List[ILink]]
final val AUDIT_WIKI_CREATED = "WIKI_CREATED"
final val AUDIT_WIKI_UPDATED = "WIKI_UPDATED"
final val AUDIT_WIKI_DELETED = "WIKI_DELETED"
final val AUDIT_NOTE_CREATED = "NOTE_CREATED"
final val AUDIT_NOTE_UPDATED = "NOTE_UPDATED"
/** field definitions as parsed
* fields are rendered in WForm
*/
var fields : mutable.Map[String,FieldDef] = new mutable.HashMap[String, FieldDef]()
lazy val form = new WikiForm(this)
def formRole = this.props.get(FormStatus.FORM_ROLE)
def formState = this.props.get(FormStatus.FORM_STATE).orElse(form.formState)
/** other parsing artifacts to be used by knowledgeable modules.
* Parsers can put stuff in here. */
//todo move the fields and form stuff here
val collector = new scala.collection.mutable.HashMap[String, Any]()
def linksFrom = RMany[WikiLink] ("from.id" -> this.uwid.id)
def linksTo = RMany[WikiLink] ("to.id" -> this.uwid.id)
def viewCount = ROne[WikiCount] ("pid" -> this.uwid.id).map(_.count)
}
/** a section inside a wiki page
* {{stype name:signature args}}content{{/stype}}
*
* Note that the content will start with a \\n if you use separate lines...
*/
case class WikiSection(original:String, parent: WikiEntry, stype: String, name: String, signature: String, content: String, args:Map[String,String] = Map.empty) {
var line : Int = -1
var col : Int = -1
def sign = Services.auth.sign(content)
def checkSignature(au:Option[WikiUser]) =
if(signature startsWith "SIG")
Services.auth.checkSignature(sign, signature.substring(3), au)
else
Services.auth.checkSignature(sign, signature, au)
def wid = parent.wid.copy(section=Some(name))
override def toString = s"WikiSection(stype=$stype, name=$name, signature=$signature, args=$args, line=$line)"
}
object WikiEntry {
final val PROP_VISIBILITY = "visibility"
final val PROP_WVIS = "wvis"
final val PROP_RESERVED = "reserved"
final val PROP_OWNER: String = "owner"
def grated(o: DBObject) = grater[WikiEntry].asObject(o)
}
/** old wiki entries - a copy of each older version is archived when udpated or deleted */
@RTable
case class WikiEntryOld(entry: WikiEntry, reason:Option[String], crDtm: Option[DateTime] = Some(DateTime.now), _id: ObjectId = new ObjectId()) {
def create (implicit txn:Txn) = RCreate.noAudit[WikiEntryOld](this)
}
|
razie/wikireactor
|
common/app/razie/wiki/model/WikiModel.scala
|
Scala
|
apache-2.0
| 17,125 |
package org.scalarules.finance.nl
import java.text.NumberFormat
import java.util.Locale
import scala.math.BigDecimal.RoundingMode._
// scalastyle:off method.name
/**
* Representeert een bedrag in euro's.
*/
case class Bedrag private[finance] (waarde: BigDecimal) {
/** Returnt de som van dit bedrag en n. */
def + (n: Bedrag): Bedrag = Bedrag(waarde + n.waarde)
/** Returnt het verschil tussen dit bedrag en n. */
def - (n: Bedrag): Bedrag = Bedrag(waarde - n.waarde)
/** Returnt het product van dit bedrag en n. */
def * (n: BigDecimal): Bedrag = Bedrag(waarde * n)
/** Returnt het quotiënt van dit bedrag en n. */
def / (n: BigDecimal): Bedrag = Bedrag(waarde / n)
/** Returnt het quotiënt van dit bedrag en n. */
def / (n: Bedrag): BigDecimal = waarde / n.waarde
/** Kapt dit bedrag af (naar beneden) op een rond honderdtal euro's. */
def afgekaptOp100Euro: Bedrag = afgekaptOp(-2) // scalastyle:ignore magic.number
/** Kapt dit bedrag af (naar beneden) op ronde euro's. */
def afgekaptOpEuros: Bedrag = afgekaptOp(0)
/** Kapt dit bedrag af (naar beneden) op hele centen. */
def afgekaptOpCenten: Bedrag = afgekaptOp(2)
/** Rondt dit bedrag af op hele centen, volgens BigDecimal.RoundingMode.HALF_EVEN. */
def afgerondOpCenten: Bedrag = afgerondOp(2, BigDecimal.RoundingMode.HALF_EVEN)
def afgerondOp(aantalDecimalen: Integer, afrondingsWijze: RoundingMode): Bedrag =
Bedrag(waarde.setScale(aantalDecimalen, afrondingsWijze))
/** Kapt dit bedrag af (naar beneden) op het gegeven aantal decimalen. */
private def afgekaptOp(decimalen: Int): Bedrag = afgerondOp(decimalen, BigDecimal.RoundingMode.FLOOR)
override def toString = NumberFormat.getCurrencyInstance(Bedrag.nederland).format(waarde)
}
object Bedrag {
private val nederland = new Locale("nl", "NL")
private[nl] val centNaarEuroFactor = BigDecimal(0.01)
}
trait BedragImplicits {
abstract class ToBedrag(value: BigDecimal) {
/** Maakt een Bedrag. */
def euro: Bedrag = Bedrag(value)
/** Maakt een Bedrag. */
def cent: Bedrag = Bedrag(value * Bedrag.centNaarEuroFactor)
/** Returnt het product van deze BigDecimal en Bedrag b. */
def *(b: Bedrag): Bedrag = b * value
}
implicit class BigDecimalToBedrag(value: BigDecimal) extends ToBedrag(value)
implicit class IntToBedrag(value: Int) extends ToBedrag(value)
implicit class LongToBedrag(value: Long) extends ToBedrag(value)
/** Het is niet mogelijk om een String te vermenigvuldigen met een Bedrag
* Dit conflicteert met String's eigen * functie en is dus niet geimplementeerd*/
implicit class StringToBedrag(value: String){
/** Maakt een Bedrag. */
def euro: Bedrag = Bedrag(BigDecimal(value))
def cent: Bedrag = Bedrag(BigDecimal(value) * Bedrag.centNaarEuroFactor)
}
/** Zorgt ervoor dat zaken als "sum" gemakkelijk kunnen worden berekend op verzamelingen van Bedrag. */
implicit object NumericBedrag extends Numeric[Bedrag] {
override def plus(x: Bedrag, y: Bedrag): Bedrag = x + y
override def minus(x: Bedrag, y: Bedrag): Bedrag = x - y
override def times(x: Bedrag, y: Bedrag): Bedrag =
throw new UnsupportedOperationException("Vermenigvuldiging van bedrag*bedrag zou een bedrag^2 geven, wat niets betekent.")
override def negate(x: Bedrag): Bedrag = Bedrag(-x.waarde)
override def fromInt(x: Int): Bedrag = x.euro
override def toInt(x: Bedrag): Int = throw new UnsupportedOperationException("toInt zou leiden tot een verlies van precisie.")
override def toLong(x: Bedrag): Long = throw new UnsupportedOperationException("toLong zou leiden tot een verlies van precisie.")
override def toFloat(x: Bedrag): Float = throw new UnsupportedOperationException("toFloat zou leiden tot een verlies van precisie.")
override def toDouble(x: Bedrag): Double = throw new UnsupportedOperationException("toDouble zou leiden tot een verlies van precisie.")
override def compare(x: Bedrag, y: Bedrag): Int = x.waarde compare y.waarde
}
}
|
scala-rules/finance-dsl
|
src/main/scala/org/scalarules/finance/nl/Bedrag.scala
|
Scala
|
mit
| 4,023 |
package actions
import play.api._
import play.api.mvc._
import play.core.j.JavaHelpers
import play.core.j.JavaHelpers._
import scala.collection.concurrent.TrieMap
import scala.concurrent._
import scala.concurrent.duration._
import WithJContextSupportAction._
import play.mvc.Http.RequestBody
case class WithJContextSupportAction[A](block: JContext => Action[A])(implicit config: Configuration, env: Environment,
bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Action[A] {
def apply(request: Request[A]): Future[Result] = {
val components = JavaHelpers.createContextComponents(config, env)
val jContext = createJavaContext(request.asInstanceOf[Request[RequestBody]], components)
try {
store += (request.id -> jContext)
// need to wait for the enclosed actions to complete
Await.ready(block(jContext)(request), 60 seconds)
} finally {
store -= request.id
}
}
override def executionContext = ec
override def parser = bodyParsers.anyContent.asInstanceOf[BodyParser[A]]
}
object WithJContextSupportAction {
type JContext = play.mvc.Http.Context
type JComponents = play.core.j.JavaContextComponents
private lazy val store = TrieMap[Long, JContext]()
/**
* Extracts the Java context given a request
* @param request The request
* @tparam A The request body type
*/
implicit class RequestToContext[A](request: Request[A]) {
def jContextOption : Option[JContext] = store.get(request.id)
def jContext : JContext = store(request.id)
}
def apply[A](action: Action[A])(implicit config: Configuration, env: Environment, bodyParsers: PlayBodyParsers,
ec: ExecutionContext): WithJContextSupportAction[A] = WithJContextSupportAction(_ => action)
}
|
bravegag/play-authenticate-usage-scala
|
app/actions/WithJContextSupportAction.scala
|
Scala
|
apache-2.0
| 1,843 |
package com.codechef.problems.fctrl2
object Main {
def main(args: Array[String]): Unit = {
import java.io._
val reader = new BufferedReader(new InputStreamReader(System.in))
val tests = reader.readLine.toInt
for(i <- 1 to tests) {
println(fac(BigInt(reader.readLine)))
}
}
def fac(n: BigInt): BigInt = {
def helper(a: BigInt, pro: BigInt): BigInt = if(a > n) pro else helper(a + 1, pro * a)
helper(1, 1)
}
}
|
pamu/CodeChef
|
src/main/scala/com/codechef/problems/fctrl2/Main.scala
|
Scala
|
apache-2.0
| 450 |
// Copyright (C) Maxime MORGE 2017
package org.scaia.asia
import org.scaia.hedonic.Player
/**
* Coalition of individuals around an activity
* @constructor create a new coalition
* @param activity the activity practiced by the individuals
* @param group the group of individuals formed
*/
class Coalition(val activity: Activity, val group: Group) {
override def toString: String = activity + ": " + group
/**
* Returns the size of the coalition
*/
def size() = group.size
/**
* Returns the capacity of the coalition
*/
def capacity() = if (activity.equals(Activity.VOID)) 1 else activity.c
/**
* Returns true if the coalition is empty
*/
def isEmpty() = group.isEmpty
/**
* Returns true if the coalition is sound
*/
def isSound() = this.size() <= this.capacity()
/**
* Returns true if the coalition is individually rational for i
*/
def isIndividuallyRational(i: Individual) = i.u(group.names(), activity.name) >= 0
/**
* Returns true if the coalition strongly blocks the matching
*/
@throws(classOf[RuntimeException])
def stronglyBlock(matching: Matching): Boolean = {
if (this.isEmpty()) throw new RuntimeException(this+" is an empty coalition ")
if (!this.isSound()) throw new RuntimeException(this+" is not a sound coalition")
group.forall(i => i.sprefC(this,new Coalition(matching.a(i),matching.g(i))))
}
/**
* Returns true if the coalition weakly blocks the matching
*/
@throws(classOf[RuntimeException])
def weaklyBlock(matching: Matching): Boolean = {
if (this.isEmpty()) throw new RuntimeException(this+" is an empty coalition ")
if (!this.isSound()) throw new RuntimeException(this+" is not a sound coalition")
(group.forall(i => i.prefC(this,new Coalition(matching.a(i),matching.g(i)))) &&
group.exists(i => i.sprefC(this,new Coalition(matching.a(i),matching.g(i)))))
}
}
/*
* Factory for [[Coalition]] instances
*/
object Coalition{
val debug = false
/**
* Returns the result of applying [[org.scaia.hedonic.Coalition]] to [[Coalition]]
* @param hedonicCoalition
* @param pb
* @return
*/
def apply(hedonicCoalition: org.scaia.hedonic.Coalition, pb: IAProblem): Coalition = {
var a: Activity = Activity.VOID
var g : Group = new Group()
//Find the player for this activity
hedonicCoalition.find(p=> pb.activities.exists(a => a.name.equals(p.name))) match {
case Some(playerActivity) => { // If one player represents an activity
a=pb.getActivity(playerActivity.name)
val playersIndividuals =hedonicCoalition-playerActivity
playersIndividuals.foreach( playerIndividual => g+=pb.getIndividual(playerIndividual.name))
val c= new Coalition(a,g)
if (debug) println("Coalition: "+c)
return c
}
case None => // Otherwise the activity is void and the group a single individual
g+=pb.getIndividual(hedonicCoalition.head.name)
val c= new Coalition(a,g)
if (debug) println("Coalition: "+c)
return c
}
}
}
|
maximemorge/ScaIA
|
src/main/scala/org/scaia/asia/Coalition.scala
|
Scala
|
gpl-3.0
| 3,105 |
/*
* Copyright (c) 2017. Yuriy Stul
*/
package com.stulsoft.kafka3
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import scala.concurrent.Future
import scala.util.Random
/**
* @author Yuriy Stul
*/
final class Producer(val topic: String, val interval: Int) extends LazyLogging {
private var continueExecuting = false
import scala.concurrent.ExecutionContext.Implicits.global
def start(): Future[Unit] = Future {
logger.info("Started Producer")
continueExecuting = true
val props: Properties = new Properties()
props.put("bootstrap.servers", KAFKA_HOSTS)
props.put("acks", "all")
props.put("retries", Int.box(0))
props.put("batch.size", Int.box(16384))
props.put("linger.ms", Int.box(1))
props.put("buffer.memory", Int.box(33554432))
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
val producer = new KafkaProducer[String, String](props)
while (continueExecuting) {
sendMessage(producer)
Thread.sleep(interval)
}
producer.close()
logger.info("Stopped Producer")
}
private def sendMessage(producer: KafkaProducer[String, String]): Unit = {
try {
val future = producer.send(new ProducerRecord[String, String](topic, "theKey_" + Random.nextInt(), "the value_" + Random.nextInt()))
logger.info("Sent message")
try {
val result = future.get(10, TimeUnit.SECONDS)
val resultText = s"Succeeded send message. Offset is ${result.offset()}, partition is ${result.partition()}, topic is ${result.topic()}"
logger.info(resultText)
}
catch {
case e: Exception => logger.error("Failed send message. Error: {}", e.getMessage)
}
}
catch {
case e: Throwable => logger.error("Failed sending message with error {}", e.getMessage)
}
}
def stop(): Unit = continueExecuting = false
}
|
ysden123/poc
|
pkafka/kafka3/src/main/scala/com/stulsoft/kafka3/Producer.scala
|
Scala
|
mit
| 2,122 |
package uk.gov.homeoffice.io
import java.io.IOException
import java.net.URL
import scala.io.Codec
import scala.util.{Failure, Success}
import org.specs2.mutable.Specification
class URLToStringSpec extends Specification {
"URL resource" should {
"give a string" in {
Resource(new URL("file:./src/test/resources/test.json")).to[String] mustEqual Success {
"""{
| "blah": "whatever"
|}""".stripMargin
}
}
"give a string for a specified encoding" in {
Resource(new URL("file:./src/test/resources/test.json"), Codec.ISO8859).to[String] mustEqual Success {
"""{
| "blah": "whatever"
|}""".stripMargin
}
}
}
"URL" should {
"fail to be read" in {
new URL("file:./src/test/resources/non-existing.json").to[String] must beLike {
case Failure(e: IOException) => e.getMessage mustEqual "Could not read URL for given: file:./src/test/resources/non-existing.json"
}
}
"give a string" in {
new URL("file:./src/test/resources/test.json").to[String] mustEqual Success {
"""{
| "blah": "whatever"
|}""".stripMargin
}
}
}
}
|
UKHomeOffice/rtp-io-lib
|
src/test/scala/uk/gov/homeoffice/io/URLToStringSpec.scala
|
Scala
|
mit
| 1,198 |
package com.truman.modules.json.alias
import javax.inject.{Inject, Singleton}
import scala.util.Try
import scala.concurrent.Future
import com.google.inject.AbstractModule
import play.api.Play
import play.api.libs.json._
import play.api.inject.ApplicationLifecycle
import com.truman.utils.Base62Encoder
@Singleton
class JsonAliasRedis @Inject()(lifecycle: ApplicationLifecycle) extends JsonAlias {
// release storage manager for alias store
lifecycle.addStopHook { () =>
Future.successful(Unit)
}
private def validate(raw: JsValue): Boolean = {
Try(Some(raw)).map { raw =>
1 == 1 // alway valid
}.getOrElse(false)
}
override def encode(jsObject: JsObject): Future[JsObject] = {
if (!validate(jsObject)) {
Future.failed(new IllegalArgumentException("Raw JSON is invalid."))
} else {
// alias start
// iterate all attributes of the JSON
Future.successful(jsObject)
}
}
override def encode(jsArray: JsArray): Future[JsArray] = {
if (!validate(jsArray)) {
Future.failed(new IllegalArgumentException("Raw JSON is invalid."))
} else {
// alias start
// iterate all attributes of the JSON
Future.successful(jsArray)
}
}
override def decode(aliasObject: JsObject): Future[JsObject] = {
if (!validate(aliasObject)) {
Future.failed(new IllegalArgumentException("Raw JSON is invalid."))
} else {
// alias start
// iterate all JsObject within JsArray
Future.successful(aliasObject)
}
}
override def decode(aliasArray: JsArray): Future[JsArray] = {
if (!validate(aliasArray)) {
Future.failed(new IllegalArgumentException("Raw JSON is invalid."))
} else {
// alias start
// iterate all JsObject within JsArray
Future.successful(aliasArray)
}
}
}
|
truman-misfit/play-json-alias
|
src/main/scala/com/truman/modules/json/alias/JsonAliasRedisModule.scala
|
Scala
|
apache-2.0
| 1,830 |
// Copyright (C) 2019 MapRoulette contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
package org.maproulette.services.osm.objects
import javax.inject.{Inject, Singleton}
import org.joda.time.DateTime
import org.maproulette.Config
import org.maproulette.cache.BasicCache
import org.maproulette.services.osm.OSMType
import play.api.libs.ws.WSClient
import scala.concurrent.Future
import scala.xml.Node
/**
* Service extending from the ObjectService that retrieves and caches relations
*
* @author mcuthbert
*/
@Singleton
class RelationProvider @Inject() (override val ws: WSClient, override val config: Config)
extends ObjectProvider[VersionedRelation] {
val cache = new BasicCache[Long, VersionedObjects[VersionedRelation]](config)
def get(ids: List[Long]): Future[List[VersionedRelation]] = getFromType(ids, OSMType.RELATION)
override protected def createVersionedObjectFromXML(
elem: Node,
id: Long,
visible: Boolean,
version: Int,
changeset: Int,
timestamp: DateTime,
user: String,
uid: Long,
tags: Map[String, String]
): VersionedRelation = {
VersionedRelation(
s"Node_$id",
id,
visible,
version,
changeset,
timestamp,
user,
uid,
tags,
(elem \\ "member")
.map(elem => {
RelationMember((elem \\ "@type").text, (elem \\ "@ref").text.toLong, (elem \\ "@role").text)
})
.toList
)
}
}
|
Crashfreak/maproulette2
|
app/org/maproulette/provider/osm/objects/RelationProvider.scala
|
Scala
|
apache-2.0
| 1,512 |
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.store
import scala.language.implicitConversions
import com.google.common.primitives.UnsignedLongs
import com.treode.pickle.Picklers
class CatalogId (val id: Long) extends AnyVal with Ordered [CatalogId] {
def compare (that: CatalogId): Int =
UnsignedLongs.compare (this.id, that.id)
override def toString =
if (id < 256) f"Catalog:$id%02X" else f"Catalog:$id%016X"
}
object CatalogId extends Ordering [CatalogId] {
val MinValue = CatalogId (0)
val MaxValue = CatalogId (-1)
implicit def apply (id: Long): CatalogId =
new CatalogId (id)
def compare (x: CatalogId, y: CatalogId): Int =
x compare y
val pickler = {
import Picklers._
wrap (fixedLong) build (apply _) inspect (_.id)
}}
|
Treode/store
|
store/src/com/treode/store/CatalogId.scala
|
Scala
|
apache-2.0
| 1,348 |
// Copyright 2011-2012 James Michael Callahan
// See LICENSE-2.0 file for licensing information.
package org.scalagfx.math
import scala.math.{abs,sin,cos}
import java.nio._
//--------------------------------------------------------------------------------------------------
// M A T R I X 3 3 D
//--------------------------------------------------------------------------------------------------
//--------------------------------------------------------------------------------------------------
// | basisX.x basisY.x basisZ.x |
// | basisX.y basisY.y basisZ.y |
// | basisX.z basisY.z basisZ.z |
//--------------------------------------------------------------------------------------------------
/** Companion object for Matrix33d. */
object Matrix33d
{
//------------------------------------------------------------------------------------------------
// C R E A T I O N
//------------------------------------------------------------------------------------------------
/** Create an identity matrix. */
def apply() =
new Matrix33d(Vector3d.unitX, Vector3d.unitY, Vector3d.unitZ)
/** Create an arbitrary matrix from basis vectors. */
def apply(basisX: Vector3d, basisY: Vector3d, basisZ: Vector3d) =
new Matrix33d(basisX, basisY, basisZ)
/** Create an arbitrary 3x3 matrix from a nested list of elements (basis vectors XYZ). */
def apply(mx: List[List[Double]]) =
mx match {
case List(List(bxx, bxy, bxz),
List(byx, byy, byz),
List(bzx, bzy, bzz)) => new Matrix33d(Vector3d(bxx, bxy, bxz),
Vector3d(byx, byy, byz),
Vector3d(bzx, bzy, bzz))
case _ => throw new IllegalArgumentException(
"The given nested list of values did not correspond to a legal 3x3 matrix!")
}
/** Create an arbitrary 3x3 matrix from a nested array of elements (basis vectors XYZ). */
def apply(mx: Array[Array[Double]]) =
mx match {
case Array(Array(bxx, bxy, bxz),
Array(byx, byy, byz),
Array(bzx, bzy, bzz)) => new Matrix33d(Vector3d(bxx, bxy, bxz),
Vector3d(byx, byy, byz),
Vector3d(bzx, bzy, bzz))
case _ => throw new IllegalArgumentException(
"The given nested array of values did not correspond to a legal 3x3 matrix!")
}
/** Create an arbitrary 3x3 matrix from a native array of elements (basis vectors XYZ). */
def apply(mx: DoubleBuffer) = {
if(mx.capacity != 9)
throw new IllegalArgumentException(
"The given native array did not contain (9) values!")
mx.rewind
(mx.get, mx.get, mx.get,
mx.get, mx.get, mx.get,
mx.get, mx.get, mx.get) match {
case (bxx, bxy, bxz,
byx, byy, byz,
bzx, bzy, bzz) => new Matrix33d(Vector3d(bxx, bxy, bxz),
Vector3d(byx, byy, byz),
Vector3d(bzx, bzy, bzz))
case _ => throw new IllegalArgumentException(
"The given native array of values did not correspond to a legal 3x3 matrix!")
}
}
/** Create an arbitrary 3x3 matrix from a native array of float elements (basis vectors XYZ). */
def apply(mx: FloatBuffer) = {
if(mx.capacity != 9)
throw new IllegalArgumentException(
"The given native array did not contain (9) values!")
mx.rewind
(mx.get, mx.get, mx.get,
mx.get, mx.get, mx.get,
mx.get, mx.get, mx.get) match {
case (bxx, bxy, bxz,
byx, byy, byz,
bzx, bzy, bzz) => new Matrix33d(Vector3d(bxx.toDouble, bxy.toDouble, bxz.toDouble),
Vector3d(byx.toDouble, byy.toDouble, byz.toDouble),
Vector3d(bzx.toDouble, bzy.toDouble, bzz.toDouble))
case _ => throw new IllegalArgumentException(
"The given native array of values did not correspond to a legal 3x3 matrix!")
}
}
/** Create a new diagonal matrix.
*
* | v.x 0.0 0.0 |
* | 0.0 v.y 0.0 |
* | 0.0 0.0 v.z | */
def diagonal(v: Vector3d): Matrix33d =
Matrix33d(Vector3d.unitX*v.x, Vector3d.unitY*v.y, Vector3d.unitZ*v.z)
/** Create a new diagonal matrix.
*
* | x 0.0 0.0 |
* | 0.0 y 0.0 |
* | 0.0 0.0 z | */
def diagonal(x: Double, y: Double, z: Double): Matrix33d =
diagonal(Vector3d(x, y, z))
//------------------------------------------------------------------------------------------------
// C O M P A R I S O N
//------------------------------------------------------------------------------------------------
/** The component-wise comparison of whether two matrixs are within a given
* epsilon. */
def equiv(a: Matrix33d, b: Matrix33d, epsilon: Double): Boolean =
a.equiv(b, epsilon)
/** The component-wise comparison of whether two matrixs are within a type
* specific epsilon. */
def equiv(a: Matrix33d, b: Matrix33d): Boolean =
(a equiv b)
/** The component-wise minimum of two matrices. */
def min(a: Matrix33d, b: Matrix33d): Matrix33d =
compwise(a, b, scala.math.min(_, _))
/** The component-wise maximum of two matrices. */
def max(a: Matrix33d, b: Matrix33d): Matrix33d =
compwise(a, b, scala.math.max(_, _))
//------------------------------------------------------------------------------------------------
// U T I L I T Y
//------------------------------------------------------------------------------------------------
/** Create a matrix who's components are generated by applying the given binary operator
* to each of the corresponding components of the given two matrices. */
def compwise(a: Matrix33d, b: Matrix33d, f: (Double, Double) => Double): Matrix33d =
Matrix33d(Vector3d.compwise(a.basisX, b.basisX, f),
Vector3d.compwise(a.basisY, b.basisY, f),
Vector3d.compwise(a.basisZ, b.basisZ, f))
}
/** An arbitrary 3x3 matrix of Double value.
*
* @constructor Create a new matrix.
* @param basisX The basis vector for the X-dimension.
* @param basisY The basis vector for the Y-dimension.
* @param basisZ The basis vector for the Z-dimension. */
class Matrix33d(val basisX: Vector3d, val basisY: Vector3d, val basisZ: Vector3d)
extends MatrixLike
{
//------------------------------------------------------------------------------------------------
// U N A R Y O P S
//------------------------------------------------------------------------------------------------
/** The number of dimensions. */
val dimens = 3
//------------------------------------------------------------------------------------------------
// O P E R A T O R S
//------------------------------------------------------------------------------------------------
/** Post-multiplying a column vector by this matrix. */
def xform(v: Vector3d) =
basisX*v.x + basisY*v.y + basisZ*v.z
/** Post-multiplying a column vector by this matrix. */
def * (v: Vector3d) = xform(v)
/** Concatenate (multiply) a matrix (on the right) with this matrix. */
def concat(that: Matrix33d) =
Matrix33d(xform(that.basisX), xform(that.basisY), xform(that.basisZ))
/** Concatenate (multiply) a matrix (on the right) with this matrix. */
def * (that: Matrix33d) = concat(that)
//------------------------------------------------------------------------------------------------
/** The addition of a scalar value to all components of this matrix. */
def + (scalar: Double): Matrix33d =
Matrix33d(basisX+scalar, basisY+scalar, basisZ+scalar)
/** The subtraction of a scalar value from all components of this matrix. */
def - (scalar: Double): Matrix33d =
Matrix33d(basisX-scalar, basisY-scalar, basisZ-scalar)
/** The product of a scalar value with all components of this matrix. */
def * (scalar: Double): Matrix33d =
Matrix33d(basisX*scalar, basisY*scalar, basisZ*scalar)
/** The quotient of dividing all components of this matrix by a scalar value. */
def / (scalar: Double): Matrix33d =
Matrix33d(basisX/scalar, basisY/scalar, basisZ/scalar)
//------------------------------------------------------------------------------------------------
/** Find the transpose of this matrix. */
def transpose: Matrix33d =
Matrix33d(Vector3d(basisX.x, basisY.x, basisZ.x),
Vector3d(basisX.y, basisY.y, basisZ.y),
Vector3d(basisX.z, basisY.z, basisZ.z))
/** Find the 2x2 sub-matrix obtained by deleting the given column and row. */
def submatrix(col: Int, row: Int): Matrix22d =
(col, row) match {
case (0, 0) => Matrix22d(Vector2d(basisY.y, basisY.z),
Vector2d(basisZ.y, basisZ.z))
case (0, 1) => Matrix22d(Vector2d(basisY.x, basisY.z),
Vector2d(basisZ.x, basisZ.z))
case (0, 2) => Matrix22d(Vector2d(basisY.x, basisY.y),
Vector2d(basisZ.x, basisZ.y))
case (1, 0) => Matrix22d(Vector2d(basisX.y, basisX.z),
Vector2d(basisZ.y, basisZ.z))
case (1, 1) => Matrix22d(Vector2d(basisX.x, basisX.z),
Vector2d(basisZ.x, basisZ.z))
case (1, 2) => Matrix22d(Vector2d(basisX.x, basisX.y),
Vector2d(basisZ.x, basisZ.y))
case (2, 0) => Matrix22d(Vector2d(basisX.y, basisX.z),
Vector2d(basisY.y, basisY.z))
case (2, 1) => Matrix22d(Vector2d(basisX.x, basisX.z),
Vector2d(basisY.x, basisY.z))
case (2, 2) => Matrix22d(Vector2d(basisX.x, basisX.y),
Vector2d(basisY.x, basisY.y))
case _ => throw new IllegalArgumentException(
"Invalid column (" + col + ") or row (" + row + ")!")
}
/** Find the minor of the given cell (column, row) of this matrix.
*
* The minor is the determinant of the 2x2 matrix which remains when the row and column of the
* given cell are removed from the original 3x3 matrix. */
def minor(col: Int, row: Int): Double =
submatrix(col, row).determinant
/** Find the matrix of minors of this matrix. */
def minors: Matrix33d =
Matrix33d(Vector3d(minor(0, 0), minor(0, 1), minor(0, 2)),
Vector3d(minor(1, 0), minor(1, 1), minor(1, 2)),
Vector3d(minor(2, 0), minor(2, 1), minor(2, 2)))
/** Find the cofactor of the given cell (column, row) of this matrix.
*
* The cofactor of a cell is the minor in which the sign of the result is determined by
* whether the sum of the column and row indices in the original matrix is even (unchanged)
* or odd (flipped). */
def cofactor(col: Int, row: Int): Double = {
val mm = minor(col, row)
if((col+row)%2 == 0) mm else -mm
}
/** Find the matrix of cofactors of this matrix. */
def cofactors: Matrix33d = {
val even = Vector3d(1.0, -1.0, 1.0)
val odd = even * -1.0
val mm = minors
Matrix33d(mm.basisX*even, mm.basisY*odd, mm.basisZ*even)
}
/** Find the adjoint of this matrix.
*
* The adjoint is the transpose of the cofactors matrix. */
def adjoint: Matrix33d =
cofactors.transpose
/** Find the determinant of this matrix. */
def determinant: Double =
basisX.x*cofactor(0, 0) + basisX.y*cofactor(0, 1) + basisX.z*cofactor(0, 2)
/** Whether the matrix has an inverse (is non-singular). */
def isInvertible =
!Scalar.equiv(determinant, 0.0)
/** Find the inverse (if possible) of this matrix. */
def inverse: Option[Matrix33d] = {
val det = determinant
if(Scalar.equiv(det, 0.0)) None
else Some(adjoint/det)
}
//------------------------------------------------------------------------------------------------
// C O M P A R I S O N
//------------------------------------------------------------------------------------------------
/** Compares this position to the specified value for equality. */
override def equals(that: Any): Boolean =
that match {
case that: Matrix33d =>
(that canEqual this) &&
(basisX == that.basisX) && (basisY == that.basisY) && (basisZ == that.basisZ)
case _ => false
}
/** A method that should be called from every well-designed equals method that is open
* to be overridden in a subclass. */
def canEqual(that: Any): Boolean =
that.isInstanceOf[Matrix33d]
/** Returns a hash code value for the object. */
override def hashCode: Int =
47 * (43 * (41 + basisX.hashCode) + basisY.hashCode) + basisZ.hashCode
//------------------------------------------------------------------------------------------------
/** The component-wise comparison of whether the given matrix in within a given
* epsilon of this matrix. */
def equiv(that: Matrix33d, epsilon: Double): Boolean =
forall(that)(Scalar.equiv(_, _, epsilon))
/** The component-wise comparison of whether the given matrix is within a type
* specific epsilon of this matrix. */
def equiv(that: Matrix33d): Boolean =
forall(that)(Scalar.equiv(_, _))
//------------------------------------------------------------------------------------------------
// U T I L I T Y
//------------------------------------------------------------------------------------------------
/** Lookup a the value of a given cell (column, row) of this matrix. */
def apply(col: Int, row: Int) = {
val b = col match {
case 0 => basisX
case 1 => basisY
case 2 => basisZ
case _ => throw new IllegalArgumentException("Invalid column (" + col + ")!")
}
row match {
case 0|1|2 => b(row)
case _ => throw new IllegalArgumentException("Invalid row (" + row + ")!")
}
}
/** Tests whether the given predicate holds true for all components of this coordinate
* frame. */
def forall(p: (Double) => Boolean): Boolean =
basisX.forall(p) && basisY.forall(p) && basisZ.forall(p)
/** Tests whether the given predicate holds true for all of the corresponding components
* of this and the given matrix. */
def forall(that: Matrix33d)(p: (Double, Double) => Boolean): Boolean =
basisX.forall(that.basisX)(p) && basisY.forall(that.basisY)(p) &&
basisZ.forall(that.basisZ)(p)
/** Tests whether the given predicate holds true for any components of this coordinate
* frame. */
def forany(p: (Double) => Boolean): Boolean =
basisX.forany(p) && basisY.forany(p) && basisZ.forany(p)
/** Tests whether the given predicate holds true for any of the corresponding components
* of this and the given matrix. */
def forany(that: Matrix33d)(p: (Double, Double) => Boolean): Boolean =
basisX.forany(that.basisX)(p) && basisY.forany(that.basisY)(p) &&
basisZ.forany(that.basisZ)(p)
/** Applies a function to all components of this matrix.
*
* @param f The function that is applied for its side-effect to every component. */
def foreach(f: (Double) => Unit): Unit = {
basisX.foreach(f); basisY.foreach(f); basisZ.foreach(f)
}
/** Builds a new matrix by applying a function to each component of this
* matrix. */
def map(f: (Double) => Double): Matrix33d =
Matrix33d(basisX.map(f), basisY.map(f), basisZ.map(f))
//------------------------------------------------------------------------------------------------
// C O N V E R S I O N
//------------------------------------------------------------------------------------------------
/** Convert to a nested list of elements (basis vectors XYZ). */
def toList: List[List[Double]] =
List(List(basisX.x, basisX.y, basisX.z),
List(basisY.x, basisY.y, basisY.z),
List(basisZ.x, basisZ.y, basisZ.z))
/** Convert to a nested array of elements (basis vectors XYZ). */
def toArray: Array[Array[Double]] =
Array(Array(basisX.x, basisX.y, basisX.z),
Array(basisY.x, basisY.y, basisY.z),
Array(basisZ.x, basisZ.y, basisZ.z))
/** Add the component values (basis vectors XYZ) starting at the current position to given
* native array. */
def putNative(buf: DoubleBuffer) {
buf.put(basisX.x); buf.put(basisX.y); buf.put(basisX.z)
buf.put(basisY.x); buf.put(basisY.y); buf.put(basisY.z)
buf.put(basisZ.x); buf.put(basisZ.y); buf.put(basisZ.z)
}
/** Add the component values (basis vectors XYZ) starting at the current position to given
* native array of floats. */
def putNativeFloats(buf: FloatBuffer) {
buf.put(basisX.x.toFloat); buf.put(basisX.y.toFloat); buf.put(basisX.z.toFloat)
buf.put(basisY.x.toFloat); buf.put(basisY.y.toFloat); buf.put(basisY.z.toFloat)
buf.put(basisZ.x.toFloat); buf.put(basisZ.y.toFloat); buf.put(basisZ.z.toFloat)
}
/** Convert to a string representation. */
override def toString() =
"Matrix33d(" + basisX + ", " + basisY + ", " + basisZ + ")"
}
|
JimCallahan/Graphics
|
src/org/scalagfx/math/Matrix33d.scala
|
Scala
|
apache-2.0
| 18,027 |
package net.resonious.sburb.entities
import net.resonious.sburb.game.After
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.resonious.sburb.abstracts.Vector3
import net.resonious.sburb.Sburb
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.entity.Entity
import cpw.mods.fml.common.registry.IEntityAdditionalSpawnData
import net.minecraft.client.renderer.entity.Render
import net.minecraft.client.renderer.Tessellator
import net.minecraft.world.World
import org.lwjgl.opengl.GL11
import net.minecraft.nbt.NBTTagCompound
import net.minecraft.util.ResourceLocation
import scala.math._
import scala.util.Random
import net.resonious.sburb.game.SburbProperties
import net.minecraft.util.ResourceLocation
import net.minecraft.client.Minecraft
import net.minecraft.client.renderer.RenderHelper
import net.resonious.sburb.packets.ActivePacket
import net.resonious.sburb.abstracts.PacketPipeline
import com.xcompwiz.mystcraft.api.impl.InternalAPI
import com.xcompwiz.mystcraft.world.agedata.AgeData
import net.minecraftforge.common._
import cpw.mods.fml.common.registry._
import io.netty.buffer.ByteBuf
object ReturnNode {
// Radius of the entire epicycloid
final val radius: Double = 1.3
// kN (k numerator) is a constant prime number so that we don't get any insane dips in
// number of cusps. (completely stupid it happens anyway because of fractions dummy)
final val kN: Double = 11.0
// k = 11/7 looks a lot like the Sburb portals, so that's where we start.
final val initial_kD: Double = 7.0
// Some algebra on the kD formula down there, so that we can start out at k = 11/7.
final val initial_r: Double = radius / (1.0 + kN/initial_kD)
// Calculates the denominator of r relative to a given r. If you pass in initial_r, you
// should get 7.0!
def kD(r: Double) = (kN * r) / (radius - r)
// def k(r: Double) = kN / kD(r)
var er: Double = 0.9
var eR: Double = 1.7
var ed: Double = 9
var es: Double = 0.85
}
object ReturnNodeRenderer extends Render {
import net.resonious.sburb.entities.HousePortalRenderer.VecTup
import ReturnNode._
lazy val t = Tessellator.instance
lazy val mc = Minecraft.getMinecraft
val tex = new ResourceLocation("sburb", "textures/tile_entities/houseportal.png")
final val thetaMax = 20*Pi
var printedShit = false
override def doRender(entity: Entity, x: Double, y: Double, z: Double, yaw: Float, pitch: Float) = {
val portal = entity.asInstanceOf[ReturnNode]
val r = portal.r
val color = portal.color
GL11.glPushMatrix()
GL11.glTranslated(x, y, z)
bindTexture(tex)
GL11.glColor3f(color.r, color.g, color.b)
GL11.glDisable(GL11.GL_BLEND)
mc.entityRenderer.disableLightmap(0)
RenderHelper.disableStandardItemLighting()
t.startDrawingQuads()
def epicycloid(theta: Double) = {
val k = kN / kD(r)
(
r*(k + 1)*cos(theta) - r*cos((k + 1)*theta),
r*(k + 1)*sin(theta) - r*sin((k + 1)*theta)
)
}
def otherthing(theta: Double) = {
(
es*(eR*cos(theta) + er*sin(ed*theta)),
es*(eR*sin(theta) + er*cos(ed*theta))
)
}
var pointFunc = otherthing(_)
var drawCycloid = false
var theta = 0.0
var point1: (Double, Double) = null
var point2: (Double, Double) = null
while (theta <= thetaMax) {
if (point1 == null) {
point1 = pointFunc(theta)
} else {
point2 = pointFunc(theta)
// Angle Between Points
val abp = (point2 - point1) match {
case (x12, y12) => atan2(y12, x12)
}
// Rotate 90 degrees
val pCos = cos(abp + Pi/2)
val pSin = sin(abp + Pi/2)
// Rectangle size
val s = 0.025
val topLeft = point1 match { case (x, y) => (x + s*pCos, y + s*pSin) }
val bottomLeft = point1 match { case (x, y) => (x - s*pCos, y - s*pSin) }
val topRight = point2 match { case (x, y) => (x + s*pCos, y + s*pSin) }
val bottomRight = point2 match { case (x, y) => (x - s*pCos, y - s*pSin) }
def vert(y: Double, points: (Double, Double)*) =
points foreach { _ match { case (x, z) => { t.addVertex(x, y, z) } } }
// Render both sides of rectangles
vert(0.5, topLeft, topRight, bottomRight, bottomLeft)
vert(0.5, bottomLeft, bottomRight, topRight, topLeft)
point1 = point2
}
theta += Pi/60
if (!drawCycloid && theta > thetaMax / 2) {
drawCycloid = true
theta = 0
pointFunc = epicycloid(_)
point1 = null
}
}
t.draw()
GL11.glPopMatrix()
mc.entityRenderer.enableLightmap(0)
RenderHelper.enableStandardItemLighting()
}
override def getEntityTexture(entity: Entity): ResourceLocation = {
return null
}
}
class ReturnNode(world: World) extends Portal(world) {
def r = ReturnNode.initial_r
override def entityInit(): Unit = {
}
override def setColorFromWorld(): Portal = {
val result = super.setColorFromWorld()
color = new Vector3[Float](color.r * 0.9f, color.g * 0.9f, color.b * 0.9f)
result
}
override def onCollideWithPlayer(player: EntityPlayer): Unit = {
if (Sburb.isServer) {
(posX - player.posX, posZ - player.posZ) match {
case (x, y) => if (sqrt(x*x+y*y) <= warpRadius) {
val dim = if (targetDim == 0) world.provider.dimensionId
else targetDim
val props = SburbProperties of player
if (!props.serverMode.activated)
Sburb.warpPlayer(player, dim, targetPos)
}
}
}
}
}
|
Resonious/mcsburb
|
src/main/scala/net/resonious/sburb/entities/ReturnNode.scala
|
Scala
|
mit
| 5,632 |
package coursier.cli.resolve
import java.io.PrintStream
import coursier.cli.params.OutputParams
import coursier.core.{Dependency, Resolution}
import coursier.graph.Conflict
import coursier.params.ResolutionParams
import coursier.parse.{JavaOrScalaDependency, JavaOrScalaModule}
import coursier.util.{ModuleMatcher, Print}
object Output {
private val nl = sys.props("line.separator")
def errPrintln(s: String) = Console.err.println(s)
def printDependencies(
outputParams: OutputParams,
resolutionParams: ResolutionParams,
deps: Seq[Dependency],
stdout: PrintStream,
stderr: PrintStream
): Unit =
if (outputParams.verbosity >= 1) {
stderr.println(
s" Dependencies:$nl" +
Print.dependenciesUnknownConfigs(
deps,
Map.empty,
printExclusions = outputParams.verbosity >= 2
)
)
if (resolutionParams.forceVersion.nonEmpty) {
stderr.println(" Force versions:")
val ordered = resolutionParams.forceVersion
.toVector
.sortBy { case (mod, _) =>
mod.toString
}
for ((mod, ver) <- ordered)
stderr.println(s"$mod:$ver")
}
}
def printResolutionResult(
printResultStdout: Boolean,
params: ResolveParams,
scalaVersionOpt: Option[String],
platformOpt: Option[String],
res: Resolution,
stdout: PrintStream,
stderr: PrintStream,
colors: Boolean
): Unit =
if (printResultStdout || params.output.verbosity >= 1 || params.anyTree || params.conflicts) {
val printHeader = (printResultStdout && params.output.verbosity >= 1) ||
params.output.verbosity >= 2 ||
params.anyTree
if (printHeader)
stderr.println(s" Result:")
val withExclusions = params.output.verbosity >= 1
val depsStr =
if (params.whatDependsOn.nonEmpty) {
val matchers = params.whatDependsOn
.map(_.module(
JavaOrScalaModule.scalaBinaryVersion(scalaVersionOpt.getOrElse("")),
scalaVersionOpt.getOrElse("")
))
.map(ModuleMatcher(_))
Print.dependencyTree(
res,
roots = res.minDependencies
.filter(f => matchers.exists(m => m.matches(f.module)))
.toSeq,
printExclusions = withExclusions,
reverse = true,
colors = colors
)
}
else if (params.reverseTree || params.tree)
Print.dependencyTree(
res,
printExclusions = withExclusions,
reverse = params.reverseTree,
colors = colors
)
else if (params.conflicts) {
val conflicts = Conflict(res)
val messages = Print.conflicts(conflicts)
if (messages.isEmpty) {
if ((printResultStdout && params.output.verbosity >= 1) || params.output.verbosity >= 2)
stderr.println("No conflict found.")
""
}
else
messages.mkString(nl)
}
else if (params.candidateUrls) {
val classpathOrder = params.classpathOrder.getOrElse(true)
// TODO Allow to filter on classifiers / artifact types
val urls = res.dependencyArtifacts(None, classpathOrder).map(_._3.url)
urls.mkString(nl)
}
else {
val classpathOrder = params.classpathOrder.getOrElse(false)
Print.dependenciesUnknownConfigs(
if (classpathOrder) res.orderedDependencies else res.minDependencies.toVector,
res.projectCache.mapValues { case (_, p) => p },
printExclusions = withExclusions,
reorder = !classpathOrder
)
}
if (depsStr.nonEmpty)
if (printResultStdout)
stdout.println(depsStr)
else
stderr.println(depsStr)
}
}
|
coursier/coursier
|
modules/cli/src/main/scala/coursier/cli/resolve/Output.scala
|
Scala
|
apache-2.0
| 3,921 |
package de.tototec.sbuild
import java.io.File
import scala.reflect.ClassTag
/**
* Path can be used to produce absolute [[File]] instances which are relative to the current SBuild project directory
* or the directory containing an included and explicit requested project resource.
*/
object Path {
// since SBuild 0.4.0.9002
def apply[T: ClassTag](path: String, paths: String*)(implicit project: Project): File =
Path[T](new File(path), paths: _*)
def apply(path: String, paths: String*)(implicit project: Project): File =
Path(new File(path), paths: _*)
// since SBuild 0.5.0.9004
def apply[T: ClassTag](path: File, paths: String*)(implicit project: Project): File = {
val baseDir = project.includeDirOf[T]
val file = normalize(path, baseDir)
if (paths.isEmpty) {
file
} else {
paths.foldLeft(file)((f, e) => new File(f, e))
}
}
// since SBuild 0.5.0.9004
def apply(path: File, paths: String*)(implicit project: Project): File = {
val file = normalize(path, project.projectDirectory)
if (paths.isEmpty) {
file
} else {
paths.foldLeft(file)((f, e) => new File(f, e))
}
}
def normalize(path: File, baseDir: File = new File(".")): File = {
val absFile = if (path.isAbsolute) path else new File(baseDir, path.getPath)
new File(absFile.toURI.normalize)
}
}
// since SBuild 0.3.1.9000
@deprecated("Use Paths instead.", "0.4.0.9002")
object Pathes {
def apply(paths: Seq[String])(implicit project: Project): Seq[File] =
paths.map(path => Path(path))
}
// since SBuild 0.4.0.9002
object Paths {
def apply(paths: Seq[String])(implicit project: Project): Seq[File] =
paths.map(path => Path(path))
}
|
SBuild-org/sbuild
|
de.tototec.sbuild/src/main/scala/de/tototec/sbuild/Path.scala
|
Scala
|
apache-2.0
| 1,713 |
package hu.frankdavid.diss.network
import hu.frankdavid.diss.expression.Expression
case class Calculate(expression: Expression)
|
frankdavid/diss
|
src/main/scala/hu/frankdavid/diss/network/Calculate.scala
|
Scala
|
apache-2.0
| 129 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.util
import scala.reflect.Manifest
import scala.collection.mutable.{HashMap,HashSet,ArrayBuffer}
import cc.factorie._
/** Concrete version is implemented as an inner class of @see CmdOptions.
@author Andrew McCallum */
trait CmdOption[T] {
def name: String
def shortName: Char
def helpString: String
def valueClass: Class[_]
def valueName: String
def defaultValue: T
def value: T
def setValue(v: T): Unit
def hasValue: Boolean
def invokedCount: Int
def wasInvoked = invokedCount > 0
def required: Boolean
def parse(args:Seq[String], index:Int): Int
def unParse: Seq[String] = {
if (hasValue)
value match {
case a: Seq[_] => Seq(f"--$name%s") ++ a.map(_.toString)
case "" => Seq()
case a: Any => Seq(f"--$name%s=${value.toString}%s")
}
else
Seq()
}
override def hashCode = name.hashCode
override def equals(other:Any) = name.equals(other)
}
/** A simple command-line option parsing tool.
Example usage:
<code>
def main(args:Array[String]): Unit = {
object opts extends CmdOptions {
val train = new CmdOption("train", "eng.train", "CoNLL-format file from which to get training data.")
val temperature = new CmdOption("temperature", 1.0, "Temperature for the sampler.")
val iterations = new CmdOption("iterations", 15, "Number of iterations of training.")
}
opts.parse(args)
// then later
for (i <- 0 until opts.iterations.value) ...
}
</code>
@author Andrew McCallum
*/
class CmdOptions {
private val opts = new HashMap[String,cc.factorie.util.CmdOption[_]]
def apply(key: String) = opts(key)
def get(key:String) = opts.get(key)
def size = opts.size
//def iterator = opts.iterator
var strict = true
def values = opts.values
def +=(c:cc.factorie.util.CmdOption[_]): this.type = {
if (opts.contains(c.name)) throw new Error("CmdOption "+c.name+" already exists.")
opts(c.name) = c
this
}
def -=(c:cc.factorie.util.CmdOption[_]): this.type = {
opts -= c.name
this
}
def error(msg:String): Unit = {
System.err.println(msg)
System.err.println(usageString)
System.exit(-1)
}
def usageString: String = {
val sb = new StringBuffer
sb append "Usage: "
opts.values.foreach(o => { if (o.hasValue) sb.append("--" + o.name+"="+o.valueName) else sb.append(o.name); sb.append(" ") })
sb.toString
}
/** The arguments that were unqualified by dashed options. */
private val _remaining = new ArrayBuffer[String]
def remaining: Seq[String] = _remaining
/** Parse sequence of command-line arguments. */
def parse(args:Seq[String]): Unit = {
//opts.values.foreach(o => o.invokedCount = 0) // Reset for each parse? No, might want to parse multiple times.
var index = 0
while (index < args.length) {
val origIndex = index
var invoked = false
val optsIter = opts.valuesIterator
while (optsIter.hasNext && !invoked) {
val opt = optsIter.next()
index = opt.parse(args, index)
invoked = index != origIndex
assert(invoked || index == origIndex)
}
if (!invoked) {
// Handle options not recognized by any CmdOption parse
if (strict && args(index).startsWith("-")) error("Unrecognized option "+args(index))
_remaining += args(index)
index += 1
}
}
opts.values.find(o => o.required && o.invokedCount == 0) match {
case Some(o) => error("Required CmdOption --"+o.name+" was not provided.")
case None =>
}
}
/*object CmdOption {
def apply[T](name:String, defaultValue:T, valueName:String, helpMsg:String)(implicit m:Manifest[T]): CmdOption[T] =
new CmdOption[T](name, defaultValue, valueName, helpMsg)
def apply[T](name:String, shortName:Char, defaultValue:T, valueName:String, helpMsg:String)(implicit m:Manifest[T]): CmdOption[T] =
new CmdOption[T](name, shortName, defaultValue, valueName, helpMsg)
def apply(name:String, helpMsg:String): CmdOption[Any] =
new CmdOption[Any](name, helpMsg)
}*/
class CmdOption[T](val name:String, val helpMsg:String, val required:Boolean = false)(implicit m:Manifest[T]) extends cc.factorie.util.CmdOption[T] {
def this(name:String, defaultValue:T, valueName:String, helpMsg:String, required:Boolean)(implicit m:Manifest[T]) = {
this(name, helpMsg, required)
this.valueName = valueName
value = defaultValue
this.defaultValue = defaultValue
}
def this(name:String, defaultValue:T, valueName:String, helpMsg:String)(implicit m:Manifest[T]) = this(name, defaultValue, valueName, helpMsg, false)
/*def this(name:String, defaultValue:T, helpMsg:String)(implicit m:Manifest[T]) = {
this(name, defaultValue, { val fields = m.runtimeClass.getName.split("[^A-Za-z]+"); if (fields.length > 1) fields.last else fields.head }, helpMsg)
}*/
def this(name:String, shortName:Char, defaultValue:T, valueName:String, helpMsg:String, required:Boolean)(implicit m:Manifest[T]) = {
this(name, defaultValue, valueName, helpMsg, required)
this.shortName = shortName
}
def this(name:String, shortName:Char, defaultValue:T, valueName:String, helpMsg:String)(implicit m:Manifest[T]) = this(name, shortName, defaultValue, valueName, helpMsg, false)
/*def this(name:String, shortName:Char, defaultValue:T, helpMsg:String)(implicit m:Manifest[T]) = {
this(name, defaultValue, helpMsg)
this.shortName = shortName
}*/
CmdOptions.this += this
// TODO When we have Scala 2.8 default args, add a "shortName" one-char alternative here
var shortName: Char = ' ' // space char indicates no shortName
val valueManifest: Manifest[T] = m
def valueClass: Class[_] = m.runtimeClass
var valueName: String = null
var defaultValue: T = _
var value: T = _
var invokedCount = 0
def setValue(v: T) { value = v }
def hasValue = valueClass != noClass
def noClass = classOf[Any] // This is the value of m.runtimeClass if no type is specified for T in CmdOption[T].
/** Attempt to match and process command-line option at position 'index' in 'args'.
Return the index of the next position to be processed. */
def parse(args:Seq[String], index:Int): Int = {
if (valueClass == noClass && args(index) == "--"+name || args(index) == "-"+shortName) {
// support options like --help or -h (i.e. no arguments to option)
invoke
invokedCount += 1
index + 1
} else if (args(index) == "--"+name || args(index) == "-"+shortName) {
// support options like --file foo, or -f foo (or just --file or -f, in which case value is the defaultValue)
var newIndex = index + 1
// If the next args does not begin with regex "-.+" assume it is the value of this argument and parse it...
if (newIndex < args.length && !(args(newIndex).startsWith("-") && args(newIndex).length > 1)) newIndex = parseValue(args, newIndex)
else if (valueClass == classOf[Boolean]) this.asInstanceOf[CmdOption[Boolean]].value = true // for CmdOption[Boolean], invoking with no value arg defaults to true
// ...otherwise the value will just remain the defaultValue
invoke
invokedCount += 1
newIndex
} else if (args(index).startsWith("--"+name+"=")) {
// support --file=foo
// modified on 1/21/2012 to support --file=foo=bar --brian
val rightOfEq = args(index).drop(name.size + 3)
parseValue(List(rightOfEq), 0)
invoke
invokedCount += 1
index + 1
} else index
}
/** Called after this CmdOption has been matched and value has been parsed. */
def invoke(): Unit = {}
/** After we have found a match, request that argument(s) to command-line option be parsed.
Return the index position that should be processed next.
This method allows one option to possibly consume multiple args, (in contrast with parseValue(String).) */
protected def parseValue(args:Seq[String], index:Int): Int = {
//println("CmdOption "+valueManifest)
//val listIntManifest = Manifest.classType[List[Int]](classOf[List[Int]], Manifest.classType[Int](classOf[Int]))
//println("Manifest "+listIntManifest)
//println("CmdOption == "+(valueManifest == listIntManifest))
//println("typeArgs "+(valueManifest.typeArguments))
//println("typeArgs1 == "+((valueClass eq classOf[List[_]])))
//if (valueManifest.typeArguments.size > 0) println("typeArgs2 == "+((valueManifest.typeArguments(0).runtimeClass eq classOf[Int])))
//println("typeArgs == "+((valueClass eq classOf[List[_]]) && (valueManifest.typeArguments(0).runtimeClass eq classOf[Int])))
if ((valueClass eq classOf[List[_]]) && (valueManifest.typeArguments(0).runtimeClass eq classOf[String])) {
// Handle CmdOpt whose value is a List[String]
if (args(index).contains(',')) {
// Handle the case in which the list is comma-separated
value = args(index).split(",").toList.asInstanceOf[T]
index + 1
} else {
// Handle the case in which the list is space-separated
var i = index
val listValue = new scala.collection.mutable.ListBuffer[String]
// Read arguments until we find another CmdOption, which must begin with either with regex "--" or "-.+"
while (i < args.length && !args(i).startsWith("--") && !(args(i).startsWith("-") && args(i).length > 1)) {
listValue += args(i)
i += 1
}
value = listValue.toList.asInstanceOf[T]
i
}
} else if ((valueClass eq classOf[List[_]]) && (valueManifest.typeArguments(0).runtimeClass eq classOf[Int])) {
// Handle CmdOpt whose value is a List[String]
if (args(index).contains(',')) {
// Handle the case in which the list is comma-separated
value = args(index).split(",").toList.map(_.toInt).asInstanceOf[T]
index + 1
} else {
// Handle the case in which the list is space-separated
var i = index
val listValue = new scala.collection.mutable.ListBuffer[Int]
// Read arguments until we find another CmdOption, which must begin with either with regex "--" or "-.+"
while (i < args.length && !args(i).startsWith("--") && !(args(i).startsWith("-") && args(i).length > 1)) {
listValue += args(i).toInt
i += 1
}
value = listValue.toList.asInstanceOf[T]
i
}
} else if ((valueClass eq classOf[List[_]]) && (valueManifest.typeArguments(0).runtimeClass eq classOf[Double])) {
// Handle CmdOpt whose value is a List[String]
if (args(index).contains(',')) {
// Handle the case in which the list is comma-separated
value = args(index).split(",").toList.map(_.toDouble).asInstanceOf[T]
index + 1
} else {
// Handle the case in which the list is space-separated
var i = index
val listValue = new scala.collection.mutable.ListBuffer[Double]
// Read arguments until we find another CmdOption, which must begin with either with regex "--" or "-.+"
while (i < args.length && !args(i).startsWith("--") && !(args(i).startsWith("-") && args(i).length > 1)) {
listValue += args(i).toDouble
i += 1
}
value = listValue.toList.asInstanceOf[T]
i
}
} else {
parseValue(args(index))
index + 1
}
}
/** Parse a value from a single arg */
protected def parseValue(valueStr:String): Unit = {
// TODO Is there a better way to do this?
if (valueClass eq classOf[Int]) value = Integer.parseInt(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Float]) value = java.lang.Float.parseFloat(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Double]) value = java.lang.Double.parseDouble(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Short]) value = java.lang.Short.parseShort(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Long]) value = java.lang.Long.parseLong(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Boolean]) value = java.lang.Boolean.parseBoolean(valueStr).asInstanceOf[T]
else if (valueClass eq classOf[Char]) value = valueStr.apply(0).asInstanceOf[T]
else if (valueClass eq classOf[String]) value = valueStr.asInstanceOf[T]
// Support comma-separated multiple values, e.g. --train=eng.train,eng.testa
//else if (valueManifest <:< Manifest.classType[List[String]](classOf[List[String]], Manifest.classType[String](classOf[String]))) value = valueStr.split(",").toList.asInstanceOf[T] // Now handled above.
//else if (valueManifest <:< Manifest.classType[List[Int]](classOf[List[Int]], Manifest.classType[Int](classOf[Int]))) value = valueStr.split(',').map(Integer.parseInt(_)).toList.asInstanceOf[T]
else throw new Error("CmdOption does not handle value of type "+valueManifest)
// TODO Add an option that will run the interpreter on some code
}
// TODO Format long help messages more nicely.
def helpString: String = {
val defaultValueString = if(defaultValue == null) "null" else defaultValue match { case d:Seq[_] => d.mkString(","); case _ => defaultValue }
if (valueClass != noClass) "--%-15s %s\n".format(name+"="+valueName, helpMsg+" Default="+defaultValueString)
else "--%-15s %s\n".format(name, helpMsg)
}
}
}
/** Default CmdOption collection that should be included in most CmdOptions. */
trait DefaultCmdOptions extends CmdOptions {
new CmdOption("help", "", "STRING", "Print this help message.") {
override def invoke = {
DefaultCmdOptions.this.values.foreach(o => println(o.helpString))
System.exit(0)
}
}
new CmdOption("version", "", "STRING", "Print version numbers.") {
override def invoke {
throw new Error("Not yet implemented.") // TODO How to manage version strings?
//println("FACTORIE version "+factorieVersionString)
// TODO How do I print the Scala and JVM version numbers?
System.exit(0)
}
}
new CmdOption("config", "", "FILE", "Read command option values from a file") {
override def invoke {
if (this.value != "") {
import scala.io.Source
val contents = Source.fromFile(new java.io.File(this.value)).mkString
val args = contents.split("\\s+")
DefaultCmdOptions.this.parse(args)
}
}
}
}
|
iesl/fuse_ttl
|
src/factorie-factorie_2.11-1.1/src/main/scala/cc/factorie/util/CmdOption.scala
|
Scala
|
apache-2.0
| 15,432 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.runtime.allocation
import cogx.cogmath.collection.{IdentityHashSet, IdentityHashMap}
import cogx.compiler.codegenerator.KernelCircuit
import cogx.parameters.Cog
import cogx.platform.cpumemory.BufferType
import cogx.platform.opencl.{OpenCLDeviceKernel, OpenCLAbstractKernel, OpenCLCpuKernel, OpenCLDevice}
import cogx.platform.types.{AbstractKernel, FieldType, VirtualFieldRegister}
import scala.collection.mutable.ArrayBuffer
/**
* An allocator of shared latches that are guaranteed to produce correct results under the assumption
* that kernels are executed in the order they are enqueued with no overlap.
*
* @author Dick Carter
*/
class InOrderSharedLatchAllocator(kernelEnqueueOrder: Seq[OpenCLAbstractKernel]) extends SharedLatchAllocator {
/** Process the kernel circuit from inputs to outputs, looking for latch
* sharing opportunities.
*/
def calcSharedLatches(circuit: KernelCircuit, device: OpenCLDevice, bufferType: BufferType,
requiresLatch: (VirtualFieldRegister) => Boolean) = {
/** Existing SharedLatches in a collection that permits intelligent sharing choices. */
val latches = new SharedLatchesByUtilization
/** Map from a kernel to the set of kernels guaranteed to have executed before it. */
val precursors =
new IdentityHashMap[AbstractKernel, IdentityHashSet[AbstractKernel]]()
/** Queue of kernels ready to have their outputs assigned to a latch. */
val readyKernels = new IdentityHashSet[AbstractKernel]()
/** Map from a kernel to the number of input kernels yet to be processed. */
val numPendingInputKernels = new IdentityHashMap[AbstractKernel, Int]()
/** Tally of buffers optimized away through sharing */
var buffersRemoved = 0
/** Top-of-KernelCircuit kernels. These 'seal' the latch they share.
* Kernels outputs that drive recurrences have no apparent outputs. */
val rootKernels = new IdentityHashSet[AbstractKernel]() {
circuit.roots.foreach(this.add(_))
}
/** Kernels that have already executed. */
val executedDeviceKernels = new IdentityHashSet[AbstractKernel]()
/** Kernels that have already executed. */
lazy val executedKernels = new IdentityHashSet[AbstractKernel]()
/** Leaves of the kernel DAG- the Constant and Recurrence kernels should not be part of the
* returned map- they will be given flip-flops or unshared latches by the caller of this routine.
* Any device kernels (as is possible with the 0-input GPUOperators favored by the Profiler) become
* "ready kernels".
*/
val leaves = new IdentityHashSet[AbstractKernel]()
circuit.leaves.foreach( _ match {
case zeroInputDeviceKernel: OpenCLDeviceKernel =>
precursors(zeroInputDeviceKernel) = new IdentityHashSet[AbstractKernel]()
readyKernels += zeroInputDeviceKernel
case otherLeaf => leaves.add(otherLeaf)
})
/** Are all the kernels that use the latch (the `lastConsumers`) guaranteed
* to have executed (are they in the precursors set of this kernel)?
*/
def latchOKToUse(latch: SharedLatch, kernel: AbstractKernel) = {
if (latch.isSealed)
false
else {
val kernelPrecursors = precursors(kernel)
val properOKToUse =
if (kernel.isInstanceOf[OpenCLDeviceKernel]) {
latch.lastConsumers.find(kernelUse =>
!kernelPrecursors.contains(kernelUse) && !executedDeviceKernels.contains(kernelUse)) match {
case Some(unexecutedKernel) => false
case None => true
}
}
else {
latch.lastConsumers.find(kernelUse =>
!kernelPrecursors.contains(kernelUse)) match {
case Some(unexecutedKernel) => false
case None => true
}
}
if (Cog.checkLegacyBufferSharing) {
val problematicOKToUse =
latch.lastConsumers.find(kernelUse => !executedKernels.contains(kernelUse)) match {
case Some(unexecutedKernel) => false
case None => true
}
if (problematicOKToUse && !properOKToUse)
println("Warning: earlier Cog versions 4.1.47 thru 4.3.5 may have improperly run kernel " + kernel)
}
properOKToUse
}
}
/** Propogate some kernel info to its sinks, then remove it from the maps */
def postProcessKernel(kernel: AbstractKernel) = {
// In prep for updating sinks' precursors, add this kernel to its set
val precursorsPlusMe =
precursors.getOrElseUpdate(kernel, new IdentityHashSet[AbstractKernel]())
precursorsPlusMe += kernel
var myPrecursorsPassedOnward = false
kernel.outputs.foreach(output => {
output.sinks.foreach(sink => {
// Update the precursor sets for this kernel's sinks
// The following is a performance optimization in which the precursor
// sets can be just moved simply to a single sink without copying.
precursors.get(sink) match {
case Some(sinkPrecursors) =>
sinkPrecursors.putAll(precursorsPlusMe)
case None =>
if (!myPrecursorsPassedOnward) {
precursors(sink) = precursorsPlusMe
myPrecursorsPassedOnward = true
}
else
precursors(sink) = precursorsPlusMe.copy
}
// decrease the number of the sinks' pending inputs and maybe mark ready
val numSinkInputs = sink.inputs.length
var numPendingInputs =
numPendingInputKernels.getOrElseUpdate(sink, numSinkInputs)
require(numPendingInputs > 0, "InOrderSharedLatchAllocator: compiler internal error.")
// Optimizers should have consolidated all uses of a given input to one, but just in case...
var inputIndex = 0
while (numPendingInputs > 0 && inputIndex < numSinkInputs) {
if (output eq sink.inputs(inputIndex))
numPendingInputs -= 1
inputIndex += 1
}
numPendingInputKernels(sink) = numPendingInputs
if (numPendingInputs == 0)
readyKernels += sink // add sink to readyKernels
})
})
// Remove this kernel from the precursor map since it's no longer needed
precursors.remove(kernel)
if (kernel.isInstanceOf[OpenCLDeviceKernel])
executedDeviceKernels.add(kernel)
// Only maintain executedKernels if we care about warning the user of past misbehaviors
if (Cog.checkLegacyBufferSharing)
executedKernels.add(kernel)
}
/** Since GPU kernels currently do not mark the cpu-portion of their output
* buffer invalid when run, any buffer looked at by a cpu kernel cannot be
* further shared by a downstream kernel. The problem is that when the
* cpu kernel runs, it will do a read() of the input, which will mark the
* buffer's cpu portion valid. If that buffer is later written by a device
* kernel that shares the buffer, the updated value will never be seen by
* another cpu kernel or probe (the read of the new GPU data will never
* be triggered, because the buffer's cpu data is already marked valid).
*
* We need to further study whether the asynchronous probe mechanism
* can force a premature validation of the CPU portion of buffers, which
* would present false data to CPU kernel readers (this is not a buffer
* sharing issue). XXX -RJC
* */
def drivesCPUKernels(virtualRegister: VirtualFieldRegister) =
virtualRegister.sinks.map(_.isInstanceOf[OpenCLCpuKernel]).foldLeft(false)(_ || _)
/** Should this kernel, once it's permitted to share a latch, then prohibit
* other kernels from being added to the kernel-pool for this latch?
*/
def sealLatch(virtualRegister: VirtualFieldRegister) =
rootKernels.contains(virtualRegister.source) ||
virtualRegister.sinks.length == 0 ||
virtualRegister.probed ||
drivesCPUKernels(virtualRegister) ||
virtualRegister.source.isInstanceOf[OpenCLCpuKernel]
// First process inputs without allocating any latches
leaves.foreach(input => postProcessKernel(input))
// Main loop: kernels are processed after all their inputs have been
// processed, at which point the kernel's precursor set is known to be complete.
// Main loop: kernels are processed in the order in which they will be enqueued, skipping leaf kernels.
kernelEnqueueOrder.filter(!leaves.contains(_)).foreach( kernel => {
require(readyKernels.contains(kernel),
"InOrderSharedLatchAllocator: kernel launch order doesn't respect kernel DAG dependencies:" + kernel)
readyKernels -= kernel
// Kernels driving no other kernels are dangerous for this approach, since
// that suggests the output buffer assigned to it can be reused immediately.
// These kernels are generally the top-of-circuit root kernels that we
// wouldn't want to have sharing buffers
kernel.outputs.foreach( virtualRegister => {
if (requiresLatch(virtualRegister)) {
// Any latch of the proper size?
val candidateLatches = latches.get(virtualRegister.fieldType)
candidateLatches.find(latchOKToUse(_,kernel)) match {
case Some(latch) => // Found a latch that can be used by this kernel
latch.addVirtualRegister(virtualRegister, sealLatch(virtualRegister))
buffersRemoved += 1
case None => // No latch of proper size can be used; make a new one
val newLatch = new SharedLatch(device, virtualRegister, sealLatch(virtualRegister), bufferType)
latches.addLatch(virtualRegister.fieldType, newLatch)
}
}
})
postProcessKernel(kernel)
})
if (Cog.verboseOptimizer) {
latches.values.foreach(sharedLatch => {
print("Field Register(Latch) (" + sharedLatch.register.master + ", " +
sharedLatch.register.slave + ") ")
println(sharedLatch.virtualRegisters.mkString("; "))
})
}
if (Cog.verboseOptimizer)
println(" *** BufferSharing: " + buffersRemoved + " buffers removed.")
// Convert the variously grouped SharedLatches into a map of Kernel -> SharedLatch
val answer = new IdentityHashMap[VirtualFieldRegister, SharedLatch]() {
latches.values.foreach(latch =>
latch.virtualRegisters.foreach(register => this.put(register, latch))
)
}
answer
}
}
|
hpe-cct/cct-core
|
src/main/scala/cogx/runtime/allocation/InOrderSharedLatchAllocator.scala
|
Scala
|
apache-2.0
| 11,269 |
/*
* Copyright (c) 2018-2019 Brian Scully
*
*/
package akka.contrib.persistence.mongodb
import akka.NotUsed
import akka.persistence.{AtomicWrite, PersistentRepr}
import akka.stream.scaladsl._
import com.mongodb.ErrorCategory
import org.mongodb.scala._
import org.mongodb.scala.bson.{BsonDocument, BsonValue}
import org.mongodb.scala.model.Aggregates.{`match`, `group`}
import org.mongodb.scala.model.Filters._
import org.mongodb.scala.model.Projections._
import org.mongodb.scala.model.Sorts._
import org.mongodb.scala.model.Updates._
import org.mongodb.scala.model.{Accumulators, BulkWriteOptions, InsertOneModel, UpdateOptions}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConverters._
import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
class ScalaDriverPersistenceJournaller(val driver: ScalaMongoDriver) extends MongoPersistenceJournallingApi {
import RxStreamsInterop._
import driver.ScalaSerializers._
import driver.{materializer, pluginDispatcher}
protected val logger: Logger = LoggerFactory.getLogger(getClass)
private[this] val writeConcern = driver.journalWriteConcern
private[this] def journal: Future[driver.C] = driver.journal.map(_.withWriteConcern(driver.journalWriteConcern))
private[this] def realtime: Future[driver.C] = driver.realtime
private[this] def metadata: Future[driver.C] = driver.metadata.map(_.withWriteConcern(driver.metadataWriteConcern))
private[this] def journalRangeQuery(pid: String, from: Long, to: Long) =
and(
equal(PROCESSOR_ID, pid),
gte(FROM, from),
lte(TO, to)
)
def journalRange(pid: String, from: Long, to: Long, max: Int): Source[Event, NotUsed] = {
val journal = driver.getJournal(pid)
val source =
Source
.future(journal)
.flatMapConcat(
_.find(journalRangeQuery(pid, from, to))
.sort(ascending(TO))
.projection(include(EVENTS))
.asAkka
.take(max.toLong)
)
val flow = Flow[BsonDocument]
.mapConcat[Event](e =>
Option(e.get(EVENTS)).filter(_.isArray).map(_.asArray).map(_.getValues.asScala.toList.collect {
case d: BsonDocument => driver.deserializeJournal(d)
}).getOrElse(Seq.empty[Event])
)
.filter(_.sn >= from)
.filter(_.sn <= to)
source.via(flow)
}
private[this] def buildBatch(writes: Seq[AtomicWrite]): Seq[Try[BsonDocument]] =
writes.map(aw => Try(driver.serializeJournal(Atom[BsonValue](aw, driver.useLegacySerialization))))
private[this] def doBatchAppend(batch: Seq[Try[BsonDocument]], collection: Future[driver.C]): Future[Seq[Try[BsonDocument]]] = {
if (batch.forall(_.isSuccess)) {
val collected: Seq[InsertOneModel[driver.D]] = batch.collect { case Success(doc) => InsertOneModel(doc) }
collection.flatMap(_.withWriteConcern(writeConcern).bulkWrite(collected, new BulkWriteOptions().ordered(true))
.toFuture()
.map(_ => batch))
} else {
Future.sequence(batch.map {
case Success(document: BsonDocument) =>
collection.flatMap(_.withWriteConcern(writeConcern).insertOne(document).toFuture().map(_ => Success(document)))
case f: Failure[_] =>
Future.successful(Failure[BsonDocument](f.exception))
})
}
}
override def batchAppend(writes: Seq[AtomicWrite]): Future[Seq[Try[Unit]]] = {
val batchFuture = if (driver.useSuffixedCollectionNames) {
val fZero = Future.successful(Seq.empty[Try[BsonDocument]])
// this should guarantee that futures are performed sequentially...
writes
.groupBy(write => driver.getJournalCollectionName(write.persistenceId))
.foldLeft(fZero) { case (future, (_, hunk)) =>
for {
prev <- future
batch = buildBatch(hunk)
next <- doBatchAppend(batch, driver.journal(hunk.head.persistenceId))
} yield prev ++ next
}
} else {
val batch = buildBatch(writes)
doBatchAppend(batch, journal)
}
if (driver.realtimeEnablePersistence)
batchFuture.andThen {
case Success(batch) =>
val f = doBatchAppend(batch, realtime)
f.onComplete {
case scala.util.Failure(t) =>
logger.error("Error during write to realtime collection", t)
case _ => ()
}
f
}.map(squashToUnit)
else
batchFuture.map(squashToUnit)
}
private[this] def setMaxSequenceMetadata(persistenceId: String, maxSequenceNr: Long): Future[Unit] = {
for {
md <- metadata
_ <- md.updateOne(
equal(PROCESSOR_ID, persistenceId),
combine(
setOnInsert(PROCESSOR_ID, persistenceId),
setOnInsert(MAX_SN, maxSequenceNr)
),
new UpdateOptions().upsert(true)
).toFuture()
_ <- md.updateOne(
and(equal(PROCESSOR_ID, persistenceId), lte(MAX_SN, maxSequenceNr)),
set(MAX_SN, maxSequenceNr),
new UpdateOptions().upsert(false)
).toFuture()
} yield ()
}
private[this] def findMaxSequence(persistenceId: String, maxSequenceNr: Long): Future[Option[Long]] = {
def performAggregation(j: MongoCollection[BsonDocument]): Future[Option[Long]] = {
j.aggregate(
`match`(and(equal(PROCESSOR_ID,persistenceId), lte(TO, maxSequenceNr))) ::
group(s"$$$PROCESSOR_ID", Accumulators.max("max", s"$$$TO")) ::
Nil
).toFuture()
.map(_.headOption)
.map(_.flatMap(l => Option(l.asDocument().get("max")).filter(_.isInt64).map(_.asInt64).map(_.getValue)))
}
for {
j <- driver.getJournal(persistenceId)
rez <- performAggregation(j)
} yield rez
}
override def deleteFrom(persistenceId: String, toSequenceNr: Long): Future[Unit] = {
for {
journal <- driver.getJournal(persistenceId)
ms <- findMaxSequence(persistenceId, toSequenceNr)
_ <- ms.fold(Future.successful(()))(setMaxSequenceMetadata(persistenceId, _))
//first remove docs that have to be removed, it avoid settings some docs with from > to and trying to set same from on several docs
docWithAllEventsToRemove = and(equal(PROCESSOR_ID, persistenceId), lte(TO, toSequenceNr))
removed <- journal.deleteMany(docWithAllEventsToRemove).toFuture()
//then update the (potential) doc that should have only one (not all) event removed
//note the query: we exclude documents that have to < toSequenceNr, it should have been deleted just before,
// but we avoid here some potential race condition that would lead to have from > to and several documents with same from
query = journalRangeQuery(persistenceId, toSequenceNr, toSequenceNr)
update = combine(
pull(EVENTS,
and(
equal(PROCESSOR_ID, persistenceId),
lte(SEQUENCE_NUMBER, toSequenceNr)
)
),
set(FROM, toSequenceNr + 1)
)
_ <- journal.withWriteConcern(writeConcern).updateMany(query, update, new UpdateOptions().upsert(false)).toFuture().recover {
case we : MongoWriteException if we.getError.getCategory == ErrorCategory.DUPLICATE_KEY =>
// Duplicate key error:
// it's ok, (and work is done) it can occur only if another thread was doing the same deleteFrom() with same args, and has just done it before this thread
// (dup key => Same (pid,from,to) => Same targeted "from" in mongo document => it was the same toSequenceNr value)
}
} yield {
if (driver.useSuffixedCollectionNames && driver.suffixDropEmpty && removed.wasAcknowledged())
driver.removeEmptyJournal(journal)
.map(_ => driver.removeJournalInCache(persistenceId))
()
}
}
private[this] def maxSequenceFromMetadata(pid: String)(previous: Option[Long]): Future[Option[Long]] = {
previous.fold(
metadata.flatMap(_.find(BsonDocument(PROCESSOR_ID -> pid))
.projection(BsonDocument(MAX_SN -> 1))
.first()
.toFutureOption()
.map(d => d.flatMap(l => Option(l.asDocument().get(MAX_SN)).filter(_.isInt64).map(_.asInt64).map(_.getValue)))))(l => Future.successful(Option(l)))
}
override def maxSequenceNr(pid: String, from: Long): Future[Long] = {
val journal = driver.getJournal(pid)
journal.flatMap(_.find(BsonDocument(PROCESSOR_ID -> pid))
.projection(BsonDocument(TO -> 1))
.sort(BsonDocument(
// the PROCESSOR_ID is a workaround for DocumentDB as it would otherwise sort on the compound index due to different optimizations. has no negative effect on MongoDB
PROCESSOR_ID -> 1,
TO -> -1
))
.first()
.toFutureOption()
.map(d => d.flatMap(a => Option(a.asDocument().get(TO)).filter(_.isInt64).map(_.asInt64).map(_.getValue)))
.flatMap(maxSequenceFromMetadata(pid)(_))
.map(_.getOrElse(0L)))
}
override def replayJournal(pid: String, from: Long, to: Long, max: Long)(replayCallback: PersistentRepr => Unit): Future[Unit] =
if (max == 0L) Future.successful(())
else {
val maxInt = max.toIntWithoutWrapping
journalRange(pid, from, to, maxInt).map(_.toRepr).runWith(Sink.foreach[PersistentRepr](replayCallback)).map(_ => ())
}
}
|
scullxbones/akka-persistence-mongo
|
scala/src/main/scala/akka/contrib/persistence/mongodb/ScalaDriverPersistenceJournaller.scala
|
Scala
|
apache-2.0
| 9,318 |
package cn.gridx.scala.spray.routing
import akka.actor.{Actor, ActorLogging}
import spray.can.Http
import spray.http.{HttpEntity, HttpRequest, HttpResponse}
import spray.http.HttpMethods.GET
import spray.http.MediaTypes._
/**
* Created by tao on 9/5/16.
*/
class HttpRequestService extends Actor with ActorLogging {
def actorRefFactory = context
override def receive = {
// when a new connection comes in we register ourselves as the connection handler
// 一定需要这一步
case _: Http.Connected =>
log.info("收到 `Http.Connected` ")
sender() ! Http.Register(self)
// 收到请求后构造一个HttpResponse
case HttpRequest(GET, path, headers, entity, protocol) =>
val msg = s"收到GET请求, \\n\\theaders = ${headers}, entity = ${entity}, protocol = ${protocol}"
log.info(msg)
sender() ! GenHttpResp(msg)
}
def GenHttpResp(msg: String) = HttpResponse(
entity = HttpEntity(`text/html`,
<html>
<body>
<h1>Header 1</h1>
<h2>$msg</h2>
</body>
</html>
.toString()
)
)
}
|
TaoXiao/Scala
|
spray/src/main/scala/cn/gridx/scala/spray/routing/HttpRequestService.scala
|
Scala
|
apache-2.0
| 1,108 |
package com.github.agaro1121.models.leaguerules
case class LeagueRule(
id: Int,
name: String,
description: String
)
case class LeagueRules(leagueRules: List[LeagueRule])
|
agaro1121/PathOfExileApiClient
|
src/main/scala/com/github/agaro1121/models/leaguerules/LeagueRules.scala
|
Scala
|
mit
| 178 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable.{GREATER_THAN, GREATER_THAN_OR_EQUAL, IF}
import org.apache.flink.table.planner.plan.utils.SetOpRewriteUtil.replicateRows
import org.apache.calcite.plan.RelOptRule.{any, operand}
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall}
import org.apache.calcite.rel.core.{Intersect, RelFactories}
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.util.Util
import scala.collection.JavaConversions._
/**
* Replaces logical [[Intersect]] operator using a combination of union all, aggregate
* and table function.
*
* Original Query :
* {{{
* SELECT c1 FROM ut1 INTERSECT ALL SELECT c1 FROM ut2
* }}}
*
* Rewritten Query:
* {{{
* SELECT c1
* FROM (
* SELECT c1, If (vcol_left_cnt > vcol_right_cnt, vcol_right_cnt, vcol_left_cnt) AS min_count
* FROM (
* SELECT
* c1,
* count(vcol_left_marker) as vcol_left_cnt,
* count(vcol_right_marker) as vcol_right_cnt
* FROM (
* SELECT c1, true as vcol_left_marker, null as vcol_right_marker FROM ut1
* UNION ALL
* SELECT c1, null as vcol_left_marker, true as vcol_right_marker FROM ut2
* ) AS union_all
* GROUP BY c1
* )
* WHERE vcol_left_cnt >= 1 AND vcol_right_cnt >= 1
* )
* )
* LATERAL TABLE(replicate_row(min_count, c1)) AS T(c1)
* }}}
*
* Only handle the case of input size 2.
*/
class RewriteIntersectAllRule extends RelOptRule(
operand(classOf[Intersect], any),
RelFactories.LOGICAL_BUILDER,
"RewriteIntersectAllRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val intersect: Intersect = call.rel(0)
intersect.all && intersect.getInputs.size() == 2
}
override def onMatch(call: RelOptRuleCall): Unit = {
val intersect: Intersect = call.rel(0)
val left = intersect.getInput(0)
val right = intersect.getInput(1)
val fields = Util.range(intersect.getRowType.getFieldCount)
// 1. add marker to left rel node
val leftBuilder = call.builder
val boolType = leftBuilder.getTypeFactory.createSqlType(SqlTypeName.BOOLEAN)
val leftWithMarker = leftBuilder
.push(left)
.project(
leftBuilder.fields(fields) ++ Seq(
leftBuilder.alias(leftBuilder.literal(true), "vcol_left_marker"),
leftBuilder.alias(
leftBuilder.getRexBuilder.makeNullLiteral(boolType), "vcol_right_marker")))
.build()
// 2. add marker to right rel node
val rightBuilder = call.builder
val rightWithMarker = rightBuilder
.push(right)
.project(
rightBuilder.fields(fields) ++ Seq(
rightBuilder.alias(
rightBuilder.getRexBuilder.makeNullLiteral(boolType), "vcol_left_marker"),
rightBuilder.alias(rightBuilder.literal(true), "vcol_right_marker")))
.build()
// 3. union and aggregate
val builder = call.builder
builder
.push(leftWithMarker)
.push(rightWithMarker)
.union(true)
.aggregate(
builder.groupKey(builder.fields(fields)),
builder.count(false, "vcol_left_cnt", builder.field("vcol_left_marker")),
builder.count(false, "vcol_right_cnt", builder.field("vcol_right_marker")))
.filter(builder.and(
builder.call(
GREATER_THAN_OR_EQUAL,
builder.field("vcol_left_cnt"),
builder.literal(1)),
builder.call(
GREATER_THAN_OR_EQUAL,
builder.field("vcol_right_cnt"),
builder.literal(1))))
.project(Seq(builder.call(
IF,
builder.call(
GREATER_THAN,
builder.field("vcol_left_cnt"),
builder.field("vcol_right_cnt")),
builder.field("vcol_right_cnt"),
builder.field("vcol_left_cnt"))) ++ builder.fields(fields))
// 4. add table function to replicate rows
val output = replicateRows(builder, intersect.getRowType, fields)
call.transformTo(output)
}
}
object RewriteIntersectAllRule {
val INSTANCE: RelOptRule = new RewriteIntersectAllRule
}
|
apache/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/logical/RewriteIntersectAllRule.scala
|
Scala
|
apache-2.0
| 5,069 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.sample
import org.apache.spark.rdd._
import scala.reflect.ClassTag
import scala.util.Random
/**
* Class that represents the entry content and label of a data point.
*
* @param label for this data point.
* @param entry content for this data point.
*/
case class LabeledLine[L: ClassTag, T: ClassTag](label: L, entry: T)
/**
* Data Splitter for ML algorithms. It randomly labels an input RDD with user
* specified percentage for each category.
*
* TODO: this class doesn't really belong in the Engine but it is shared code that both frame-plugins and graph-plugins need access to
*
* @param percentages A double array stores percentages.
* @param seed Random seed for random number generator.
*/
class MLDataSplitter(percentages: Array[Double], labels: Array[String], seed: Int) extends Serializable {
require(percentages.forall(p => p > 0d), "MLDataSplitter: Some percentage numbers are negative or zero.")
require(Math.abs(percentages.sum - 1.0d) < 0.000000001d, "MLDataSplitter: Sum of percentages does not equal 1.")
require(labels.length == percentages.length, "Number of class labels differs from number of percentages given.")
var cdf: Array[Double] = percentages.scanLeft(0.0d)(_ + _)
cdf = cdf.drop(1)
// clamp the final value to 1.0d so that we cannot get rare (but in big data, still possible!)
// occurrences where the sample value falls between the gap of the summed input probabilities and 1.0d
cdf(cdf.length - 1) = 1.0d
/**
* Randomly label each entry of an input RDD according to user specified percentage
* for each category.
*
* @param inputRDD RDD of type T.
*/
def randomlyLabelRDD[T: ClassTag](inputRDD: RDD[T]): RDD[LabeledLine[String, T]] = {
// generate auxiliary (sample) RDD
val auxiliaryRDD: RDD[(T, Double)] = inputRDD.mapPartitionsWithIndex({ case (i, p) => addRandomValues(seed, i, p) })
val labeledRDD = auxiliaryRDD.map { p =>
val (line, sampleValue) = p
val label = labels.apply(cdf.indexWhere(_ >= sampleValue))
LabeledLine(label, line)
}
labeledRDD
}
private def addRandomValues[T: ClassTag](seed: Int, index: Int, it: Iterator[T]): Iterator[(T, Double)] = {
val pseudoRandomGenerator = new Random(seed + index)
it.map(x => (x, pseudoRandomGenerator.nextDouble()))
}
}
|
dmsuehir/spark-tk
|
sparktk-core/src/main/scala/org/trustedanalytics/sparktk/frame/internal/ops/sample/MLDataSplitter.scala
|
Scala
|
apache-2.0
| 3,066 |
package com.rasterfoundry.backsplash.server
import com.rasterfoundry.backsplash._
import com.rasterfoundry.backsplash.RenderableStore.ToRenderableStoreOps
import com.rasterfoundry.backsplash.error._
import com.rasterfoundry.database.{SceneDao, SceneToLayerDao}
import com.rasterfoundry.datamodel.{BandOverride, SingleBandOptions}
import com.rasterfoundry.common._
import com.rasterfoundry.common.color.ColorCorrect
import com.rasterfoundry.backsplash.{
BacksplashGeotiff,
BacksplashImage,
RenderableStore
}
import cats.data.{NonEmptyList => NEL}
import cats.effect.{ContextShift, IO}
import cats.implicits._
import com.typesafe.scalalogging.LazyLogging
import doobie._
import doobie.implicits._
import geotrellis.vector.{Polygon, Projected}
import java.net.URI
import java.util.UUID
import com.colisweb.tracing.TracingContext
class RenderableStoreImplicits(xa: Transactor[IO])(
implicit contextShift: ContextShift[IO])
extends ToRenderableStoreOps
with LazyLogging {
implicit val sceneCache = Cache.caffeineSceneCache
implicit val projectLayerCache = Cache.caffeineProjectLayerCache
private def prefixFromHttpsS3Path(url: String): String = {
val path = URI.create(url).getPath
val tail = path.split("/").drop(1)
tail.take(tail.length - 1).mkString("/")
}
@SuppressWarnings(Array("OptionGet"))
private def mosaicDefinitionToImage(mosaicDefinition: MosaicDefinition,
bandOverride: Option[BandOverride],
projId: UUID): BacksplashImage[IO] = {
val singleBandOptions =
mosaicDefinition.singleBandOptions flatMap {
_.as[SingleBandOptions.Params].toOption
}
val sceneId = mosaicDefinition.sceneId
val footprint = mosaicDefinition.footprint getOrElse {
throw NoFootprintException
}
val subsetBands = if (mosaicDefinition.isSingleBand) {
singleBandOptions map { sbo =>
List(sbo.band)
} getOrElse {
throw SingleBandOptionsException(
"Single band options must be specified for single band projects"
)
}
} else {
bandOverride map { ovr =>
List(ovr.redBand, ovr.greenBand, ovr.blueBand)
} getOrElse {
List(
mosaicDefinition.colorCorrections.redBand,
mosaicDefinition.colorCorrections.greenBand,
mosaicDefinition.colorCorrections.blueBand
)
}
}
val colorCorrectParameters = ColorCorrect.Params(
0, // red
1, // green
2, // blue
mosaicDefinition.colorCorrections.gamma,
mosaicDefinition.colorCorrections.bandClipping,
mosaicDefinition.colorCorrections.tileClipping,
mosaicDefinition.colorCorrections.sigmoidalContrast,
mosaicDefinition.colorCorrections.saturation
)
mosaicDefinition.datasource match {
case Config.publicData.landsat8DatasourceId
if Config.publicData.enableMultiTiff =>
Landsat8MultiTiffImage(
sceneId,
footprint,
subsetBands,
colorCorrectParameters,
singleBandOptions,
mosaicDefinition.projectId,
projId,
mosaicDefinition.mask,
mosaicDefinition.metadataFiles.headOption map { uri =>
s"s3://landsat-pds/${prefixFromHttpsS3Path(uri)}"
} getOrElse { "" }
)
case Config.publicData.sentinel2DatasourceId
if Config.publicData.enableMultiTiff =>
Sentinel2MultiTiffImage(
sceneId,
footprint,
subsetBands,
colorCorrectParameters,
singleBandOptions,
mosaicDefinition.projectId,
projId,
mosaicDefinition.mask,
mosaicDefinition.metadataFiles.headOption map { uri =>
s"s3://sentinel-s2-l1c/${prefixFromHttpsS3Path(uri)}"
} getOrElse { "" }
)
case Config.publicData.landsat45ThematicMapperDatasourceId |
Config.publicData.landsat7ETMDatasourceId
if Config.publicData.enableMultiTiff =>
LandsatHistoricalMultiTiffImage(
sceneId,
footprint,
subsetBands,
colorCorrectParameters,
singleBandOptions,
mosaicDefinition.projectId,
projId,
mosaicDefinition.mask,
mosaicDefinition.sceneName
)
case _ =>
val ingestLocation = mosaicDefinition.ingestLocation getOrElse {
throw UningestedScenesException(
s"Scene ${sceneId} does not have an ingest location"
)
}
BacksplashGeotiff(
sceneId,
mosaicDefinition.projectId,
projId, // actually the layer ID
ingestLocation,
subsetBands,
colorCorrectParameters,
singleBandOptions,
mosaicDefinition.mask,
footprint,
mosaicDefinition.sceneMetadataFields
)
}
}
implicit def sceneStore: RenderableStore[SceneDao] =
new RenderableStore[SceneDao] {
def read(
self: SceneDao,
projId: UUID, // actually a scene id, but argument names have to match
window: Option[Projected[Polygon]],
bandOverride: Option[BandOverride],
imageSubset: Option[NEL[UUID]],
tracingContext: TracingContext[IO]): BacksplashMosaic = {
val tags = Map("sceneId" -> projId.toString)
tracingContext.childSpan("sceneStore.read", tags) use { childContext =>
Cacheable.getSceneById(projId, window, xa, childContext) map {
scene =>
// We don't actually have a project, so just make something up
val randomProjectId = UUID.randomUUID
val ingestLocation = scene.ingestLocation getOrElse {
throw UningestedScenesException(
s"Scene ${scene.id} does not have an ingest location")
}
val footprint = scene.dataFootprint getOrElse {
throw NoFootprintException
}
val imageBandOverride = bandOverride map { ovr =>
List(ovr.redBand, ovr.greenBand, ovr.blueBand)
} getOrElse { List(0, 1, 2) }
val colorCorrectParams =
ColorCorrect.paramsFromBandSpecOnly(0, 1, 2)
logger.debug(s"Chosen color correction: ${colorCorrectParams}")
(tracingContext,
List(
BacksplashGeotiff(
scene.id,
randomProjectId,
randomProjectId,
ingestLocation,
imageBandOverride,
colorCorrectParams,
None, // no single band options ever
None, // not adding the mask here, since out of functional scope for md to image
footprint,
scene.metadataFields
)))
}
}
}
def getOverviewConfig(self: SceneDao,
renderableId: UUID,
tracingContext: TracingContext[IO]) = IO.pure {
OverviewConfig.empty
}
}
implicit def layerStore: RenderableStore[SceneToLayerDao] =
new RenderableStore[SceneToLayerDao] {
// projId here actually refers to a layer -- but the argument names have to
// match the typeclass we're providing evidence for
def read(self: SceneToLayerDao,
projId: UUID,
window: Option[Projected[Polygon]],
bandOverride: Option[BandOverride],
imageSubset: Option[NEL[UUID]],
tracingContext: TracingContext[IO]): BacksplashMosaic = {
val tags = Map("projectId" -> projId.toString)
tracingContext.childSpan("layerStore.read", tags) use { child =>
for {
mosaicDefinitions <- child.childSpan("getMosaicDefinitions", tags) use {
_ =>
SceneToLayerDao
.getMosaicDefinition(projId, window, bandOverride map {
_.redBand
}, bandOverride map {
_.greenBand
}, bandOverride map {
_.blueBand
}, imageSubset map {
_.toList
} getOrElse List.empty)
.transact(xa)
}
} yield {
(tracingContext, mosaicDefinitions map { md =>
mosaicDefinitionToImage(md, bandOverride, projId)
})
}
}
}
def getOverviewConfig(
self: SceneToLayerDao,
projId: UUID,
tracingContext: TracingContext[IO]): IO[OverviewConfig] =
Cacheable.getProjectLayerById(projId, xa, tracingContext) map {
projectLayer =>
(projectLayer.overviewsLocation, projectLayer.minZoomLevel).tupled map {
case (overviews, minZoom) =>
OverviewConfig(Some(overviews), Some(minZoom))
} getOrElse { OverviewConfig.empty }
}
}
}
|
aaronxsu/raster-foundry
|
app-backend/backsplash-server/src/main/scala/com/rasterfoundry/backsplash/implicits/RenderableStoreImplicits.scala
|
Scala
|
apache-2.0
| 9,092 |
/* Copyright 2009-2016 EPFL, Lausanne */
import leon.annotation._
import leon.io.{ FileOutputStream => FOS, FileInputStream => FIS }
import leon.io.{ StdIn, StdOut }
import leon.lang._
object IO {
def filename = "test.txt"
def printX(x: Int, c: Char, sep: String): Unit = {
val out = FOS.open(filename)
if (out.isOpen) {
out.write(x)
out.write(sep)
out.write(c)
out.close
} else {
StdOut.print("CANNOT PRINT ")
StdOut.print(x)
StdOut.print(sep)
StdOut.print(c)
StdOut.print(" TO FILE ")
StdOut.print(filename)
StdOut.print("\\n")
}
}
def echo(): Unit = {
implicit val state = leon.io.newState
StdOut.print("ECHOING...")
StdIn.tryReadInt() match {
case Some(x) => StdOut.print(x)
case None() => StdOut.print("Nothing to echo")
}
StdOut.print("\\n")
}
def slowEcho(): Unit = {
implicit val state = leon.io.newState
val message = 58
val out = FOS.open("echo.txt")
if (out.isOpen) {
out.write(message)
out.close()
()
} else {
StdOut.print("Couldn't write message\\n")
}
val in = FIS.open("echo.txt")
if (in.isOpen) {
val x = in.tryReadInt()
in.close()
if (x.isDefined && x.get == message) {
StdOut.print("The echo was slow but successful!\\n")
} else
StdOut.print("The echo was slow and buggy! :-(\\n")
} else {
StdOut.print("Couldn't read message\\n")
}
}
def _main() = {
StdOut.print(42)
// Testing escaped characters support
StdOut.print('\\n')
StdOut.print('\\t')
StdOut.print('\\"')
StdOut.print('\\\\')
StdOut.print('\\'')
StdOut.print("\\"ab'&\\n\\t\\\\\\\\")
StdOut.print('\\n')
printX(42, '*', " <--> ")
echo()
slowEcho()
0
}
@extern
def main(args: Array[String]): Unit = _main()
}
|
regb/leon
|
src/test/resources/regression/genc/unverified/IO.scala
|
Scala
|
gpl-3.0
| 1,883 |
package io.github.meshelton.secs
import java.util.UUID
import scala.collection.mutable.{ArrayBuffer, HashMap}
/**
* A Component
*
* Components should only store mutable data and only have getter and setter methods
*/
trait Component
/**
* Manages a type of componenet
*
* Manages the components in the system of [[io.github.meshelton.secs.ComponentManager.ComponentType ComponentType]]
* Should be used when attempting to retrieve entities that have a specific component
* Ideally should not be subclassed
*
* @constructor Creates a new ComponentManager registered to a EntityManager
* @param entityManager The [[io.github.meshelton.secs.EntityManager EntityManager]] that this component manager will be registered to. It is implicitly passed in so you have to make your entity manager availble to it
*/
class ComponentManager[ComponentType <: Component](implicit val entityManager: EntityManager){
entityManager.registerComponentManager(this)
private val components = HashMap[Entity, ComponentType]()
/**
* Gets all the entities that have components managed by this ComponentManager
*
* @return all the entities that have components managed by this ComponentManager
*/
def apply() = getEntities()
/**
* Gets the component that is attached to entity
*
* @param entity the entity that may or may not have an attached component
* @return an option containing the component attached to the entity
*/
def apply(entity: Entity) = getComponent(entity)
/**
* Adds a component to the entity overriding the old one if present
*
*
* @param entity the entity that the new component will be attached to
* @return the newly created component
*/
def addComponent(entity: Entity, component: ComponentType): ComponentType = {
components(entity) = component
component
}
def update(entity: Entity, component: ComponentType): Unit = {
addComponent(entity, component)
}
/**
* Removes the component from the entity if present
*
* @param entity the entity from which to remove the component
* @return the removed component
*/
def removeComponent(entity: Entity): Option[Component] = {
val removed = components.get(entity)
components -= entity
removed
}
/**
* Gets the component that is attached to entity
*
* @param entity the entity that may or may not have an attached component
* @return an option containing the component attached to the entity
*/
def getComponent(entity: Entity): Option[ComponentType] = {
components.get(entity)
}
/**
* Gets all the components managed by this ComponentManager
*
* @return all the components managed by this ComponentManager
*/
def getAllComponents(): List[ComponentType] = {
components.values.toList
}
/**
* Gets all the entities that have components managed by this ComponentManager
*
* @return all the entities that have components managed by this ComponentManager
*/
def getEntities(): List[Entity] = {
components.keys.toList
}
}
|
meshelton/SECS
|
core/src/main/scala/io/github/meshelton/secs/ComponentyManager.scala
|
Scala
|
gpl-2.0
| 3,118 |
package unit
import org.scalatest.{Matchers, WordSpecLike}
import akka.actor.{ActorRef, Props, ActorSystem}
import akka.testkit.{TestProbe, TestActorRef, ImplicitSender, TestKit}
import io.neilord.serial.models.Messages
import Messages.{CommandFailed, PortOpened, OpenPort}
import org.mockito.Mockito
import Mockito._
import io.neilord.serial.{SerialPortSubscriptionManager, SerialPortManager}
import scala.concurrent.duration._
import util.TestPropsProvider
class SerialPortManagerSpec extends TestKit(ActorSystem("test-manager-spec"))
with WordSpecLike with Matchers with ImplicitSender {
trait TestCase extends SerialPortManagerContext {
val testProbe = new TestProbe(system)
val serialPortManager = TestActorRef(
Props(new SerialPortManager[SerialPortSubscriptionManager](mockFactory) with TestPropsProvider {
val testProbeRef = testProbe.ref
})
)
}
"A SerialPortManager" should {
"open and set the parameters on a SerialPort, and return a PortOpened msg" when {
"receiving an OpenPort message" in new TestCase {
when(mockFactory.newInstance(portName)).thenReturn(mockSerialPort)
when(mockSerialPort.open()).thenReturn(true)
when(mockSerialPort.name).thenReturn(portName)
serialPortManager ! OpenPort(settings)
expectMsgPF() {
case PortOpened(handler: ActorRef) =>
verify(mockSerialPort).open()
verify(mockSerialPort).setParameters(settings)
handler ! testMessage
testProbe.expectMsg(1.second, testMessage)
}
}
}
"send a CommandFailed message" when {
"an exception is thrown during the command" in new TestCase {
when(mockFactory.newInstance(portName)).thenReturn(mockSerialPort)
when(mockSerialPort.open()).thenThrow(testException)
when(mockSerialPort.name).thenReturn(portName)
val command = OpenPort(settings)
serialPortManager ! OpenPort(settings)
expectMsgPF() {
case CommandFailed(`command`, `testException`) => true
}
}
}
"correctly escape port names" in {
SerialPortManager.escapePort("/slash/slash/") should equal("-slash-slash-")
SerialPortManager.escapePort("//slash//slash//") should equal("--slash--slash--")
SerialPortManager.escapePort("none") should equal("none")
SerialPortManager.escapePort("-/none") should equal("--none")
}
}
}
|
NoOrdInaryGuy/akka-cereal
|
src/test/scala/unit/SerialPortManagerSpec.scala
|
Scala
|
apache-2.0
| 2,448 |
package booflow.example
import java.nio.ByteBuffer
import akka.actor.ActorSystem
import akka.http.scaladsl.model.ws.{BinaryMessage, Message, TextMessage}
import akka.stream.stage._
import akka.util.ByteString
import booflow.FlowController
import scala.concurrent.duration._
import akka.http.scaladsl.server.Directives
import akka.stream.FlowMaterializer
import akka.stream.scaladsl._
class Webservice(implicit fm: FlowMaterializer, system: ActorSystem) extends Directives {
def route =
get {
pathSingleSlash {
getFromResource("web/index.html")
} ~
// Scala-JS puts them in the root of the resource directory per default,
// so that's where we pick them up
path("example-fastopt.js")(getFromResource("example-fastopt.js")) ~
path("example-opt.js")(getFromResource("example-opt.js")) ~
path("ws") {
handleWebsocketMessages(webSocketFlow)
}
}
def webSocketFlow: Flow[Message, Message, Unit] = {
val wsTransport = new WSTransport
val fc = new FlowController(wsTransport, null)
Flow() { implicit b =>
import FlowGraph.Implicits._
val collect = b.add(Flow[Message].collect( {
case BinaryMessage.Strict(bs) => bs.asByteBuffer
}))
val filter = b.add(Flow[ByteBuffer].map(bb => wsTransport.receiveData(bb)).filter( _ => false))
val merge = b.add(Merge[ByteBuffer](2))
val out = b.add(Flow[ByteBuffer].map(data => BinaryMessage.Strict(ByteString(data))))
val r = b.add(Source(wsTransport.asPublisher))
collect ~> filter ~> merge
r ~> merge ~> out
(collect.inlet, out.outlet)
}
}
}
|
ochrons/booflow
|
example/jvm/src/main/scala/booflow/example/Webservice.scala
|
Scala
|
mit
| 1,655 |
package com.rajiv;
import com.rajiv.client._
import com.rajiv.server._
object Demo {
val DEFAULT_PORT = 9090
}
object Server extends Application {
val server = new NioServer(Demo.DEFAULT_PORT)
server.start()
}
object Client extends Application {
val client = new Client(Demo.DEFAULT_PORT)
client.start()
}
|
RajivKurian/Java-NIO-example
|
src/main/scala/Main.scala
|
Scala
|
apache-2.0
| 318 |
package ru.dgolubets.jsmoduleloader.api.readers
import java.io.File
import java.net.URI
import java.nio.charset.Charset
/**
* File reader with shim config.
*/
class TestFileModuleReader(baseDir: File, shim: Map[String, String]) extends FileModuleReader(baseDir, charset = Charset.defaultCharset()) {
override protected def getModuleFile(uri: URI): File = {
val uriStr = uri.toString
val absolute = !uriStr.startsWith(".")
if(absolute && shim.contains(uriStr)){
new File(shim(uriStr))
}
else super.getModuleFile(uri)
}
}
|
DGolubets/js-module-loader
|
src/test/scala/ru/dgolubets/jsmoduleloader/api/readers/TestFileModuleReader.scala
|
Scala
|
mit
| 554 |
import play.api.mvc.Results._
import play.api.mvc.{Filter, RequestHeader, Result, WithFilters}
import play.api.{Application, GlobalSettings, Logger}
import play.filters.gzip.GzipFilter
import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.Future
/**
* Global Settings
* @author [email protected]
*/
object Global extends WithFilters(LoggingFilter, new GzipFilter()) with GlobalSettings {
override def onStart(app: Application) = Logger.info("Application has started")
override def onStop(app: Application) = Logger.info("Application shutdown...")
override def onBadRequest(request: RequestHeader, error: String) = Future.successful(BadRequest(s"Bad Request: $error"))
//override def onHandlerNotFound(request: RequestHeader)= NotFound(views.html.notFoundPage(request.path))
//override def onError(request: RequestHeader, t: Throwable) = InternalServerError(views.html.errorPage(t))
}
/**
* Logging Filter
* @author [email protected]
* http://www.playframework.com/documentation/2.2.3/ScalaHttpFilters
*/
object LoggingFilter extends Filter {
def apply(nextFilter: (RequestHeader) => Future[Result])(requestHeader: RequestHeader): Future[Result] = {
val startTime = System.nanoTime
nextFilter(requestHeader).map { result =>
val endTime = System.nanoTime
val requestTime = (endTime - startTime) / 1e+6f
if (requestHeader.uri.startsWith("/api")) {
Logger.info(f"${requestHeader.method} ${requestHeader.uri} ~> ${result.header.status} [$requestTime%.1f ms]")
}
result.withHeaders("Request-Time" -> requestTime.toString)
}
}
}
|
ldaniels528/broadway
|
app-play/app/Global.scala
|
Scala
|
apache-2.0
| 1,659 |
package com.anakiou.modbus.net
import java.net.InetAddress
import com.anakiou.modbus.Modbus
import com.anakiou.modbus.ModbusCoupler
import com.anakiou.modbus.ModbusIOException
import com.anakiou.modbus.io.ModbusUDPTransport
import com.anakiou.modbus.msg.ModbusResponse
class ModbusUDPListener {
private var m_Terminal: UDPSlaveTerminal = _
private var m_Handler: ModbusUDPHandler = _
private var m_HandlerThread: Thread = _
private var m_Port: Int = Modbus.DEFAULT_PORT
private var m_Listening: Boolean = _
private var m_Interface: InetAddress = _
def this(ifc: InetAddress) {
this()
m_Interface = ifc
}
def getPort(): Int = m_Port
def setPort(port: Int) {
m_Port = (if ((port > 0)) port else Modbus.DEFAULT_PORT)
}
def start() {
try {
m_Terminal = if (m_Interface == null) new UDPSlaveTerminal(InetAddress.getLocalHost) else new UDPSlaveTerminal(m_Interface)
m_Terminal.setLocalPort(m_Port)
m_Terminal.activate()
m_Handler = new ModbusUDPHandler(m_Terminal.getModbusTransport)
m_HandlerThread = new Thread(m_Handler)
m_HandlerThread.start()
} catch {
case e: Exception =>
}
m_Listening = true
}
def stop() {
m_Terminal.deactivate()
m_Handler.stop()
m_Listening = false
}
def isListening(): Boolean = m_Listening
class ModbusUDPHandler(private var m_Transport: ModbusUDPTransport) extends Runnable {
private var m_Continue: Boolean = true
def run() {
try {
do {
val request = m_Transport.readRequest()
var response: ModbusResponse = null
response = if (ModbusCoupler.getReference.getProcessImage == null) request.createExceptionResponse(Modbus.ILLEGAL_FUNCTION_EXCEPTION) else request.createResponse()
if (Modbus.debug) println("Request:" + request.getHexMessage)
if (Modbus.debug) println("Response:" + response.getHexMessage)
m_Transport.writeMessage(response)
} while (m_Continue);
} catch {
case ex: ModbusIOException => if (!ex.isEOF) {
ex.printStackTrace()
}
} finally {
try {
m_Terminal.deactivate()
} catch {
case ex: Exception =>
}
}
}
def stop() {
m_Continue = false
}
}
}
|
anakiou/scamod
|
src/com/anakiou/modbus/net/ModbusUDPListener.scala
|
Scala
|
apache-2.0
| 2,316 |
/**
* Copyright (C) 2014 Reactibility Inc. <http://www.reactibility.com>
*/
package garde.security
case class User(id: String, name: String, login: String, password: String, roles: List[String], tenants: List[String], version: Long)
class UserService {
private var users: List[User] = Nil
def findLogin(login: String, password: String): Option[User] = {
users.find(u => u.login == login && u.password == password)
}
}
|
reactibility/garde
|
garde-security/src/main/scala/garde/security/User.scala
|
Scala
|
apache-2.0
| 437 |
package models
case class User(
username: String,
passwordHash: String,
email: String
)
|
sne11ius/scail
|
app/models/User.scala
|
Scala
|
gpl-3.0
| 95 |
package com.rockymadden.stringmetric.phonetic
import com.rockymadden.stringmetric.{StringFilter, StringMetric}
import com.rockymadden.stringmetric.Alphabet.Alpha
/** An implementation of the Soundex metric. */
class SoundexMetric extends StringMetric[DummyImplicit, Boolean] { this: StringFilter =>
final override def compare(charArray1: Array[Char], charArray2: Array[Char])
(implicit di: DummyImplicit): Option[Boolean] = {
val fca1 = filter(charArray1)
lazy val fca2 = filter(charArray2)
if (fca1.length == 0 || !(Alpha isSuperset fca1.head) || fca2.length == 0 || !(Alpha isSuperset fca2.head)) None
else if (fca1.head.toLower != fca2.head.toLower) Some(false)
else SoundexAlgorithm.compute(fca1).filter(_.length > 0).flatMap(se1 =>
SoundexAlgorithm.compute(fca2).filter(_.length > 0).map(se1.sameElements(_))
)
}
final override def compare(string1: String, string2: String)(implicit di: DummyImplicit): Option[Boolean] =
compare(string1.toCharArray, string2.toCharArray)
}
object SoundexMetric {
private lazy val self = apply()
def apply(): SoundexMetric = new SoundexMetric with StringFilter
def compare(charArray1: Array[Char], charArray2: Array[Char]) = self.compare(charArray1, charArray2)
def compare(string1: String, string2: String) = self.compare(string1, string2)
}
|
cocoxu/multip
|
src/main/scala/com/rockymadden/stringmetric/phonetic/SoundexMetric.scala
|
Scala
|
gpl-3.0
| 1,315 |
package scato
package benchmarks
import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Mode}
import scato.Prelude._
import scato.benchmarks.Data._
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
class Instantiations {
def l[F[_]](fi: F[Int])(implicit F: Light[F]): Foldable[F] = F.foldable
def h[F[_]](fi: F[Int])(implicit F: Heavy[F]): Foldable[F] = F.foo0.foo1.foldable
def hs[F[_]](fi: F[Int])(implicit F: HeavySubtyping[F]): Foldable[F] = F.foo0.foo1.foldable
@Benchmark def light = xs.map(i => (l(List(i))))
@Benchmark def heavy = xs.map(i => (h(List(i))))
@Benchmark def heavySubtyping = xs.map(i => (hs(List(i))))
}
object Hierarchy {
implicit def lightFoldable[F[_]](implicit F: Light[F]): Foldable[F] = F.foldable
implicit def heavyFoldable[F[_]](implicit F: Heavy[F]): Foldable[F] = F.foo0.foo1.foldable
implicit def heavySubtypingFoldable[F[_]](implicit F: HeavySubtyping[F]): Foldable[F] = F.foo0.foo1.foldable
}
abstract class Light[F[_]] {
def foldable: Foldable[F]
def functor: Functor[F]
}
object Light {
implicit def light[F[_]](implicit F: Functor[F]): Light[F] = new Light[F] { def foldable = null; val functor = F }
}
abstract class Heavy[F[_]] {
def foo0: Heavy.Foo0[F]
def foo3: Heavy.Foo3[F]
}
object Heavy {
abstract class Foo0[F[_]] {
def foo1: Foo1[F]
def foo4: Foo4[F]
}
abstract class Foo1[F[_]] {
def foldable: Foldable[F]
def foo2: Foo2[F]
}
abstract class Foo2[F[_]] {
def functor: Functor[F]
}
abstract class Foo3[F[_]] {
def functor: Functor[F]
def foo4: Foo4[F]
}
abstract class Foo4[F[_]] {
def foldable: Foldable[F]
}
implicit def heavy[F[_]](implicit F: Functor[F]): Heavy[F] = new Heavy[F] {
override val foo3 = new Foo3[F] {
override val functor = F
override val foo4 = new Foo4[F] {
override val foldable = null;
}
}
override val foo0 = new Foo0[F] {
override val foo4: Foo4[F] = foo3.foo4
override val foo1 = new Foo1[F] {
override val foo2 = new Foo2[F] {
override val functor = foo3.functor
}
override val foldable = foo4.foldable
}
}
}
}
trait HeavySubtyping[F[_]] {
def foo0: HeavySubtyping.Foo0[F]
def foo3: HeavySubtyping.Foo3[F]
}
object HeavySubtyping {
trait HeavySubtypingClass[F[_]] extends HeavySubtyping[F] with Foo0Class[F] with Foo3Class[F] {
}
trait Foo0[F[_]] {
def foo1: Foo1[F]
def foo4: Foo4[F]
}
trait Foo0Class[F[_]] extends Foo0[F] with Foo1Class[F] with Foo4Class[F] {
final def foo0: Foo0[F] = this
}
trait Foo1[F[_]] {
def foldable: Foldable[F]
def foo2: Foo2[F]
}
trait Foo1Class[F[_]] extends Foo1[F] with Foo2Class[F] {
def functor: Functor[F]
final def foo1: Foo1[F] = this
}
trait Foo2[F[_]] {
def functor: Functor[F]
}
trait Foo2Class[F[_]] extends Foo2[F] {
def functor: Functor[F]
final def foo2: Foo2[F] = this
}
trait Foo3[F[_]] {
def functor: Functor[F]
def foo4: Foo4[F]
}
trait Foo3Class[F[_]] extends Foo3[F] with Foo4Class[F]{
def functor: Functor[F]
final def foo3: Foo3[F] = this
}
trait Foo4[F[_]] {
def foldable: Foldable[F]
}
trait Foo4Class[F[_]] extends Foo4[F] {
def foldable: Foldable[F]
final def foo4: Foo4[F] = this
}
implicit def heavySubtyping[F[_]](implicit F: Functor[F]): HeavySubtyping[F] = new HeavySubtypingClass[F] {
override val functor = F
override val foldable = null;
}
}
|
aloiscochard/scato
|
benchmarks/src/main/scala/Instantiations.scala
|
Scala
|
apache-2.0
| 3,538 |
package com.stulsoft.ysps.preduce
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
/**
* Created by Yuriy Stul on 9/24/2016.
*/
class PersonTest extends AnyFunSpec with Matchers {
describe("Person") {
describe("#toString") {
it("should return text representation") {
val p = Person(1, 150, "test")
p.toString should equal("name: test, age = 1, height = 150")
}
}
}
}
|
ysden123/ysps
|
src/test/scala/com/stulsoft/ysps/preduce/PersonTest.scala
|
Scala
|
mit
| 451 |
package de.htwg.zeta.server.controller.restApi
import java.util.UUID
import javax.inject.Inject
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import com.mohiva.play.silhouette.api.actions.SecuredRequest
import com.softwaremill.quicklens.ModifyPimp
import de.htwg.zeta.common.format.ProjectShortInfo
import de.htwg.zeta.common.format.project.ClassFormat
import de.htwg.zeta.common.format.project.ConceptFormat
import de.htwg.zeta.common.format.project.GdslProjectFormat
import de.htwg.zeta.common.format.project.ReferenceFormat
import de.htwg.zeta.common.models.project.GdslProject
import de.htwg.zeta.common.models.project.concept.elements.MReference
import de.htwg.zeta.persistence.accessRestricted.AccessRestrictedGdslProjectRepository
import de.htwg.zeta.persistence.general.{AccessAuthorisationRepository, GraphicalDslInstanceRepository, UserRepository}
import de.htwg.zeta.server.model.modelValidator.generator.ValidatorGenerator
import de.htwg.zeta.server.model.modelValidator.generator.ValidatorGeneratorResult
import de.htwg.zeta.server.routing.routes
import de.htwg.zeta.server.silhouette.ZetaEnv
import grizzled.slf4j.Logging
import play.api.libs.json.JsArray
import play.api.libs.json.JsError
import play.api.libs.json.JsValue
import play.api.libs.json.Writes
import play.api.mvc.AnyContent
import play.api.mvc.InjectedController
import play.api.mvc.Result
/**
* REST-ful API for GraphicalDsl definitions
*/
class GraphicalDslRestApi @Inject()(
graphicalDslRepo: AccessRestrictedGdslProjectRepository,
instanceRepo: GraphicalDslInstanceRepository,
userRepo: UserRepository,
accessAuthorisationRepo: AccessAuthorisationRepository,
conceptFormat: ConceptFormat,
graphicalDslFormat: GdslProjectFormat,
classFormat: ClassFormat,
referenceFormat: ReferenceFormat,
implicit val ec: ExecutionContext
) extends InjectedController with Logging {
val sGdslProject = "GdslProject"
/** Lists all MetaModels for the requesting user, provides HATEOAS links.
*
* @param request The request
* @return The result
*/
def showForUser(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
val repo = graphicalDslRepo.restrictedTo(request.identity.id)
repo.readAllIds().flatMap(ids => {
Future.sequence(ids.map(repo.read)).map(_.map { mm =>
ProjectShortInfo(id = mm.id, name = mm.name)
})
}).map((set: Set[ProjectShortInfo]) => Ok(JsArray(set.toList.map(ProjectShortInfo.writes.writes)))).recover {
case e: Exception => BadRequest(e.getMessage)
}
}
/** inserts whole MetaModel structure (MetaModel itself, DSLs...)
*
* @param request The request
* @return The result
*/
def insert(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
request.body.validate(graphicalDslFormat.empty).fold(
faulty => {
faulty.foreach(error(_))
Future.successful(BadRequest(JsError.toJson(faulty)))
},
graphicalDsl => {
graphicalDslRepo.restrictedTo(request.identity.id).create(graphicalDsl).map { metaModelEntity =>
Created(graphicalDslFormat.writes(metaModelEntity))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
)
}
/** Updates whole MetaModel structure (MetaModel itself, DSLs...)
*
* @param id MetaModel-Id
* @param request request
* @return result
*/
def update(id: UUID)(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
info("updating concept: " + request.body.toString)
request.body.validate(conceptFormat).fold(
faulty => {
faulty.foreach(error(_))
Future.successful(BadRequest(JsError.toJson(faulty)))
},
concept => {
val repo = graphicalDslRepo.restrictedTo(request.identity.id)
repo.update(id, _.copy(concept = concept)).map { _ =>
Ok(conceptFormat.writes(concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
)
}
/** Deletes whole MetaModel incl. dsl definitions
*
* @param id MetaModel-Id
* @param request request
* @return result
*/
def delete(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
accessAuthorisationRepo.update(request.identity.id, _.revokeEntityAccess(sGdslProject, id)).map { _ =>
Ok("")
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
/** returns whole MetaModels incl. dsl definitions and HATEOAS links */
def get(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, metaModelEntity =>
Ok(graphicalDslFormat.writes(metaModelEntity))
)
}
/** returns pure MetaModel without dsl definitions */
def getMetaModelDefinition(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, metaModelEntity => {
Ok(conceptFormat.writes(metaModelEntity.concept))
})
}
/** returns all MClasses of a specific MetaModel as Json Array */
def getMClasses(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, metaModelEntity => {
Ok(Writes.seq(classFormat).writes(metaModelEntity.concept.classes))
})
}
/** returns all MReferences of a specific MetaModel as Json Array */
def getMReferences(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, graphicalDsl => {
Ok(Writes.seq(referenceFormat).writes(graphicalDsl.concept.references))
})
}
/** returns specific MClass of a specific MetaModel as Json Object */
def getMClass(id: UUID, name: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, graphicalDsl => {
graphicalDsl.concept.classMap.get(name).map(clazz =>
Ok(classFormat.writes(clazz))
).getOrElse(NotFound)
})
}
/** returns specific MReference of a specific MetaModel as Json Object */
def getMReference(id: UUID, name: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, metaModelEntity => {
metaModelEntity.concept.referenceMap.get(name).map((reference: MReference) =>
Ok(referenceFormat.writes(reference))
).getOrElse(NotFound)
})
}
/** returns style definition */
def getStyle(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, graphicalDsl => Ok(graphicalDsl.style))
}
/** returns shape definition */
def getShape(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, graphicalDsl => Ok(graphicalDsl.shape))
}
/** returns diagram definition */
def getDiagram(id: UUID)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, graphicalDsl => Ok(graphicalDsl.diagram))
}
/** A helper method for less verbose reads from the database */
private def protectedRead[A](id: UUID, request: SecuredRequest[ZetaEnv, A], trans: GdslProject => Result): Future[Result] = {
graphicalDslRepo.restrictedTo(request.identity.id).read(id).map { graphicalDsl =>
trans(graphicalDsl)
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
/** updates shape definition */
def updateShape(id: UUID)(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
request.body.validate[String].fold(
faulty => {
faulty.foreach(error(_))
Future.successful(BadRequest(JsError.toJson(faulty)))
},
shape => {
graphicalDslRepo.restrictedTo(request.identity.id).update(id, _.modify(_.shape).setTo(shape)).map { graphicalDsl =>
Ok(conceptFormat.writes(graphicalDsl.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
)
}
/** updates style definition */
def updateStyle(id: UUID)(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
request.body.validate[String].fold(
faulty => {
faulty.foreach(error(_))
Future.successful(BadRequest(JsError.toJson(faulty)))
},
style => {
graphicalDslRepo.restrictedTo(request.identity.id).update(id, _.modify(_.style).setTo(style)).map { graphicalDsl =>
Ok(conceptFormat.writes(graphicalDsl.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
)
}
/** updates diagram definition */
def updateDiagram(id: UUID)(request: SecuredRequest[ZetaEnv, JsValue]): Future[Result] = {
request.body.validate[String].fold(
faulty => {
faulty.foreach(error(_))
Future.successful(BadRequest(JsError.toJson(faulty)))
},
diagram => {
graphicalDslRepo.restrictedTo(request.identity.id).update(id, _.modify(_.diagram).setTo(diagram)).map { graphicalDsl =>
Ok(conceptFormat.writes(graphicalDsl.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
)
}
/** updates method code */
def updateClassMethodCode(metaModelId: UUID, className: String, methodName: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
request.body.asText.fold(
Future.successful(BadRequest("ClassMethodError"))
) { code =>
graphicalDslRepo.restrictedTo(request.identity.id).update(metaModelId, _.modify(_.concept.classes).using { classes =>
val clazz = classes.find(_.name == className).get
val method = clazz.methods.find(_.name == methodName).get
val updatedMethods = method.copy(code = code) +: clazz.methods.filter(_ != method)
clazz.copy(methods = updatedMethods) +: classes.filter(_ != clazz)
}).map { graphicalDsl =>
Ok(conceptFormat.writes(graphicalDsl.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
}
/** updates method code */
def updateReferenceMethodCode(metaModelId: UUID, referenceName: String, methodName: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
request.body.asText.fold(
Future.successful(BadRequest("ReferenceMethodError"))
) { code =>
graphicalDslRepo.restrictedTo(request.identity.id).update(metaModelId, _.modify(_.concept.references).using { references =>
val reference = references.find(_.name == referenceName).get
val method = reference.methods.find(_.name == methodName).get
val updatedMethods = method.copy(code = code) +: reference.methods.filter(_ != method)
reference.copy(methods = updatedMethods) +: references.filter(_ != reference)
}).map { metaModelEntity =>
Ok(conceptFormat.writes(metaModelEntity.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
}
/** updates method code */
def updateCommonMethodCode(metaModelId: UUID, methodName: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
request.body.asText.fold(
Future.successful(BadRequest("CommonMethodError"))
) { code =>
graphicalDslRepo.restrictedTo(request.identity.id).update(metaModelId, _.modify(_.concept.methods).using { methods =>
val method = methods.find(_.name == methodName).get
method.copy(code = code) +: methods.filter(_ != method)
}).map { metaModelEntity =>
Ok(conceptFormat.writes(metaModelEntity.concept))
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
}
/**
* Loads or generates the validator for a given meta model.
*
* The following HTTP status codes can be returned:
* * 200 OK - The validator was loaded from memory and is contained in the response.
* * 201 CREATED - The validator has been generated or regenerated and is contained in the response.
* * 409 CONFLICT - A validator was not yet generated, or could not be generated.
*
* @param id ID of the meta model to load or generate the validator.
* @param generateOpt Force a (re)generation.
* @param get Return a result body.
* @param request The HTTP-Request.
* @return The validator.
*/
def getValidator(id: UUID, generateOpt: Option[Boolean], get: Boolean)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
protectedRead(id, request, (metaModelEntity: GdslProject) => {
val generate = generateOpt.getOrElse(false)
if (generate) {
new ValidatorGenerator(metaModelEntity).generateValidator() match {
case ValidatorGeneratorResult(false, msg) => if (get) Conflict(msg) else Conflict
case ValidatorGeneratorResult(_, validator) =>
graphicalDslRepo.restrictedTo(request.identity.id).update(id, _.copy(validator = Some(validator)))
if (get) Created(validator) else Created
}
} else {
metaModelEntity.validator match {
case Some(validatorText) => if (get) Ok(validatorText) else Ok
case None =>
val url = routes.ScalaRoutes.getMetamodelsValidator(id, Some(true)).absoluteURL()(request)
if (get) {
Conflict(
s"""No validator generated yet. Try calling $url first.""")
} else {
Conflict
}
}
}
})
}
/** Duplicate a project.
*
* @param id MetaModel-Id
* @param name name of the duplicated project
* @param request request
* @return result
*/
def duplicate(id: UUID, name: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
val repo = graphicalDslRepo.restrictedTo(request.identity.id)
val duplicatedId = UUID.randomUUID
(for {
existing <- repo.read(id)
_ <- repo.create(existing.copy(id = duplicatedId, name = name))
allInstanceIds <- instanceRepo.readAllIds()
allInstances <- Future.sequence(allInstanceIds.map(instanceRepo.read))
_ <- Future.sequence(allInstances.filter(_.graphicalDslId == id).map(instance =>
instanceRepo.create(instance.copy(id = UUID.randomUUID, graphicalDslId = duplicatedId))
))
} yield {
Ok("")
}).recover {
case e: Exception => BadRequest(e.getMessage)
}
}
/** Invite a user to a project.
*
* @param id MetaModel-Id
* @param email email of the user to invite
* @param request request
* @return result
*/
def inviteUser(id: UUID, email: String)(request: SecuredRequest[ZetaEnv, AnyContent]): Future[Result] = {
userRepo.readByEmail(email).map { user =>
accessAuthorisationRepo.update(user.id, _.grantEntityAccess(sGdslProject, id))
Ok("")
}.recover {
case e: Exception => BadRequest(e.getMessage)
}
}
}
|
Zeta-Project/zeta
|
api/server/app/de/htwg/zeta/server/controller/restApi/GraphicalDslRestApi.scala
|
Scala
|
bsd-2-clause
| 14,942 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import monix.execution.Ack.{Continue, Stop}
import monix.execution.cancelables.BooleanCancelable
import monix.execution.{Ack, Cancelable}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import scala.annotation.tailrec
import scala.util.control.NonFatal
import scala.util.{Failure, Try}
private[reactive] final class UnfoldObservable[S, A](seed: => S, f: S => Option[(A, S)]) extends Observable[A] {
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
var streamErrors = true
try {
val init = seed
val cancelable = BooleanCancelable()
streamErrors = false
new StateRunLoop(subscriber, cancelable, init, f).run()
cancelable
} catch {
case ex if NonFatal(ex) =>
if (streamErrors) subscriber.onError(ex)
else subscriber.scheduler.reportFailure(ex)
Cancelable.empty
}
}
private[this] final class StateRunLoop(o: Subscriber[A], c: BooleanCancelable, initialSeed: S, f: S => Option[(A, S)])
extends Runnable {
self =>
import o.{scheduler => s}
private[this] var seed = initialSeed
private[this] val em = s.executionModel
private[this] val asyncReschedule: Try[Ack] => Unit = {
case Continue.AsSuccess =>
self.run()
case Failure(ex) =>
o.onError(ex)
case _ =>
() // do nothing else
}
@tailrec
def fastLoop(syncIndex: Int): Unit = {
val ack =
try {
f(seed) match {
case Some((nextA, newState)) =>
this.seed = newState
o.onNext(nextA)
case None =>
o.onComplete()
Stop
}
} catch {
case ex if NonFatal(ex) =>
o.onError(ex)
Stop
}
val nextIndex =
if (ack == Continue) em.nextFrameIndex(syncIndex)
else if (ack == Stop) -1
else 0
if (nextIndex > 0)
fastLoop(nextIndex)
else if (nextIndex == 0 && !c.isCanceled)
ack.onComplete(asyncReschedule)
}
def run(): Unit =
try fastLoop(0)
catch {
case ex if NonFatal(ex) =>
s.reportFailure(ex)
}
}
}
|
monixio/monix
|
monix-reactive/shared/src/main/scala/monix/reactive/internal/builders/UnfoldObservable.scala
|
Scala
|
apache-2.0
| 2,924 |
package gg.uhc.hosts.endpoints.matches
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.server.directives.RouteDirectives.{complete, reject}
import gg.uhc.hosts.CustomJsonCodec._
import gg.uhc.hosts.Instrumented
import gg.uhc.hosts.endpoints.matches.websocket.MatchesWebsocket
import gg.uhc.hosts.endpoints.{BasicCache, DatabaseErrorRejection, EndpointRejectionHandler}
import scala.util.{Failure, Success}
class ListUpcomingMatches(cache: BasicCache, websocket: MatchesWebsocket) extends Instrumented {
private[this] val upcomingMatchesTimer = metrics.timer("upcoming-matches-request-time")
private[this] val upcomingMatchesCounter = metrics.counter("upcoming-matches-request-count")
def apply(): Route =
handleRejections(EndpointRejectionHandler()) {
concat(
pathEndOrSingleSlash {
(timed(upcomingMatchesTimer) & counting(upcomingMatchesCounter)) {
onComplete(cache.getUpcomingMatches) {
case Success(value) => complete(value)
case Failure(t) => reject(DatabaseErrorRejection(t))
}
}
},
path("listen")(websocket.route)
)
}
}
|
Eluinhost/hosts.uhc.gg
|
src/main/scala/gg/uhc/hosts/endpoints/matches/ListUpcomingMatches.scala
|
Scala
|
mit
| 1,217 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
package patterns
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import lexer.ScalaTokenTypes
import types.ScSequenceArg
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
trait ScPatternArgumentList extends ScArguments {
def patterns: Seq[ScPattern]
def missedLastExpr: Boolean = {
var child = getLastChild
while (child != null && child.getNode.getElementType != ScalaTokenTypes.tCOMMA) {
if (child.isInstanceOf[ScPattern] || child.isInstanceOf[ScSequenceArg]) return false
child = child.getPrevSibling
}
return child != null && child.getNode.getElementType == ScalaTokenTypes.tCOMMA
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScPatternArgumentList.scala
|
Scala
|
apache-2.0
| 741 |
/**
* Copyright 2013, 2016 Jandom Team
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.targets.cfg
import scala.collection.mutable.HashMap
import scala.collection.mutable.Queue
import scala.collection.JavaConverters._
import it.unich.jandom.targets.Annotation
import it.unich.jandom.targets.Target
import soot.toolkits.graph.DirectedGraph
/**
* This is a generic analyzer for control flow graphs. It uses the `Soot` library and exploits F-bounded
* polymorphism to ensure type safety.
* @tparam Node the type of the nodes for the control flow graph.
* @tparam Tgt the real class we are endowing with the ControlFlowGraph quality.
* @author Gianluca Amato <[email protected]>
* @author Francesca Scozzari <[email protected]>
*/
abstract class ControlFlowGraph[Tgt <: ControlFlowGraph[Tgt, Node], Node] extends Target[Tgt] {
/**
* The `ProgramPoint` type is defined as an alias for the `Node`. We are wondering whether to make
* `ProgramPoint` an alias for `Edge`.
*/
type ProgramPoint = Node
/**
* The analyzer needs a representation of the CFG. Here we use the class `DirectedGraph` of `Soot`.
*/
val graph: DirectedGraph[Node]
/**
* In order to determine widening points, we need an ordering on nodes.
*/
val ordering: Ordering[Node]
/**
* The dimension of the environment space. It is used to get the initial values of the abstract domains.
* It should be removed once we have methods `empty` and `full` which do not depend on a given dimension.
*/
val size: Int
private type Edge = (Node, Node)
/**
* Returns the output property embedded in an annotation. It essentially consider the union of the result of analyzing
* the tail nodes of the directed graph starting from their annotation.
* @param params the parameters used to generate the annotation
* @param ann the annotation
* @note The implementation is not very robust, since we are assuming a lot of thing on the result of analyzing
* a tail block. In particular, we are assuming that the last result of analyzeBlock is the "output"
* of the CFG, so be careful to preserve this property.
*/
def extractOutput(params: Parameters)(ann: Annotation[ProgramPoint, params.Property]): params.Property = {
graph.getTails.asScala map { (node) => analyzeBlock(params)(node, ann(node)).last } reduce { _ union _ }
}
/**
* This method adapt an input property (expressed typically only in terms of the input
* parameters) in a new property with additional information needed to carry on the analysis.
*/
protected def expandPropertyWithLocalVariables(params: Parameters)(input: params.Property): params.Property
/**
* This method returns the top property for a given node in the CFG
* @param node the node for which the top element should be determined
* @param params parameters of the analysis
*/
protected def topProperty(node: Node, params: Parameters): params.Property
/**
* This method analyzes a single `Node`.
* @param params the parameters of the analysis
* @param node the node to analyze
* @param prop the ingoing property to the node
* @return a sequence of properties, one for each outgoing edge. The order of these properties should
* correspond to the order of edges in `graph`. For the tails, the last element should be the value
* returned as the result of the CFG.
*/
protected def analyzeBlock(params: Parameters)(node: Node, prop: params.Property): Seq[params.Property]
/**
* Analyzes the target, starting from a given property.
* @param params the parameters which drive the analyzer
* @param input the starting property
* @return the resulting annotation
* @note this should be moved in the Target class.
*/
def analyzeFromInput(params: Parameters)(input: params.Property): Annotation[ProgramPoint, params.Property] = {
val ann = getAnnotation[params.Property]
for (node <- graph.getHeads().asScala) ann(node) = expandPropertyWithLocalVariables(params)(input)
analyzeFromAnnotation(params)(ann)
}
/**
* Perform a static analysis over the target, from a standard initial annotation
* @param params the parameters which drive the analyzer
* @return an annotation for the program
*/
def analyze(params: Parameters): Annotation[ProgramPoint, params.Property] = {
val ann = getAnnotation[params.Property]
for (node <- graph.getHeads.asScala) ann(node) = topProperty(node, params)
analyzeFromAnnotation(params)(ann)
}
/**
* The analyzer. At the moment, it implements a work-list based analysis.
*/
def analyzeFromAnnotation(params: Parameters)(ann: Annotation[ProgramPoint, params.Property]): Annotation[ProgramPoint, params.Property] = {
val annEdge = HashMap[Edge, params.Property]()
val taskList = Queue[ProgramPoint](graph.getHeads.asScala: _*)
// ASCENDING phase
params.log("Ascening Phase\n")
while (!taskList.isEmpty) {
val node = taskList.dequeue
params.log(s"node ${node}input ${ann(node)}\n")
val result = analyzeBlock(params)(node, ann(node))
params.log("result " + result.mkString(",") + "\n")
for ((succ, out) <- graph.getSuccsOf(node).asScala zip result) {
annEdge((node, succ)) = out
if (graph.getPredsOf(succ).size() > 1 && (ann contains succ)) {
params.log(s"join $succ : ${ann(succ)} with $out")
val succval: params.Property = if (ordering.lteq(succ, node)) {
params.log(s" widening")
params.widening(node)(ann(succ), out)
} else
ann(succ) union out
if (succval > ann(succ)) {
params.log(s" update with $succval\n")
ann(succ) = succval
taskList.enqueue(succ)
} else {
params.log(s" not updated\n")
}
} else {
ann(succ) = out
taskList.enqueue(succ)
}
}
}
// DESCENDING phase
taskList.enqueue(graph.asScala.toSeq: _*)
params.log("Descending Phase\n")
while (!taskList.isEmpty) {
val node = taskList.dequeue
params.log(s"node ${node} input ${ann(node)} ")
val result = analyzeBlock(params)(node, ann(node))
params.log("result " + (graph.getSuccsOf(node).asScala zip result).mkString(" ; ") + "\n")
for ((succ, out) <- graph.getSuccsOf(node).asScala zip result) {
annEdge((node, succ)) = out
val newinput = ann(succ) intersection (graph.getPredsOf(succ).asScala map { e => annEdge((e, succ)) } reduce { _ union _ })
params.log(s"narrow $succ : ${ann(succ)} with $newinput ")
// this may probably cause an infinite loop
val succval = if (ordering.lteq(succ, node)) {
params.narrowing(node)(ann(succ), newinput)
} else
newinput
params.log(s"result $succval\n")
if (succval < ann(succ)) {
ann(succ) = succval
taskList.enqueue(succ)
}
}
}
ann
}
}
|
amato-gianluca/Jandom
|
core/src/main/scala/it/unich/jandom/targets/cfg/ControlFlowGraph.scala
|
Scala
|
lgpl-3.0
| 7,690 |
package chrome.windows
import bindings._
import chrome.events.EventSource
import chrome.events.EventSourceImplicits._
import utils.ErrorHandling.lastErrorOrValue
import scala.concurrent.{Promise, Future}
import scala.scalajs.js
object Windows {
val WINDOW_ID_NONE: Window.Id = bindings.Windows.WINDOW_ID_NONE
val WINDOW_ID_CURRENT: Window.Id = bindings.Windows.WINDOW_ID_CURRENT
val onCreated: EventSource[Window] = bindings.Windows.onCreated
val onRemoved: EventSource[Window.Id] = bindings.Windows.onRemoved
val onFocusChanged: EventSource[Window.Id] = bindings.Windows.onFocusChanged
def get(windowId: Window.Id, getInfo: js.UndefOr[GetOptions] = js.undefined): Future[Window] = {
val promise = Promise[Window]()
bindings.Windows.get(windowId, getInfo, (window: Window) => {
promise.complete(lastErrorOrValue(window))
})
promise.future
}
def getCurrent(getInfo: js.UndefOr[GetOptions] = js.undefined): Future[Window] = {
val promise = Promise[Window]()
bindings.Windows.getCurrent(getInfo, (window: Window) => {
promise.complete(lastErrorOrValue(window))
})
promise.future
}
def getLastFocused(getInfo: js.UndefOr[GetOptions] = js.undefined): Future[Window] = {
val promise = Promise[Window]()
bindings.Windows.getLastFocused(getInfo, (window: Window) => {
promise.complete(lastErrorOrValue(window))
})
promise.future
}
def getAll(getInfo: js.UndefOr[GetOptions] = js.undefined): Future[List[Window]] = {
val promise = Promise[List[Window]]()
bindings.Windows.getAll(getInfo, (windows: js.Array[Window]) => {
promise.complete(lastErrorOrValue(windows.toList))
})
promise.future
}
def create(createData: js.UndefOr[CreateOptions]): Future[Option[Window]] = {
val promise = Promise[Option[Window]]()
bindings.Windows.create(createData, js.Any.fromFunction1((window: js.UndefOr[Window]) => {
promise.complete(lastErrorOrValue(window.toOption))
}))
promise.future
}
def update(windowId: Window.Id, updateInfo: UpdateOptions): Future[Window] = {
val promise = Promise[Window]()
bindings.Windows.update(windowId, updateInfo, js.Any.fromFunction1((window: Window) => {
promise.complete(lastErrorOrValue(window))
}))
promise.future
}
def remove(windowId: Window.Id): Future[Unit] = {
val promise = Promise[Unit]()
bindings.Windows.remove(windowId, js.Any.fromFunction0(() => {
promise.complete(lastErrorOrValue(()))
}))
promise.future
}
}
|
amsayk/scala-js-chrome
|
bindings/src/main/scala/chrome/windows/Windows.scala
|
Scala
|
mit
| 2,538 |
package org.bitcoins.spvnode.networking
import java.net.InetSocketAddress
import akka.actor.{Actor, ActorContext, ActorRef, ActorRefFactory, Props}
import akka.event.LoggingReceive
import akka.io.Tcp
import org.bitcoins.core.crypto.{DoubleSha256Digest, Sha256Hash160Digest}
import org.bitcoins.core.number.UInt32
import org.bitcoins.core.protocol.Address
import org.bitcoins.core.util.{BitcoinSLogger, BitcoinSUtil}
import org.bitcoins.spvnode.NetworkMessage
import org.bitcoins.spvnode.block.MerkleBlock
import org.bitcoins.spvnode.bloom.{BloomFilter, BloomUpdateNone}
import org.bitcoins.spvnode.constant.Constants
import org.bitcoins.spvnode.messages.data.{GetDataMessage, Inventory, InventoryMessage}
import org.bitcoins.spvnode.messages._
import org.bitcoins.spvnode.messages.control.FilterLoadMessage
import org.bitcoins.spvnode.util.BitcoinSpvNodeUtil
/**
* Created by chris on 8/30/16.
* Responsible for checking if a payment to a address was made
* Verifying that the transaction that made the payment was included
* inside of a block on the blockchain
*
* 1.) Creates a bloom filter
* 2.) Sends the bloom filter to a node on the network
* 3.) Nodes matches the bloom filter, sends a txid that matched the filter back to us
* 4.) We request the full transaction using a [[GetDataMessage]]
* 5.) We verify the transaction given to us has an output that matches the address we expected a payment to
* 6.) When another block is announced on the network, we send a MsgMerkleBlock
* to our peer on the network to see if the tx was included on that block
* 7.) If it was, send the actor that that requested this [[PaymentActor.SuccessfulPayment]] message back
*/
sealed trait PaymentActor extends Actor with BitcoinSLogger {
def receive = LoggingReceive {
case hash: Sha256Hash160Digest =>
paymentToHash(hash)
case address: Address =>
self.forward(address.hash)
}
/** Constructs a bloom filter that matches the given hash,
* then sends that bloom filter to a peer on the network */
def paymentToHash(hash: Sha256Hash160Digest) = {
val bloomFilter = BloomFilter(10,0.0001,UInt32.zero,BloomUpdateNone).insert(hash)
val filterLoadMsg = FilterLoadMessage(bloomFilter)
val peerMsgHandler = PeerMessageHandler(context)
val bloomFilterNetworkMsg = NetworkMessage(Constants.networkParameters,filterLoadMsg)
peerMsgHandler ! bloomFilterNetworkMsg
logger.debug("Switching to awaitTransactionInventoryMessage")
context.become(awaitTransactionInventoryMessage(hash, peerMsgHandler))
}
/** Waits for a transaction inventory message on the p2p network,
* once we receive one we switch to teh awaitTransactionGetDataMessage context */
def awaitTransactionInventoryMessage(hash: Sha256Hash160Digest, peerMessageHandler: ActorRef): Receive = LoggingReceive {
case invMsg: InventoryMessage =>
//txs are broadcast by nodes on the network when they are seen by a node
//filter out the txs we do not care about
val txInventories = invMsg.inventories.filter(_.typeIdentifier == MsgTx)
handleTransactionInventoryMessages(txInventories,peerMessageHandler)
context.become(awaitTransactionGetDataMessage(hash,peerMessageHandler))
}
/** Awaits for a [[GetDataMessage]] that requested a transaction. We can also fire off more [[GetDataMessage]] inside of this context */
def awaitTransactionGetDataMessage(hash: Sha256Hash160Digest, peerMessageHandler: ActorRef): Receive = LoggingReceive {
case txMsg : TransactionMessage =>
//check to see if any of the outputs on this tx match our hash
val outputs = txMsg.transaction.outputs.filter(o => o.scriptPubKey.asm.filter(_.bytes == hash.bytes).nonEmpty)
if (outputs.nonEmpty) {
logger.debug("matched transaction inside of awaitTransactionGetDataMsg: " + txMsg.transaction.hex)
logger.debug("Matched txid: " + txMsg.transaction.txId.hex)
logger.debug("Switching to awaitBlockAnnouncement")
context.become(awaitBlockAnnouncement(hash,txMsg.transaction.txId, peerMessageHandler))
}
//otherwise we do nothing and wait for another transaction message
case invMsg: InventoryMessage =>
//txs are broadcast by nodes on the network when they are seen by a node
//filter out the txs we do not care about
val txInventories = invMsg.inventories.filter(_.typeIdentifier == MsgTx)
handleTransactionInventoryMessages(txInventories,peerMessageHandler)
}
/** Sends a [[GetDataMessage]] to get the full transaction for a transaction inventory message */
private def handleTransactionInventoryMessages(inventory: Seq[Inventory], peerMessageHandler: ActorRef) = for {
txInv <- inventory
inventory = GetDataMessage(txInv)
} yield peerMessageHandler ! inventory
/** This context waits for a block announcement on the network,
* then constructs a [[MerkleBlockMessage]] to check
* if the txid was included in that block */
def awaitBlockAnnouncement(hash: Sha256Hash160Digest, txId: DoubleSha256Digest, peerMessageHandler: ActorRef): Receive = LoggingReceive {
case invMsg: InventoryMessage =>
val blockHashes = invMsg.inventories.filter(_.typeIdentifier == MsgBlock).map(_.hash)
if (blockHashes.nonEmpty) {
//construct a merkle block message to verify that the txIds was in the block
val merkleBlockInventory = Inventory(MsgFilteredBlock,blockHashes.head)
val getDataMsg = GetDataMessage(merkleBlockInventory)
val getDataNetworkMessage = NetworkMessage(Constants.networkParameters,getDataMsg)
peerMessageHandler ! getDataNetworkMessage
logger.debug("Switching to awaitMerkleBlockMessage")
context.become(awaitMerkleBlockMessage(hash,txId,blockHashes, peerMessageHandler))
}
//else do nothing and wait for another block announcement
}
/** This context waits for a [[MerkleBlockMessage]] from our peer on the network, then checks
* if the given txid is contained inside of the block. If it is included, send a [[PaymentActor.SuccessfulPayment]]
* message back to the actor that created this actor, else send a [[PaymentActor.FailedPayment]] message back to
* the actor that created this actor
* @param hash
* @param txId
* @param blockHashes
* @param peerMessageHandler
* @return
*/
def awaitMerkleBlockMessage(hash: Sha256Hash160Digest, txId: DoubleSha256Digest, blockHashes: Seq[DoubleSha256Digest],
peerMessageHandler: ActorRef): Receive = LoggingReceive {
case merkleBlockMsg: MerkleBlockMessage =>
val result = merkleBlockMsg.merkleBlock.partialMerkleTree.extractMatches.contains(txId)
if (result) {
val successfulPayment = PaymentActor.SuccessfulPayment(hash,txId,blockHashes,merkleBlockMsg.merkleBlock)
logger.info("Received successful payment: " + successfulPayment)
context.parent ! successfulPayment
} else context.parent ! PaymentActor.FailedPayment(hash)
peerMessageHandler ! Tcp.Close
context.stop(self)
}
}
object PaymentActor {
private case class PaymentActorImpl() extends PaymentActor
def props = Props(classOf[PaymentActorImpl])
def apply(context: ActorRefFactory): ActorRef = context.actorOf(props, BitcoinSpvNodeUtil.createActorName(this.getClass))
sealed trait PaymentActorMessage
case class SuccessfulPayment(hash:Sha256Hash160Digest, txId: DoubleSha256Digest,
blockHash: Seq[DoubleSha256Digest], merkleBlock: MerkleBlock) extends PaymentActorMessage
case class FailedPayment(hash: Sha256Hash160Digest) extends PaymentActorMessage
}
|
bitcoin-s/bitcoin-s-spv-node
|
src/main/scala/org/bitcoins/spvnode/networking/PaymentActor.scala
|
Scala
|
mit
| 7,688 |
package phenan.prj.body
import phenan.prj._
import phenan.prj.ir._
import scala.util.Try
trait BodyCompiler {
this: BodyParser with StatementParsersModule with ExpressionParsersModule with ContextSensitiveParsersModule with JTypeLoader with Environments with IRStatements with IRExpressions with JModules =>
def parseMethodBody (code: String, expected: JType, env: ProcedureEnvironment): Try[IRMethodBody] = {
new BodyParsers(env).getStatementParsers(expected).methodBody(code, env)
}
def parseConstructorBody (code: String, env: ProcedureEnvironment): Try[IRConstructorBody] = {
new BodyParsers(env).getStatementParsers(voidType).constructorBody(code, env)
}
def parseInitializerBody (code: String, env: ProcedureEnvironment): Try[IRInitializerBody] = {
new BodyParsers(env).getStatementParsers(voidType).initializerBody(code, env)
}
def parseExpression (code: String, expected: JType, env: ModuleEnvironment): Try[IRExpression] = {
new BodyParsers(env).getExpressionParser(expected)(code, env)
}
}
|
csg-tokyo/proteaj2
|
src/main/scala/phenan/prj/body/BodyCompiler.scala
|
Scala
|
mit
| 1,041 |
package org.sgine.ui
import com.badlogic.gdx.graphics.Texture
import render.{Vertex, TextureCoordinates}
import org.powerscala._
/**
*
*
* @author Matt Hicks <[email protected]>
*/
object ShapeComponentExample extends UI {
val texture = new Texture(Resource("sgine.png"))
val shape = new ShapeComponent()
shape._texture := texture
shape._textureCoordinates := TextureCoordinates.rectCoords(0.0, 0.0, 400.0, 96.0, texture.getWidth, texture.getHeight)
shape._vertices := Vertex.rect(400.0, 96.0)
contents += shape
}
|
Axiometry/sgine
|
ui/src/test/scala/org/sgine/ui/ShapeComponentExample.scala
|
Scala
|
bsd-3-clause
| 532 |
object Test extends App {
trait A
trait B extends A
class C {
type D
trait E { type T >: B <: A; val x : T }
// This is currently correctly disallowed
// val y : (D with E)#T = y
val y : D with E = y
var sneak = { () => y.x }
sneak = { () => new B { } }
}
class F extends C {
trait G
trait H { type T = G }
type D = H
def frob(arg : G) : G = arg
frob(sneak())
}
new F
}
|
yusuke2255/dotty
|
tests/untried/neg/volatile.scala
|
Scala
|
bsd-3-clause
| 416 |
package grammarcomp
package parsing
import grammar._
import utils._
import Logging._
import CFGrammar._
import CNFConverter._
/**
* A converter to Greiback Normal Form GNF
* Requires grammars to be in CNF
*/
object GNFUtilities {
/**
* This method will also handle epsilons
*/
def indirectLeftRecursiveNonterms[T](g: Grammar[T]) = {
val nullables = GrammarUtils.nullables(g)
(new GraphUtil.DirectedGraph[Nonterminal] {
def start = g.start
def vertices = g.nonTerminals
def successors(v: Nonterminal) = {
g.nontermToRules(v).flatMap { rl =>
var foundNonnullable = false
rl.rightSide.foldLeft(List[Nonterminal]()) { (acc, sym) =>
if (!foundNonnullable) {
sym match {
case nt: Nonterminal if nullables(nt) =>
acc :+ nt //here nt is nullable, so we need to continue choosing the next non-terminal if any
case nt: Nonterminal =>
foundNonnullable = true
acc :+ nt
case _ =>
foundNonnullable = true
acc
}
} else acc
}
}
}
}).sccs
}
def hasIndirectLeftRecursion[T](g: Grammar[T]) = {
indirectLeftRecursiveNonterms(g).exists(_.size >= 2)
}
/**
* assuming the grammar is in GNF form
*/
def firstNT[T](nt: Nonterminal, g: Grammar[T]): List[Terminal[T]] = {
g.nontermToRules(nt).collect {
case Rule(_, (t: Terminal[T]) :: tail) => t
case r @ Rule(_, (nt: Nonterminal) :: tail) =>
throw new IllegalStateException("Rule: " + r + " is not in GNF form")
}.distinct
}
/**
* Checks if a grammar in GNF form is LL(2)
*/
def isGNFGrammarLL2[T](g: Grammar[T]): Boolean = {
var break = false
g.nontermToRules.foreach {
case (nt, ntrules) if !break =>
val rules = ntrules.filter {
case Rule(_, head :: tail) => true
case Rule(l, List()) if l == g.start => false
case rl @ _ =>
throw new IllegalStateException("Found epsilon rule for non-start symbol: " + rl)
}
val headToRules = rules.groupBy {
case Rule(_, (head: Terminal[T]) :: tail) => head
case rl @ _ =>
throw new IllegalStateException("Rule not in GNF: " + rl)
}
headToRules.foreach {
case (frst, rules) if !break =>
val seconds = rules.collect {
case Rule(_, _ :: s :: tail) => s
}
//the first set of each of the seconds should be disjoint
var firsts = Set[Terminal[T]]()
seconds.foreach {sym =>
if (!break)
sym match {
case t: Terminal[T] if firsts(t) =>
break = true
case t: Terminal[T] =>
firsts += t
case nt: Nonterminal =>
val ntfirsts = firstNT(nt, g).toSet
if (ntfirsts.intersect(firsts).isEmpty)
firsts ++= ntfirsts
else
break = true
}
}
case _ => ; //do nothing
}
case _ => ; //fo nothing
}
!break
}
/**
* Performs left factorization, not really used anywhere
*/
def leftFactor[T](g: Grammar[T]): Grammar[T] = {
def factorOnce(rules: List[Rule[T]]) = {
var modRules = List[Rule[T]]()
var newRules = List[Rule[T]]()
rules.groupBy(_.leftSide).foreach {
case (nt, rules) =>
val headToRules = rules.groupBy {
case Rule(_, List()) => List()
case Rule(_, head :: tail) => head
}
headToRules.foreach {
case (h: Symbol[T], rules) =>
val (rulesToFactor, rest) = rules.partition(_.rightSide.size >= 2)
if (rulesToFactor.size >= 2) {
//create a new non-terminal to produce the suffix
val sufNT = CFGrammar.freshNonterminal(Some("Suf"))
val sufRHS = rulesToFactor.map(_.rightSide.drop(1)) //drop the head from the rightsides
modRules ++= (Rule(nt, List(h, sufNT)) +: rest)
newRules ++= sufRHS.map(Rule(sufNT, _))
} else
modRules ++= rules
case _ =>
modRules ++= rules
}
}
(modRules, newRules)
}
var rulesToCheck = g.rules
var rulesTransformed = List[Rule[T]]()
while (!rulesToCheck.isEmpty) {
val (modrules, newrules) = factorOnce(rulesToCheck)
rulesToCheck = newrules
rulesTransformed ++= modrules
}
Grammar[T](g.start, rulesTransformed)
}
/**
* This also will handle epsilons
*/
/*def removeLeftRecursionOptimized(ntrules: List[Rule], nt: Nonterminal): List[Rule] = {
def isLeftRecursive(rule: Rule): Boolean = rule match {
case Rule(lhs, head :: _) if lhs == head => true
case _ => false
}
//substitute for epsilons
//val afterEpsilons = CNFConverter.removeEpsilonProductions(Grammar[T](nt, ntrules)).rules
//collect all left recursive rules
val (leftRecur, rest) = ntrules.partition(isLeftRecursive)
if (leftRecur.isEmpty)
ntrules
else {
val alphas = leftRecur.map(_.rightSide.tail)
val betas = rest.map(_.rightSide)
//create a new non-terminal for alpha if it has many productions
val alphaNT = Nonterminal(Util.freshName(Some(nt.name)))
val alphaRules = alphas.map(Rule(alphaNT, _))
val betaNT = Nonterminal(Util.freshName(Some(nt.name)))
val betaRules = betas.map(Rule(betaNT, _))
val Z = Nonterminal(Util.freshName(Some(nt.name)))
List(Rule(nt, List(betaNT)), Rule(nt, List(betaNT, Z))) ++
List(Rule(Z, List(alphaNT, Z)), Rule(Z, List(alphaNT))) ++
alphaRules ++ betaRules
}
}
*/
/**
* The following method is not really used by the GNFConverter itself,
* but by other clients. Eps. by the antlr parser
*/ /*
def removeIndirectLeftRecursion(g: Grammar[T])(implicit opctx: OperationContext): Grammar[T] = {
val indirectLeftRecurs = (new GraphUtil.DirectedGraph[Nonterminal] {
def start = g.start
def vertices = g.nonTerminals
def successors(v: Nonterminal) = {
g.nontermToRules(v).collect {
case Rule(_, (firstNt: Nonterminal) :: rest) =>
firstNt
}
}
}).sccs.filter(_.size >= 2)
val nonterms = g.nonTerminals.toSet
val newRuls = indirectLeftRecurs.flatMap { scc =>
println("SCC: " + scc)
val sccRules = scc.flatMap(g.nontermToRules.apply _)
println("#old rules : " + sccRules.size)
println("Old Rules for Scc: " + CFGrammar.rulesToStr(sccRules))
//order the nonterminals as A1 ... An, arbitrarily
val ntIndexMap = scc.zipWithIndex.toMap
val leftiesSet = scc.toSet
//convert every rule A_i -> A_j \\alpha so that j >= i
//assuming that the lhs of every rule in ntrules is nt
def orderNonterm(nt: Nonterminal, ntrules: List[Rule], ntToRules: Map[Nonterminal, List[Rule]]): (List[Rule], List[Rule]) = {
//println("normalizing nt: " + nt + " Index: " + ntIndexMap(nt))
//println("#Rules: "+ntrules.size)
val firstSymToRules = (ntrules.groupBy {
case rl @ Rule(_, (fnt: Nonterminal) :: rest) =>
Some(fnt)
case rl @ _ =>
None
})
val i = ntIndexMap(nt)
var ntRules = List[Rule]()
var newRules = List[Rule]()
firstSymToRules.foreach {
//if 'fnt' has a smaller index or if it was created newly
case (Some(fnt), rls) if (ntIndexMap.contains(fnt) && ntIndexMap(fnt) < i)
|| (!nonterms.contains(fnt)) =>
//println("RecursiveFirstNT: " + fnt)
val suffs = rls.map { _.rightSide.tail }
val Z = Nonterminal(Util.freshName(Some(nt.name)))
val newrls = suffs.map(Rule(Z, _))
val inlinedRules = ntToRules(fnt).map {
case Rule(l, r) =>
Rule(nt, r :+ Z)
}
ntRules ++= inlinedRules
newRules ++= newrls
case (_, rls) =>
ntRules ++= rls
}
if (!newRules.isEmpty) {
//println("ntRules: " + ntRules.mkString("\\n"))
//println("newRules: " + newRules.mkString("\\n"))
//recurse
val (ntrs, nwrs) = orderNonterm(nt, ntRules, ntToRules)
(ntrs, nwrs ++ newRules)
} else
(ntRules, List())
}
val orderedRules = scc.foldLeft(Map[Nonterminal, List[Rule]]()) {
case (acc, nt) =>
val ntrules = g.nontermToRules(nt)
//println("#old NTRules: "+ntrules.size)
val (newNtRules, rest) = orderNonterm(nt, ntrules, acc)
val nonLeftRecRules = removeLeftRecursionOptimized(newNtRules, nt)
//println("New rules: "+CFGrammar.rulesToStr(nonLeftRecRules))
(acc ++ (nonLeftRecRules ++ rest).groupBy(_.leftSide))
}.flatMap(_._2).toList
println("#new rules : " + orderedRules.size)
//println("New rules for Scc: " + CFGrammar.rulesToStr(orderedRules))
orderedRules
}
//add the new rules for lefties
val lefties = indirectLeftRecurs.flatten.toSet
val transRules = g.rules.filterNot(rl => lefties.contains(rl.leftSide)) ++ newRuls
//simplify the transformed grammar
val simplifications = {
removeUnreachableRules _ andThen
removeUnproductiveRules
}
val transGrammar = Grammar[T](g.start, transRules)
//require(verifyNoIndirectRecursion(transGrammar))
transGrammar
}
*/ }
|
epfl-lara/GrammarComparison
|
src/main/scala/grammarcomp/parsing/GNFUtilities.scala
|
Scala
|
mit
| 9,827 |
object Test {
val builder: Builder_1[Int] = ???
builder.build(): Option[Int]
}
|
som-snytt/dotty
|
tests/pos-java-interop-separate/i8435/Test_2.scala
|
Scala
|
apache-2.0
| 83 |
package chess
import format.Uci
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
final case class Actor(
piece: Piece,
pos: Pos,
board: Board
) {
import Actor._
lazy val moves: List[Move] = kingSafetyMoveFilter(trustedMoves(board.variant.allowsCastling))
/** The moves without taking defending the king into account */
def trustedMoves(withCastle: Boolean): List[Move] = {
val moves = piece.role match {
case Pawn =>
pawnDir(pos) map { next =>
val fwd = Option(next) filterNot board.pieces.contains
def capture(horizontal: Direction): Option[Move] = {
for {
p <- horizontal(next)
if board.pieces.get(p).exists { _.color != color }
b <- board.taking(pos, p)
} yield move(p, b, Option(p))
} flatMap maybePromote
def enpassant(horizontal: Direction): Option[Move] =
for {
victimPos <- horizontal(pos).filter(_ => pos.rank == color.passablePawnRank)
_ <- board(victimPos).filter(v => v == !color - Pawn)
targetPos <- horizontal(next)
_ <- pawnDir(victimPos) flatMap pawnDir filter { vf =>
history.lastMove.exists {
case Uci.Move(orig, dest, _) => orig == vf && dest == victimPos
case _ => false
}
}
b <- board.taking(pos, targetPos, Option(victimPos))
} yield move(targetPos, b, Option(victimPos), enpassant = true)
def forward(p: Pos): Option[Move] =
board.move(pos, p) map { move(p, _) } flatMap maybePromote
def maybePromote(m: Move): Option[Move] =
if (m.dest.rank == m.color.promotablePawnRank)
(m.after promote m.dest) map { b2 =>
m.copy(after = b2, promotion = Option(Queen))
}
else Option(m)
List(
fwd flatMap forward,
for {
p <- fwd.filter(_ => board.variant.isUnmovedPawn(color, pos))
p2 <- pawnDir(p)
if !(board.pieces contains p2)
b <- board.move(pos, p2)
} yield move(p2, b),
capture(_.left),
capture(_.right),
enpassant(_.left),
enpassant(_.right)
).flatten
} getOrElse Nil
case Bishop => longRange(Bishop.dirs)
case Knight => shortRange(Knight.dirs)
case Rook => longRange(Rook.dirs)
case Queen => longRange(Queen.dirs)
case King if withCastle => shortRange(King.dirs) ::: castle
case King => shortRange(King.dirs)
}
// We apply the current game variant's effects if there are any so that we can accurately decide if the king would
// be in danger after the move was made.
if (board.variant.hasMoveEffects) moves map (_.applyVariantEffect) else moves
}
lazy val destinations: List[Pos] = moves map (_.dest)
def color = piece.color
def is(c: Color) = c == piece.color
def is(r: Role) = r == piece.role
def is(p: Piece) = p == piece
/*
* Filters out moves that would put the king in check.
*
* critical function. optimize for performance.
*/
def kingSafetyMoveFilter(ms: List[Move]): List[Move] = {
val filter: Piece => Boolean =
if ((piece is King) || check) _ => true else _.role.projection
val stableKingPos = if (piece is King) None else board kingPosOf color
ms filter { m =>
board.variant.kingSafety(m, filter, stableKingPos orElse (m.after kingPosOf color))
}
}
lazy val check: Boolean = board check color
private def castle: List[Move] = castleOn(KingSide) ::: castleOn(QueenSide)
def castleOn(side: Side): List[Move] =
(for {
// Check castling rights.
kingPos <- board kingPosOf color filter (_ => history canCastle color on side)
rookPos <- side.tripToRook(kingPos, board).lastOption
if board(rookPos) contains color.rook
if history.unmovedRooks.pos.contains(rookPos)
// Check impeded castling.
newKingPos = Pos(side.castledKingFile, kingPos.rank)
newRookPos = Pos(side.castledRookFile, rookPos.rank)
kingPath = kingPos <-> newKingPos
rookPath = rookPos <-> newRookPos
mustBeUnoccupied = (kingPath ++ rookPath).filter(_ != kingPos).filter(_ != rookPos)
if !mustBeUnoccupied.exists(board.pieces.contains)
// Check the king is not currently attacked, and none of the squares it
// passes *through* are attacked. We do this after removing the old king,
// to ensure the old king does not shield attacks. This is important in
// Atomic chess, where touching kings can shield attacks without being in
// check.
b1 <- board take kingPos
mustNotBeAttacked = kingPath.filter(_ != newKingPos || kingPos == newKingPos)
if !mustNotBeAttacked.exists(p => board.variant.kingThreatened(b1, !color, p))
// Test the final king position seperately, after the rook has been moved.
b2 <- b1 take rookPos
b3 <- b2.place(color.king, newKingPos)
b4 <- b3.place(color.rook, newRookPos)
if !board.variant.kingThreatened(b4, !color, newKingPos)
b5 = b4 updateHistory (_ withoutCastles color)
castle = Option((kingPos -> newKingPos, rookPos -> newRookPos))
} yield {
if (board.variant == chess.variant.Chess960) List(rookPos)
else List(rookPos, newKingPos).distinct
} map { move(_, b5, castle = castle) }) getOrElse Nil
private def shortRange(dirs: Directions): List[Move] =
dirs flatMap { _(pos) } flatMap { to =>
board.pieces.get(to) match {
case None => board.move(pos, to) map { move(to, _) }
case Some(piece) =>
if (piece is color) Nil
else board.taking(pos, to) map { move(to, _, Option(to)) }
}
}
private def longRange(dirs: Directions): List[Move] = {
val buf = new ArrayBuffer[Move]
@tailrec
def addAll(p: Pos, dir: Direction): Unit = {
dir(p) match {
case None => ()
case s @ Some(to) =>
board.pieces.get(to) match {
case None =>
board.move(pos, to).foreach { buf += move(to, _) }
addAll(to, dir)
case Some(piece) =>
if (piece.color != color) board.taking(pos, to) foreach {
buf += move(to, _, s)
}
}
}
}
dirs foreach { addAll(pos, _) }
buf.toList
}
private def pawnDir = pawnDirOf(color)
private def move(
dest: Pos,
after: Board,
capture: Option[Pos] = None,
castle: Option[((Pos, Pos), (Pos, Pos))] = None,
promotion: Option[PromotableRole] = None,
enpassant: Boolean = false
) =
Move(
piece = piece,
orig = pos,
dest = dest,
situationBefore = Situation(board, piece.color),
after = after,
capture = capture,
castle = castle,
promotion = promotion,
enpassant = enpassant
)
private def history = board.history
}
object Actor {
def pawnDirOf(color: Color): Direction = color.fold(_.up, _.down)
/**
* Determines the position one ahead of a pawn based on the color of the piece.
* White pawns move up and black pawns move down.
*/
def posAheadOfPawn(pos: Pos, color: Color): Option[Pos] = pawnDirOf(color)(pos)
/**
* Determines the squares that a pawn attacks based on the colour of the pawn.
*/
def pawnAttacks(pos: Pos, color: Color): List[Pos] =
color
.fold(
List(pos.upLeft, pos.upRight),
List(pos.downLeft, pos.downRight)
)
.flatten
}
|
niklasf/scalachess
|
src/main/scala/Actor.scala
|
Scala
|
mit
| 7,779 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.stacklang
class DupSuite extends BaseWordSuite {
def interpreter: Interpreter = Interpreter(StandardVocabulary.allWords)
def word: Word = StandardVocabulary.Dup
def shouldMatch: List[(String, List[Any])] = List(
"a" -> List("a", "a"),
"a,b" -> List("b", "b", "a")
)
def shouldNotMatch: List[String] = List("")
}
|
Netflix/atlas
|
atlas-core/src/test/scala/com/netflix/atlas/core/stacklang/DupSuite.scala
|
Scala
|
apache-2.0
| 970 |
package com.seanshubin.todo.application.console
object EntryPoint extends App {
new EntryPointWiring {
override def commandLineArguments: Seq[String] = args
}.runner.run()
}
|
SeanShubin/todo-application
|
console/src/main/scala/com/seanshubin/todo/application/console/EntryPoint.scala
|
Scala
|
unlicense
| 183 |
package sgwlpr
import com.eaio.uuid.UUID
import scala.collection.mutable.ListBuffer
import events._
import packets._
trait GameServerTrait[T <: Session] extends ServerTrait[T] {
val sessionsInTransit : ListBuffer[Session] = ListBuffer.empty
def migrateSession(oldSession: Session, newSession: T) : Boolean
override def handlePacket(event: Event) = event match {
case evt: unenc.ClientVerificationPacketEvent => {
val session = evt.session.asInstanceOf[T]
val packet = evt.packet
val providedKeys = List(packet.securityKey1, packet.securityKey2)
// XXX - what if we can't find an old session
val oldSession = sessionsInTransit.find(_.securityKeys == providedKeys)
if(oldSession == None || !migrateSession(oldSession.get, session))
session.drop
}
case e => super.handlePacket(e)
}
override def receive = {
case SessionTransit(session) => sessionsInTransit += session
case m => super.receive(m)
}
}
|
th0br0/sgwlpr
|
framework/src/main/scala/sgwlpr/GameServerTrait.scala
|
Scala
|
agpl-3.0
| 986 |
package skinny.oauth2.client.facebook
import skinny.oauth2.client._
import skinny.logging.LoggerProvider
import skinny.json.JSONStringOps
import scala.util.control.NonFatal
/**
* Facebook Graph API client.
*/
trait FacebookGraphAPI extends LoggerProvider {
def me(token: OAuth2Token): Option[FacebookUser] = {
try {
val response = OAuth2Client.resource {
BearerRequest("https://graph.facebook.com/v2.1/me").accessToken(token.accessToken)
}
logger.debug(s"Facebook authorized user: ${response.body}")
JSONStringOps.fromJSONString[FacebookUser](response.body).toOption
} catch {
case NonFatal(e) =>
logger.error(s"Failed to get current Facebook user information because ${e.getMessage}", e)
None
}
}
}
object FacebookGraphAPI extends FacebookGraphAPI
|
Kuchitama/skinny-framework
|
oauth2/src/main/scala/skinny/oauth2/client/facebook/FacebookGraphAPI.scala
|
Scala
|
mit
| 826 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze.modules.{Tanh, TanhBuilder}
abstract class Tanh_JVM
extends Tanh
with MapLayer_JVM[TanhBuilder] {
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
final override protected def doPredict(inPlaceAllowed: Boolean, input: Tensor)
: RealArrayTensor = {
val out = {
if (inPlaceAllowed) {
input.asOrToRealArrayTensor
}
else {
input.toRealArrayTensor
}
}
doPredict(out)
out
}
protected def doPredict(output: RealArrayTensor): Unit
final override protected def doPredictInv(output: Tensor)
: RealArrayTensor = {
val inp = output.toRealArrayTensor
doPredictInv(inp)
inp
}
protected def doPredictInv(input: RealArrayTensor): Unit
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
final override protected def doDeriveInputError(output: Tensor,
error: Tensor)
: Tensor = {
val out = output.asOrToRealArrayTensor
val err = error.asOrToRealArrayTensor
err.transform(out,
(err, out) => err * (Real.one - out * out)
)
if (out ne output) {
out.close()
}
err
}
}
|
bashimao/ltudl
|
blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/Tanh_JVM.scala
|
Scala
|
apache-2.0
| 2,203 |
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.equalities
import org.scalactic.Equality
import scala.language.{higherKinds, implicitConversions}
/**
* An [[Equality]] that allows the comparison of values nested in [[java.util.List]]s using whatever Equality is
* in scope for the contained type.
*/
trait RecursiveJavaListEquality {
implicit def recursiveJavaListEquality[E, JLIST[e] <: java.util.List[e]](implicit eqE: Equality[E]): Equality[JLIST[E]] =
new Equality[JLIST[E]] {
import collection.JavaConverters._
val scalaEq = RecursiveSeqEquality.recursiveSeqEquality[E, Seq](eqE)
def areEqual(seqA: JLIST[E], other: Any): Boolean = (seqA, other) match {
case (jlistA: java.util.List[E], jlistB: java.util.List[_]) => scalaEq.areEqual(jlistA.asScala.toSeq, jlistB.asScala.toSeq)
case _ => false
}
}
}
object RecursiveJavaListEquality extends RecursiveJavaListEquality
|
SRGOM/scalatest
|
scalactic/src/main/scala/org/scalactic/equalities/RecursiveJavaListEquality.scala
|
Scala
|
apache-2.0
| 1,506 |
/**
* Copyright 2015 Otto (GmbH & Co KG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package schedoscope.example.osm.datahub
import org.schedoscope.dsl.Parameter.p
import org.schedoscope.dsl.View
import org.schedoscope.dsl.storageformats.Parquet
import org.schedoscope.dsl.transformations.HiveTransformation
import org.schedoscope.dsl.transformations.HiveTransformation.{ insertInto, queryFromResource }
import org.schedoscope.dsl.views.DateParameterizationUtils.allMonths
import org.schedoscope.dsl.views.{ Id, JobMetadata }
import schedoscope.example.osm.Globals._
import schedoscope.example.osm.processed.Nodes
case class Restaurants() extends View
with Id
with JobMetadata {
val restaurantName = fieldOf[String]("The name of a restaurant")
val restaurantType = fieldOf[String]("The cuisine of a restaurant, as given by OSM.")
val area = fieldOf[String]("A geoencoded area string")
dependsOn { () =>
for ((year, month) <- allMonths())
yield Nodes(p(year), p(month))
}
transformVia { () =>
HiveTransformation(
insertInto(
this,
queryFromResource("hiveql/datahub/insert_restaurants.sql"),
settings = Map("parquet.compression" -> "GZIP")))
.configureWith(defaultHiveQlParameters(this))
}
comment("View of restaurants")
storedAs(Parquet())
}
|
hpzorn/schedoscope
|
schedoscope-tutorial/src/main/scala/schedoscope/example/osm/datahub/Restaurants.scala
|
Scala
|
apache-2.0
| 1,838 |
object Test extends App {
class C(val s: Array[Int]) extends AnyVal {
override def equals(that: Any) = that match {
case that: C => s.toList == that.s.toList
case _ => false
}
}
def test1() = {
val c = new C(Array(1, 2,3))
assert(c `equals` new C(Array(1, 2, 3)))
assert(c == (new C(Array(1, 2, 3)): Any))
assert(c == new C(Array(1, 2, 3)))
assert(new C(Array(1, 2, 3)) == c)
assert((new C(Array(1, 2, 3)): Any) == c)
assert(new C(Array(1, 2, 3)) == c)
}
trait Eql extends Any {
def deep: Any
override def equals(that: Any) = that match {
case that: D => deep == that.s.toList
case _ => false
}
}
class D(val s: Array[Int]) extends AnyVal with Eql {
def deep = s.toList
}
def test2() = {
val c = new D(Array(1, 2,3))
assert(c `equals` new D(Array(1, 2, 3)))
assert(c == (new D(Array(1, 2, 3)): Any))
assert(c == new D(Array(1, 2, 3)))
assert(new D(Array(1, 2, 3)) == c)
assert((new D(Array(1, 2, 3)): Any) == c)
assert(new D(Array(1, 2, 3)) == c)
}
test1()
test2()
}
|
som-snytt/dotty
|
tests/run/vc-equals.scala
|
Scala
|
apache-2.0
| 1,108 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalString, Input}
case class B156(value: Option[String]) extends CtBoxIdentifier("Full Name of Nominee") with CtOptionalString with Input
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/v2/B156.scala
|
Scala
|
apache-2.0
| 828 |
package rewriting.rules
import ir.{ArrayType, Type}
import ir.ast._
import opencl.ir.pattern.ReduceSeq
import rewriting.utils.Utils
object InterchangeRules {
val mapMapTransposeZipOutside = Rule("Map(fun(x => Map(fun(a => )) $ Get(n, ) $ Zip(..., A, ...) => " +
"Transpose() o Map(fun(a => Map() $ Zip(..., a, ...)) o Transpose() $ A", {
case FunCall(Map(Lambda(outerLambdaParam,
FunCall(Map(Lambda(innerLambdaParam,
expr
)), FunCall(Get(n), getParam))
)), FunCall(Zip(_), zipArgs@_*))
if getParam eq outerLambdaParam.head
=>
// Find all Get patterns that refer to the an element from the zipped array
// and have to be replaced in expr
val gets = Utils.findGets(expr, outerLambdaParam.head)
// Find 'A'
val newArg = zipArgs(n)
// Construct new Get patterns and replace them for the old ones
val newParam = Param()
val newGets = gets zip gets.map(get => Get(newParam, get.f.asInstanceOf[Get].n))
val newExpr = newGets.foldRight(expr)((get, e) => Expr.replace(e, get._1, get._2))
// Create a get pattern for an element 'a'
val finalNewExpr = Expr.replace(newExpr, innerLambdaParam.head, Get(n)(newParam))
// Replace 'a' with a new parameter to create the new arguments for zip
val secondNewParam = Param()
val newZipArgs = zipArgs.updated(n, secondNewParam)
TransposeW() o Map(Lambda(Array(secondNewParam),
Map(Lambda(Array(newParam), finalNewExpr)) $ Zip(newZipArgs: _*)
)) o Transpose() $ newArg
})
val transposeBothSides = Rule("Map(fun(a => Map(f) $ a)) $ A => " +
"Transpose() o Map(fun(a =>Map(f) $ a)) o Transpose() $ A ", {
case FunCall(Map(f@Lambda(param, FunCall(Map(_), a))), arg)
if param.head eq a
=>
TransposeW() o Map(f) o Transpose() $ arg
})
val mapMapTransposeZipInside = Rule("Map(fun(a => Map() $ Zip(..., ... $ a, ...)) $ A => " +
"Transpose() o Map(Map(fun(a => ))) $ Zip(..., Transpose() o Map(...) $ A, ...) ", {
case FunCall(Map(Lambda(Array(outerLambdaParam),
FunCall(Map(Lambda(Array(innerLambdaParam),
expr
)), FunCall(Zip(_), zipArgs@_*))
)), arg)
if zipArgs.count(Utils.getFinalArg(_) eq outerLambdaParam) >= 1
=>
// Find all Get patterns that refer to the an element from the zipped array
// and have to be replaced in expr
val gets = Utils.findGets(expr, innerLambdaParam)
// Find which Get pattern corresponds to the component containing an element of 'a'
val zipToReplace = zipArgs.zipWithIndex.filter(e =>
Utils.getFinalArg(e._1) eq outerLambdaParam
)
// Create the new Get patterns with a new parameter
val newParam = Param()
val getPairs = gets zip gets.map(get => Get(newParam, get.f.asInstanceOf[Get].n))
// Separate the Get pattern containing an element of 'a', as it will now refer
// to the component containing an element of 'A'
val (one, two) = getPairs.partition(x =>
zipToReplace.map(_._2).contains(x._1.f.asInstanceOf[Get].n)
)
// Replace most of the old Get patterns with new ones
val newExpr = two.foldRight(expr)((get, e) => Expr.replace(e, get._1, get._2))
// Create a new parameter for an element of 'a' and replace for the Get referring
// an element of 'a'
val secondNewParam = Param()
val finalNewExpr = one.zipWithIndex.foldLeft(newExpr)((expr, p) => {
val id = p._2 //zipToReplace.find(_._1 eq p._1).get._2
val newThing = if (one.size == 1) secondNewParam else Get(secondNewParam, id)
Expr.replace(expr, p._1._1, newThing)
})
val elems = zipToReplace.map(pair =>
if (!(pair._1 eq outerLambdaParam)) {
// Isolate any splits/joins/transposes/computation inside the zip
val thirdNewParam = Param()
val replace = Expr.replace(pair._1, outerLambdaParam, thirdNewParam)
// And include them before the transpose
val map = Map(Lambda(Array(thirdNewParam), replace))
Transpose() o map $ arg
} else {
Transpose() $ arg
})
// Create the arguments for the zip, replacing '... o a' with 'Transpose() o Map(...) $ A'
val newZipArgs = (zipToReplace, elems).zipped.foldLeft(zipArgs)((arguments, y) => {
val elem = y._2
val id = y._1._2
arguments.updated(id, elem)
})
val newArgs = if (one.size == 1) one.map(_._2).head else Zip(one.map(_._2):_*)
// Construct the final expression
val lambda = Lambda(Array(secondNewParam), finalNewExpr)
TransposeW() o Map(Lambda(Array(newParam), Map(lambda) $ newArgs)) $ Zip(newZipArgs: _*)
})
val mapMapInterchange = Rule("Map(fun(a => Map(fun( b => ... ) $ B) $ A => " +
"Transpose() o Map(fun(b => Map(fun( a => ... ) $ A) $ B", {
case FunCall(Map(Lambda(a, FunCall(Map(Lambda(b, expr)), bArg))), aArg)
if !bArg.contains({ case e if e eq a.head => })
=>
TransposeW() o Map(Lambda(b, FunCall(Map(Lambda(a, expr)), aArg))) $ bArg
})
val mapReduceInterchange = Rule("Map(Reduce(f)) => Transpose() o Reduce(Map(f)) o Transpose()", {
case FunCall(Map(Lambda(lambdaParams,
FunCall(r@AbstractPartRed(Lambda(innerParams, expr)), init: Value, arg)
)), mapArg)
if lambdaParams.head eq arg
=>
val newInit = Value(init.value, ArrayType(init.t, Type.getLength(mapArg.t)))
val newMapParam = Param()
val newExpr = innerParams.zipWithIndex.foldLeft(expr)((e, pair) =>
Expr.replace(e, pair._1, Get(pair._2)(newMapParam)))
val lambda = fun((acc, c) => Map(Lambda(Array(newMapParam), newExpr)) $ Zip(acc, c))
TransposeW()( r.copy(lambda)(newInit, Transpose() $ mapArg))
})
val mapReduceInterchangeWithZipOutside =
Rule("Map(fun(x => Reduce(f, Get(x, 0)) $ Get(x, 1) ) $ Zip(a, b) => " +
"Transpose() o Reduce(fun((acc, y) => Map(f) $ Zip(acc, y) ), a ) o Transpose() $ b", {
case FunCall(Map(Lambda(lambdaParams,
FunCall(r@AbstractPartRed(Lambda(innerParams, expr)), FunCall(Get(i), a1), FunCall(Get(j), a2))
)), FunCall(Zip(2), zipArgs@_*))
if (lambdaParams.head eq a1) && (lambdaParams.head eq a2)
=>
val newInit = zipArgs(i)
val newArg = zipArgs(j)
val acc = Param()
val next = Param()
val mapParam = Param()
val interimExpr = Expr.replace(expr, innerParams(i), Get(i)(mapParam))
val finalExpr = Expr.replace(interimExpr, innerParams(j), Get(j)(mapParam))
Transpose() ( r.copy(Lambda(Array(acc, next),
Map(Lambda(Array(mapParam), finalExpr)) $ Zip(acc, next)
))(newInit, Transpose() $ newArg))
})
// TODO: Should use Reduce instead of PartRed, as PartRed can return something that is
// TODO: not length one, and the output type can break. Will need to check some
// TODO: other way that both fs are the same.
val mapReducePartialReduce =
Rule("Map(Reduce(f, init) o Join() o Map(PartRed(f, init2)) ) => " +
"Transpose() o Reduce((acc, a) => Join() o Map(x => PartRed(f, Get(x, 0)) $ Get(x, 1)) $ Zip(acc, a) , Array(init)) o Transpose()", {
case FunCall(Map(Lambda(p1,
FunCall(ReduceSeq(f1), init1: Value, FunCall(Join(), FunCall(Map(Lambda(p2,
FunCall(PartRed(_), init2: Value, a2))), a1)))
)), arg)
if (p1.head eq a1) && (p2.head eq a2) && init1 == init2
=>
val newInit = Value(init2.value, ArrayType(init2.t, Type.getLength(arg.t)))
TransposeW() o ReduceSeq(fun((acc, a) =>
Join() o Map(fun(x =>
PartRed(f1, Get(x, 0)) $ Get(x,1)
)) $ Zip(acc, a)
), newInit) o Transpose() $ arg
})
}
|
lift-project/lift
|
src/main/rewriting/rules/InterchangeRules.scala
|
Scala
|
mit
| 7,776 |
package mesosphere.marathon.upgrade
import java.net.URL
import java.util.UUID
import com.wix.accord._
import com.wix.accord.dsl._
import mesosphere.marathon.api.v2.Validation._
import mesosphere.marathon.storage.repository.legacy.store.{ CompressionConf, ZKData }
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.state._
import mesosphere.marathon.storage.TwitterZk
import mesosphere.marathon.{ MarathonConf, Protos }
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
import scala.collection.SortedMap
import scala.collection.immutable.Seq
sealed trait DeploymentAction {
def app: AppDefinition
}
// application has not been started before
case class StartApplication(app: AppDefinition, scaleTo: Int) extends DeploymentAction
// application is started, but the instance count should be changed
case class ScaleApplication(
app: AppDefinition,
scaleTo: Int,
sentencedToDeath: Option[Iterable[Task]] = None) extends DeploymentAction
// application is started, but shall be completely stopped
case class StopApplication(app: AppDefinition) extends DeploymentAction
// application is there but should be replaced
case class RestartApplication(app: AppDefinition) extends DeploymentAction
// resolve and store artifacts for given app
case class ResolveArtifacts(app: AppDefinition, url2Path: Map[URL, String]) extends DeploymentAction
/**
* One step in a deployment plan.
* The contained actions may be executed in parallel.
*
* @param actions the actions of this step that maybe executed in parallel
*/
case class DeploymentStep(actions: Seq[DeploymentAction]) {
def +(step: DeploymentStep): DeploymentStep = DeploymentStep(actions ++ step.actions)
def nonEmpty(): Boolean = actions.nonEmpty
}
/**
* A deployment plan consists of the [[mesosphere.marathon.upgrade.DeploymentStep]]s necessary to
* change the group state from original to target.
*
* The steps are executed sequentially after each other. The actions within a
* step maybe executed in parallel.
*
* See `mesosphere.marathon.upgrade.DeploymentPlan.appsGroupedByLongestPath` to
* understand how we can guarantee that all dependencies for a step are fulfilled
* by prior steps.
*/
case class DeploymentPlan(
id: String,
original: Group,
target: Group,
steps: Seq[DeploymentStep],
version: Timestamp) extends MarathonState[Protos.DeploymentPlanDefinition, DeploymentPlan] {
/**
* Reverts this plan by applying the reverse changes to the given Group.
*/
def revert(group: Group): Group = DeploymentPlanReverter.revert(original, target)(group)
lazy val isEmpty: Boolean = steps.isEmpty
lazy val nonEmpty: Boolean = !isEmpty
lazy val affectedApplications: Set[AppDefinition] = steps.flatMap(_.actions.map(_.app)).toSet
/** @return all ids of apps which are referenced in any deployment actions */
lazy val affectedApplicationIds: Set[PathId] = steps.flatMap(_.actions.map(_.app.id)).toSet
def isAffectedBy(other: DeploymentPlan): Boolean =
// FIXME: check for group change conflicts?
affectedApplicationIds.intersect(other.affectedApplicationIds).nonEmpty
lazy val createdOrUpdatedApps: Seq[AppDefinition] = {
target.transitiveApps.toIndexedSeq.filter(app => affectedApplicationIds(app.id))
}
lazy val deletedApps: Seq[PathId] = {
original.transitiveAppIds.diff(target.transitiveAppIds).toVector
}
override def toString: String = {
def appString(app: AppDefinition): String = {
val cmdString = app.cmd.fold("")(cmd => ", cmd=\"" + cmd + "\"")
val argsString = app.args.fold("")(args => ", args=\"" + args.mkString(" ") + "\"")
val maybeDockerImage: Option[String] = app.container.flatMap(_.docker().map(_.image))
val dockerImageString = maybeDockerImage.fold("")(image => ", image=\"" + image + "\"")
s"App(${app.id}$dockerImageString$cmdString$argsString))"
}
def actionString(a: DeploymentAction): String = a match {
case StartApplication(app, scale) => s"Start(${appString(app)}, instances=$scale)"
case StopApplication(app) => s"Stop(${appString(app)})"
case ScaleApplication(app, scale, toKill) =>
val killTasksString =
toKill.filter(_.nonEmpty).map(", killTasks=" + _.map(_.taskId.idString).mkString(",")).getOrElse("")
s"Scale(${appString(app)}, instances=$scale$killTasksString)"
case RestartApplication(app) => s"Restart(${appString(app)})"
case ResolveArtifacts(app, urls) => s"Resolve(${appString(app)}, $urls})"
}
val stepString =
if (steps.nonEmpty) {
steps
.map { _.actions.map(actionString).mkString(" * ", "\n * ", "") }
.zipWithIndex
.map { case (stepsString, index) => s"step ${index + 1}:\n$stepsString" }
.mkString("\n", "\n", "")
} else " NO STEPS"
s"DeploymentPlan $version$stepString\n"
}
override def mergeFromProto(bytes: Array[Byte]): DeploymentPlan =
mergeFromProto(Protos.DeploymentPlanDefinition.parseFrom(bytes))
override def mergeFromProto(msg: Protos.DeploymentPlanDefinition): DeploymentPlan = DeploymentPlan(
original = Group.fromProto(msg.getDeprecatedOriginal),
target = Group.fromProto(msg.getDeprecatedTarget),
version = Timestamp(msg.getTimestamp),
id = Some(msg.getId)
)
override def toProto: Protos.DeploymentPlanDefinition =
Protos.DeploymentPlanDefinition
.newBuilder
.setId(id)
.setDeprecatedOriginal(original.toProto)
.setDeprecatedTarget(target.toProto)
.setTimestamp(version.toString)
.build()
}
object DeploymentPlan {
private val log = LoggerFactory.getLogger(getClass)
def empty: DeploymentPlan =
DeploymentPlan(UUID.randomUUID().toString, Group.empty, Group.empty, Nil, Timestamp.now())
def fromProto(message: Protos.DeploymentPlanDefinition): DeploymentPlan = empty.mergeFromProto(message)
/**
* Returns a sorted map where each value is a subset of the supplied group's
* apps and for all members of each subset, the longest path in the group's
* dependency graph starting at that member is the same size. The result
* map is sorted by its keys, which are the lengths of the longest path
* starting at the value set's elements.
*
* Rationale:
*
* #: AppDefinition → ℤ is an equivalence relation on AppDefinition where
* the members of each equivalence class can be concurrently deployed.
*
* This follows naturally:
*
* The dependency graph is guaranteed to be free of cycles.
* By definition for all α, β in some class X, # α = # β.
* Choose any two apps α and β in a class X.
* Suppose α transitively depends on β.
* Then # α must be greater than # β.
* Which is absurd.
*
* Furthermore, for any two apps α in class X and β in a class Y, X ≠ Y
* where # α is less than # β: α does not transitively depend on β, by
* similar logic.
*/
private[upgrade] def appsGroupedByLongestPath(
group: Group): SortedMap[Int, Set[AppDefinition]] = {
import org.jgrapht.DirectedGraph
import org.jgrapht.graph.DefaultEdge
def longestPathFromVertex[V](g: DirectedGraph[V, DefaultEdge], vertex: V): Seq[V] = {
val outgoingEdges: Set[DefaultEdge] =
if (g.containsVertex(vertex)) g.outgoingEdgesOf(vertex).asScala.toSet
else Set.empty[DefaultEdge]
if (outgoingEdges.isEmpty)
Seq(vertex)
else
outgoingEdges.map { e =>
vertex +: longestPathFromVertex(g, g.getEdgeTarget(e))
}.maxBy(_.length)
}
val unsortedEquivalenceClasses = group.transitiveApps.groupBy { app =>
longestPathFromVertex(group.dependencyGraph, app).length
}
SortedMap(unsortedEquivalenceClasses.toSeq: _*)
}
/**
* Returns a sequence of deployment steps, the order of which is derived
* from the topology of the target group's dependency graph.
*/
def dependencyOrderedSteps(original: Group, target: Group,
toKill: Map[PathId, Iterable[Task]]): Seq[DeploymentStep] = {
val originalApps: Map[PathId, AppDefinition] = original.transitiveAppsById
val appsByLongestPath: SortedMap[Int, Set[AppDefinition]] = appsGroupedByLongestPath(target)
appsByLongestPath.valuesIterator.map { (equivalenceClass: Set[AppDefinition]) =>
val actions: Set[DeploymentAction] = equivalenceClass.flatMap { (newApp: AppDefinition) =>
originalApps.get(newApp.id) match {
// New app.
case None =>
Some(ScaleApplication(newApp, newApp.instances))
// Scale-only change.
case Some(oldApp) if oldApp.isOnlyScaleChange(newApp) =>
Some(ScaleApplication(newApp, newApp.instances, toKill.get(newApp.id)))
// Update or restart an existing app.
case Some(oldApp) if oldApp.needsRestart(newApp) =>
Some(RestartApplication(newApp))
// Other cases require no action.
case _ =>
None
}
}
DeploymentStep(actions.to[Seq])
}.to[Seq]
}
/**
* @param original the root group before the deployment
* @param target the root group after the deployment
* @param resolveArtifacts artifacts to resolve
* @param version the version to use for new AppDefinitions (should be very close to now)
* @param toKill specific tasks that should be killed
* @return The deployment plan containing the steps necessary to get from the original to the target group definition
*/
def apply(
original: Group,
target: Group,
resolveArtifacts: Seq[ResolveArtifacts] = Seq.empty,
version: Timestamp = Timestamp.now(),
toKill: Map[PathId, Iterable[Task]] = Map.empty,
id: Option[String] = None): DeploymentPlan = {
// Lookup maps for original and target apps.
val originalApps: Map[PathId, AppDefinition] = original.transitiveAppsById
val targetApps: Map[PathId, AppDefinition] = target.transitiveAppsById
// A collection of deployment steps for this plan.
val steps = Seq.newBuilder[DeploymentStep]
// 0. Resolve artifacts.
steps += DeploymentStep(resolveArtifacts)
// 1. Destroy apps that do not exist in the target.
steps += DeploymentStep(
(originalApps -- targetApps.keys).valuesIterator.map { oldApp =>
StopApplication(oldApp)
}.to[Seq]
)
// 2. Start apps that do not exist in the original, requiring only 0
// instances. These are scaled as needed in the dependency-ordered
// steps that follow.
steps += DeploymentStep(
(targetApps -- originalApps.keys).valuesIterator.map { newApp =>
StartApplication(newApp, 0)
}.to[Seq]
)
// 3. For each app in each dependency class,
//
// A. If this app is new, scale to the target number of instances.
//
// B. If this is a scale change only, scale to the target number of
// instances.
//
// C. Otherwise, if this is an app update:
// i. Scale down to the target minimumHealthCapacity fraction of
// the old app or the new app, whichever is less.
// ii. Restart the app, up to the new target number of instances.
//
steps ++= dependencyOrderedSteps(original, target, toKill)
// Build the result.
val result = DeploymentPlan(
id.getOrElse(UUID.randomUUID().toString),
original,
target,
steps.result().filter(_.actions.nonEmpty),
version
)
result
}
def deploymentPlanValidator(conf: MarathonConf): Validator[DeploymentPlan] = {
val maxSize = conf.zooKeeperMaxNodeSize()
val maxSizeError = s"""The way we persist data in ZooKeeper would exceed the maximum ZK node size ($maxSize bytes).
|You can adjust this value via --zk_max_node_size, but make sure this value is compatible with
|your ZooKeeper ensemble!
|See: http://zookeeper.apache.org/doc/r3.3.1/zookeeperAdmin.html#Unsafe+Options""".stripMargin
val notBeTooBig = isTrue[DeploymentPlan](maxSizeError) { plan =>
if (conf.internalStoreBackend() == TwitterZk.StoreName) {
val compressionConf = CompressionConf(conf.zooKeeperCompressionEnabled(), conf.zooKeeperCompressionThreshold())
val zkDataProto = ZKData(s"deployment-${plan.id}", UUID.fromString(plan.id), plan.toProto.toByteArray)
.toProto(compressionConf)
zkDataProto.toByteArray.length < maxSize
} else {
// we could try serializing the proto then gzip compressing it for the new ZK backend, but should we?
true
}
}
validator[DeploymentPlan] { plan =>
plan.createdOrUpdatedApps as "app" is every(valid(AppDefinition.updateIsValid(plan.original)))
plan should notBeTooBig
}
}
}
|
timcharper/marathon
|
src/main/scala/mesosphere/marathon/upgrade/DeploymentPlan.scala
|
Scala
|
apache-2.0
| 12,888 |
package net.technowizardry.xmpp.messages
import net.technowizardry.XMLWriter
import net.technowizardry.Base64
import java.nio.ByteBuffer
class SaslPlainAuthMessage(username : String, password : String) extends SaslAuthMessage {
def WriteAuthBody(writer : XMLWriter) {
val busername = username.getBytes()
val bpassword = password.getBytes()
val buffer = Array[Byte](0) ++ busername ++ Array[Byte](0) ++ bpassword
val base64d = Base64.Encode(buffer)
writer.WriteText(base64d)
}
def GetMechanismName() = "PLAIN"
}
|
ajacques/XmppClient
|
SeniorProject/src/net/technowizardry/xmpp/messages/SaslPlainAuthMessage.scala
|
Scala
|
mit
| 540 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controllers.services
import org.scalatestplus.play.{OneAppPerSuite, PlaySpec}
import play.api.test.FakeRequest
import play.api.test.Helpers._
class PriceListControllerSpec extends PlaySpec with OneAppPerSuite {
class Setup {
object TestController extends PriceListController
}
"Sending a GET to the PriceListController" should {
"return a 200" in new Setup {
val result = TestController.show()(FakeRequest())
status(result) mustBe OK
}
}
}
|
chrisjwwalker/cjww-diagnostics
|
test/controllers/services/PriceListControllerSpec.scala
|
Scala
|
apache-2.0
| 1,213 |
package org.scalaide.refactoring.internal
import scala.tools.refactoring.MultiStageRefactoring
import scala.tools.refactoring.analysis.GlobalIndexes
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.ltk.core.refactoring.RefactoringStatus
import org.scalaide.core.IScalaProject
import org.scalaide.core.internal.jdt.model.ScalaSourceFile
import org.scalaide.core.internal.statistics.Features.Feature
/**
* Helper trait that adds an index variable to a refactoring.
* Needed to be able to factor out common functionality of refactorings
* that need an index of the full project.
* @see IndexedIdeRefactoring
*/
trait Indexed {
this: GlobalIndexes =>
var index = EmptyIndex
}
/**
* Abstract ScalaIdeRefactoring for refactorings that need an index of the full project.
*/
abstract class IndexedIdeRefactoring(feature: Feature, refactoringName: String, start: Int, end: Int, sourcefile: ScalaSourceFile)
extends ScalaIdeRefactoring(feature, refactoringName, sourcefile, start, end) with FullProjectIndex {
val project: IScalaProject = sourcefile.scalaProject
val refactoring: MultiStageRefactoring with GlobalIndexes with Indexed
/**
* A cleanup handler, will later be set by the refactoring
* to remove all loaded compilation units from the compiler.
*/
var cleanup: () => Unit = () => ()
override def checkInitialConditions(pm: IProgressMonitor): RefactoringStatus = {
val status = super.checkInitialConditions(pm)
if (!status.hasError) {
val (index, cleanupIndex) = buildFullProjectIndex(pm, indexHints)
refactoring.index = index
// will be called after the refactoring has finished
cleanup = cleanupIndex
}
if (pm.isCanceled) {
status.addWarning("Indexing was cancelled, aborting refactoring.")
}
status
}
/**
* Provide hints for index building.
* If no hints are provided, the full project index is built,
* this can slow down the refactoring considerably.
*/
def indexHints(): List[String] = Nil
override def createChange(pm: IProgressMonitor) = {
val change = super.createChange(pm)
cleanup()
change
}
}
|
dragos/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/refactoring/internal/IndexedRefactorings.scala
|
Scala
|
bsd-3-clause
| 2,158 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.