code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright (c) 2014 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 12/24/14 6:58 PM
*/
package base.entity.perm
import base.entity.test.EntityBaseSuite
/**
* Essentially a gold file for permissions groups -- asserts that every group has exactly and only the
* permissions explicitly defined in this file
* @author rconrad
*/
class PermSetGroupsTest extends EntityBaseSuite {
import base.entity.perm.PermSetGroups._
import base.entity.perm.Perms._
private def assertGroup(group: PermSetGroup, expected: Set[Perm]) {
expected.foreach { perm =>
assert(group.contains(perm), "actual perms is missing expected perm")
}
group.permSet.set.foreach { perm =>
assert(expected.contains(perm), "expected perms is missing actual perm")
}
}
test("public") {
assertGroup(public, Set(
PROVIDER_READ))
}
test("siteUser") {
assertGroup(siteUser, Set(
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
IPN_SEND,
PROVIDER_READ,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH))
}
test("sitePublishableKey") {
assertGroup(sitePublishableKey, Set(
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
PROVIDER_READ))
}
test("siteSecretKey") {
assertGroup(siteSecretKey, Set(
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
IPN_SEND,
PROVIDER_READ,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH))
}
test("merchantUser") {
assertGroup(merchantUser, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_READ,
MERCHANT_UPDATE,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("merchantUserWithSiteContext") {
assertGroup(merchantUserWithSiteContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
IPN_SEND,
MERCHANT_READ,
MERCHANT_UPDATE,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("merchantPublishableKey") {
assertGroup(merchantPublishableKey, Set(
AUTH_READ,
PROVIDER_READ))
}
test("merchantPublishableKeyWithSiteContext") {
assertGroup(merchantPublishableKeyWithSiteContext, Set(
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
PROVIDER_READ))
}
test("merchantSecretKey") {
assertGroup(merchantSecretKey, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_READ,
MERCHANT_UPDATE,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ))
}
test("merchantSecretKeyWithSiteContext") {
assertGroup(merchantSecretKeyWithSiteContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
IPN_SEND,
MERCHANT_READ,
MERCHANT_UPDATE,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ))
}
test("providerUser") {
assertGroup(user, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_CREATE,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("providerUserWithMerchantContext") {
assertGroup(providerUserWithMerchantContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("providerUserWithSiteContext") {
assertGroup(providerUserWithSiteContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
IPN_SEND,
PROVIDER_READ,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("providerPublishableKeyWithMerchantContext") {
assertGroup(providerPublishableKeyWithMerchantContext, Set(
AUTH_READ,
PROVIDER_READ))
}
test("providerPublishableKeyWithSiteContext") {
assertGroup(providerPublishableKeyWithSiteContext, Set(
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
PROVIDER_READ))
}
test("providerSecretKey") {
assertGroup(providerSecretKey, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_CREATE,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
PROVIDER_READ,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("providerSecretKeyWithMerchantContext") {
assertGroup(providerSecretKeyWithMerchantContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
SITE_CREATE,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
PROVIDER_READ,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
test("providerSecretKeyWithSiteContext") {
assertGroup(providerSecretKeyWithSiteContext, Set(
API_KEY_READ,
API_KEY_REFRESH,
AUTH_READ,
INVOICE_CREATE,
INVOICE_UPDATE,
INVOICE_READ,
INVOICE_SEARCH,
IPN_SEND,
MERCHANT_UPDATE,
MERCHANT_READ,
MERCHANT_SEARCH,
PROVIDER_READ,
SITE_READ,
SITE_UPDATE,
SITE_SEARCH,
USER_CREATE,
USER_UPDATE,
USER_READ,
USER_READ_ME))
}
}
|
robconrad/base-api
|
project-entity/src/test/scala/base/entity/perm/PermSetGroupsTest.scala
|
Scala
|
mit
| 6,909 |
package com.temportalist.chalked.client
import java.util
import com.temportalist.chalked.client.gui.GuiChalkDust
import com.temportalist.chalked.client.render.BlockChalkDustRenderer
import com.temportalist.chalked.common.tile.TEChalkDust
import com.temportalist.chalked.common.{Chalked, CommonProxy}
import cpw.mods.fml.client.IModGuiFactory
import cpw.mods.fml.client.IModGuiFactory.{RuntimeOptionCategoryElement, RuntimeOptionGuiHandler}
import cpw.mods.fml.client.registry.RenderingRegistry
import net.minecraft.client.Minecraft
import net.minecraft.client.gui.GuiScreen
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.tileentity.TileEntity
import net.minecraft.world.World
/**
*
*
* @author TheTemportalist
*/
class ClientProxy() extends CommonProxy with IModGuiFactory {
override def registerRender(): Unit = {
RenderingRegistry.registerBlockHandler(BlockChalkDustRenderer)
}
override def getClientElement(ID: Int, player: EntityPlayer, world: World, x: Int, y: Int,
z: Int, tileEntity: TileEntity): AnyRef = {
if (ID == Chalked.guiChalkDust && tileEntity.isInstanceOf[TEChalkDust]) {
return new GuiChalkDust(tileEntity.asInstanceOf[TEChalkDust])
}
null
}
override def getHandlerFor(element: RuntimeOptionCategoryElement): RuntimeOptionGuiHandler = {
null
}
override def runtimeGuiCategories(): util.Set[RuntimeOptionCategoryElement] = {
null
}
override def mainConfigGuiClass(): Class[_ <: GuiScreen] = {
null
}
override def initialize(minecraftInstance: Minecraft): Unit = {
}
}
|
TheTemportalist/Chalked
|
src/main/scala/com/temportalist/chalked/client/ClientProxy.scala
|
Scala
|
apache-2.0
| 1,554 |
package javaee6.web.cache
import javax.enterprise.context.ApplicationScoped
import javax.enterprise.inject.{Disposes, Produces}
import org.infinispan.manager.{DefaultCacheManager, EmbeddedCacheManager}
class EmbeddedCacheManagerProvider {
@Produces
@ApplicationScoped
def getDefaultEmbeddedCacheManager: EmbeddedCacheManager =
new DefaultCacheManager("infinispan.xml")
private def stopEmbeddedCacheManager(@Disposes cacheManager: EmbeddedCacheManager): Unit =
cacheManager.stop()
}
|
kazuhira-r/infinispan-examples
|
infinispan-cdi/src/main/scala/javaee6/web/cache/EmbeddedCacheManagerProvider.scala
|
Scala
|
mit
| 502 |
package ch.fhnw.ima.saav
package model
import ch.fhnw.ima.saav.model.domain.{CriteriaId, IndicatorId}
import ch.fhnw.ima.saav.model.weight.{Weight, Weights}
import io.circe.Error
import io.circe.generic.auto._
import io.circe.parser._
object config {
/** Application defaults and potential mismatches between config and data. */
trait Config {
def title: String
def allowedValueRange: (Double, Double)
def defaultWeights: Weights
def nonAggregatableCriteria: Set[CriteriaId]
def mismatch: ConfigMismatch
}
trait ConfigMismatch {
def missingIndicators: Seq[IndicatorId]
def unexpectedIndicators: Seq[IndicatorId]
}
object ConfigMismatch {
val none = new ConfigMismatch {
val missingIndicators: Seq[IndicatorId] = Seq.empty
val unexpectedIndicators: Seq[IndicatorId] = Seq.empty
}
}
/** Entry point to JSON config (aka catalog) */
object AnalysisConfig {
val default = AnalysisConfig("", (Double.NegativeInfinity, Double.PositiveInfinity), Seq())
def fromJson(json: String): Either[Error, AnalysisConfig] = decode[AnalysisConfig](json)
}
final case class AnalysisConfig(title: String, allowedValueRange: (Double, Double), criteria: Seq[CriteriaConfig])
final case class CriteriaConfig(name: String, aggregatable: Boolean, subCriteria: Seq[SubCriteriaConfig])
final case class SubCriteriaConfig(name: String, weight: Weight, indicators: Seq[IndicatorConfig])
final case class IndicatorConfig(name: String, enabled: Boolean)
}
|
fhnw-saav/saav
|
src/main/scala/ch/fhnw/ima/saav/model/config.scala
|
Scala
|
mit
| 1,517 |
package fabricator
import java.awt.font.{FontRenderContext, TextLayout}
import java.awt.image.BufferedImage
import java.awt.{Color, Font, Graphics2D, Rectangle}
import fabricator.entities.CsvFileBuilder
import fabricator.enums.{FileType, MimeType}
import scala.util.Random
object FileGenerator {
def apply(): FileGenerator = {
new FileGenerator( Alphanumeric(), new Random(),
Contact(), Words(), Calendar(), Finance(),
Internet(), Location(), Mobile(), UserAgent(), UtilityService())
}
def apply(locale: String): FileGenerator = {
new FileGenerator(Alphanumeric(), new Random(),
Contact(locale), Words(locale), Calendar(locale), Finance(locale),
Internet(locale), Location(locale), Mobile(), UserAgent(), UtilityService())
}
}
class FileGenerator(private val alpha: Alphanumeric,
private val random: Random,
private val contact: Contact,
private val words: Words,
private val calendar: Calendar,
private val finance: Finance,
private val internet: Internet,
private val location: Location,
private val mobile: Mobile,
private val userAgent: UserAgent,
private val utility: UtilityService) {
def image(width: Int, height: Int, path: String) = {
if (width > 2560 || height > 2560) throw new IllegalArgumentException("Image cannot be more then 2560x2560")
val label: String = "" + width + "x" + height
val font: Font = new Font("Arial", Font.PLAIN, 32)
val frc: FontRenderContext = new FontRenderContext(null, true, true)
val layout: TextLayout = new TextLayout(label, font, frc)
val rectangle: Rectangle = layout.getPixelBounds(null, 0, 0)
val bufferedImage: BufferedImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB)
val graphics2D: Graphics2D = bufferedImage.getGraphics.asInstanceOf[Graphics2D]
//filing background with black color
graphics2D.setColor(Color.black)
graphics2D.fillRect(0, 0, width, height)
//writing with white color width and height of the image
graphics2D.setColor(Color.white)
layout.draw(graphics2D, width / 2 - rectangle.getWidth.toInt / 2, height / 2)
//done with drawing
graphics2D.dispose()
// write image to a file
javax.imageio.ImageIO.write(bufferedImage, "png", new java.io.File(path))
}
def csvBuilder : CsvFileBuilder = new CsvFileBuilder(alpha, calendar, contact, finance, internet,
location, mobile, userAgent, words)
def fileName: String = fileName(FileType.getRandom)
def fileName(fileType: FileType): String = {
val fileExt = fileExtension(fileType)
val fileName = words.word
fileName + "." + fileExt
}
def fileExtension: String = fileExtension(FileType.getRandom)
def fileExtension(fileType: FileType): String = {
fileType match {
case FileType.AUDIO => utility.getValueFromArray("audio_file_extensions")
case FileType.IMAGE => utility.getValueFromArray("image_file_extensions")
case FileType.TEXT => utility.getValueFromArray("text_file_extensions")
case FileType.DOCUMENT => utility.getValueFromArray("document_file_extensions")
case FileType.VIDEO => utility.getValueFromArray("video_file_extensions")
}
}
def mime_type: String = {
mime_type(MimeType.getRandom)
}
def mime_type(mimeType: MimeType): String = {
mimeType match {
case MimeType.APPLICATION => utility.getValueFromArray("application_mime_types")
case MimeType.AUDIO => utility.getValueFromArray("audio_mime_types")
case MimeType.IMAGE => utility.getValueFromArray("image_mime_types")
case MimeType.MESSAGE => utility.getValueFromArray("message_mime_types")
case MimeType.MODEL => utility.getValueFromArray("model_mime_types")
case MimeType.MULTIPART => utility.getValueFromArray("multipart_mime_types")
case MimeType.TEXT => utility.getValueFromArray("text_mime_types")
case MimeType.VIDEO => utility.getValueFromArray("video_mime_types")
}
}
}
|
edombowsky/fabricator
|
src/main/scala/fabricator/FileGenerator.scala
|
Scala
|
apache-2.0
| 4,191 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.language.existentials
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkException
import org.apache.spark.annotation.Since
import org.apache.spark.ml.{Estimator, Model, Transformer}
import org.apache.spark.ml.attribute.{Attribute, NominalAttribute}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared.{HasHandleInvalid, HasInputCol, HasOutputCol}
import org.apache.spark.ml.util._
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.util.collection.OpenHashMap
/**
* Base trait for [[StringIndexer]] and [[StringIndexerModel]].
*/
private[feature] trait StringIndexerBase extends Params with HasHandleInvalid with HasInputCol
with HasOutputCol {
/**
* Param for how to handle invalid data (unseen labels or NULL values).
* Options are 'skip' (filter out rows with invalid data),
* 'error' (throw an error), or 'keep' (put invalid data in a special additional
* bucket, at index numLabels).
* Default: "error"
* @group param
*/
@Since("1.6.0")
override val handleInvalid: Param[String] = new Param[String](this, "handleInvalid",
"How to handle invalid data (unseen labels or NULL values). " +
"Options are 'skip' (filter out rows with invalid data), error (throw an error), " +
"or 'keep' (put invalid data in a special additional bucket, at index numLabels).",
ParamValidators.inArray(StringIndexer.supportedHandleInvalids))
setDefault(handleInvalid, StringIndexer.ERROR_INVALID)
/**
* Param for how to order labels of string column. The first label after ordering is assigned
* an index of 0.
* Options are:
* - 'frequencyDesc': descending order by label frequency (most frequent label assigned 0)
* - 'frequencyAsc': ascending order by label frequency (least frequent label assigned 0)
* - 'alphabetDesc': descending alphabetical order
* - 'alphabetAsc': ascending alphabetical order
* Default is 'frequencyDesc'.
*
* @group param
*/
@Since("2.3.0")
final val stringOrderType: Param[String] = new Param(this, "stringOrderType",
"How to order labels of string column. " +
"The first label after ordering is assigned an index of 0. " +
s"Supported options: ${StringIndexer.supportedStringOrderType.mkString(", ")}.",
ParamValidators.inArray(StringIndexer.supportedStringOrderType))
/** @group getParam */
@Since("2.3.0")
def getStringOrderType: String = $(stringOrderType)
/** Validates and transforms the input schema. */
protected def validateAndTransformSchema(schema: StructType): StructType = {
val inputColName = $(inputCol)
val inputDataType = schema(inputColName).dataType
require(inputDataType == StringType || inputDataType.isInstanceOf[NumericType],
s"The input column $inputColName must be either string type or numeric type, " +
s"but got $inputDataType.")
val inputFields = schema.fields
val outputColName = $(outputCol)
require(inputFields.forall(_.name != outputColName),
s"Output column $outputColName already exists.")
val attr = NominalAttribute.defaultAttr.withName($(outputCol))
val outputFields = inputFields :+ attr.toStructField()
StructType(outputFields)
}
}
/**
* A label indexer that maps a string column of labels to an ML column of label indices.
* If the input column is numeric, we cast it to string and index the string values.
* The indices are in [0, numLabels). By default, this is ordered by label frequencies
* so the most frequent label gets index 0. The ordering behavior is controlled by
* setting `stringOrderType`.
*
* @see `IndexToString` for the inverse transformation
*/
@Since("1.4.0")
class StringIndexer @Since("1.4.0") (
@Since("1.4.0") override val uid: String) extends Estimator[StringIndexerModel]
with StringIndexerBase with DefaultParamsWritable {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("strIdx"))
/** @group setParam */
@Since("1.6.0")
def setHandleInvalid(value: String): this.type = set(handleInvalid, value)
/** @group setParam */
@Since("2.3.0")
def setStringOrderType(value: String): this.type = set(stringOrderType, value)
setDefault(stringOrderType, StringIndexer.frequencyDesc)
/** @group setParam */
@Since("1.4.0")
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
@Since("1.4.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): StringIndexerModel = {
transformSchema(dataset.schema, logging = true)
val values = dataset.na.drop(Array($(inputCol)))
.select(col($(inputCol)).cast(StringType))
.rdd.map(_.getString(0))
val labels = $(stringOrderType) match {
case StringIndexer.frequencyDesc => values.countByValue().toSeq.sortBy(-_._2)
.map(_._1).toArray
case StringIndexer.frequencyAsc => values.countByValue().toSeq.sortBy(_._2)
.map(_._1).toArray
case StringIndexer.alphabetDesc => values.distinct.collect.sortWith(_ > _)
case StringIndexer.alphabetAsc => values.distinct.collect.sortWith(_ < _)
}
copyValues(new StringIndexerModel(uid, labels).setParent(this))
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
@Since("1.4.1")
override def copy(extra: ParamMap): StringIndexer = defaultCopy(extra)
}
@Since("1.6.0")
object StringIndexer extends DefaultParamsReadable[StringIndexer] {
private[feature] val SKIP_INVALID: String = "skip"
private[feature] val ERROR_INVALID: String = "error"
private[feature] val KEEP_INVALID: String = "keep"
private[feature] val supportedHandleInvalids: Array[String] =
Array(SKIP_INVALID, ERROR_INVALID, KEEP_INVALID)
private[feature] val frequencyDesc: String = "frequencyDesc"
private[feature] val frequencyAsc: String = "frequencyAsc"
private[feature] val alphabetDesc: String = "alphabetDesc"
private[feature] val alphabetAsc: String = "alphabetAsc"
private[feature] val supportedStringOrderType: Array[String] =
Array(frequencyDesc, frequencyAsc, alphabetDesc, alphabetAsc)
@Since("1.6.0")
override def load(path: String): StringIndexer = super.load(path)
}
/**
* Model fitted by [[StringIndexer]].
*
* @param labels Ordered list of labels, corresponding to indices to be assigned.
*
* @note During transformation, if the input column does not exist,
* `StringIndexerModel.transform` would return the input dataset unmodified.
* This is a temporary fix for the case when target labels do not exist during prediction.
*/
@Since("1.4.0")
class StringIndexerModel (
@Since("1.4.0") override val uid: String,
@Since("1.5.0") val labels: Array[String])
extends Model[StringIndexerModel] with StringIndexerBase with MLWritable {
import StringIndexerModel._
@Since("1.5.0")
def this(labels: Array[String]) = this(Identifiable.randomUID("strIdx"), labels)
private val labelToIndex: OpenHashMap[String, Double] = {
val n = labels.length
val map = new OpenHashMap[String, Double](n)
var i = 0
while (i < n) {
map.update(labels(i), i)
i += 1
}
map
}
/** @group setParam */
@Since("1.6.0")
def setHandleInvalid(value: String): this.type = set(handleInvalid, value)
/** @group setParam */
@Since("1.4.0")
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
@Since("1.4.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
if (!dataset.schema.fieldNames.contains($(inputCol))) {
logInfo(s"Input column ${$(inputCol)} does not exist during transformation. " +
"Skip StringIndexerModel.")
return dataset.toDF
}
transformSchema(dataset.schema, logging = true)
val filteredLabels = getHandleInvalid match {
case StringIndexer.KEEP_INVALID => labels :+ "__unknown"
case _ => labels
}
val metadata = NominalAttribute.defaultAttr
.withName($(outputCol)).withValues(filteredLabels).toMetadata()
// If we are skipping invalid records, filter them out.
val (filteredDataset, keepInvalid) = getHandleInvalid match {
case StringIndexer.SKIP_INVALID =>
val filterer = udf { label: String =>
labelToIndex.contains(label)
}
(dataset.na.drop(Array($(inputCol))).where(filterer(dataset($(inputCol)))), false)
case _ => (dataset, getHandleInvalid == StringIndexer.KEEP_INVALID)
}
val indexer = udf { label: String =>
if (label == null) {
if (keepInvalid) {
labels.length
} else {
throw new SparkException("StringIndexer encountered NULL value. To handle or skip " +
"NULLS, try setting StringIndexer.handleInvalid.")
}
} else {
if (labelToIndex.contains(label)) {
labelToIndex(label)
} else if (keepInvalid) {
labels.length
} else {
throw new SparkException(s"Unseen label: $label. To handle unseen labels, " +
s"set Param handleInvalid to ${StringIndexer.KEEP_INVALID}.")
}
}
}
filteredDataset.select(col("*"),
indexer(dataset($(inputCol)).cast(StringType)).as($(outputCol), metadata))
}
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
if (schema.fieldNames.contains($(inputCol))) {
validateAndTransformSchema(schema)
} else {
// If the input column does not exist during transformation, we skip StringIndexerModel.
schema
}
}
@Since("1.4.1")
override def copy(extra: ParamMap): StringIndexerModel = {
val copied = new StringIndexerModel(uid, labels)
copyValues(copied, extra).setParent(parent)
}
@Since("1.6.0")
override def write: StringIndexModelWriter = new StringIndexModelWriter(this)
}
@Since("1.6.0")
object StringIndexerModel extends MLReadable[StringIndexerModel] {
private[StringIndexerModel]
class StringIndexModelWriter(instance: StringIndexerModel) extends MLWriter {
private case class Data(labels: Array[String])
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val data = Data(instance.labels)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class StringIndexerModelReader extends MLReader[StringIndexerModel] {
private val className = classOf[StringIndexerModel].getName
override def load(path: String): StringIndexerModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
.select("labels")
.head()
val labels = data.getAs[Seq[String]](0).toArray
val model = new StringIndexerModel(metadata.uid, labels)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[StringIndexerModel] = new StringIndexerModelReader
@Since("1.6.0")
override def load(path: String): StringIndexerModel = super.load(path)
}
/**
* A `Transformer` that maps a column of indices back to a new column of corresponding
* string values.
* The index-string mapping is either from the ML attributes of the input column,
* or from user-supplied labels (which take precedence over ML attributes).
*
* @see `StringIndexer` for converting strings into indices
*/
@Since("1.5.0")
class IndexToString @Since("2.2.0") (@Since("1.5.0") override val uid: String)
extends Transformer with HasInputCol with HasOutputCol with DefaultParamsWritable {
@Since("1.5.0")
def this() =
this(Identifiable.randomUID("idxToStr"))
/** @group setParam */
@Since("1.5.0")
def setInputCol(value: String): this.type = set(inputCol, value)
/** @group setParam */
@Since("1.5.0")
def setOutputCol(value: String): this.type = set(outputCol, value)
/** @group setParam */
@Since("1.5.0")
def setLabels(value: Array[String]): this.type = set(labels, value)
/**
* Optional param for array of labels specifying index-string mapping.
*
* Default: Not specified, in which case [[inputCol]] metadata is used for labels.
* @group param
*/
@Since("1.5.0")
final val labels: StringArrayParam = new StringArrayParam(this, "labels",
"Optional array of labels specifying index-string mapping." +
" If not provided or if empty, then metadata from inputCol is used instead.")
/** @group getParam */
@Since("1.5.0")
final def getLabels: Array[String] = $(labels)
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = {
val inputColName = $(inputCol)
val inputDataType = schema(inputColName).dataType
require(inputDataType.isInstanceOf[NumericType],
s"The input column $inputColName must be a numeric type, " +
s"but got $inputDataType.")
val inputFields = schema.fields
val outputColName = $(outputCol)
require(inputFields.forall(_.name != outputColName),
s"Output column $outputColName already exists.")
val outputFields = inputFields :+ StructField($(outputCol), StringType)
StructType(outputFields)
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val inputColSchema = dataset.schema($(inputCol))
// If the labels array is empty use column metadata
val values = if (!isDefined(labels) || $(labels).isEmpty) {
Attribute.fromStructField(inputColSchema)
.asInstanceOf[NominalAttribute].values.get
} else {
$(labels)
}
val indexer = udf { index: Double =>
val idx = index.toInt
if (0 <= idx && idx < values.length) {
values(idx)
} else {
throw new SparkException(s"Unseen index: $index ??")
}
}
val outputColName = $(outputCol)
dataset.select(col("*"),
indexer(dataset($(inputCol)).cast(DoubleType)).as(outputColName))
}
@Since("1.5.0")
override def copy(extra: ParamMap): IndexToString = {
defaultCopy(extra)
}
}
@Since("1.6.0")
object IndexToString extends DefaultParamsReadable[IndexToString] {
@Since("1.6.0")
override def load(path: String): IndexToString = super.load(path)
}
|
mike0sv/spark
|
mllib/src/main/scala/org/apache/spark/ml/feature/StringIndexer.scala
|
Scala
|
apache-2.0
| 15,471 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.dnvriend
trait Printer {
type PrintLineAble = {
def println(msg: String): Unit
}
def println(msg: String, printer: PrintLineAble)
}
object MessagePrinter extends Printer {
override def println(msg: String, printer: PrintLineAble): Unit =
printer.println(msg)
}
|
dnvriend/akka-template-project
|
src/main/scala/com/github/dnvriend/MessagePrinter.scala
|
Scala
|
apache-2.0
| 896 |
package io.taig.android.app.activity
import android.app
import android.app.TaskStackBuilder
import android.content.Intent
import android.content.res.Configuration
import android.graphics.{Bitmap, Canvas}
import android.os.Bundle
import android.view.ActionMode.Callback
import android.view.ContextMenu.ContextMenuInfo
import android.view.WindowManager.LayoutParams
import android.view._
import io.taig.android.log.Log
trait Lifecycle extends Activity {
override def onCreate(state: Bundle): Unit = {
Log.d(s"onCreate( $state )")
super.onCreate(state)
}
override def onCreateOptionsMenu(menu: Menu) = {
Log.d(s"onCreateOptionsMenu( $menu )")
super.onCreateOptionsMenu(menu)
}
override protected def onPostCreate(state: Bundle): Unit = {
super.onPostCreate(state)
Log.d(s"onPostCreate( $state )")
}
override def onConfigurationChanged(configuration: Configuration): Unit = {
Log.d(s"onConfigurationChanged( $configuration )")
super.onConfigurationChanged(configuration)
}
override def onStop(): Unit = {
Log.d(s"onStop()")
super.onStop()
}
override def onPostResume(): Unit = {
Log.d(s"onPostResume()")
super.onPostResume()
}
override def onCreatePanelView(id: Int) = {
Log.d(s"onCreatePanelView( $id )")
super.onCreatePanelView(id)
}
override def onDestroy(): Unit = {
Log.d(s"onDestroy()")
super.onDestroy()
}
override def onTitleChanged(title: CharSequence, color: Int): Unit = {
Log.d(s"onTitleChanged( $title, $color )")
super.onTitleChanged(title, color)
}
override def onCreatePanelMenu(id: Int, menu: Menu) = {
Log.d(s"onCreatePanelMenu( $id, $menu )")
super.onCreatePanelMenu(id, menu)
}
override def onPreparePanel(id: Int, view: View, menu: Menu) = {
Log.d(s"onPreparePanel( $id, $view, $menu )")
super.onPreparePanel(id, view, menu)
}
override def onPanelClosed(id: Int, menu: Menu): Unit = {
Log.d(s"onPanelClosed( $id, $menu )")
super.onPanelClosed(id, menu)
}
override def onMenuOpened(id: Int, menu: Menu) = {
Log.d(s"onMenuOpened( $id, $menu )")
super.onMenuOpened(id, menu)
}
override def onBackPressed(): Unit = {
Log.d(s"onBackPressed()")
super.onBackPressed()
}
override def onKeyShortcut(key: Int, event: KeyEvent) = {
Log.d(s"onKeyShortcut( $key, $event )")
super.onKeyShortcut(key, event)
}
override def onKeyDown(key: Int, event: KeyEvent) = {
Log.d(s"onKeyDown( $key, $event )")
super.onKeyDown(key, event)
}
override def onActivityResult(request: Int,
result: Int,
data: Intent): Unit = {
Log.d(s"onActivityResult( $request, $result, $data )")
super.onActivityResult(request, result, data)
}
override def onLowMemory(): Unit = {
Log.d(s"onLowMemory()")
super.onLowMemory()
}
override def onPause(): Unit = {
Log.d(s"onPause()")
super.onPause()
}
override def onNewIntent(intent: Intent): Unit = {
Log.d(s"onNewIntent( $intent )")
super.onNewIntent(intent)
}
override def onResume(): Unit = {
Log.d(s"onResume()")
super.onResume()
}
override def onSaveInstanceState(state: Bundle): Unit = {
Log.d(s"onSaveInstanceState( $state )")
super.onSaveInstanceState(state)
}
override def onStart(): Unit = {
Log.d(s"onStart()")
super.onStart()
}
override def onAttachFragment(fragment: android.app.Fragment): Unit = {
Log.d(s"onAttachFragment( $fragment )")
super.onAttachFragment(fragment)
}
override def onRestoreInstanceState(state: Bundle): Unit = {
Log.d(s"onRestoreInstanceState( $state )")
super.onRestoreInstanceState(state)
}
override def onRestart(): Unit = {
Log.d(s"onRestart()")
super.onRestart()
}
override def onUserLeaveHint(): Unit = {
Log.d(s"onUserLeaveHint()")
super.onUserLeaveHint()
}
override def onCreateThumbnail(bitmap: Bitmap, canvas: Canvas) = {
Log.d(s"onCreateThumbnail( $bitmap, $canvas )")
super.onCreateThumbnail(bitmap, canvas)
}
override def onCreateDescription() = {
Log.d(s"onCreateDescription()")
super.onCreateDescription()
}
override def onProvideAssistData(data: Bundle): Unit = {
Log.d(s"onProvideAssistData( $data )")
super.onProvideAssistData(data)
}
override def onKeyLongPress(key: Int, event: KeyEvent) = {
Log.d(s"onKeyLongPress( $key, $event )")
super.onKeyLongPress(key, event)
}
override def onKeyUp(key: Int, event: KeyEvent) = {
Log.d(s"onKeyUp( $key, $event )")
super.onKeyUp(key, event)
}
override def onKeyMultiple(key: Int, repeatCount: Int, event: KeyEvent) = {
Log.d(s"onKeyMultiple( $key, $repeatCount, $event )")
super.onKeyMultiple(key, repeatCount, event)
}
override def onTouchEvent(event: MotionEvent) = {
Log.d(s"onTouchEvent( $event )")
super.onTouchEvent(event)
}
override def onTrackballEvent(event: MotionEvent) = {
Log.d(s"onTrackballEvent( $event )")
super.onTrackballEvent(event)
}
override def onGenericMotionEvent(event: MotionEvent) = {
Log.d(s"onGenericMotionEvent( $event )")
super.onGenericMotionEvent(event)
}
override def onUserInteraction(): Unit = {
Log.d(s"onUserInteraction()")
super.onUserInteraction()
}
override def onWindowAttributesChanged(parameters: LayoutParams): Unit = {
Log.d(s"onWindowAttributesChanged( $parameters )")
super.onWindowAttributesChanged(parameters)
}
override def onWindowFocusChanged(hasFocus: Boolean): Unit = {
Log.d(s"onWindowFocusChanged( $hasFocus )")
super.onWindowFocusChanged(hasFocus)
}
override def onAttachedToWindow(): Unit = {
Log.d(s"onAttachedToWindow()")
super.onAttachedToWindow()
}
override def onDetachedFromWindow(): Unit = {
Log.d(s"onDetachedFromWindow()")
super.onDetachedFromWindow()
}
override def onOptionsItemSelected(item: MenuItem) = {
Log.d(s"onOptionsItemSelected( $item )")
super.onOptionsItemSelected(item)
}
override def onPrepareOptionsMenu(menu: Menu) = {
Log.d(s"onPrepareOptionsMenu( $menu )")
super.onPrepareOptionsMenu(menu)
}
override def onNavigateUp() = {
Log.d(s"onNavigateUp()")
super.onNavigateUp()
}
override def onNavigateUpFromChild(child: app.Activity) = {
Log.d(s"onNavigateUpFromChild( $child )")
super.onNavigateUpFromChild(child)
}
override def onCreateNavigateUpTaskStack(builder: TaskStackBuilder): Unit = {
Log.d(s"onCreateNavigateUpTaskStack( $builder )")
super.onCreateNavigateUpTaskStack(builder)
}
override def onPrepareNavigateUpTaskStack(builder: TaskStackBuilder): Unit = {
Log.d(s"onPrepareNavigateUpTaskStack( $builder )")
super.onPrepareNavigateUpTaskStack(builder)
}
override def onOptionsMenuClosed(menu: Menu): Unit = {
Log.d(s"onOptionsMenuClosed( $menu )")
super.onOptionsMenuClosed(menu)
}
override def onCreateContextMenu(menu: ContextMenu,
view: View,
info: ContextMenuInfo): Unit = {
Log.d(s"onCreateContextMenu( $menu, $view, $info )")
super.onCreateContextMenu(menu, view, info)
}
override def onContextItemSelected(item: MenuItem) = {
Log.d(s"onContextItemSelected( $item )")
super.onContextItemSelected(item)
}
override def onContextMenuClosed(menu: Menu): Unit = {
Log.d(s"onContextMenuClosed( $menu )")
super.onContextMenuClosed(menu)
}
override def onSearchRequested() = {
Log.d(s"onSearchRequested()")
super.onSearchRequested()
}
override def onActivityReenter(result: Int, data: Intent): Unit = {
Log.d(s"onActivityReenter( $result, $data )")
super.onActivityReenter(result, data)
}
override def onChildTitleChanged(child: app.Activity,
title: CharSequence): Unit = {
Log.d(s"onChildTitleChanged( $child, $title )")
super.onChildTitleChanged(child, title)
}
override def onVisibleBehindCanceled(): Unit = {
Log.d(s"onVisibleBehindCanceled()")
super.onVisibleBehindCanceled()
}
override def onEnterAnimationComplete(): Unit = {
Log.d(s"onEnterAnimationComplete()")
super.onEnterAnimationComplete()
}
override def onWindowStartingActionMode(callback: Callback) = {
Log.d(s"onWindowStartingActionMode( $callback )")
super.onWindowStartingActionMode(callback)
}
override def onActionModeStarted(mode: ActionMode): Unit = {
Log.d(s"onActionModeStarted( $mode )")
super.onActionModeStarted(mode)
}
override def onActionModeFinished(mode: ActionMode): Unit = {
Log.d(s"onActionModeFinished( $mode )")
super.onActionModeFinished(mode)
}
}
|
Taig/Toolbelt
|
app/src/main/scala/io/taig/android/app/activity/Lifecycle.scala
|
Scala
|
mit
| 8,839 |
package de.christofreichardt.scala.ellipticcurve
import de.christofreichardt.diagnosis.AbstractTracer
import de.christofreichardt.diagnosis.TracerFactory
import de.christofreichardt.scala.diagnosis.Tracing
import java.io.BufferedInputStream
import java.io.File
import java.io.FileInputStream
import java.util.Scanner
/**
* @author Christof Reichardt
*/
object PrimeBase extends Tracing {
val primeBaseFile: File = new File("." + File.separator + "data" + File.separator + "PrimeBase.txt")
require(primeBaseFile.exists())
val primes = init()
class BufferedPrimeScanner(bufferedInputStream : BufferedInputStream) extends Iterator[Int] {
val scanner = new Scanner(bufferedInputStream)
val tracer = getCurrentTracer
tracer.out().printfIndentln("scanner.radix() = %d", int2Integer(scanner.radix()))
def hasNext: Boolean = scanner.hasNextInt()
def next: Int = scanner.nextInt()
}
private def init(): IndexedSeq[Int] = {
withTracer("IndexedSeq[Int]", this, "init()") {
val buffer = new BufferedInputStream(new FileInputStream(primeBaseFile))
try {
val primeScanner = new BufferedPrimeScanner(buffer)
primeScanner.toIndexedSeq
}
finally {
buffer.close()
}
}
}
override def getCurrentTracer(): AbstractTracer = {
try {
TracerFactory.getInstance().getDefaultTracer
}
catch {
case ex: TracerFactory.Exception => TracerFactory.getInstance().getDefaultTracer
}
}
}
|
chr78rm/jca-provider
|
elliptic-curve-arithmetic/src/main/scala/de/christofreichardt/scala/ellipticcurve/PrimeBase.scala
|
Scala
|
gpl-3.0
| 1,494 |
package org.http4s
package headers
import cats.data.NonEmptyList
import cats.syntax.foldable._
import org.http4s.parser.HttpHeaderParser
import CharsetRange.Atom
object `Accept-Charset` extends HeaderKey.Internal[`Accept-Charset`] with HeaderKey.Recurring {
override def parse(s: String): ParseResult[`Accept-Charset`] =
HttpHeaderParser.ACCEPT_CHARSET(s)
}
final case class `Accept-Charset`(values: NonEmptyList[CharsetRange])
extends Header.RecurringRenderable {
def key: `Accept-Charset`.type = `Accept-Charset`
type Value = CharsetRange
def qValue(charset: Charset): QValue = {
def specific =
values.collectFirst { case cs: Atom if cs.matches(charset) => cs.qValue }
def splatted =
values.collectFirst { case cs: CharsetRange.`*` => cs.qValue }
specific.orElse(splatted).getOrElse(QValue.Zero)
}
@deprecated("Use satisfiedBy(charset)", "0.16.1")
def isSatisfiedBy(charset: Charset): Boolean = satisfiedBy(charset)
def satisfiedBy(charset: Charset): Boolean = qValue(charset) > QValue.Zero
def map(f: CharsetRange => CharsetRange): `Accept-Charset` = `Accept-Charset`(values.map(f))
}
|
aeons/http4s
|
core/src/main/scala/org/http4s/headers/Accept-Charset.scala
|
Scala
|
apache-2.0
| 1,146 |
/*
* Copyright (c) 2015 Lucas Satabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolxit
/** @author Lucas Satabin
*
*/
package object dimen {
/** Special dimension, which is zero */
val ZeroDimen = Dimension(0)
implicit class IntDimen(val i: Int) extends AnyVal {
def sp = Dimension(i)
def pt = Dimension.ofPoint(i)
def pc = Dimension.ofPica(i)
def in = Dimension.ofInch(i)
def bp = Dimension.ofBigPoint(i)
def cm = Dimension.ofCentimeter(i)
def mm = Dimension.ofMillimeter(i)
def dd = Dimension.ofDidotPoint(i)
def cc = Dimension.ofCicero(i)
}
implicit class DoubleDimen(val f: Double) extends AnyVal {
def sp = Dimension(f.toInt)
def pt = Dimension.ofPoint(f)
def pc = Dimension.ofPica(f)
def in = Dimension.ofInch(f)
def bp = Dimension.ofBigPoint(f)
def cm = Dimension.ofCentimeter(f)
def mm = Dimension.ofMillimeter(f)
def dd = Dimension.ofDidotPoint(f)
def cc = Dimension.ofCicero(f)
}
}
|
satabin/toolxit-ng
|
core/src/main/scala/toolxit/dimen/package.scala
|
Scala
|
apache-2.0
| 1,515 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.File
import org.apache.spark.sql.catalyst.util._
/**
* A framework for running the query tests that are listed as a set of text files.
*
* TestSuites that derive from this class must provide a map of testCaseName to testCaseFiles
* that should be included. Additionally, there is support for including and excluding
* tests as development progresses.
*/
abstract class HiveQueryFileTest extends HiveComparisonTest {
/** A list of tests deemed out of scope and thus completely disregarded */
def excludeList: Seq[String] = Nil
/**
* The set of tests that are believed to be working in catalyst. Tests not in includeList or
* excludeList are implicitly marked as ignored.
*/
def includeList: Seq[String] = ".*" :: Nil
def testCases: Seq[(String, File)]
val runAll: Boolean =
!(System.getProperty("spark.hive.alltests") == null) ||
runOnlyDirectories.nonEmpty ||
skipDirectories.nonEmpty
val deprecatedIncludeListProperty: String = "spark.hive.whitelist"
val includeListProperty: String = "spark.hive.includelist"
if (System.getProperty(deprecatedIncludeListProperty) != null) {
logWarning(s"System property `$deprecatedIncludeListProperty` is deprecated; please update " +
s"to use new property: $includeListProperty")
}
// Allow the includeList to be overridden by a system property
val realIncludeList: Seq[String] =
Option(System.getProperty(includeListProperty))
.orElse(Option(System.getProperty(deprecatedIncludeListProperty)))
.map(_.split(",").toSeq)
.getOrElse(includeList)
// Go through all the test cases and add them to scala test.
testCases.sorted.foreach {
case (testCaseName, testCaseFile) =>
if (excludeList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) {
logDebug(s"Excluded test skipped $testCaseName")
} else if (
realIncludeList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) ||
runAll) {
// Build a test case and submit it to scala test framework...
val queriesString = fileToString(testCaseFile)
createQueryTest(testCaseName, queriesString, reset = true, tryWithoutResettingFirst = true)
} else {
// Only output warnings for the built in includeList as this clutters the output when the
// user is trying to execute a single test from the commandline.
if (System.getProperty(includeListProperty) == null && !runAll) {
ignore(testCaseName) {}
}
}
}
}
|
shaneknapp/spark
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala
|
Scala
|
apache-2.0
| 3,394 |
package blended.security.scep.internal
import blended.domino.TypesafeConfigWatching
import blended.security.ssl.{CertificateProvider, CommonNameProvider}
import blended.util.config.Implicits._
import domino.DominoActivator
import blended.util.logging.Logger
class ScepActivator extends DominoActivator with TypesafeConfigWatching {
private[this] val log = Logger[ScepActivator]
whenBundleActive {
whenTypesafeConfigAvailable{ (cfg, _) =>
val scepUrl = cfg.getStringOption("scepUrl")
scepUrl.foreach { url =>
val profile = cfg.getStringOption("scepProfile")
val keyLength = cfg.getInt("keyLength", 2048)
val csrSignAlgorithm = cfg.getString("csrSignAlgorithm", "SHA1withRSA")
val scepChallenge = cfg.getString("scepChallenge")
val scepCfg = ScepConfig(
url = url,
profile = profile,
keyLength = keyLength,
csrSignAlgorithm = csrSignAlgorithm,
scepChallenge = scepChallenge
)
new ScepCertificateProvider(scepCfg).providesService[CertificateProvider]("provider" -> "scep")
}
}
}
}
|
lefou/blended
|
blended.security.scep/src/main/scala/blended/security/scep/internal/ScepActivator.scala
|
Scala
|
apache-2.0
| 1,124 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv
import laws.discipline._, arbitrary._
class ByteCodecTests extends DisciplineSuite {
checkAll("CellCodec[Byte]", CellCodecTests[Byte].codec[String, Float])
checkAll("RowCodec[Byte]", RowCodecTests[Byte].codec[String, Float])
}
|
nrinaudo/scala-csv
|
core/shared/src/test/scala/kantan/csv/ByteCodecTests.scala
|
Scala
|
mit
| 854 |
package test_data.v20
import scala.xml.Elem
object XMLData extends utils.TestUtils {
def madeUpField(xml: Elem) = {
Seq(
"I am an invalid field that should never appear in the pdf"
)
}
def functionalTestCase1(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml)
}
def functionalTestCase2(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutYourPartner(xml)
}
def functionalTestCase3(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutYourPartner(xml) ++ careBreaks(fields) ++ sectionAboutEmployment(xml) ++ sectionEvidenceList(xml)
}
def functionalTestCase4(xml: Elem): Seq[String] = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ careBreaks(fields)
}
def functionalTestCase5(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutYourPartner(xml) ++ careBreaks(fields)
}
def functionalTestCase6(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutOtherMoney(xml)
}
def functionalTestCase7(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutYourPartner(xml) ++ careBreaks(fields)
}
def functionalTestCase8(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionAboutSelfEmployment(xml)
}
def functionalTestCase9(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml)
}
def functionalTestCase10(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml)
}
def functionalTestCase11(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionCustomerConsent(xml)
}
def functionalTestCase12(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionCustomerConsent(xml)
}
def functionalTestCase13(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionCustomerConsent(xml) ++ sectionAboutYourPayDetails(xml)
}
def functionalTestCase14(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionCustomerConsent(xml)
}
def functionalTestCase15(xml: Elem) = {
val fields = XMLDataFields(xml)
functionalTestCaseMandatoryFields(xml) ++ sectionCustomerConsent(xml)
}
def functionalTestCaseMandatoryFields(xml: Elem) = {
val fields = XMLDataFields(xml)
Seq(
"Date Received:"+" "+fields.dateClaimReceived.text+" "+"Transaction: " + fields.transactionPath.text,
fields.titleOtherAnswer.text + " " + fields.firstNameAnswer.text + " " +fields.surNameAnswer.text + " "+ fields.nationalInsuranceNumberAnswer.text,
"Summary",
"About the care you provide"
) ++ aboutYouTheCarer(xml) ++ sectionAboutTheCareYouProvide(xml) ++ claimDates(fields) ++ claimSummary(fields)
}
def aboutYouTheCarer(xml: Elem) = {
val fields = SectionPart1AboutYouTheCarer(xml)
Seq("About you - the carer",
buildQuestion(fields.nationalInsuranceNumberQuestion.text, fields.nationalInsuranceNumberAnswer.text.trim),
buildQuestion(fields.lastNameQuestion.text, fields.lastNameAnswer.text.trim),
buildQuestion(fields.firstNameQuestion.text, fields.firstNameAnswer.text.trim),
buildQuestion(fields.claimantMiddleNamesQuestion.text, fields.claimantMiddleNameAnswer.text.trim),
buildQuestion(fields.titleQuestion.text, fields.titleAnswer.text.trim),
buildQuestion(fields.dateOfBirthQuestion.text, fields.dateOfBirthAnswer.text.trim),
buildQuestion(fields.maritalStatusQuestion.text, fields.maritalStatusAnswer.text.trim),
buildQuestion(fields.dayTimeTelephoneNumberQuestion.text, fields.dayTimeTelephoneNumberAnswer.text.trim),
buildQuestion(fields.speechOrHearingDifficultyQuestion.text, fields.speechOrHearingDifficultyAnswer.text.trim),
buildQuestion(fields.nationalityQuestion.text, fields.nationalityAnswer.text.trim),
buildQuestion(fields.doYouLiveEnglandScotlandWalesQuestion.text, fields.doYouLiveEnglandScotlandWalesAnswer.text.trim),
buildQuestion(fields.countryNormallyLiveInQuestion.text, fields.countryNormallyLiveInAnswer.text.trim),
buildQuestion(fields.timeOutsideGBLast3YearsQuestion.text, fields.timeOutsideGBLast3YearsAnswer.text.trim),
buildQuestion(fields.havePartnerQuestion.text, fields.havePartnerAnswer.text.trim),
fields.addressCarerAnswer,
fields.postCodeCarer.text,
fields.reasonTimeAbroadOther.text,
buildQuestion(fields.cared35HoursBeforeQuestion.text, fields.cared35HoursBeforeAnswer.text),
buildQuestion(fields.dateStartedCaringQuestion.text, fields.dateStartedCaringAnswer.text)
)
}
def aboutTheCareYouProvide(fields: XMLDataFields) = {
Seq(
fields.careeLastNameQuestion.text + " " + fields.careeLastNameAnswer.text.trim,
fields.careeFirstNameQuestion.text + " " + fields.careeFirstNameAnswer.text.trim,
fields.careeMiddleNameQuestion.text + " " + fields.careeMiddleNameAnswer.text.trim,
fields.titleQuestion.text + " " + fields.titleAnswer.text,
fields.addressCareeAnswer,
fields.postCodeCaree.text
)
}
def claimDates(fields: XMLDataFields) = {
Seq("Claim Date",
buildQuestion(fields.dateOfClaimQuestion.text, fields.dateOfClaimAnswer.text)
)
}
def claimSummary(fields: XMLDataFields) = {
Seq("Claim Summary",
fields.nationalityAnswer.text,
fields.qualifyingBenefitAnswer.text,
buildQuestion(fields.receiveEEAPensionsBenefitsQuestion.text, fields.receiveEEAPensionsBenefitsAnswer.text),
buildQuestion(fields.timeOutsideGBLast3YearsQuestion.text, fields.timeOutsideGBLast3YearsAnswer.text),
buildQuestion(fields.otherInformationWelshCommunicationQuestion.text, fields.otherInformationWelshCommunicationAnswer.text),
buildQuestion(fields.otherInformationAddtionalInformationQuestion.text, fields.otherInformationAddtionalInformationAnswer.text)
)
}
def careBreaks(fields: XMLDataFields) = fields.careBreak
def sectionAboutYourPartner(xml: Elem) = {
val fields = SectionPart2AboutYourPartner(xml)
Seq (
"Part 2 - About your partner",
"Partner details",
buildQuestion(fields.dateOfBirthQuestion.text, fields.dateOfBirthAnswer.text),
buildQuestion(fields.nationalityPartnerQuestion.text, fields.nationalityPartnerAnswer.text),
buildQuestion(fields.seperatedFromPartnerQuestion.text, fields.seperatedFromPartnerAnswer.text),
buildQuestion(fields.isCareeQuestion.text, fields.isCareeAnswer.text),
buildQuestion(fields.parnerNINOQuestion.text, fields.parnerNINOAnswer.text),
buildQuestion(fields.partnerSurnameQuestion.text,fields.partnerSurnameAnswer.text),
buildQuestion(fields.partnerMiddleNamesQuestion.text,fields.partnerMiddleNamesAnswer.text),
buildQuestion(fields.partnerOtherNamesQuestion.text,fields.partnerOtherNamesAnswer.text),
buildQuestion(fields.partnerTitleQuestion.text,fields.partnerTitleAnswer.text),
buildQuestion(fields.partnerOtherSurnamesQuestion.text,fields.partnerOtherSurnamesAnswer.text)
)
}
def sectionAboutEmployment(xml:Elem) = {
val fields = SectionAboutEmployment(xml)
var employmentTitle = "Part 5 - About Your Employment"
if (serviceVersion(xml).equals("0.2")){
employmentTitle = "Part 5 - Employment"
}
Seq (employmentTitle,
fields.areYouEmployedQuestion.text+" "+fields.areYouEmployedAnswer.text,
buildEmpAdditionaInfo(xml)) ++ fields.employmentDetails
}
private def buildEmpAdditionaInfo (xml:Elem) = {
val xmlDatafields = XMLDataFields(xml)
buildOther(xmlDatafields.employmentAdditionaInfoQuestion.text,
xmlDatafields.employmentAdditionaInfoAnswer.text,
xmlDatafields.employmentAdditionaInfoOther.text)
}
def sectionAboutSelfEmployment(xml:Elem) = {
val fields = SectionAboutSelfEmployment(xml)
val yourAccounts: Seq[String] = if (fields.haveBeenSelfEmployedAnswer.text == "yes")
Seq("Your accounts",
buildQuestion(fields.doYouKnowYourTaxYearQuestion.text, fields.doYouKnowYourTaxYearAnswer.text),
buildQuestion(fields.tradingYearStartedQuestion.text, fields.tradingYearStartedAnswer.text),
buildQuestion(fields.tradingYearEndedQuestion.text, fields.tradingYearEndedAnswer.text),
buildQuestion(fields.sameIncomeOutgoingLevelsQuestion.text, fields.sameIncomeOutgoingLevelsAnswer.text),
buildQuestion(fields.whyWhenChangeQuestion.text, fields.whyWhenChangeAnswer.text))
else Seq()
Seq ("Part 6 - About Self Employment",
buildQuestion(fields.haveBeenSelfEmployedQuestion.text, fields.haveBeenSelfEmployedAnswer.text),
"Your job",
buildQuestion(fields.selfEmployedNowQuestion.text, fields.selfEmployedNowAnswer.text),
buildQuestion(fields.selfEmployedStartedQuestion.text, fields.selfEmployedStartedAnswer.text),
buildQuestion(fields.selfEmployedEndedQuestion.text, fields.selfEmployedEndedAnswer.text),
buildQuestion(fields.ceasedTradingQuestion.text, fields.ceasedTradingAnswer.text),
buildQuestion(fields.natureOfBusinessQuestion.text, fields.natureOfBusinessAnswer.text)) ++
yourAccounts ++
Seq ("Pension and expenses",
buildQuestion(fields.paidForPensionQuestion.text, fields.paidForPensionAnswer.text),
buildQuestion(fields.paidForPensionQuestion.text, fields.paidForPensionAnswer.text),
buildQuestion(fields.pensionExpensesQuestion.text, fields.pensionExpensesAnswer.text),
buildQuestion(fields.paidForJobExpensesQuestion.text, fields.paidForJobExpensesAnswer.text),
buildQuestion(fields.jobExpensesQuestion.text, fields.jobExpensesAnswer.text)
) ++ Seq(buildEmpAdditionaInfo(xml))
}
def sectionAboutTheCareYouProvide(xml:Elem) = {
val fields = SectionAboutTheCareYouProvide(xml)
Seq("Part 3 - About the care you provide",
"Details of the person you care for",
buildQuestion((fields.nationalInsuranceNumber \\\\ "QuestionLabel").text, (fields.nationalInsuranceNumber \\\\ "Answer").text),
buildQuestion((fields.dateOfBirth \\\\ "QuestionLabel").text, (fields.dateOfBirth \\\\ "Answer").text),
buildQuestion(fields.liveSameAddressQuestion.text,fields.liveSameAddressAnswer.text),
//"Contact details of the person you care for", This is now a dynamic label, so is giving problems
buildQuestion((fields.dayTimeTelephoneNumber \\\\ "QuestionLabel").text, (fields.dayTimeTelephoneNumber \\\\ "Answer").text),
buildQuestion(fields.relationToClaimantQuestion.text, fields.relationToClaimantAnswer.text),
"More about the care you provide",
buildQuestion(fields.cared35HoursQuestion.text, fields.cared35HoursAnswer.text)
) ++ aboutTheCareYouProvide(XMLDataFields(xml)) ++ careBreaks(XMLDataFields(xml))
}
def sectionAboutOtherMoney(xml:Elem) = {
val fields = SectionAboutOtherMoney(xml)
var otherMoneyTitle = "Part 7 - About Other Money"
if (!serviceVersion(xml).equals("0.1")){
otherMoneyTitle = "PART 7 - OTHER PAYMENTS"
}
Seq (otherMoneyTitle,
buildQuestion(fields.otherMoneyQuestion.text, fields.otherMoneyAnswer.text),
buildQuestion(fields.otherMoneyPaymentQuestion.text, fields.otherMoneyPaymentAnswer.text),
buildQuestion(fields.otherMoneyPaymentNameQuestion.text, fields.otherMoneyPaymentNameAnswer.text),
buildQuestion(fields.otherMoneyPaymentAmountQuestion.text, fields.otherMoneyPaymentAmountAmount.text),
buildOther(fields.otherMoneyPaymentFrequencyQuestion.text, fields.otherMoneyPaymentFrequencyAnswer.text, fields.otherMoneyPaymentFrequencyOther.text),
buildQuestion(fields.otherMoneySSPQuestion.text, fields.otherMoneySSPAnswer.text),
buildQuestion(fields.otherMoneySSPPaymentAmountQuestion.text, fields.otherMoneySSPPaymentAmountAmount.text),
buildOther(fields.otherMoneySSPPaymentFrequencyQuestion.text, fields.otherMoneySSPPaymentFrequencyAnswer.text, fields.otherMoneySSPPaymentFrequencyOther.text),
"Employer's Name" + " " + fields.otherMoneySSPEmployerName.text,
"Employer's Address " + fields.otherMoneySSPEmployerAddress,
"Employer's Postcode " + fields.otherMoneySSPEmployerPostcode.text,
buildQuestion(fields.otherMoneySPQuestion.text, fields.otherMoneySPAnswer.text),
buildQuestion(fields.otherMoneySPPaymentAmountQuestion.text, fields.otherMoneySPPaymentAmountAmount.text),
buildOther(fields.otherMoneySPPaymentFrequencyQuestion.text, fields.otherMoneySPPaymentFrequencyAnswer.text, fields.otherMoneySPPaymentFrequencyOther.text),
"Employer's Name" + " " + fields.otherMoneySPEmployerName.text,
"Employer's Address " + fields.otherMoneySPEmployerAddress,
"Employer's Postcode " + fields.otherMoneySPEmployerPostcode.text,
buildQuestion(fields.eeaGuardQuestion.text, fields.eeaGuardAnswer.text),
buildQuestion(fields.receiveEEAPensionsBenefitsQuestion.text, fields.receiveEEAPensionsBenefitsAnswer.text),
buildQuestion(fields.receiveEEAPensionsBenefitsDetailsQuestion.text, fields.receiveEEAPensionsBenefitsDetailsAnswer.text),
buildQuestion(fields.workingEEAInsuranceQuestion.text, fields.workingEEAInsuranceAnswer.text),
buildQuestion(fields.workingEEAInsuranceDetailsQuestion.text, fields.workingEEAInsuranceDetailsAnswer.text)
)
}
def sectionAboutYourPayDetails(xml:Elem) = {
val fields = SectionAboutYourPayDetails(xml)
val payDetailsTitle = "PART 8 - PAY DETAILS"
Seq (payDetailsTitle,
buildQuestion(fields.howToGetPaidQuestion.text, fields.howToGetPaidAnswer.text),
buildOther(fields.howOftenGetPaidQuestion.text, fields.howOftenGetPaidAnswer.text, fields.howOftenGetPaidOther.text),
"Bank/Building Society Details",
buildQuestion(fields.bankAccountHolderNameQuestion.text, fields.bankAccountHolderNameAnswer.text),
buildQuestion(fields.bankAccountBankNameQuestion.text, fields.bankAccountBankNameAnswer.text),
buildQuestion(fields.bankAccountSortCodeQuestion.text, fields.bankAccountSortCodeAnswer.text),
buildQuestion(fields.bankAccountNumberQuestion.text, fields.bankAccountNumberAnswer.text),
buildQuestion(fields.bankAccountReferenceNumberQuestion.text, fields.bankAccountReferenceNumberAnswer.text)
)
}
def sectionAboutYourEducation(xml:Elem) = {
val fields = SectionAboutYourEducation(xml)
Seq("Part 4 - About Your Education",
buildQuestion(fields.courseOfEducationQuestion.text, fields.courseOfEducationAnswer.text),
"Your course details",
"Course title "+fields.courseDetailsTitle.text,
buildQuestion(fields.courseDetailsDateStartedQuestion.text, fields.courseDetailsDateStartedAnswer.text),
buildQuestion(fields.courseDetailsExpectedEndDateQuestion.text, fields.courseDetailsExpectedEndDateAnswer.text),
"Your student reference number "+fields.studentReferenceNumber.text,
"Name of school, college or university "+fields.nameOfUniversity.text,
"Name of main Teacher or Tutor "+fields.nameOfTheTutor.text,
"Course contact number "+fields.phoneNumber.text
)
}
def sectionCustomerConsent(xml:Elem) = {
val fields = SectionConsentAndDeclaration(xml)
val consentXml: Seq[String] = fields.consent.flatMap {
c => {
((c \\ "Answer").text.toLowerCase == "no") match {
case true => Seq(buildQuestion((c \\ "QuestionLabel").text, (c \\ "Answer").text), buildQuestion((c \\ "Why" \\ "QuestionLabel").text, (c \\ "Why" \\ "Answer").text))
case false => Seq(buildQuestion((c \\ "QuestionLabel").text, (c \\ "Answer").text))
}
}
}
Seq ("Part 9 - Customer Consent And Declaration",
"Additional Information",
buildQuestion(fields.otherInformationAdditionalInformationQuestion.text,fields.otherInformationAdditionalInformationAnswer.text),
buildQuestion(fields.otherInformationWelshCommunicationQuestion.text,fields.otherInformationWelshCommunicationAnswer.text),
"Consent",
"Disclaimer",
"Declaration"
) ++ consentXml
}
def sectionCustomerDeclaration(xml:Elem) = {
val fields = SectionConsentAndDeclaration(xml)
val disclaimerQuestionXml = fields.disclaimerQuestion.map (d => buildQuestion((d \\ "QuestionLabel").text, (d \\ "Answer").text))
val disclaimerStatementXml: Seq[String] = {
fields.disclaimerStatement.
map(x => {
Seq((x \\\\ "Content").map(v => v.text).reduce((total,cur) => total + " " + cur)
)
}).flatten
}
val declarationQuestionXml = fields.declarationQuestion.map (d => buildQuestion((d \\ "QuestionLabel").text, (d \\ "Answer").text))
val declarationStatementXml: Seq[String] = {
fields.disclaimerStatement.
map(x => {
Seq((x \\\\ "Content").map(v => v.text).reduce((total,cur) => total + " " + cur)
)
}).flatten
}
disclaimerStatementXml ++ disclaimerQuestionXml ++ declarationStatementXml ++ declarationQuestionXml
}
def sectionEvidenceList(xml:Elem) = {
val fields = SectionEvidenceList(xml)
Seq("Part 10 - Customer Evidence List",
fields.postCode.text
) ++ fields.evidenceList ++ fields.address
}
def serviceVersion(xml:Elem) = {
XMLDataFields(xml).serviceVersion.text
}
}
|
Department-for-Work-and-Pensions/RenderingService
|
test/test_data/v20/XMLData.scala
|
Scala
|
mit
| 17,832 |
import scala.concurrent.duration.FiniteDuration
import reactivemongo.bson.{ BSONDocument, BSONString }
import reactivemongo.api.MongoConnection
import reactivemongo.api.commands.CollStatsResult
import reactivemongo.api.collections.bson.BSONCollection
import org.specs2.concurrent.{ ExecutionEnv => EE }
class CollectionSpec extends org.specs2.mutable.Specification {
import Common._
sequential
lazy val collection = db(s"collspec${System identityHashCode db}")
lazy val slowColl = slowDb(s"collspec${System identityHashCode slowDb}")
val cappedMaxSize: Long = 2 * 1024 * 1024
"ReactiveMongo" should {
"create a collection" in { implicit ee: EE =>
collection.create() must beEqualTo({}).await(1, timeout) and (
slowColl.create() must beEqualTo({}).await(1, slowTimeout)
)
}
"convert to capped" >> {
def cappedSpec(c: BSONCollection, timeout: FiniteDuration)(implicit ee: EE) = c.convertToCapped(cappedMaxSize, None) must beEqualTo({}).await(1, timeout)
"with the default collection" in { implicit ee: EE =>
cappedSpec(collection, timeout)
}
"with the default collection" in { implicit ee: EE =>
cappedSpec(slowColl, slowTimeout)
}
}
"check if it's capped (MongoDB <= 2.6)" in { implicit ee: EE =>
collection.stats must beLike[CollStatsResult] {
case stats => stats.capped must beTrue and (stats.maxSize must beNone)
}.await(1, timeout)
} tag "mongo2"
"check if it's capped (MongoDB >= 3.0)" >> {
def statSpec(con: MongoConnection, c: BSONCollection, timeout: FiniteDuration)(implicit ee: EE) = {
c.stats must beLike[CollStatsResult] {
case stats => stats.capped must beTrue and (
stats.maxSize must beSome(cappedMaxSize)
)
}.await(1, timeout)
}
"with the default connection" in { implicit ee: EE =>
statSpec(connection, collection, timeout)
} tag "not_mongo26"
"with the slow connection" in { implicit ee: EE =>
statSpec(slowConnection, slowColl, slowTimeout)
} tag "not_mongo26"
}
"insert some docs then test lastError result and finally count" in {
implicit ee: EE =>
collection.insert(BSONDocument("name" -> BSONString("Jack"))).
map(_.ok) must beTrue.await(1, timeout) and (
collection.count() must beEqualTo(1).await(1, timeout)
) and (
collection.count(skip = 1) must beEqualTo(0).await(1, timeout)
) and (
collection.count(selector = Some(BSONDocument("name" -> "Jack"))).
aka("matching count") must beEqualTo(1).await(1, timeout)
) and (
collection.count(selector = Some(BSONDocument("name" -> "Foo"))).
aka("not matching count") must beEqualTo(0).await(1, timeout)
)
}
// Empty capped need to be enabled with enableTestCommands
// see: http://docs.mongodb.org/manual/reference/command/emptycapped/#dbcmd.emptycapped
/*"empty the capped collection" in {
Await.result(collection.emptyCapped(), timeout) mustEqual true
Await.result(db.command(Count(collection.name)), timeout) mustEqual 0
} tag ("testCommands")*/
"drop it" in { implicit ee: EE =>
collection.drop(false) must beTrue.await(1, timeout)
}
}
}
|
maxime-gautre/ReactiveMongo
|
driver/src/test/scala/CollectionSpec.scala
|
Scala
|
apache-2.0
| 3,381 |
/*
* Copyright 2011 Hui Wen Han, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.huiwen.prefz
package conversions
import com.twitter.util.Time
import com.twitter.util.TimeConversions._
import me.huiwen.prefz
object Preference {
class RichPrefzPreference(pref: prefz.Preference) {
def toThrift = new thrift.Preference(pref.userId, pref.itemId,
pref.source,pref.action,pref.score,pref.updatedAt.inSeconds,
thrift.Status.findByValue(pref.status.id),thrift.CreateType.findByValue(pref.createType.id))
}
implicit def RichPrefzPreference(pref: prefz.Preference) = new RichPrefzPreference(pref)
class RichThriftPreference(pref: thrift.Preference) {
def fromThrift = new prefz.Preference(pref.user_id, pref.item_id,
pref.source,pref.action,Time.fromSeconds(pref.create_date),pref.score,prefz.Status(pref.status.getValue()),prefz.CreateType(pref.create_type.getValue()))
}
implicit def RichThriftPreference(pref: thrift.Preference) = new RichThriftPreference(pref)
}
|
huiwenhan/PrefStore
|
src/main/scala/me/huiwen/prefz/conversions/Preference.scala
|
Scala
|
apache-2.0
| 1,620 |
import scala.concurrent._
import java.util.concurrent.atomic.AtomicInteger
object Test {
def main(args: Array[String]): Unit = {
test()
}
def test() = {
def await(f: Future[Any]) =
Await.result(f, duration.Duration.Inf)
val ec = new TestExecutionContext(ExecutionContext.Implicits.global)
{
val p = Promise[Int]()
val fp = p.future
println("mapping")
val mapped = fp.map(x => x)(ec)
p.success(0)
await(mapped)
}
{
println("flatmapping")
val p = Promise[Int]()
val fp = p.future
val flatMapped = fp.flatMap({ (x: Int) =>
Future.successful(2 * x)
})(ec)
p.success(0)
await(flatMapped)
}
{
println("recovering")
val recovered = Future.failed(new Throwable()).recoverWith {
case _ => Future.successful(2)
}(ec)
await(recovered)
}
}
class TestExecutionContext(delegate: ExecutionContext) extends ExecutionContext {
def execute(runnable: Runnable): Unit = ???
def reportFailure(t: Throwable): Unit = ???
override def prepare(): ExecutionContext = {
val preparedDelegate = delegate.prepare()
return new ExecutionContext {
def execute(runnable: Runnable): Unit = {
println("execute()")
preparedDelegate.execute(runnable)
}
def reportFailure(t: Throwable): Unit = ???
}
}
}
}
|
yusuke2255/dotty
|
tests/run/future-flatmap-exec-count.scala
|
Scala
|
bsd-3-clause
| 1,427 |
/**
* Copyright (C) 2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.agent.itest.support
import net.lshift.diffa.agent.client.DifferencesRestClient
import org.joda.time.DateTime
import net.lshift.diffa.kernel.differencing.{ExternalDifferenceEvent, DifferenceEvent}
import org.junit.Assert._
/**
* Helper class for retrieving differences.
*/
class DifferencesHelper(pairKey:String, diffClient:DifferencesRestClient) {
def pollForAllDifferences(from:DateTime, until:DateTime, n:Int = 20, wait:Int = 100, minLength:Int = 1) =
tryAgain((d:DifferencesRestClient) => d.getEvents(pairKey, from, until, 0, 100) ,n,wait,minLength)
def tryAgain(poll:DifferencesRestClient => Seq[ExternalDifferenceEvent], n:Int = 20, wait:Int = 100, minLength:Int = 1) : Seq[ExternalDifferenceEvent]= {
var i = n
var diffs = poll(diffClient)
while(diffs.length < minLength && i > 0) {
Thread.sleep(wait)
diffs = poll(diffClient)
i-=1
}
assertNotNull(diffs)
diffs
}
def waitFor(from:DateTime, until:DateTime, conditions:DifferenceCondition*) = {
val n = 20
val wait = 100
def poll() = diffClient.getEvents(pairKey, from, until, 0, 100)
def satisfied(diffs:Seq[ExternalDifferenceEvent]) = conditions.forall(_.isSatisfiedBy(diffs))
var i = n
var diffs = poll()
while(!satisfied(diffs) && i > 0) {
Thread.sleep(wait)
diffs = poll()
i-=1
}
if (!satisfied(diffs)) {
val message = conditions.filter(!_.isSatisfiedBy(diffs)).map(_.describeIssuesWith(diffs)).mkString(";")
throw new Exception("Conditions weren't satisfied: " + message)
}
diffs
}
}
abstract class DifferenceCondition {
def isSatisfiedBy(diffs:Seq[ExternalDifferenceEvent]):Boolean
def describeIssuesWith(diffs:Seq[ExternalDifferenceEvent]):String
}
case class DiffCount(count:Int) extends DifferenceCondition {
def isSatisfiedBy(diffs: Seq[ExternalDifferenceEvent]) = diffs.length == count
def describeIssuesWith(diffs: Seq[ExternalDifferenceEvent]) =
"Didn't reach required diff count %s. Last attempt returned %s diffs (%s)".format(count, diffs.length, diffs)
}
case class DoesntIncludeObjId(id:String) extends DifferenceCondition {
def isSatisfiedBy(diffs: Seq[ExternalDifferenceEvent]) = diffs.find(e => e.entityId == id).isEmpty
def describeIssuesWith(diffs: Seq[ExternalDifferenceEvent]) =
"Difference ids (%s) shouldn't have included %s".format(diffs.map(e => e.entityId), id)
}
case class IncludesObjId(id:String) extends DifferenceCondition {
def isSatisfiedBy(diffs: Seq[ExternalDifferenceEvent]) = diffs.find(e => e.entityId == id).isDefined
def describeIssuesWith(diffs: Seq[ExternalDifferenceEvent]) =
"Difference ids (%s) should have included %s".format(diffs.map(e => e.entityId), id)
}
|
0x6e6562/diffa
|
agent/src/test/scala/net/lshift/diffa/agent/itest/support/DifferencesHelper.scala
|
Scala
|
apache-2.0
| 3,364 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.dllib.nn.abstractnn.Activity
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* Feature Pyramid Network.
* @param inChannels number of channels of feature maps
* @param outChannels number of channels of FPN output
* @param topBlocks Top Blocks option
* Extra operation to be performed on the smallest
* resolution FPN output, whose result is appended
* to the result list
* 0 for null,
* 1 for using max pooling on the last level
* 2 for extra layers P6 and P7 in RetinaNet
* @param inChannelsOfP6P7 number of input channels of P6 P7 in RetinaNet
* @param outChannelsOfP6P7 number of output channels of P6 P7 in RetinaNet
*/
class FPN[T : ClassTag](
val inChannels: Array[Int],
val outChannels: Int,
val topBlocks: Int = 0,
val inChannelsOfP6P7: Int = 0,
val outChannelsOfP6P7: Int = 0
)
(implicit ev: TensorNumeric[T])
extends BaseModule[T]{
override def buildModel(): Module[T] = {
val featureMapsNum = inChannels.length
val innerBlockModules = new Array[SpatialConvolution[T]](featureMapsNum)
val layerBlockModules = new Array[SpatialConvolution[T]](featureMapsNum)
for (i <- 0 to featureMapsNum - 1) {
if (inChannels(i) != 0) {
val innerBlockModule =
SpatialConvolution[T](inChannels(i), outChannels, 1, 1, 1, 1)
.setName(s"fpn_inner${i + 1}")
val layerBlockModule =
SpatialConvolution[T](outChannels, outChannels, 3, 3, 1, 1, 1, 1)
.setName(s"fpn_layer${i + 1}")
innerBlockModules(i) = innerBlockModule
layerBlockModules(i) = layerBlockModule
}
}
val inputs = new Array[ModuleNode[T]](featureMapsNum)
for (i <- 0 to featureMapsNum - 1) {
inputs(i) = Input[T]()
}
val innerBlocks = new Array[ModuleNode[T]](featureMapsNum)
for (i <- 0 to featureMapsNum - 1) {
innerBlocks(i) = innerBlockModules(i).inputs(inputs(i))
}
val results = new Array[ModuleNode[T]](featureMapsNum + topBlocks)
var count = results.length - 1 - topBlocks
var lastInner = innerBlocks(featureMapsNum - 1)
results(count) = layerBlockModules(featureMapsNum - 1).inputs(lastInner)
for(i <- featureMapsNum - 2 to 0 by -1) {
val layerBlock = layerBlockModules(i)
if (layerBlock != null) {
val innerTopDown = UpSampling2D[T](Array(2, 2)).inputs(lastInner)
val innerLateral = innerBlocks(i)
lastInner = CAddTable[T]().setName(s"number_${i}_${featureMapsNum}")
.inputs(innerLateral, innerTopDown)
count -= 1
results(count) = layerBlock.inputs(lastInner)
}
}
if (topBlocks == 1) {
results(results.length - 1) = SpatialMaxPooling(1, 1, 2, 2)
.inputs(results(featureMapsNum - 1))
}
if (topBlocks == 2) {
val p6_module = SpatialConvolution[T](inChannelsOfP6P7, outChannelsOfP6P7, 3, 3, 2, 2, 1, 1)
val p7_module = SpatialConvolution[T](outChannelsOfP6P7, outChannelsOfP6P7, 3, 3, 2, 2, 1, 1)
results(results.length - 2) = if (inChannelsOfP6P7 == outChannelsOfP6P7) {
p6_module.inputs(results(featureMapsNum - 1))
} else {
p6_module.inputs(inputs(featureMapsNum - 1))
}
results(results.length - 1) = p7_module.inputs(ReLU[T]().inputs(results(results.length - 2)))
}
Graph(inputs, results)
}
override def updateGradInput(input: Activity, gradOutput: Activity): Activity = {
throw new UnsupportedOperationException("Not support backward propagation")
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[FPN[T]]
override def equals(other: Any): Boolean = other match {
case that: FPN[T] =>
super.equals(that) &&
(that canEqual this) &&
inChannels.deep == that.inChannels.deep &&
outChannels == that.outChannels
case _ => false
}
override def hashCode(): Int = {
val state = Seq(super.hashCode(), inChannels, outChannels)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
override def reset(): Unit = {
super.reset()
model.reset()
}
override def toString: String = s"FPN($outChannels)"
}
object FPN {
def apply[@specialized(Float, Double) T: ClassTag](
inChannels: Array[Int],
outChannels: Int,
topBlocks: Int = 0,
inChannelsOfP6P7: Int = 0,
outChannelsOfP6P7: Int = 0
)(implicit ev: TensorNumeric[T]): FPN[T] = {
new FPN[T](inChannels, outChannels, topBlocks, inChannelsOfP6P7, outChannelsOfP6P7)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/FPN.scala
|
Scala
|
apache-2.0
| 5,482 |
package org.zouzias.spark.lucenerdd.examples.linkage
import org.apache.spark.sql.DataFrame
object LinkageUtils {
def matches(linkageResults: DataFrame, truthDF: DataFrame,
leftId: String, rightId: String)
: Long = {
linkageResults
.join(truthDF, truthDF.col(leftId).equalTo(linkageResults(leftId))
&& truthDF.col(rightId).equalTo(linkageResults(rightId)))
.count()
}
/**
* Naive white-space tokenizer for text, keep only alphanumerics
*
* @param text
* @param minThreshold Keep tokens with length more than minThreshold
* @return Array of tokens / words
*/
def tokenize(text: String, minThreshold: Int): Array[String] = {
text.split(" ")
.flatMap(_.replaceAll("[^a-zA-Z0-9]", " ").split(" "))
.filter(_.length > minThreshold)
.distinct
}
}
|
zouzias/spark-lucenerdd-examples
|
src/main/scala/org/zouzias/spark/lucenerdd/examples/linkage/LinkageUtils.scala
|
Scala
|
apache-2.0
| 840 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import org.apache.spark.sql.SparkSession
import org.apache.carbondata.common.logging.LogServiceFactory
/**
* This class contains all carbon hive metadata related utilities
*/
object CarbonHiveMetadataUtil {
@transient
val LOGGER = LogServiceFactory.getLogService(CarbonHiveMetadataUtil.getClass.getName)
/**
* This method invalidates the table from HiveMetastoreCatalog before dropping table
*
* @param databaseName
* @param tableName
* @param sparkSession
*/
def invalidateAndDropTable(databaseName: String,
tableName: String,
sparkSession: SparkSession): Unit = {
try {
sparkSession.sql(s"DROP TABLE IF EXISTS $databaseName.$tableName")
} catch {
case e: Exception =>
LOGGER.audit(
s"Error While deleting the table $databaseName.$tableName during drop carbon table" +
e.getMessage)
}
}
}
|
mayunSaicmotor/incubator-carbondata
|
integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetadataUtil.scala
|
Scala
|
apache-2.0
| 1,727 |
package collections
object Pack {
// Hint : takeWhile/dropWhile or span
def pack[T](list: List[T]): List[List[T]] = list match {
case Nil => Nil
case h :: t =>
val list1 = list.takeWhile(e => e == h)
val rest = list.dropWhile(e => e == h)
val listRest = pack(rest)
list1 :: listRest
}
def encode[T](list: List[T]): List[(T, Int)] =
pack(list).map(l => (l.head, l.size))
}
|
PiSurQuatre/fp-scala-public
|
exercises/src/main/scala/collections/Pack.scala
|
Scala
|
mit
| 418 |
package io.youi
import io.youi.net.{Path, Protocol, URL}
import org.scalajs.dom._
import reactify.{Channel, Val, Var}
import scala.scalajs.js
/**
* Convenience functionality for working with browser history.
*/
object History {
/**
* If set to true calling push and replace will always result in the same functionality as set. This is a toggle to
* be able to disable single-application mode with one switch.
*
* Defaults to false.
*/
val alwaysReload: Var[Boolean] = Var(false)
private val currentURL = Var[URL](URL(document.location.href))
val url: Val[URL] = Val(currentURL)
val stateChange: Channel[HistoryStateChange] = Channel[HistoryStateChange]
def isSecure: Boolean = url().protocol == Protocol.Https
window.addEventListener("popstate", (evt: PopStateEvent) => {
val urlString = document.location.href
val newURL = URL(urlString)
if (newURL != url()) {
currentURL @= newURL
stateChange @= HistoryStateChange(newURL, StateType.Pop, evt.state)
}
})
def update(change: HistoryStateChange): Unit = change.stateType match {
case StateType.Push => push(change.url, change.state)
case StateType.Replace => replace(change.url, change.state)
case StateType.Set => set(change.url)
case StateType.Pop => back()
}
def setPath(path: Path, keepParams: Boolean = false): Unit = {
set(url.withPath(path) match {
case u if keepParams => u
case u => u.clearParams()
})
}
def set(url: URL, target: String = "_self"): Unit = {
window.open(url.toString, target)
stateChange @= HistoryStateChange(url, StateType.Set, null)
}
def pushPath(path: Path, keepParams: Boolean = false, state: js.Any = null): Unit = {
push(url.withPath(path) match {
case u if keepParams => u
case u => u.clearParams()
}, state)
}
def push(url: URL, state: js.Any = null): Unit = if (alwaysReload()) {
set(url)
} else if (url != this.url()) {
val urlString = url.toString
window.history.pushState(state, urlString, urlString)
currentURL @= url
stateChange @= HistoryStateChange(url, StateType.Push, state)
}
def replacePath(path: Path, keepParams: Boolean = false, state: js.Any = null): Unit = {
replace(url.withPath(path) match {
case u if keepParams => u
case u => u.clearParams()
}, state)
}
def replace(url: URL, state: js.Any): Unit = if (alwaysReload()) {
set(url)
} else if (url != this.url()) {
val urlString = url.toString
window.history.replaceState(state, urlString, urlString)
currentURL @= url
stateChange @= HistoryStateChange(url, StateType.Replace, state)
}
def back(delta: Int = 1): Unit = window.history.go(-delta)
def forward(delta: Int = 1): Unit = window.history.go(delta)
def reload(force: Boolean): Unit = window.location.reload(force)
/**
* Updates all anchors on the page to internal links to push history instead of loading another page. Essentially
* converts all links to be single-page-app compliant. May be run multiple times and will only change new links.
*/
def fixAnchors(): Unit = dom.byTag[html.Anchor]("a").foreach(fixAnchor)
def fixAnchor(anchor: html.Anchor): html.Anchor = {
if (Option(anchor.onclick).isEmpty && linkType(anchor.href) == LinkType.Internal) {
this.anchor(anchor, anchor.href)
} else {
anchor
}
}
def anchor(anchor: html.Anchor, location: String): html.Anchor = {
anchor.href = location
anchor.onclick = (evt: Event) => {
evt.preventDefault()
evt.stopPropagation()
push(URL(anchor.href))
}
anchor
}
def linkType(href: String): LinkType = if (href.trim.isEmpty) {
LinkType.Empty
} else if (href.contains("#")) {
LinkType.Hash
} else if (href.startsWith("javascript:")) {
LinkType.JavaScript
} else if (href.startsWith(url().base) || href.startsWith("/")) {
LinkType.Internal
} else {
LinkType.External
}
}
|
outr/youi
|
dom/src/main/scala/io/youi/History.scala
|
Scala
|
mit
| 3,988 |
package unfiltered
import unfiltered.request.HttpRequest
import unfiltered.response.{ResponseFunction,HttpResponse,Pass}
object Cycle {
/** A rountrip intent is a set of instructions for producting
* a complete response to a request. Plans that contain intents
* of this type can be run against a general set of tests. */
type Intent[A,B] = PartialFunction[HttpRequest[A], ResponseFunction[B]]
}
|
softprops/Unfiltered
|
library/src/main/scala/intents.scala
|
Scala
|
mit
| 408 |
package com.twitter.zipkin.web
import com.twitter.zipkin.common.{Annotation, BinaryAnnotation, Span}
import org.specs.Specification
import com.twitter.zipkin.common.json.ZipkinJson
class JsonSerializationSpec extends Specification {
"Jerkson" should {
"serialize" in {
"span with no annotations" in {
val s = Span(1L, "Unknown", 2L, None, List.empty[Annotation], List.empty[BinaryAnnotation], false)
ZipkinJson.generate(s) mustNot throwAnException
}
}
}
}
|
tangyang/zipkin
|
zipkin-web/src/test/scala/com/twitter/zipkin/web/JsonSerializationSpec.scala
|
Scala
|
apache-2.0
| 498 |
package class_instance_extractor
import com.hp.hpl.jena.rdf.model.ResourceFactory
import com.hp.hpl.jena.util.FileManager
import java.io._
import scala.collection.JavaConversions._
import scala.collection.mutable.Map
import com.hp.hpl.jena.vocabulary.OWL
/**
* 1. Extracting class-instance relationships
* 1-1. class_instance_extractor.ClassInstanceExtractor.scala
* - Input
* -- ontologies/wikipediaontology_instance_20101114ja.rdf
* - Outputs
* -- inputs_and_outputs/class-instance.txt
* -- inputs_and_outputs/class-instance-cnt.csv
*/
object ClassInstanceExtractor {
def main(args: Array[String]) {
val inputOntology = "ontologies/wikipediaontology_instance_20101114ja.rdf";
val outputText = "inputs_and_outputs/class-instance.txt"
val outputCSV = "inputs_and_outputs/class-instance-cnt.csv"
val writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputText), "UTF-8"))
val cntWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputCSV), "SJIS"))
val typeProperty = ResourceFactory.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")
val clsInstanceCntMap = Map[String, Int]()
val model = FileManager.get().loadModel(inputOntology)
for (stmt <- model.listStatements(null, typeProperty, null).toList()) {
println("subject: " + stmt.getSubject().getURI())
println("object: " + stmt.getObject())
if (!stmt.getObject().equals(OWL.Class)) {
val instance = stmt.getSubject().getURI().split("instance/")(1)
val cls = stmt.getObject().asResource().getURI().split("class/")(1)
clsInstanceCntMap.get(cls) match {
case Some(cnt) => clsInstanceCntMap.put(cls, cnt + 1)
case None => clsInstanceCntMap.put(cls, 1)
}
writer.write(instance + "\\t" + cls)
writer.newLine()
println(instance + "\\t" + cls)
}
}
writer.close
for (entry <- clsInstanceCntMap.entrySet()) {
cntWriter.write(entry.getKey() + "," + entry.getValue())
cntWriter.newLine()
println(entry.getKey() + "\\t" + entry.getValue())
}
cntWriter.close
println("Classes with instances: " + clsInstanceCntMap.keySet.size)
}
}
|
t-morita/JWO_Refinement_Tools
|
src/main/scala/class_instance_extractor/ClassInstanceExtractor.scala
|
Scala
|
apache-2.0
| 2,288 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.fixture
import org.ensime.vfs._
import org.scalatest.{ BeforeAndAfterAll, Suite }
trait EnsimeVFSFixture {
def withVFS[T](testCode: EnsimeVFS => T): T
}
trait IsolatedEnsimeVFSFixture extends Suite with EnsimeVFSFixture {
override def withVFS[T](testCode: EnsimeVFS => T): T = {
val vfs = EnsimeVFS()
try {
testCode(vfs)
} finally {
vfs.close()
}
}
}
/**
* Provides the basic building blocks to build custom fixtures around
* a project that is cloned once for the test suite.
*/
trait SharedEnsimeVFSFixture
extends Suite
with EnsimeVFSFixture
with BeforeAndAfterAll {
private[fixture] implicit var _vfs: EnsimeVFS = _
override def beforeAll(): Unit = {
super.beforeAll()
_vfs = EnsimeVFS()
}
override def afterAll(): Unit = {
super.afterAll()
_vfs.close()
}
override def withVFS[T](testCode: EnsimeVFS => T): T =
testCode(_vfs)
}
|
ensime/ensime-server
|
testutil/src/main/scala/org/ensime/fixture/EnsimeVFSFixture.scala
|
Scala
|
gpl-3.0
| 1,069 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{ByteArrayOutputStream, File, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import java.util.jar.JarFile
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source
import scala.util.Try
import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.launcher.SparkSubmitArgumentsParser
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.util.Utils
/**
* Parses and encapsulates arguments from the spark-submit script.
* The env argument is used for testing.
*/
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser {
var master: String = null
var deployMode: String = null
var executorMemory: String = null
var executorCores: String = null
var totalExecutorCores: String = null
var propertiesFile: String = null
var driverMemory: String = null
var driverExtraClassPath: String = null
var driverExtraLibraryPath: String = null
var driverExtraJavaOptions: String = null
var queue: String = null
var numExecutors: String = null
var files: String = null
var archives: String = null
var mainClass: String = null
var primaryResource: String = null
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
var isR: Boolean = false
var action: SparkSubmitAction = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
var proxyUser: String = null
var principal: String = null
var keytab: String = null
// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
var submissionToKill: String = null
var submissionToRequestStatusFor: String = null
var useRest: Boolean = true // used internally
/** Default properties present in the currently defined defaults file. */
lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
// scalastyle:off println
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
val properties = Utils.getPropertiesFromFile(filename)
properties.foreach { case (k, v) =>
defaultProperties(k) = v
}
// Property files may contain sensitive information, so redact before printing
if (verbose) {
Utils.redact(properties).foreach { case (k, v) =>
SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
}
// scalastyle:on println
defaultProperties
}
// Set parameters from command line arguments
try {
parse(args.asJava)
} catch {
case e: IllegalArgumentException =>
SparkSubmit.printErrorAndExit(e.getMessage())
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
// Remove keys that don't start with "spark." from `sparkProperties`.
ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
validateArguments()
/**
* Merge values from the default properties file with those specified through --conf.
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}
/**
* Remove keys that don't start with "spark." from `sparkProperties`.
*/
private def ignoreNonSparkProperties(): Unit = {
sparkProperties.foreach { case (k, v) =>
if (!k.startsWith("spark.")) {
sparkProperties -= k
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
}
}
}
/**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {
master = Option(master)
.orElse(sparkProperties.get("spark.master"))
.orElse(env.get("MASTER"))
.orNull
driverExtraClassPath = Option(driverExtraClassPath)
.orElse(sparkProperties.get("spark.driver.extraClassPath"))
.orNull
driverExtraJavaOptions = Option(driverExtraJavaOptions)
.orElse(sparkProperties.get("spark.driver.extraJavaOptions"))
.orNull
driverExtraLibraryPath = Option(driverExtraLibraryPath)
.orElse(sparkProperties.get("spark.driver.extraLibraryPath"))
.orNull
driverMemory = Option(driverMemory)
.orElse(sparkProperties.get("spark.driver.memory"))
.orElse(env.get("SPARK_DRIVER_MEMORY"))
.orNull
driverCores = Option(driverCores)
.orElse(sparkProperties.get("spark.driver.cores"))
.orNull
executorMemory = Option(executorMemory)
.orElse(sparkProperties.get("spark.executor.memory"))
.orElse(env.get("SPARK_EXECUTOR_MEMORY"))
.orNull
executorCores = Option(executorCores)
.orElse(sparkProperties.get("spark.executor.cores"))
.orElse(env.get("SPARK_EXECUTOR_CORES"))
.orNull
totalExecutorCores = Option(totalExecutorCores)
.orElse(sparkProperties.get("spark.cores.max"))
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
repositories = Option(repositories)
.orElse(sparkProperties.get("spark.jars.repositories")).orNull
deployMode = Option(deployMode)
.orElse(sparkProperties.get("spark.submit.deployMode"))
.orElse(env.get("DEPLOY_MODE"))
.orNull
numExecutors = Option(numExecutors)
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
queue = Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
val uri = new URI(primaryResource)
val uriScheme = uri.getScheme()
uriScheme match {
case "file" =>
try {
Utils.tryWithResource(new JarFile(uri.getPath)) { jar =>
// Note that this might still return null if no main-class is set; we catch that later
mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class")
}
} catch {
case _: Exception =>
SparkSubmit.printErrorAndExit(s"Cannot load main class from JAR $primaryResource")
}
case _ =>
SparkSubmit.printErrorAndExit(
s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
"Please specify a class through --class.")
}
}
// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
// In YARN mode, app name can be set via SPARK_YARN_APP_NAME (see SPARK-5222)
if (master.startsWith("yarn")) {
name = Option(name).orElse(env.get("SPARK_YARN_APP_NAME")).orNull
}
// Set name from main class if not given
name = Option(name).orElse(Option(mainClass)).orNull
if (name == null && primaryResource != null) {
name = new File(primaryResource).getName()
}
// Action should be SUBMIT unless otherwise specified
action = Option(action).getOrElse(SUBMIT)
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
private def validateArguments(): Unit = {
action match {
case SUBMIT => validateSubmitArguments()
case KILL => validateKillArguments()
case REQUEST_STATUS => validateStatusRequestArguments()
}
}
private def validateSubmitArguments(): Unit = {
if (args.length == 0) {
printUsageAndExit(-1)
}
if (primaryResource == null) {
SparkSubmit.printErrorAndExit("Must specify a primary resource (JAR or Python or R file)")
}
if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) {
SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class")
}
if (driverMemory != null
&& Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) {
SparkSubmit.printErrorAndExit("Driver Memory must be a positive number")
}
if (executorMemory != null
&& Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) {
SparkSubmit.printErrorAndExit("Executor Memory cores must be a positive number")
}
if (executorCores != null && Try(executorCores.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Executor cores must be a positive number")
}
if (totalExecutorCores != null && Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Total executor cores must be a positive number")
}
if (numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) {
SparkSubmit.printErrorAndExit("Number of executors must be a positive number")
}
if (pyFiles != null && !isPython) {
SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script")
}
if (master.startsWith("yarn")) {
val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR")
if (!hasHadoopEnv && !Utils.isTesting) {
throw new Exception(s"When running with master '$master' " +
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
}
}
if (proxyUser != null && principal != null) {
SparkSubmit.printErrorAndExit("Only one of --proxy-user or --principal can be provided.")
}
}
private def validateKillArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Killing submissions is only supported in standalone or Mesos mode!")
}
if (submissionToKill == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to kill.")
}
}
private def validateStatusRequestArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Requesting submission statuses is only supported in standalone or Mesos mode!")
}
if (submissionToRequestStatusFor == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to request status for.")
}
}
def isStandaloneCluster: Boolean = {
master.startsWith("spark://") && deployMode == "cluster"
}
override def toString: String = {
s"""Parsed arguments:
| master $master
| deployMode $deployMode
| executorMemory $executorMemory
| executorCores $executorCores
| totalExecutorCores $totalExecutorCores
| propertiesFile $propertiesFile
| driverMemory $driverMemory
| driverCores $driverCores
| driverExtraClassPath $driverExtraClassPath
| driverExtraLibraryPath $driverExtraLibraryPath
| driverExtraJavaOptions $driverExtraJavaOptions
| supervise $supervise
| queue $queue
| numExecutors $numExecutors
| files $files
| pyFiles $pyFiles
| archives $archives
| mainClass $mainClass
| primaryResource $primaryResource
| name $name
| childArgs [${childArgs.mkString(" ")}]
| jars $jars
| packages $packages
| packagesExclusions $packagesExclusions
| repositories $repositories
| verbose $verbose
|
|Spark properties used, including those specified through
| --conf and those from the properties file $propertiesFile:
|${Utils.redact(sparkProperties).mkString(" ", "\\n ", "\\n")}
""".stripMargin
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
SparkSubmit.printErrorAndExit("--deploy-mode must be either \\"client\\" or \\"cluster\\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
val (confName, confValue) = SparkSubmit.parseSparkConfProperty(value)
sparkProperties(confName) = confValue
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
SparkSubmit.printVersionAndExit()
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
true
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
SparkSubmit.printErrorAndExit(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra.asScala
}
private def printUsageAndExit(exitCode: Int, unknownParam: Any = null): Unit = {
// scalastyle:off println
val outStream = SparkSubmit.printStream
if (unknownParam != null) {
outStream.println("Unknown/unsupported param " + unknownParam)
}
val command = sys.env.get("_SPARK_CMD_USAGE").getOrElse(
"""Usage: spark-submit [options] <app jar | python file | R file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]
|Usage: spark-submit run-example [options] example-class [example args]""".stripMargin)
outStream.println(command)
val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
outStream.println(
s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn,
| k8s://https://host:port, or local (Default: local[*]).
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
| on one of the worker machines inside the cluster ("cluster")
| (Default: client).
| --class CLASS_NAME Your application's main class (for Java / Scala apps).
| --name NAME A name of your application.
| --jars JARS Comma-separated list of jars to include on the driver
| and executor classpaths.
| --packages Comma-separated list of maven coordinates of jars to include
| on the driver and executor classpaths. Will search the local
| maven repo, then maven central and any additional remote
| repositories given by --repositories. The format for the
| coordinates should be groupId:artifactId:version.
| --exclude-packages Comma-separated list of groupId:artifactId, to exclude while
| resolving the dependencies provided in --packages to avoid
| dependency conflicts.
| --repositories Comma-separated list of additional remote repositories to
| search for the maven coordinates given with --packages.
| --py-files PY_FILES Comma-separated list of .zip, .egg, or .py files to place
| on the PYTHONPATH for Python apps.
| --files FILES Comma-separated list of files to be placed in the working
| directory of each executor. File paths of these files
| in executors can be accessed via SparkFiles.get(fileName).
|
| --conf PROP=VALUE Arbitrary Spark configuration property.
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
| jars added with --jars are automatically included in the
| classpath.
|
| --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G).
|
| --proxy-user NAME User to impersonate when submitting the application.
| This argument does not work with --principal / --keytab.
|
| --help, -h Show this help message and exit.
| --verbose, -v Print additional debug output.
| --version, Print the version of current Spark.
|
| Cluster deploy mode only:
| --driver-cores NUM Number of cores used by the driver, only in cluster mode
| (Default: 1).
|
| Spark standalone or Mesos with cluster deploy mode only:
| --supervise If given, restarts the driver on failure.
| --kill SUBMISSION_ID If given, kills the driver specified.
| --status SUBMISSION_ID If given, requests the status of the driver specified.
|
| Spark standalone and Mesos only:
| --total-executor-cores NUM Total cores for all executors.
|
| Spark standalone and YARN only:
| --executor-cores NUM Number of cores per executor. (Default: 1 in YARN mode,
| or all available cores on the worker in standalone mode)
|
| YARN-only:
| --queue QUEUE_NAME The YARN queue to submit to (Default: "default").
| --num-executors NUM Number of executors to launch (Default: 2).
| If dynamic allocation is enabled, the initial number of
| executors will be at least NUM.
| --archives ARCHIVES Comma separated list of archives to be extracted into the
| working directory of each executor.
| --principal PRINCIPAL Principal to be used to login to KDC, while running on
| secure HDFS.
| --keytab KEYTAB The full path to the file that contains the keytab for the
| principal specified above. This keytab will be copied to
| the node running the Application Master via the Secure
| Distributed Cache, for renewing the login tickets and the
| delegation tokens periodically.
""".stripMargin
)
if (SparkSubmit.isSqlShell(mainClass)) {
outStream.println("CLI options:")
outStream.println(getSqlShellOptions())
}
// scalastyle:on println
SparkSubmit.exitFn(exitCode)
}
/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)
val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}
override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)
try {
Utils.classForName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}
stream.flush()
// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}
}
|
ioana-delaney/spark
|
core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
|
Scala
|
apache-2.0
| 25,735 |
package frdomain.ch8
package cqrs.lib
import collection.concurrent.TrieMap
import scalaz._
import Scalaz._
import Common._
import spray.json._
trait EventStore[K] {
/**
* gets the list of events for an aggregate key `key`
*/
def get(key: K): List[Event[_]]
/**
* puts a `key` and its associated `event`
*/
def put(key: K, event: Event[_]): Error \\/ Event[_]
/**
* similar to `get` but returns an error if the `key` is not found
*/
def events(key: K): Error \\/ List[Event[_]]
/**
* get all ids from the event store
*/
def allEvents: Error \\/ List[Event[_]]
}
/**
* In memory store
*/
object InMemoryEventStore {
def apply[K] = new EventStore[K] {
val eventLog = TrieMap[K, List[Event[_]]]()
def get(key: K): List[Event[_]] = eventLog.get(key).getOrElse(List.empty[Event[_]])
def put(key: K, event: Event[_]): Error \\/ Event[_] = {
val currentList = eventLog.getOrElse(key, Nil)
eventLog += (key -> (event :: currentList))
event.right
}
def events(key: K): Error \\/ List[Event[_]] = {
val currentList = eventLog.getOrElse(key, Nil)
if (currentList.isEmpty) s"Aggregate $key does not exist".left
else currentList.right
}
def allEvents: Error \\/ List[Event[_]] = eventLog.values.toList.flatten.right
}
}
/**
* In memory json store
*/
trait InMemoryJSONEventStore {
implicit val eventJsonFormat: RootJsonFormat[Event[_]]
def apply[K] = new EventStore[K] {
val eventLog = TrieMap[K, List[String]]()
def get(key: K): List[Event[_]] =
eventLog.get(key).map(ls => ls.map(_.parseJson.convertTo[Event[_]])).getOrElse(List.empty[Event[_]])
def put(key: K, event: Event[_]): Error \\/ Event[_] = {
val currentList = eventLog.getOrElse(key, Nil)
eventLog += (key -> (eventJsonFormat.write(event).toString :: currentList))
event.right
}
def events(key: K): Error \\/ List[Event[_]] = {
val currentList = eventLog.getOrElse(key, Nil)
if (currentList.isEmpty) s"Aggregate $key does not exist".left
else currentList.map(js => js.parseJson.convertTo[Event[_]]).right
}
def allEvents: Error \\/ List[Event[_]] = eventLog.values.toList.flatten.map(_.parseJson.convertTo[Event[_]]).right
}
}
|
debasishg/frdomain
|
src/main/scala/frdomain/ch8/cqrs/lib/EventStore.scala
|
Scala
|
apache-2.0
| 2,286 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.akkahttpjackson
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.server.Directives
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.scaladsl.Source
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.io.StdIn
object ExampleApp {
final case class Foo(bar: String)
def main(args: Array[String]): Unit = {
implicit val system = ActorSystem()
// provide an implicit ObjectMapper if you want serialization/deserialization to use it
// instead of a default ObjectMapper configured only with DefaultScalaModule provided
// by JacksonSupport
//
// for example:
//
// implicit val objectMapper = new ObjectMapper()
// .registerModule(DefaultScalaModule)
// .registerModule(new GuavaModule())
Http().newServerAt("127.0.0.1", 8000).bindFlow(route)
StdIn.readLine("Hit ENTER to exit")
Await.ready(system.terminate(), Duration.Inf)
}
def route(implicit sys: ActorSystem) = {
import Directives._
import JacksonSupport._
pathSingleSlash {
post {
entity(as[Foo]) { foo =>
complete {
foo
}
}
}
} ~ pathPrefix("stream") {
post {
entity(as[SourceOf[Foo]]) { fooSource: SourceOf[Foo] =>
complete(fooSource.throttle(1, 2.seconds))
}
} ~ get {
pathEndOrSingleSlash {
complete(
Source(0 to 5)
.throttle(1, 1.seconds)
.map(i => Foo(s"bar-$i"))
)
} ~ pathPrefix("remote") {
onSuccess(Http().singleRequest(HttpRequest(uri = "http://localhost:8000/stream"))) {
response => complete(Unmarshal(response).to[SourceOf[Foo]])
}
}
}
}
}
}
|
hseeberger/akka-http-json
|
akka-http-jackson/src/test/scala/de/heikoseeberger/akkahttpjackson/ExampleApp.scala
|
Scala
|
apache-2.0
| 2,476 |
implicit val identityFunctor = new Functor[Id] {
def fmap[A, B](f: A => B)(x: Id[A]): Id[B] =
f(x)
}
|
hmemcpy/milewski-ctfp-pdf
|
src/content/1.8/code/scala/snippet06.scala
|
Scala
|
gpl-3.0
| 106 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
import util.control.Exception._
import org.specs2.mutable.Specification
object ParserBugs extends Specification {
"Unicode ffff is a valid char in string literal" in {
parseOpt(""" {"x":"\\uffff"} """).isDefined mustEqual true
}
"Does not allow colon at start of array (1039)" in {
parseOpt("""[:"foo", "bar"]""") mustEqual None
}
"Does not allow colon instead of comma in array (1039)" in {
parseOpt("""["foo" : "bar"]""") mustEqual None
}
"Solo quote mark should fail cleanly (not StringIndexOutOfBoundsException) (1041)" in {
JsonParser.parse("\\"", discardParser) must throwA[JsonParser.ParseException].like {
case e => e.getMessage must startWith("unexpected eof")
}
}
"Field names must be quoted" in {
val json = JObject(List(JField("foo\\nbar", JInt(1))))
val s = compactRender(json)
(s mustEqual """{"foo\\nbar":1}""") and
(parse(s) mustEqual json)
}
"Double in scientific notation with + can be parsed" in {
val json = JObject(List(JField("t", JDouble(12.3))))
val s = """{"t" : 1.23e+1}"""
parse(s) mustEqual json
}
private val discardParser = (p : JsonParser.Parser) => {
var token: JsonParser.Token = null
do {
token = p.nextToken
} while (token != JsonParser.End)
}
}
|
lift/framework
|
core/json/src/test/scala/net/liftweb/json/ParserBugs.scala
|
Scala
|
apache-2.0
| 1,943 |
package com.airbnb.scheduler.api
import java.util.logging.{Level, Logger}
import javax.ws.rs._
import javax.ws.rs.core.{MediaType, Response}
import javax.ws.rs.core.Response.Status
import scala.Array
import com.airbnb.scheduler.config.SchedulerConfiguration
import scala.collection.mutable.ListBuffer
import com.airbnb.scheduler.jobs._
import com.airbnb.scheduler.graph.JobGraph
import com.google.inject.Inject
import com.yammer.metrics.annotation.Timed
import com.fasterxml.jackson.databind.ObjectMapper
import scala.collection.JavaConversions._
/**
* The REST API to the PerformanceResource component of the API.
* @author Matt Redmond ([email protected])
* Returns a list of jobs, sorted by percentile run times.
*/
@Path(PathConstants.allStatsPath)
@Produces(Array(MediaType.APPLICATION_JSON))
class StatsResource @Inject()(
val jobScheduler: JobScheduler,
val jobGraph: JobGraph,
val configuration: SchedulerConfiguration,
val jobMetrics: JobMetrics) {
private[this] val log = Logger.getLogger(getClass.getName)
@Timed
@GET
// Valid arguments are
// /scheduler/stats/99thPercentile
// /scheduler/stats/98thPercentile
// /scheduler/stats/95thPercentile
// /scheduler/stats/75thPercentile
// /scheduler/stats/median
// /scheduler/stats/mean
def getPerf(@PathParam("percentile") percentile: String): Response = {
try {
var output = ListBuffer[Map[String, Any]]()
var jobs = ListBuffer[(String, Double)]()
val mapper = new ObjectMapper()
for (jobNameString <- jobGraph.dag.vertexSet()) {
val node = mapper.readTree(jobMetrics.getJsonStats(jobNameString))
if (node.has(percentile) && node.get(percentile) != null) {
val time = node.get(percentile).asDouble()
jobs.append((jobNameString, time))
}
}
jobs = jobs.sortBy(_._2).reverse
for ( (jobNameString, time) <- jobs) {
val myMap = Map("jobNameLabel" -> jobNameString, "time" -> time / 1000.0)
output.append(myMap)
}
Response.ok(output).build
} catch {
case ex: Throwable => {
log.log(Level.WARNING, "Exception while serving request", ex)
throw new WebApplicationException(Status.INTERNAL_SERVER_ERROR)
}
}
}
}
|
snegi/chronos
|
src/main/scala/com/airbnb/scheduler/api/StatsResource.scala
|
Scala
|
apache-2.0
| 2,403 |
package com.xantoria.flippy.serialization
import org.scalatest.Suites
class SerializationSuite extends Suites(
new SerializerSpec,
new StringSerializerSpec,
new NetworkingSerializerSpec,
new NumberSerializerSpec
)
|
giftig/flippy
|
core/src/test/scala/com/xantoria/flippy/serialization/SerializationSuite.scala
|
Scala
|
mit
| 224 |
package inloopio.indicator.function
import inloopio.math.timeseries.Null
import inloopio.math.timeseries.TBaseSer
/**
*
* @author Caoyuan Deng
*/
class DMFunction(_baseSer: TBaseSer) extends Function(_baseSer) {
val _dmPlus = TVar[Double]()
val _dmMinus = TVar[Double]()
override def set(args: Any*): Unit = {
}
protected def computeSpot(i: Int): Unit = {
if (i == 0) {
_dmPlus(i) = Null.Double
_dmMinus(i) = Null.Double
} else {
if (H(i) > H(i - 1) && L(i) > L(i - 1)) {
_dmPlus(i) = H(i) - H(i - 1)
_dmMinus(i) = 0f
} else if (H(i) < H(i - 1) && L(i) < L(i - 1)) {
_dmPlus(i) = 0f
_dmMinus(i) = L(i - 1) - L(i)
} else if (H(i) > H(i - 1) && L(i) < L(i - 1)) {
if (H(i) - H(i - 1) > L(i - 1) - L(i)) {
_dmPlus(i) = H(i) - H(i - 1)
_dmMinus(i) = 0f
} else {
_dmPlus(i) = 0f
_dmMinus(i) = L(i - 1) - L(i)
}
} else if (H(i) < H(i - 1) && L(i) > L(i - 1)) {
_dmPlus(i) = 0f
_dmMinus(i) = 0f
} else if (H(i) == H(i - 1) && L(i) == L(i - 1)) {
_dmPlus(i) = 0f
_dmMinus(i) = 0f
} else if (L(i) > H(i - 1)) {
_dmPlus(i) = H(i) - H(i)
_dmMinus(i) = 0f
} else if (H(i) < L(i - 1)) {
_dmPlus(i) = 0f
_dmMinus(i) = L(i - 1) - L(i)
} else {
_dmPlus(i) = 0f
_dmMinus(i) = 0f
}
}
}
def dmPlus(sessionId: Long, idx: Int): Double = {
computeTo(sessionId, idx)
_dmPlus(idx)
}
def dmMinus(sessionId: Long, idx: Int): Double = {
computeTo(sessionId, idx)
_dmMinus(idx)
}
}
|
dcaoyuan/inloopio-libs
|
inloopio-indicator/src/main/scala/inloopio/indicator/function/DMFunction.scala
|
Scala
|
bsd-3-clause
| 1,661 |
package cromwell.engine.db
import cromwell.engine._
import cromwell.engine.backend._
import cromwell.engine.db.slick.{Execution, ExecutionInfo}
import wdl4s.values.WdlFile
case class ExecutionInfosByExecution(execution: Execution, executionInfos: Seq[ExecutionInfo]) {
import ExecutionInfosByExecution._
lazy val callLogs: Option[CallLogs] = {
executionInfos.foldLeft(Accumulator(None, None, Map.empty))(accumulateExecutionInfo) match {
case Accumulator(Some(stdout), None, _) =>
throw new IllegalArgumentException(s"stderr was not found for stdout $stdout")
case Accumulator(None, Some(stderr), _) =>
throw new IllegalArgumentException(s"stdout was not found for stderr $stderr")
case Accumulator(None, None, logs) if logs.nonEmpty =>
throw new IllegalArgumentException(s"stdout and stderr were empty logs $logs")
case acc =>
for {
stdout <- acc.stdout
stderr <- acc.stderr
} yield CallLogs(stdout, stderr, Option(acc.logs).filterNot(_.isEmpty))
}
}
}
object ExecutionInfosByExecution {
private val CallLogPrefix = "$log"
private val StdoutSuffix = "stdout"
private val StderrSuffix = "stderr"
private case class Accumulator(stdout: Option[WdlFile], stderr: Option[WdlFile], logs: Map[String, WdlFile])
private def accumulateExecutionInfo(acc: Accumulator, executionInfo: ExecutionInfo): Accumulator = {
executionInfo.key.split("_", 2) match {
case Array(CallLogPrefix, StdoutSuffix) => acc.copy(stdout = executionInfo.value map { WdlFile(_) })
case Array(CallLogPrefix, StderrSuffix) => acc.copy(stderr = executionInfo.value map { WdlFile(_) })
case Array(CallLogPrefix, callLogName) if executionInfo.value.isDefined =>
acc.copy(logs = acc.logs + (callLogName -> WdlFile(executionInfo.value.get)))
case _ => acc
}
}
def toCallLogMap(callLogs: CallLogs): Map[String, Option[String]] = {
val callLogMap = Map(StdoutSuffix -> callLogs.stdout, StderrSuffix -> callLogs.stderr) ++
callLogs.backendLogs.getOrElse(Map.empty)
callLogMap map {
case (suffix, value) => s"${CallLogPrefix}_$suffix" -> Option(value.valueString)
}
}
def toWorkflowLogs(executionInfosByExecutions: Traversable[ExecutionInfosByExecution]): WorkflowLogs = {
executionInfosByExecutions groupBy { _.execution.callFqn } mapValues toAttemptedCallLogs filterNot {
case (_, attemptedCallLogs) => attemptedCallLogs.isEmpty
}
}
private def toAttemptedCallLogs(executionInfosByExecutions: Traversable[ExecutionInfosByExecution]):
AttemptedCallLogs = {
toAttemptedCallLogs(executionInfosByExecutions.toIndexedSeq map toKeyCallLogs)
}
private def toKeyCallLogs(executionInfosByExecution: ExecutionInfosByExecution):
(ExecutionDatabaseKey, Option[CallLogs]) = {
import ExecutionIndex._
val execution = executionInfosByExecution.execution
val key = ExecutionDatabaseKey(execution.callFqn, execution.index.toIndex, execution.attempt)
val callLogs = executionInfosByExecution.callLogs
key -> callLogs
}
private def toAttemptedCallLogs(maybeLogs: Seq[(ExecutionDatabaseKey, Option[CallLogs])]): AttemptedCallLogs = {
val logs = maybeLogs collect { case (key, Some(callLog)) => key -> callLog }
val groupByIndex = logs groupBy { case (key, _) => key.index }
val sortedByIndex = groupByIndex.toIndexedSeq sortBy { case (index, _) => index }
val attemptedCallLogs = sortedByIndex map { case (_, callLogs) => mapSortByAttempt(callLogs) }
attemptedCallLogs map { _.toIndexedSeq }
}
private def mapSortByAttempt(logs: Seq[(ExecutionDatabaseKey, CallLogs)]): Seq[CallLogs] = {
val sortedByAttempt = logs sortBy { case (key, callLog) => key.attempt }
val mappedToLogs = sortedByAttempt map { case (_, callLog) => callLog }
mappedToLogs
}
/**
* Group by execution, then remove the execution keys of the execution -> execution info tuple.
* The net result is Execution -> Seq[ExecutionInfo].
*/
def fromRawTuples(rawTuples: Seq[(Execution, ExecutionInfo)]): Seq[ExecutionInfosByExecution] = {
val groupedTuples = rawTuples groupBy {
case (execution, _) => execution
} mapValues {
_ map { case (_, executionInfo) => executionInfo }
}
val infosByExecution = groupedTuples map (ExecutionInfosByExecution.apply _).tupled
infosByExecution.toSeq
}
}
|
cowmoo/cromwell
|
engine/src/main/scala/cromwell/engine/db/ExecutionInfosByExecution.scala
|
Scala
|
bsd-3-clause
| 4,422 |
/**
* Copyright (C) 2016 Verizon. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.verizon.bda.trapezium.framework.server
import akka.actor.ActorSystem
import akka.http.scaladsl.server
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.apache.spark.SparkContext
import org.apache.spark.util.TestUtils
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
/**
* Created by Jegan on 5/24/16.
*/
class AkkaRouteHandlerTest extends FlatSpec with Matchers with ScalatestRouteTest
with BeforeAndAfterAll {
var sc: SparkContext = _
var as: ActorSystem = _
override def beforeAll: Unit = {
val conf = TestUtils.getSparkConf()
sc = new SparkContext(conf)
as = ActorSystem("test")
}
override def afterAll: Unit = {
sc.stop()
as.shutdown()
}
val routeHandler = new AkkaRouteHandler(sc, as)
"defineRoute" should "throw exception on invalid endpoint implementation" in {
intercept[RuntimeException] {
val route = routeHandler.defineRoute("test",
"com.verizon.bda.trapezium.framework.server.InvalidEndPoint")
}
}
"defineRoute" should "throw exception on invalid endpoint configuration" in {
val config = Table (
("path", "className"),
(null, "com.verizon.bda.trapezium.framework.handler.TestEndPoint1"),
("", "com.verizon.bda.trapezium.framework.handler.TestEndPoint1"),
("test-path", null),
("test-path", "")
)
forAll (config) { (path: String, className: String) =>
intercept[IllegalArgumentException] {
val route = routeHandler.defineRoute(path, className)
}
}
}
}
// Endpoint without expected constructor
class InvalidEndPoint extends ServiceEndPoint {
override def route: server.Route = {
path("invalid") {
get {
complete("Hello from Invalid EndPoint")
}
}
}
}
|
Verizon/trapezium
|
framework/src/test/scala/com/verizon/bda/trapezium/framework/server/AkkaRouteHandlerTest.scala
|
Scala
|
apache-2.0
| 2,481 |
package main.java.piratebot.pirates
import main.java.piratebot._
class SpanishGovernor(game: Game, player: Player) extends Pirate(game, player) {
val rank = 30
val name = "Spanish Governor"
override def dayAction(round : Round): RetriableMethodResponse.Value = {
for (p <- player.pirates) {
if (p.state == PirateState.Den) {
p.state = PirateState.Discard
}
}
game.printer.print(Channel.Debug, tag + ": Discarded all characters in den")
RetriableMethodResponse.Complete
}
def getSubRank(player : Player) : Int = {
Array(3, 2, 5, 4, 6, 1)(player.playerId)
}
}
|
ItCouldHaveBeenGreat/Eyepatch
|
src/main/java/piratebot/pirates/SpanishGovernor.scala
|
Scala
|
gpl-3.0
| 670 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.dsl
import algolia.AlgoliaDsl._
import algolia.AlgoliaTest
import algolia.http.{GET, HttpPayload, POST}
class GetObjectTest extends AlgoliaTest {
describe("get object") {
it("should get object objectId before") {
get objectId "myId" from "test" attributesToRetrieve Seq("a", "b")
}
it("should get object from before") {
get from "test" objectId "myId"
}
it("should call API") {
val payload =
HttpPayload(
GET,
Seq("1", "indexes", "test", "myId"),
queryParameters = Some(Map("attributesToRetrieve" -> "a,b")),
isSearch = true,
requestOptions = None
)
(get objectId "myId" from "test" attributesToRetrieve Seq("a", "b"))
.build() should be(payload)
}
}
describe("get multiple objects") {
it("should get objects by ids") {
get from "test" objectIds Seq("myId1", "myId2")
}
it("should call API") {
val body = """
|{
| "requests":[
| {
| "indexName":"test",
| "objectID":"myId1"
| },{
| "indexName":"test",
| "objectID":"myId2"
| }
| ]
|}
""".stripMargin.split("\\n").map(_.trim).mkString
val payload =
HttpPayload(
POST,
Seq("1", "indexes", "*", "objects"),
body = Some(body),
isSearch = true,
requestOptions = None
)
(get from "test" objectIds Seq("myId1", "myId2")).build() should be(
payload
)
}
}
}
|
algolia/algoliasearch-client-scala
|
src/test/scala/algolia/dsl/GetObjectTest.scala
|
Scala
|
mit
| 2,786 |
package rocks.muki.graphql.codegen
import cats.kernel.laws.discipline.SemigroupTests
import cats.tests.CatsSuite
import org.scalacheck.Arbitrary
class FailureInstancesSpec extends CatsSuite {
implicit private val failureArb: Arbitrary[Failure] = Arbitrary(Arbitrary.arbString.arbitrary.map(Failure(_)))
checkAll("Semigroup[Failure]", SemigroupTests[Failure].semigroup)
}
|
muuki88/sbt-graphql
|
src/test/scala/rocks/muki/graphql/codegen/FailureInstancesSpec.scala
|
Scala
|
apache-2.0
| 378 |
package com.avsystem.commons
package serialization
/**
* Wrapper over [[ObjectInput]] that lets you peek next field name without advancing the input.
*/
final class PeekingObjectInput(original: ObjectInput) extends ObjectInput {
private[this] var peekedField: FieldInput = _
override def knownSize: Int = original.knownSize
def peekNextFieldName: Opt[String] = peekedField match {
case null if original.hasNext =>
peekedField = original.nextField()
peekedField.fieldName.opt
case null => Opt.Empty
case fi => fi.fieldName.opt
}
def nextField(): FieldInput =
peekedField match {
case null => original.nextField()
case fi =>
peekedField = null
fi
}
def hasNext: Boolean =
peekedField != null || original.hasNext
override def peekField(name: String): Opt[FieldInput] =
original.peekField(name)
}
|
AVSystem/scala-commons
|
commons-core/src/main/scala/com/avsystem/commons/serialization/PeekingObjectInput.scala
|
Scala
|
mit
| 885 |
/**
* Copyright (C) 2012 Inria, University Lille 1.
*
* This file is part of PowerAPI.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: [email protected].
*/
package fr.inria.powerapi.sensor.mem.sigar
import org.hyperic.sigar.Sigar
import org.hyperic.sigar.SigarException
import org.hyperic.sigar.SigarProxyCache
import fr.inria.powerapi.core.Process
import fr.inria.powerapi.core.Tick
import fr.inria.powerapi.sensor.mem.api.MemSensorMessage
import fr.inria.powerapi.sensor.sigar.SigarSensor
/**
* Memory sensor component using the Hyperic SIGAR API to get hardware information.
*
* @see http://www.hyperic.com/products/sigar
*
* @author abourdon
*/
class MemSensor extends fr.inria.powerapi.sensor.mem.api.MemSensor with SigarSensor {
/**
* SIGAR's proxy instance.
*/
lazy val sigar = SigarProxyCache.newInstance(new Sigar(), 100)
def residentPerc(process: Process): Double =
try {
sigar.getProcMem(process.pid).getResident().doubleValue / sigar.getMem().getTotal()
} catch {
case se: SigarException => {
if (log.isWarningEnabled) log.warning(se.getMessage())
0
}
}
def process(tick: Tick) {
publish(MemSensorMessage(residentPerc = residentPerc(tick.subscription.process), tick))
}
}
|
abourdon/powerapi-akka
|
sensors/sensor-mem-sigar/src/main/scala/fr/inria/powerapi/sensor/mem/sigar/MemSensor.scala
|
Scala
|
agpl-3.0
| 1,908 |
package org.jetbrains.plugins.scala
package lang
package parameterInfo
import java.awt.Color
import _root_.org.jetbrains.plugins.scala.editor.documentationProvider.ScalaDocumentationProvider
import _root_.org.jetbrains.plugins.scala.lang.psi.types._
import _root_.org.jetbrains.plugins.scala.lang.resolve.{ResolveUtils, ScalaResolveResult}
import com.intellij.codeInsight.CodeInsightBundle
import com.intellij.codeInsight.completion.JavaCompletionUtil
import com.intellij.codeInsight.lookup.{LookupElement, LookupItem}
import com.intellij.lang.parameterInfo._
import com.intellij.psi._
import com.intellij.psi.tree.IElementType
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.ArrayUtil
import com.intellij.util.containers.hash.HashSet
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parameterInfo.ScalaFunctionParameterInfoHandler.AnnotationParameters
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScPrimaryConstructor}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScParameterClause}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScTypeParametersOwner, ScTypedDefinition}
import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.Parameter
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import _root_.scala.collection.mutable.ArrayBuffer
import scala.annotation.tailrec
import scala.collection.Seq
/**
* User: Alexander Podkhalyuzin
* Date: 18.01.2009
*/
class ScalaFunctionParameterInfoHandler extends ParameterInfoHandlerWithTabActionSupport[PsiElement, Any, ScExpression] {
def getArgListStopSearchClasses: java.util.Set[_ <: Class[_]] = {
java.util.Collections.singleton(classOf[PsiMethod])
}
def getParameterCloseChars: String = "{},);\\n"
def couldShowInLookup: Boolean = true
def getActualParameterDelimiterType: IElementType = ScalaTokenTypes.tCOMMA
def getActualParameters(elem: PsiElement): Array[ScExpression] = {
elem match {
case argExprList: ScArgumentExprList =>
argExprList.exprs.toArray
case u: ScUnitExpr => Array.empty
case p: ScParenthesisedExpr => p.expr.toArray
case t: ScTuple => t.exprs.toArray
case e: ScExpression => Array(e)
case _ => Array.empty
}
}
def getArgumentListClass: Class[PsiElement] = classOf[PsiElement]
def getActualParametersRBraceType: IElementType = ScalaTokenTypes.tRBRACE
def getArgumentListAllowedParentClasses: java.util.Set[Class[_]] = {
val set = new HashSet[Class[_]]()
set.add(classOf[ScMethodCall])
set.add(classOf[ScConstructor])
set.add(classOf[ScSelfInvocation])
set.add(classOf[ScInfixExpr])
set
}
def findElementForParameterInfo(context: CreateParameterInfoContext): PsiElement = {
findCall(context)
}
def findElementForUpdatingParameterInfo(context: UpdateParameterInfoContext): PsiElement = {
findCall(context)
}
def getParametersForDocumentation(p: Any, context: ParameterInfoContext): Array[Object] = {
p match {
case x: ScFunction =>
x.parameters.toArray
case _ => ArrayUtil.EMPTY_OBJECT_ARRAY
}
}
def showParameterInfo(element: PsiElement, context: CreateParameterInfoContext) {
context.showHint(element, element.getTextRange.getStartOffset, this)
}
def getParametersForLookup(item: LookupElement, context: ParameterInfoContext): Array[Object] = {
if (!item.isInstanceOf[LookupItem[_]]) return null
val allElements = JavaCompletionUtil.getAllPsiElements(item.asInstanceOf[LookupItem[_]])
if (allElements != null &&
allElements.size > 0 &&
allElements.get(0).isInstanceOf[PsiMethod]) {
return allElements.toArray(new Array[Object](allElements.size))
}
null
}
def updateParameterInfo(o: PsiElement, context: UpdateParameterInfoContext) {
if (context.getParameterOwner != o) context.removeHint()
val offset = context.getOffset
var child = o.getNode.getFirstChildNode
var i = 0
while (child != null && child.getStartOffset < offset) {
if (child.getElementType == ScalaTokenTypes.tCOMMA) i = i + 1
child = child.getTreeNext
}
context.setCurrentParameter(i)
}
def updateUI(p: Any, context: ParameterInfoUIContext) {
if (context == null || context.getParameterOwner == null || !context.getParameterOwner.isValid) return
context.getParameterOwner match {
case args: PsiElement =>
val color: Color = context.getDefaultParameterColor
val index = context.getCurrentParameterIndex
val buffer: StringBuilder = new StringBuilder("")
var isGrey = false
//todo: var isGreen = true
var namedMode = false
def paramText(param: ScParameter, subst: ScSubstitutor) = {
ScalaDocumentationProvider.parseParameter(param,
(t: ScType) =>
ScType.presentableText(subst.subst(t)), escape = false)
}
def applyToParameters(parameters: Seq[(Parameter, String)], subst: ScSubstitutor, canBeNaming: Boolean,
isImplicit: Boolean = false) {
if (parameters.length > 0) {
var k = 0
val exprs: Seq[ScExpression] = getActualParameters(args)
if (isImplicit) buffer.append("implicit ")
val used = new Array[Boolean](parameters.length)
while (k < parameters.length) {
val namedPrefix = "["
val namedPostfix = "]"
def appendFirst(useGrey: Boolean = false) {
val getIt = used.indexOf(false)
used(getIt) = true
if (namedMode) buffer.append(namedPrefix)
val param: (Parameter, String) = parameters(getIt)
buffer.append(param._2)
if (namedMode) buffer.append(namedPostfix)
}
def doNoNamed(expr: ScExpression) {
if (namedMode) {
isGrey = true
appendFirst()
} else {
val exprType = expr.getType(TypingContext.empty).getOrNothing
val getIt = used.indexOf(false)
used(getIt) = true
val param: (Parameter, String) = parameters(getIt)
val paramType = param._1.paramType
if (!exprType.conforms(paramType)) isGrey = true
buffer.append(param._2)
}
}
if (k == index || (k == parameters.length - 1 && index >= parameters.length &&
parameters(parameters.length - 1)._1.isRepeated)) {
buffer.append("<b>")
}
if (k < index && !isGrey) {
//slow checking
if (k >= exprs.length) { //shouldn't be
appendFirst(useGrey = true)
isGrey = true
} else {
exprs(k) match {
case assign@NamedAssignStmt(name) =>
val ind = parameters.indexWhere(param => ScalaPsiUtil.memberNamesEquals(param._1.name, name))
if (ind == -1 || used(ind)) {
doNoNamed(assign)
} else {
if (k != ind) namedMode = true
used(ind) = true
val param: (Parameter, String) = parameters(ind)
if (namedMode) buffer.append(namedPrefix)
buffer.append(param._2)
if (namedMode) buffer.append(namedPostfix)
assign.getRExpression match {
case Some(expr: ScExpression) =>
for (exprType <- expr.getType(TypingContext.empty)) {
val paramType = param._1.paramType
if (!exprType.conforms(paramType)) isGrey = true
}
case _ => isGrey = true
}
}
case expr: ScExpression =>
doNoNamed(expr)
}
}
} else {
//fast checking
if (k >= exprs.length) {
appendFirst()
} else {
exprs(k) match {
case NamedAssignStmt(name) =>
val ind = parameters.indexWhere(param => ScalaPsiUtil.memberNamesEquals(param._1.name, name))
if (ind == -1 || used(ind)) {
appendFirst()
} else {
if (k != ind) namedMode = true
used(ind) = true
if (namedMode) buffer.append(namedPrefix)
buffer.append(parameters(ind)._2)
if (namedMode) buffer.append(namedPostfix)
}
case _ => appendFirst()
}
}
}
if (k == index || (k == parameters.length - 1 && index >= parameters.length &&
parameters(parameters.length - 1)._1.isRepeated)) {
buffer.append("</b>")
}
k = k + 1
if (k != parameters.length) buffer.append(", ")
}
if (!isGrey && exprs.length > parameters.length && index >= parameters.length) {
if (!namedMode && parameters(parameters.length - 1)._1.isRepeated) {
val paramType = parameters(parameters.length - 1)._1.paramType
while (!isGrey && k < exprs.length.min(index)) {
if (k < index) {
for (exprType <- exprs(k).getType(TypingContext.empty)) {
if (!exprType.conforms(paramType)) isGrey = true
}
}
k = k + 1
}
} else isGrey = true
}
} else buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
}
p match {
case x: String if x == "" =>
buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
case (a: AnnotationParameters, i: Int) =>
val seq = a.seq
if (seq.length == 0) buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
else {
val paramsSeq: Seq[(Parameter, String)] = seq.zipWithIndex.map {
case (t, paramIndex) =>
(new Parameter(t._1, None, t._2, t._3 != null, false, false, paramIndex),
t._1 + ": " + ScType.presentableText(t._2) + (
if (t._3 != null) " = " + t._3.getText else ""))
}
applyToParameters(paramsSeq, ScSubstitutor.empty, canBeNaming = true, isImplicit = false)
}
case (sign: PhysicalSignature, i: Int) => //i can be -1 (it's update method)
val subst = sign.substitutor
sign.method match {
case method: ScFunction =>
val clauses = method.effectiveParameterClauses
if (clauses.length <= i || (i == -1 && clauses.length == 0)) buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
else {
val clause: ScParameterClause = if (i >= 0) clauses(i) else clauses(0)
val length = clause.effectiveParameters.length
val parameters: Seq[ScParameter] = if (i != -1) clause.effectiveParameters else clause.effectiveParameters.take(length - 1)
applyToParameters(parameters.map(param =>
(new Parameter(param), paramText(param, subst))), subst, canBeNaming = true, isImplicit = clause.isImplicit)
}
case method: FakePsiMethod =>
if (method.params.length == 0) buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
else {
buffer.append(method.params.
map((param: Parameter) => {
val buffer: StringBuilder = new StringBuilder("")
val paramType = param.paramType
val name = param.name
if (name != "") {
buffer.append(name)
buffer.append(": ")
}
buffer.append(ScType.presentableText(paramType))
if (param.isRepeated) buffer.append("*")
if (param.isDefault) buffer.append(" = _")
val isBold = if (method.params.indexOf(param) == index || (param.isRepeated && method.params.indexOf(param) <= index)) true
else {
//todo: check type
false
}
val paramText = buffer.toString()
if (isBold) "<b>" + paramText + "</b>" else paramText
}).mkString(", "))
}
case method: PsiMethod =>
val p = method.getParameterList
if (p.getParameters.length == 0) buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
else {
buffer.append(p.getParameters.
map((param: PsiParameter) => {
val buffer: StringBuilder = new StringBuilder("")
val list = param.getModifierList
if (list == null) return
val lastSize = buffer.length
for (a <- list.getAnnotations) {
if (lastSize != buffer.length) buffer.append(" ")
val element = a.getNameReferenceElement
if (element != null) buffer.append("@").append(element.getText)
}
if (lastSize != buffer.length) buffer.append(" ")
val name = param.name
if (name != null) {
buffer.append(name)
}
buffer.append(": ")
buffer.append(ScType.presentableText(subst.subst(param.exactParamType())))
if (param.isVarArgs) buffer.append("*")
val isBold = if (p.getParameters.indexOf(param) == index || (param.isVarArgs && p.getParameters.indexOf(param) <= index)) true
else {
//todo: check type
false
}
val paramText = buffer.toString()
if (isBold) "<b>" + paramText + "</b>" else paramText
}).mkString(", "))
}
}
case (constructor: ScPrimaryConstructor, subst: ScSubstitutor, i: Int) if constructor.isValid =>
val clauses = constructor.effectiveParameterClauses
if (clauses.length <= i) buffer.append(CodeInsightBundle.message("parameter.info.no.parameters"))
else {
val clause: ScParameterClause = clauses(i)
applyToParameters(clause.effectiveParameters.map(param =>
(new Parameter(param), paramText(param, subst))), subst, canBeNaming = true, isImplicit = clause.isImplicit)
}
case _ =>
}
val startOffset = buffer.indexOf("<b>")
if (startOffset != -1) buffer.replace(startOffset, startOffset + 3, "")
val endOffset = buffer.indexOf("</b>")
if (endOffset != -1) buffer.replace(endOffset, endOffset + 4, "")
if (buffer.toString != "")
context.setupUIComponentPresentation(buffer.toString(), startOffset, endOffset, isGrey, false, false, color)
else
context.setUIComponentEnabled(false)
case _ =>
}
}
def tracksParameterIndex: Boolean = true
trait Invocation {
def element: PsiElement
def parent: PsiElement = element.getParent
def invocationCount: Int
def callGeneric: Option[ScGenericCall] = None
def callReference: Option[ScReferenceExpression]
}
object Invocation {
private class CallInvocation(args: ScArgumentExprList) extends Invocation {
override def element: PsiElement = args
override def callGeneric: Option[ScGenericCall] = args.callGeneric
override def invocationCount: Int = args.invocationCount
override def callReference: Option[ScReferenceExpression] = args.callReference
}
private trait InfixInvocation extends Invocation {
override def invocationCount: Int = 1
override def callReference: Option[ScReferenceExpression] = {
element.getParent match {
case i: ScInfixExpr => Some(i.operation)
}
}
}
private class InfixExpressionInvocation(expr: ScExpression) extends InfixInvocation {
override def element: PsiElement = expr
}
private class InfixTupleInvocation(tuple: ScTuple) extends InfixInvocation {
override def element: PsiElement = tuple
}
private class InfixUnitInvocation(u: ScUnitExpr) extends InfixInvocation {
override def element: PsiElement = u
}
def getInvocation(elem: PsiElement): Option[Invocation] = {
def create[T <: PsiElement](elem: T)(f: T => Invocation): Option[Invocation] = {
elem.getParent match {
case i: ScInfixExpr if i.getArgExpr == elem => Some(f(elem))
case _ => None
}
}
elem match {
case args: ScArgumentExprList => Some(new CallInvocation(args))
case t: ScTuple => create(t)(new InfixTupleInvocation(_))
case u: ScUnitExpr => create(u)(new InfixUnitInvocation(_))
case e: ScExpression => create(e)(new InfixExpressionInvocation(_))
case _ => None
}
}
}
/**
* Returns context's argument psi and fill context items
* by appropriate PsiElements (in which we can resolve)
* @param context current context
* @return context's argument expression
*/
private def findCall(context: ParameterInfoContext): PsiElement = {
val file = context.getFile
val offset = context.getEditor.getCaretModel.getOffset
val element = file.findElementAt(offset)
if (element.isInstanceOf[PsiWhiteSpace])
if (element == null) return null
@tailrec
def findArgs(elem: PsiElement): Option[Invocation] = {
if (elem == null) return None
val res = Invocation.getInvocation(elem)
if (res.isDefined) return res
findArgs(elem.getParent)
}
val argsOption: Option[Invocation] = findArgs(element)
if (argsOption.isEmpty) return null
val args = argsOption.get
context match {
case context: CreateParameterInfoContext =>
args.parent match {
case call: MethodInvocation =>
val res: ArrayBuffer[Object] = new ArrayBuffer[Object]
def collectResult() {
val canBeUpdate = call.getParent match {
case assignStmt: ScAssignStmt if call == assignStmt.getLExpression => true
case notExpr if !notExpr.isInstanceOf[ScExpression] || notExpr.isInstanceOf[ScBlockExpr] => true
case _ => false
}
val count = args.invocationCount
val gen = args.callGeneric.getOrElse(null: ScGenericCall)
def collectSubstitutor(element: PsiElement): ScSubstitutor = {
if (gen == null) return ScSubstitutor.empty
val tp: Array[(String, String)] = element match {
case tpo: ScTypeParametersOwner => tpo.typeParameters.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p))).toArray
case ptpo: PsiTypeParameterListOwner => ptpo.getTypeParameters.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p)))
case _ => return ScSubstitutor.empty
}
val typeArgs: Seq[ScTypeElement] = gen.arguments
val map = new collection.mutable.HashMap[(String, String), ScType]
for (i <- 0 to Math.min(tp.length, typeArgs.length) - 1) {
map += ((tp(i), typeArgs(i).calcType))
}
new ScSubstitutor(Map(map.toSeq: _*), Map.empty, None)
}
def collectForType(typez: ScType) {
ScType.extractClassType(typez, Some(file.getProject)) match {
case Some((clazz: PsiClass, subst: ScSubstitutor)) =>
for{
sign <- ScalaPsiUtil.getApplyMethods(clazz)
if ResolveUtils.isAccessible(sign.method, args.element)
} {
val subst1 = {
val signSubst = sign.substitutor
val collectSubst = collectSubstitutor(sign.method)
signSubst.followed(subst).followed(collectSubst)
}
res += ((new PhysicalSignature(sign.method, subst1), 0))
}
if (canBeUpdate) {
for{
sign <- ScalaPsiUtil.getUpdateMethods(clazz)
if ResolveUtils.isAccessible(sign.method, args.element)
} {
res += ((new PhysicalSignature(sign.method, subst.followed(sign.
substitutor).followed(collectSubstitutor(sign.method))), -1))
}
}
case _ =>
}
}
args.callReference match {
case Some(ref: ScReferenceExpression) =>
if (count > 1) {
//todo: missed case with last implicit call
ref.bind() match {
case Some(ScalaResolveResult(function: ScFunction, subst: ScSubstitutor)) if function.
effectiveParameterClauses.length >= count =>
res += ((new PhysicalSignature(function, subst.followed(collectSubstitutor(function))), count - 1))
return
case _ =>
for (typez <- call.getEffectiveInvokedExpr.getType(TypingContext.empty)) //todo: implicit conversions
{collectForType(typez)}
}
} else {
val variants: Array[ResolveResult] = ref.getSameNameVariants
for {
variant <- variants
if !variant.getElement.isInstanceOf[PsiMember] ||
ResolveUtils.isAccessible(variant.getElement.asInstanceOf[PsiMember], ref)
} {
variant match {
//todo: Synthetic function
case ScalaResolveResult(method: PsiMethod, subst: ScSubstitutor) =>
res += ((new PhysicalSignature(method, subst.followed(collectSubstitutor(method))), 0))
case ScalaResolveResult(typed: ScTypedDefinition, subst: ScSubstitutor) =>
val typez = subst.subst(typed.getType(TypingContext.empty).getOrNothing) //todo: implicit conversions
collectForType(typez)
case _ =>
}
}
}
case None =>
call match {
case call: ScMethodCall =>
for (typez <- call.getEffectiveInvokedExpr.getType(TypingContext.empty)) { //todo: implicit conversions
collectForType(typez)
}
}
}
}
collectResult()
context.setItemsToShow(res.toArray)
case constr: ScConstructor =>
val res: ArrayBuffer[Object] = new ArrayBuffer[Object]
val typeElement = constr.typeElement
val i = constr.arguments.indexOf(args.element)
ScType.extractClassType(typeElement.calcType, Some(file.getProject)) match {
case Some((clazz: PsiClass, subst: ScSubstitutor)) =>
clazz match {
case clazz: ScClass =>
clazz.constructor match {
case Some(constr: ScPrimaryConstructor) if i < constr.effectiveParameterClauses.length =>
typeElement match {
case gen: ScParameterizedTypeElement =>
val tp = clazz.typeParameters.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p)))
val typeArgs: Seq[ScTypeElement] = gen.typeArgList.typeArgs
val map = new collection.mutable.HashMap[(String, String), ScType]
for (i <- 0 to Math.min(tp.length, typeArgs.length) - 1) {
map += ((tp(i), typeArgs(i).calcType))
}
val substitutor = new ScSubstitutor(Map(map.toSeq: _*), Map.empty, None)
res += ((constr, substitutor.followed(subst), i))
case _ => res += ((constr, subst, i))
}
case Some(_) if i == 0 => res += ""
case None => res += ""
case _ =>
}
for (constr <- clazz.functions if !constr.isInstanceOf[ScPrimaryConstructor] &&
constr.isConstructor && ((constr.clauses match {
case Some(x) => x.clauses.length
case None => 1
}) > i))
res += ((new PhysicalSignature(constr, subst), i))
case clazz: PsiClass if clazz.isAnnotationType =>
val resulting: (AnnotationParameters, Int) =
(AnnotationParameters(clazz.getMethods.toSeq.filter(_.isInstanceOf[PsiAnnotationMethod]).map(meth => (meth.name,
ScType.create(meth.getReturnType, meth.getProject, meth.getResolveScope),
meth.asInstanceOf[PsiAnnotationMethod].getDefaultValue))), i)
res += resulting
case clazz: PsiClass if !clazz.isInstanceOf[ScTypeDefinition] =>
for (constructor <- clazz.getConstructors) {
typeElement match {
case gen: ScParameterizedTypeElement =>
val tp = clazz.getTypeParameters.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p)))
val typeArgs: Seq[ScTypeElement] = gen.typeArgList.typeArgs
val map = new collection.mutable.HashMap[(String, String), ScType]
for (i <- 0 to Math.min(tp.length, typeArgs.length) - 1) {
map += ((tp(i), typeArgs(i).calcType))
}
val substitutor = new ScSubstitutor(Map(map.toSeq: _*), Map.empty, None)
res += ((new PhysicalSignature(constructor, substitutor.followed(subst)), i))
case _ => res += ((new PhysicalSignature(constructor, subst), i))
}
}
case _ =>
}
case _ =>
}
context.setItemsToShow(res.toArray)
case self: ScSelfInvocation =>
val res: ArrayBuffer[Object] = new ArrayBuffer[Object]
val i = self.arguments.indexOf(args.element)
val clazz = PsiTreeUtil.getParentOfType(self, classOf[ScClass], true)
clazz match {
case clazz: ScClass =>
clazz.constructor match {
case Some(constr: ScPrimaryConstructor) if i < constr.effectiveParameterClauses.length =>
res += ((constr, ScSubstitutor.empty, i))
case Some(constr) if i == 0 => res += ""
case None => res += ""
case _ =>
}
for {
constr <- clazz.functions
if !constr.isInstanceOf[ScPrimaryConstructor] &&
constr.isConstructor &&
constr.clauses.map(_.clauses.length).getOrElse(1) > i
} {
if (!PsiTreeUtil.isAncestor(constr, self, true) &&
constr.getTextRange.getStartOffset < self.getTextRange.getStartOffset) {
res += ((new PhysicalSignature(constr, ScSubstitutor.empty), i))
}
}
case _ =>
}
context.setItemsToShow(res.toArray)
}
case context: UpdateParameterInfoContext =>
var el = element
while (el.getParent != args.element) el = el.getParent
var index = 1
for (expr <- getActualParameters(args.element) if expr != el) index += 1
context.setCurrentParameter(index)
context.setHighlightedParameter(el)
case _ =>
}
args.element
}
}
object ScalaFunctionParameterInfoHandler {
case class AnnotationParameters(seq: Seq[(String, ScType, PsiAnnotationMemberValue)])
}
object ParameterInfoUtil {
/**
* Light green colour. Used for current resolve context showing.
*/
val highlightedColor = new Color(231, 254, 234)
}
|
triggerNZ/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/parameterInfo/ScalaFunctionParameterInfoHandler.scala
|
Scala
|
apache-2.0
| 30,158 |
package com.twitter.finagle.netty4
import com.twitter.finagle.FinagleInit
import com.twitter.finagle.stats.FinagleStatsReceiver
import io.netty.util.ResourceLeakDetector
import io.netty.util.ResourceLeakDetectorFactory
/**
* Runs prior initialization of any client/server in order to set Netty 4 system properties
* as early as possible.
*/
private final class Netty4Init extends FinagleInit {
def label: String = "Initializing Netty 4 system properties"
// Make the counter lazy so that we don't pay for it unless we actually have a leak
private[this] lazy val referenceLeaks =
FinagleStatsReceiver.counter("netty4", "reference_leaks")
def apply(): Unit = {
// We set a sane default and reject client initiated TLS/SSL session
// renegotiation's (for security reasons).
//
// NOTE: This property affects both JDK SSL (Java 8+) and Netty 4 OpenSSL
// implementations.
if (System.getProperty("jdk.tls.rejectClientInitiatedRenegotiation") == null) {
System.setProperty("jdk.tls.rejectClientInitiatedRenegotiation", "true")
}
// We allocate one arena per a worker thread to reduce contention. By default
// this will be equal to the number of logical cores * 2.
//
// NOTE: Before overriding it, we check whether or not it was set before. This way users
// will have a chance to tune it. Also set the "-overridden" suffixed property in order to
// signal that the property was overridden here and not by the user.
//
// NOTE: Only applicable when pooling is enabled (see `UsePooling`).
if (System.getProperty("io.netty.allocator.numDirectArenas") == null) {
System.setProperty("io.netty.allocator.numDirectArenas", numWorkers().toString)
System.setProperty("io.netty.allocator.numDirectArenas-overridden", "true")
}
// Set the number of heap arenas the number of logical cores * 2. Also set the "-overridden"
// suffixed property in order to signal that the property was overridden here and not by the
// user.
if (System.getProperty("io.netty.allocator.numHeapArenas") == null) {
System.setProperty("io.netty.allocator.numHeapArenas", numWorkers().toString)
System.setProperty("io.netty.allocator.numHeapArenas-overridden", "true")
}
// This determines the size of the memory chunks we allocate in arenas. Netty's default
// is 16mb, we shrink it to 1mb.
//
// We make the trade-off between an initial memory footprint and the max buffer size
// that can still be pooled. Every allocation that exceeds 1mb will fall back
// to an unpooled allocator.
//
// The `io.netty.allocator.maxOrder` (default: 7) determines the number of left binary
// shifts we need to apply to the `io.netty.allocator.pageSize`
// (default: 8192): 8192 << 7 = 1mb.
//
// NOTE: Before overriding it, we check whether or not it was set before. This way users
// will have a chance to tune it.
if (System.getProperty("io.netty.allocator.maxOrder") == null) {
System.setProperty("io.netty.allocator.maxOrder", "7")
}
// We're disabling Netty 4 recyclers (lightweight object pools) as we found out they
// come at the non-trivial cost of CPU overhead.
//
// NOTE: Before overriding it, we check whether or not it was set before. This way users
// will have a chance to tune it.
if (System.getProperty("io.netty.recycler.maxCapacityPerThread") == null) {
System.setProperty("io.netty.recycler.maxCapacityPerThread", "0")
}
// Initialize N4 metrics.
exportNetty4MetricsAndRegistryEntries()
// Enable tracking of reference leaks.
if (trackReferenceLeaks()) {
if (ResourceLeakDetector.getLevel == ResourceLeakDetector.Level.DISABLED) {
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.SIMPLE)
}
ResourceLeakDetectorFactory.setResourceLeakDetectorFactory(
new StatsLeakDetectorFactory(
ResourceLeakDetectorFactory.instance(),
{ () =>
referenceLeaks.incr()
referenceLeakLintRule.leakDetected()
})
)
} else {
// If our leak detection is disabled, disable Netty's leak detection as well
// so that users don't need to disable in two places.
ResourceLeakDetector.setLevel(ResourceLeakDetector.Level.DISABLED)
}
}
}
|
twitter/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4Init.scala
|
Scala
|
apache-2.0
| 4,367 |
package com.linterpreteur.taja
import org.scalatest.{FlatSpec, Matchers}
class TajaSpec extends FlatSpec with Matchers {
it should "compose individual letters into a block" in {
val letters = Array('ㅁ', 'ㅏ', 'ㅅ')
val block = "맛"
val composed = Taja.compose(letters(0), letters(1), letters(2))
composed.toString should equal(block)
}
it should "put arguments sequentially in a string" in {
val letters = Array('ㅁ', 'ㄱ', 'ㅅ')
val block = "ㅁㄱㅅ"
val composed = Taja.compose(letters(0), letters(1), letters(2))
composed.toString should equal(block)
}
it should "group keystrokes" in {
val keystrokes = "ㅁㅏㅅㅇㅣㅆㄴㅡㄴ ㄱㅗㅣㄱㅣ"
val grouped = Taja.group(keystrokes)
val string = "맛있는 괴기"
grouped.toString should equal(string)
}
it should "ungroup a string" in {
val string = "맛있는 고기"
val ungrouped = Taja.ungroup(string)
val keystrokes = Array("ㅁㅏㅅ", "ㅇㅣㅆ", "ㄴㅡㄴ", " ", "ㄱㅗ", "ㄱㅣ")
ungrouped.mkString("/") should equal(keystrokes.mkString("/"))
}
it should "detect keystroke pauses" in {
val keystrokes = Array("ㅁㅏ", "ㅅㅇㅣㅆㄴ", "ㅡㄴㄱㅗㄱ", "ㅣ")
val grouped = keystrokes.map(Taja.group(_)).mkString
val cascaded = "맛있는 고기"
val blocked = "마ㅅ있ㄴㅡㄴ곡ㅣ"
grouped should not equal cascaded
grouped should equal(blocked)
}
it should "be able to recover the original form" in {
val string = "ㅁㅏㅅ있는 고기"
val ungrouped = Taja.ungroup(string)
val regrouped = ungrouped.map(Taja.group(_))
string should equal(regrouped.mkString)
val keystrokes = "ㅁㅏㅅㅇㅣㅆㄴㅡㄴ ㄱㅗㄱㅣ"
val grouped = Taja.group(keystrokes)
val recovered = Taja.ungroup(grouped)
keystrokes should equal(recovered.mkString)
}
it should "classify characters correctly" in {
case class TypeData(hangul: Boolean, syllable: Boolean, consonant: Boolean, vowel: Boolean)
case class Letter(value: Char, typeData: TypeData)
val ka = Letter('가', TypeData(hangul = true, syllable = true, consonant = false, vowel = false))
val k = Letter('ㄱ', TypeData(hangul = true, syllable = false, consonant = true, vowel = false))
val a = Letter('ㅏ', TypeData(hangul = true, syllable = false, consonant = false, vowel = true))
val z = Letter('z', TypeData(hangul = false, syllable = false, consonant = false, vowel = false))
for (letter <- Array(ka, k, a, z)) {
letter.typeData.hangul should equal(Taja.isHangul(letter.value))
letter.typeData.syllable should equal(Taja.isSyllable(letter.value))
letter.typeData.consonant should equal(Taja.isConsonant(letter.value))
letter.typeData.vowel should equal(Taja.isVowel(letter.value))
}
}
"The documentation" should "be in accordance with the actual code" in {
Taja.compose('ㅎ', 'ㅏ', 'ㄴ') should equal("한")
Taja.decompose('많') should equal("ㅁㅏㄴㅎ")
Taja.decompose('ㅟ') should equal("ㅜㅣ")
Taja.group("ㄴㅏㄹㅏㅅㅁㅏㄹㅆㅏㅁㅣ") should equal("나랏말싸미")
Taja.ungroup("옽ㅏ").mkString("/") should equal("ㅇㅗㅌ/ㅏ")
}
}
|
linterpreteur/taja
|
jvm/src/test/scala/com/wooseopkim/taja/TajaSpec.scala
|
Scala
|
mit
| 3,251 |
package com.wavesplatform.consensus.nxt
import com.wavesplatform.account.{Address, KeyPair}
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.consensus.TransactionsOrdering
import com.wavesplatform.test.PropSpec
import com.wavesplatform.transaction.Asset
import com.wavesplatform.transaction.Asset.Waves
import com.wavesplatform.transaction.transfer._
import scala.util.Random
class TransactionsOrderingSpecification extends PropSpec {
private val kp: KeyPair = KeyPair(ByteStr(new Array[Byte](32)))
property("TransactionsOrdering.InBlock should sort correctly") {
val correctSeq = Seq(
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
125L,
ByteStr.empty,
1
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
124L,
ByteStr.empty,
2
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
124L,
ByteStr.empty,
1
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Asset.fromCompatId(Some(ByteStr.empty)),
124L,
ByteStr.empty,
2
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Asset.fromCompatId(Some(ByteStr.empty)),
124L,
ByteStr.empty,
1
)
.explicitGet()
)
val sorted = Random.shuffle(correctSeq).sorted(TransactionsOrdering.InBlock)
sorted shouldBe correctSeq
}
property("TransactionsOrdering.InUTXPool should sort correctly") {
val correctSeq = Seq(
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
124L,
ByteStr.empty,
1
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
123L,
ByteStr.empty,
1
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
123L,
ByteStr.empty,
2
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Asset.fromCompatId(Some(ByteStr.empty)),
124L,
ByteStr.empty,
1
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Asset.fromCompatId(Some(ByteStr.empty)),
124L,
ByteStr.empty,
2
)
.explicitGet()
)
val sorted = Random.shuffle(correctSeq).sorted(TransactionsOrdering.InUTXPool(Set.empty))
sorted shouldBe correctSeq
}
property("TransactionsOrdering.InBlock should sort txs by decreasing block timestamp") {
val correctSeq = Seq(
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
1,
ByteStr.empty,
124L
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
kp,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
1,
ByteStr.empty,
123L
)
.explicitGet()
)
Random.shuffle(correctSeq).sorted(TransactionsOrdering.InBlock) shouldBe correctSeq
}
property("TransactionsOrdering.InUTXPool should sort txs by ascending block timestamp taking into consideration whitelisted senders") {
val whitelisted = KeyPair(Array.fill(32)(1: Byte))
val correctSeq = Seq(
TransferTransaction
.selfSigned(
1.toByte,
whitelisted,
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
2,
ByteStr.empty,
123L
)
.explicitGet(),
TransferTransaction
.selfSigned(
1.toByte,
KeyPair(Array.fill(32)(0: Byte)),
Address.fromString("3MydsP4UeQdGwBq7yDbMvf9MzfB2pxFoUKU").explicitGet(),
Waves,
100000,
Waves,
2,
ByteStr.empty,
124L
)
.explicitGet()
)
Random.shuffle(correctSeq).sorted(TransactionsOrdering.InUTXPool(Set(whitelisted.toAddress.stringRepr))) shouldBe correctSeq
}
}
|
wavesplatform/Waves
|
node/src/test/scala/com/wavesplatform/consensus/nxt/TransactionsOrderingSpecification.scala
|
Scala
|
mit
| 6,017 |
import org.scalatest._
class AdWordsToGraphiteTests extends FlatSpec with Matchers {
"AdWrodsToGraphiteTests" should "have tests" in {
true should === (true)
}
}
|
stevesie/adwords-to-graphite
|
src/test/scala/AdWordsToGraphiteTests.scala
|
Scala
|
gpl-2.0
| 172 |
package com.atomist.rug.kind.java
import com.atomist.graph.GraphNode
import com.atomist.rug.kind.core.ProjectMutableView
import com.atomist.rug.kind.java.support._
import com.atomist.rug.spi._
class JavaProjectType
extends Type
with ReflectivelyTypedType {
override def description = "Java project"
override def runtimeClass = classOf[JavaProjectMutableView]
override def findAllIn(context: GraphNode): Option[Seq[MutableView[_]]] = {
context match {
case pv: ProjectMutableView if JavaAssertions.isJava(pv.currentBackingObject) =>
Some(Seq(new JavaProjectMutableView(pv)))
case _ => Some(Nil)
}
}
}
|
atomist/rug
|
src/main/scala/com/atomist/rug/kind/java/JavaProjectType.scala
|
Scala
|
gpl-3.0
| 646 |
package org.mauritania.photosync.starter
import java.io.{File, PrintWriter}
import org.mauritania.photosync.Constants
import org.mauritania.photosync.olympus.client.CameraClient
import org.mauritania.photosync.olympus.sync.FilesManager
import org.mauritania.photosync.starter.Os.Windows
import org.mauritania.photosync.starter.gui.GuiStarter
import org.slf4j.LoggerFactory
import scala.util.{Failure, Success, Try}
object Starter {
val ConfTemplateResource = "/application.conf"
val InitFileOutput = "application.conf"
val logger = LoggerFactory.getLogger(this.getClass)
def main(args: Array[String]): Unit = {
try {
startApp(args)
} catch {
case e: Exception =>
logger.error("Application failed", e)
throw e
}
}
def startApp(args: Array[String]): Unit = {
logger.info(s"Version: ${Constants.Version}")
val fileConfiguration = ArgumentsParserBuilder.loadConfigFile
logger.info(s"Loading file configuration: $fileConfiguration")
val osIswindows = (Os.currentOs(Os.defaultOsPropertyValue) == Windows)
ArgumentsParserBuilder.Parser.parse(args, fileConfiguration) match {
case Some(config) if config.initConfig => initConfig(args)
case Some(config) if config.guiMode && !config.commandLineMode => GuiStarter.main(args)
case Some(config) if osIswindows && !config.commandLineMode => GuiStarter.main(args)
case Some(config) => Starter.startSynchronization(config)
case None => throw new IllegalArgumentException("Bad command line arguments!")
}
}
private def initConfig(args: Array[String]): Unit = {
val targetFile = new File(InitFileOutput)
val lines = readResource(ConfTemplateResource)
val newArg = s"-Dconfig.file=${targetFile.getAbsolutePath}"
lines match {
case Success(l) => {
writeToFile(targetFile, l)
logger.info(s"File created at: ${targetFile.getAbsolutePath}")
logger.info(s"Use it as follows:")
logger.info(s" - Under Linux/MacOs execute:")
logger.info(s" - photosync $newArg")
logger.info(s" - Under Windows do as follows:")
logger.info(s" - Open bin\\photosync.bat with a text editor")
logger.info(s" - Replace the line:")
logger.info(s" set _JAVA_PARAMS=")
logger.info(s" with:")
logger.info(s" set _JAVA_PARAMS=$newArg")
logger.info(s" - Save the file")
logger.info(s" - Launch it")
}
case Failure(f) => logger.error("Unexpected error", f)
}
}
private def writeToFile(f: File, lines: List[String]): Unit = {
val NewLine = System.getProperty("line.separator")
val linesWithNewLine = lines.map(_ + NewLine)
new PrintWriter(f) {
linesWithNewLine.foreach(write)
close()
}
}
private def readResource(r: String): Try[List[String]] = {
val confTemplateStream = getClass.getResourceAsStream(r)
val templateConfigLines = Try(scala.io.Source.fromInputStream(confTemplateStream).getLines.toList)
confTemplateStream.close()
templateConfigLines
}
def startSynchronization(config: PhotosyncConfig): Unit = {
logger.info(s"Using configuration ($config)...")
val cameraClient = new CameraClient(config.client)
val managerConfig = FilesManager.Config(
outputDir = new File(config.outputDirectory),
mediaFilter = config.mediaFilter
)
val manager = new FilesManager(cameraClient, managerConfig)
logger.info("Synchronizing media from camera -> PC...")
manager.sync()
logger.info("Synchronized!")
if (config.shutDownAfterSync) {
cameraClient.shutDown()
}
}
}
|
mauriciojost/olympus-photosync
|
src/main/scala/org/mauritania/photosync/starter/Starter.scala
|
Scala
|
apache-2.0
| 3,697 |
package play.core.server
import scala.language.postfixOps
import play.api._
import play.core._
import play.api.mvc._
import play.api.libs.iteratee._
import play.api.libs.iteratee._
import play.api.libs.iteratee.Input._
import play.api.libs.concurrent._
import akka.actor._
import akka.actor.Actor._
import akka.routing._
import akka.pattern.Patterns.ask
import scala.concurrent.duration._
import akka.util.Timeout
import scala.util.control.NonFatal
trait WebSocketable {
def getHeader(header: String): String
def check: Boolean
}
/**
* provides generic server behaviour for Play applications
*/
trait Server {
// First delete the default log file for a fresh start (only in Dev Mode)
try {
if (mode == Mode.Dev) scalax.file.Path(new java.io.File(applicationProvider.path, "logs/application.log")).delete()
} catch {
case NonFatal(_) =>
}
// Configure the logger for the first time
Logger.configure(
Map("application.home" -> applicationProvider.path.getAbsolutePath),
mode = mode)
val bodyParserTimeout = {
//put in proper config
1 second
}
def mode: Mode.Mode
def getHandlerFor(request: RequestHeader): Either[Result, (Handler, Application)] = {
import scala.util.control.Exception
def sendHandler: Either[Throwable, (Handler, Application)] = {
try {
applicationProvider.get.right.map { application =>
val maybeAction = application.global.onRouteRequest(request)
(maybeAction.getOrElse(Action(BodyParsers.parse.empty)(_ => application.global.onHandlerNotFound(request))), application)
}
} catch {
case e: ThreadDeath => throw e
case e: VirtualMachineError => throw e
case e: Throwable => Left(e)
}
}
def logExceptionAndGetResult(e: Throwable) = {
Logger.error(
"""
|
|! %sInternal server error, for (%s) [%s] ->
|""".stripMargin.format(e match {
case p: PlayException => "@" + p.id + " - "
case _ => ""
}, request.method, request.uri),
e)
DefaultGlobal.onError(request, e)
}
Exception
.allCatch[Option[Result]]
.either(applicationProvider.handleWebCommand(request))
.left.map(logExceptionAndGetResult)
.right.flatMap(maybeResult => maybeResult.toLeft(())).right.flatMap { _ =>
sendHandler.left.map(logExceptionAndGetResult)
}
}
def applicationProvider: ApplicationProvider
def stop() {
Logger.shutdown()
}
}
|
noel-yap/setter-for-catan
|
play-2.1.1/framework/src/play/src/main/scala/play/core/server/Server.scala
|
Scala
|
apache-2.0
| 2,512 |
/*
* Part of NDLA learningpath-api.
* Copyright (C) 2016 NDLA
*
* See LICENSE
*
*/
package no.ndla.learningpathapi
import org.scalatra.ScalatraServlet
import org.scalatra.swagger._
class ResourcesApp(implicit val swagger: Swagger) extends ScalatraServlet with NativeSwaggerBase {
get("/") {
renderSwagger2(swagger.docs.toList)
}
}
object LearningpathApiInfo {
val contactInfo: ContactInfo = ContactInfo(
LearningpathApiProperties.ContactName,
LearningpathApiProperties.ContactUrl,
LearningpathApiProperties.ContactEmail
)
val licenseInfo: LicenseInfo = LicenseInfo(
"GPL v3.0",
"http://www.gnu.org/licenses/gpl-3.0.en.html"
)
val apiInfo: ApiInfo = ApiInfo(
"Learningpath API",
"Services for accessing learningpaths",
LearningpathApiProperties.TermsUrl,
contactInfo,
licenseInfo
)
}
class LearningpathSwagger extends Swagger("2.0", "1.0", LearningpathApiInfo.apiInfo) {
addAuthorization(
OAuth(List(), List(ImplicitGrant(LoginEndpoint(LearningpathApiProperties.Auth0LoginEndpoint), "access_token"))))
}
|
NDLANO/learningpath-api
|
src/main/scala/no/ndla/learningpathapi/LearningpathSwagger.scala
|
Scala
|
gpl-3.0
| 1,083 |
package rexstream.rex.vector
import rexstream.{DependencyProvider, RexInfo, RexTypeNames, RexVector}
/**
* Created by GregRos on 09/04/2016.
*/
private[rexstream] class RexVectorLink[T](inner : RexVector[T]) extends RexMap[T, T](inner, _.link_>) {
override val info = new RexInfo {
val isLazy = true
val isFunctional = false
val rexType = RexTypeNames.vectorLink
}
override val depends = DependencyProvider.source(inner)
}
|
GregRos/Rexstream
|
ScalaFBL/src/rexstream/rex/vector/RexVectorLink.scala
|
Scala
|
mit
| 467 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.docs
import java.io.InputStream
import play.doc.{ FileHandle, FileRepository }
/**
* A file repository that aggregates multiple file repositories
*
* @param repos The repositories to aggregate
*/
class AggregateFileRepository(repos: Seq[FileRepository]) extends FileRepository {
def this(repos: Array[FileRepository]) = this(repos.toSeq)
private def fromFirstRepo[A](load: FileRepository => Option[A]) = repos.collectFirst(Function.unlift(load))
def loadFile[A](path: String)(loader: (InputStream) => A) = fromFirstRepo(_.loadFile(path)(loader))
def handleFile[A](path: String)(handler: (FileHandle) => A) = fromFirstRepo(_.handleFile(path)(handler))
def findFileWithName(name: String) = fromFirstRepo(_.findFileWithName(name))
}
|
wsargent/playframework
|
framework/src/play-docs/src/main/scala/play/docs/AggregateFileRepository.scala
|
Scala
|
apache-2.0
| 842 |
import java.net.InetSocketAddress
import akka.actor.ActorSystem
import akka.util.ByteString
import colossus.core.{BackoffMultiplier, BackoffPolicy, IOSystem}
import colossus.metrics.MetricAddress
import colossus.protocols.http.HttpMethod.Get
import colossus.protocols.http.Http
import colossus.protocols.http.UrlParsing.{Root, on}
import colossus.protocols.http.{HttpServer, Initializer, RequestHandler}
import colossus.protocols.redis.Redis
import colossus.service.ClientConfig
import colossus.service.GenRequestHandler.PartialHandler
import scala.concurrent.duration._
object RedisRetryClient extends App {
implicit val actorSystem = ActorSystem()
implicit val ioSystem = IOSystem()
// #example
HttpServer.start("example-server", 9000) { initContext =>
new Initializer(initContext) {
val config = ClientConfig(
address = Seq(new InetSocketAddress("localhost", 6379)),
requestTimeout = 1.second,
name = MetricAddress.Root / "redis",
requestRetry = BackoffPolicy(
baseBackoff = 0.milliseconds,
multiplier = BackoffMultiplier.Constant,
maxTries = Some(3)
)
)
val redisClient = Redis.client(config)
override def onConnect: RequestHandlerFactory =
serverContext =>
new RequestHandler(serverContext) {
override def handle: PartialHandler[Http] = {
case request @ Get on Root =>
redisClient.append(ByteString("key"), ByteString("VALUE")).map { result =>
request.ok(s"Length of key is $result")
}
}
}
}
}
// #example
}
|
tumblr/colossus
|
colossus-docs/src/main/scala/RedisRetryClient.scala
|
Scala
|
apache-2.0
| 1,652 |
package org.akka.essentials.zeromq.example1
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.util.duration._
import akka.zeromq.Bind
import akka.zeromq.SocketType
import akka.zeromq.ZeroMQExtension
import akka.zeromq._
import akka.actor.Cancellable
case class Tick
class PublisherActor extends Actor with ActorLogging {
val pubSocket = ZeroMQExtension(context.system).newSocket(SocketType.Pub, Bind("tcp://127.0.0.1:1234"))
var count = 0
var cancellable:Cancellable = null
override def preStart() {
cancellable = context.system.scheduler.schedule(1 second, 1 second, self, Tick)
}
def receive: Receive = {
case Tick =>
count += 1
var payload = "This is the workload " + count;
pubSocket ! ZMQMessage(Seq(Frame("someTopic"), Frame(payload)))
if(count == 10){
cancellable.cancel()
}
}
}
|
rokumar7/trial
|
AkkaWithZeroMQ/src/main/scala/org/akka/essentials/zeromq/example1/PublisherActor.scala
|
Scala
|
unlicense
| 863 |
package net.nomadicalien.ch5.free.repository
import java.util.Date
import net.nomadicalien.ch5.free.free.AccountRepo
import net.nomadicalien.ch5.free.model.common.Amount
import net.nomadicalien.ch5.free.model.{Account, Balance}
import scalaz.Free
// Listing 5.6 The building blocks of account repository DSL
sealed trait AccountRepoF[+A]
case class QueryAccount(no: String) extends AccountRepoF[Account]
case class StoreAccount(account: Account) extends AccountRepoF[Unit]
case class DeleteAccount(no: String) extends AccountRepoF[Unit]
/**
* Listing 5.4 The AccountRepository module definition (page 169)
*/
trait AccountRepository {
// liftF lifts the Store operation into the context of the free monad AccountRepo.
def store(a: Account): AccountRepo[Unit] = Free.liftF(StoreAccount(a))
def query(no: String): AccountRepo[Account] = Free.liftF(QueryAccount(no))
def delete(no: String): AccountRepo[Unit] =
Free.liftF(DeleteAccount(no))
def update(no: String, f: Account => Account): AccountRepo[Unit] =
for {
a <- query(no)
_ <- store(f(a))
} yield ()
def updateBalance(no: String, amount: Amount, f: (Account, Amount) => Account): AccountRepo[Unit] =
for {
a <- query(no)
_ <- store(f(a, amount))
} yield ()
}
|
BusyByte/functional-n-reactive-domain-modeling
|
chapter5/src/main/scala/free/repository/repositories.scala
|
Scala
|
apache-2.0
| 1,294 |
package com.esri
import com.esri.core.geometry.{Envelope2D, Geometry, MultiPath, OperatorClip}
import spire.implicits.cfor
import scala.collection.mutable.ArrayBuffer
case class FeatureMulti(val geom: Geometry, val attr: Array[String]) extends Serializable {
def asMultiPath() = geom.asInstanceOf[MultiPath]
def toRowCols(cellSize: Double, snapMaxDistance: Double) = {
val arr = new ArrayBuffer[(RowCol, FeatureMulti)]()
val envp = new Envelope2D()
geom.queryEnvelope2D(envp)
val cmin = ((envp.xmin - snapMaxDistance) / cellSize).floor.toInt
val cmax = ((envp.xmax + snapMaxDistance) / cellSize).floor.toInt
val rmin = ((envp.ymin - snapMaxDistance) / cellSize).floor.toInt
val rmax = ((envp.ymax + snapMaxDistance) / cellSize).floor.toInt
/*
for (r <- rmin to rmax; c <- cmin to cmax) {
yield (RowCol(r, c), this)
}
*/
val clip = OperatorClip.local
cfor(rmin)(_ <= rmax, _ + 1)(r => {
cfor(cmin)(_ <= cmax, _ + 1)(c => {
val x = c * cellSize
val y = r * cellSize
envp.xmin = x - snapMaxDistance
envp.xmax = x + cellSize + snapMaxDistance
envp.ymin = y - snapMaxDistance
envp.ymax = y + cellSize + snapMaxDistance
val clipped = clip.execute(geom, envp, null, null)
if (!clipped.isEmpty) {
arr += RowCol(r, c) -> FeatureMulti(clipped, attr)
}
})
})
arr
}
}
|
mraad/spark-snap-points
|
src/main/scala/com/esri/FeatureMulti.scala
|
Scala
|
apache-2.0
| 1,439 |
package edu.gemini.p1monitor
import config.P1MonitorConfig
import javax.mail.internet.{MimeMessage, InternetAddress}
import java.util.logging.{Level, Logger}
import java.util.Properties
import javax.mail.{Transport, Message, Session}
import edu.gemini.model.p1.immutable._
import edu.gemini.p1monitor.P1Monitor._
import scalaz._
import Scalaz._
class P1MonitorMailer(cfg: P1MonitorConfig) {
val LOG = Logger.getLogger(this.getClass.getName)
val sender = new InternetAddress("[email protected]")
def notify(dirName: String, files: ProposalFileGroup) {
val proposal = files.xml.map(ProposalIo.read)
//construct email subject
val subject = proposal.map { prop =>
s"New ${getSiteString(prop.observations)} ${getTypeString(prop.proposalClass)} Proposal: ${getReferenceString(prop.proposalClass)}"
}.getOrElse("")
//construct email body
val preBody = proposal.map { prop =>
val proposalVariable = getReferenceString(prop.proposalClass).split("-").tail.mkString("_")
s"""
|A new ${getTypeString(prop.proposalClass)} proposal has been received (${getReferenceString(prop.proposalClass)})
|
| ${prop.title}
| ${prop.investigators.pi}
|
| ${getInstrumentsString(prop)}
| ${prop.proposalClass.requestedTime.format()} requested
|
|Review the PDF summary at
|https://${cfg.getHost}/fetch/${getTypeName(prop.proposalClass)}/${Semester.current.display}/fetch?dir=$dirName&type=${getTypeName(prop.proposalClass)}&proposal=$proposalVariable&format=pdf
|
|Download the proposal from:
|https://${cfg.getHost}/fetch/${getTypeName(prop.proposalClass)}/${Semester.current.display}/fetch?dir=$dirName&type=${getTypeName(prop.proposalClass)}&proposal=$proposalVariable&format=xml
|
|Download the proposal's attachment from:
|https://${cfg.getHost}/fetch/${getTypeName(prop.proposalClass)}/${Semester.current.display}/fetch?dir=$dirName&type=${getTypeName(prop.proposalClass)}&proposal=$proposalVariable&format=attachment
|
""".stripMargin
}
val body = files.xml.map { x =>
s"""
|Find it in the backend server at:
| ${x.getAbsolutePath}
|""".stripMargin
}
//send email
(body |+| preBody).foreach(sendMail(dirName, subject, _))
}
private def sendMail(dirName: String, subject: String, body: String) {
// Log the email we will send.
LOG.log(Level.INFO, s"Sending email:\\n\\nSubject:\\n$subject \\n\\nBody: \\n$body")
// Create and update the mime message.
val msg = createMessage()
msg.setFrom(sender)
msg.setSubject(subject)
msg.setText(body)
addAddresses(msg, Message.RecipientType.TO, cfg.getDirectory(dirName).to)
addAddresses(msg, Message.RecipientType.CC, cfg.getDirectory(dirName).cc)
addAddresses(msg, Message.RecipientType.BCC, cfg.getDirectory(dirName).bcc)
// Send it.
Transport.send(msg)
}
private def createMessage() = {
val sessionProps = new Properties()
sessionProps.put("mail.transport.protocol", "smtp")
sessionProps.put("mail.smtp.host", cfg.getSmtp)
val session = Session.getInstance(sessionProps, null)
new MimeMessage(session)
}
private def addAddresses(msg: MimeMessage, recType: Message.RecipientType, addrs: Traversable[InternetAddress]) {
for (addr <- addrs) {
msg.addRecipient(recType, addr)
}
}
private def getSiteString(observations: List[Observation]): String = observations.flatMap { obs =>
obs.blueprint.map(_.site)
}.distinct.mkString(", ")
private def getReferenceString(propClass: ProposalClass): String = {
val string = propClass match {
case pc: SpecialProposalClass => pc.sub.response.map(_.receipt.id).mkString(" ")
case ft: FastTurnaroundProgramClass => ft.sub.response.map(_.receipt.id).mkString(" ")
case lp: LargeProgramClass => lp.sub.response.map(_.receipt.id).mkString(" ")
case q: GeminiNormalProposalClass => ~q.subs.left.getOrElse(Nil).flatMap(_.response.map(_.receipt.id)).headOption
case _ => ""
}
string.trim
}
private def getTypeString(propClass: ProposalClass): String = propClass match {
case pc: SpecialProposalClass => pc.sub.specialType.value()
case _: FastTurnaroundProgramClass => "Fast Turnaround"
case _: LargeProgramClass => "Large Program"
case _: QueueProposalClass => "Queue"
case _: ClassicalProposalClass => "Classical"
case _ => ""
}
private def getTypeName(propClass: ProposalClass): String = propClass match {
case pc: SpecialProposalClass => pc.sub.specialType
case ft: FastTurnaroundProgramClass => "FT"
case lp: LargeProgramClass => "LP"
case q: GeminiNormalProposalClass => ~q.subs.left.getOrElse(Nil).collect {
case s if cfg.map.keys.toList.contains(s.partner.value()) => s.partner.value().toUpperCase
}.headOption
case _ => ""
}
private def getInstrumentsString(prop: Proposal): String = prop.observations.map {
obs => obs.blueprint match {
case Some(bp: GeminiBlueprintBase) => s"${bp.instrument.site.name} (${bp.instrument.id})"
case _ => ""
}
}.distinct.mkString(", ")
}
|
arturog8m/ocs
|
bundle/edu.gemini.p1monitor/src/main/scala/edu/gemini/p1monitor/P1MonitorMailer.scala
|
Scala
|
bsd-3-clause
| 5,522 |
package net.devkat.ocm
case class Path(val names:List[String], val absolute:Boolean) {
def /(name:String) = Path(names ::: name :: Nil, absolute)
override def toString = (absolute match {
case true => "" :: names
case false => names
}) mkString "/"
}
object Path {
val root = Path(Nil, true)
def parse(s:String) = Path("""\\/""".r.split("""^\\/""".r.replaceFirstIn(s, "")).toList, s.startsWith("/"))
}
|
devkat/scala-ocm
|
core/src/main/scala/net/devkat/ocm/Path.scala
|
Scala
|
apache-2.0
| 430 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.launcher
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import scala.util.Properties
/**
* Exposes methods from the launcher library that are used by the YARN backend.
*/
private[spark] object YarnCommandBuilderUtils {
def quoteForBatchScript(arg: String): String = {
CommandBuilderUtils.quoteForBatchScript(arg)
}
def findJarsDir(sparkHome: String): String = {
val scalaVer = Properties.versionNumberString
.split("\\\\.")
.take(2)
.mkString(".")
CommandBuilderUtils.findJarsDir(sparkHome, scalaVer, true)
}
}
|
akopich/spark
|
resource-managers/yarn/src/main/scala/org/apache/spark/launcher/YarnCommandBuilderUtils.scala
|
Scala
|
apache-2.0
| 1,418 |
package report.donut.transformers.cucumber
case class Tag(name: String)
case class Result(duration: Long = 0L, status: String = "", error_message: String = "")
case class Argument(offset: Int, `val`: String = "")
case class Match(location: String = "", arguments: Option[List[Argument]])
case class BeforeHook(result: Result, output: List[String], `match`: Match)
case class AfterHook(result: Result, output: List[String], `match`: Match)
case class Row(cells: List[String])
case class Embedding(mime_type: String = "", data: String = "", id: Int = 0)
// cucumber 1 backwards compatibility
case class Examples(id: String,
name: String,
keyword: String,
line: Int,
description: Option[String],
rows: List[Row])
case class Step(name: String,
keyword: String,
line: Int,
result: Result,
`match`: Match,
rows: List[Row],
matchedColumns: List[Int],
output: List[String],
embeddings: List[Embedding])
case class Element(id: String = "",
description: Option[String],
name: String,
keyword: String,
line: Int,
`type`: Option[String],
before: List[BeforeHook],
after: List[AfterHook],
tags: List[Tag],
steps: List[Step],
examples: List[Examples])
case class Feature(keyword: String,
name: String,
description: Option[String],
line: Int,
id: String,
uri: String,
elements: List[Element],
tags: List[Tag]) {
val scenariosExcludeBackground = {
elements.filterNot(e => e.keyword == "Background")
}
}
|
MagenTys/donut
|
src/main/scala/report/donut/transformers/cucumber/CucumberModel.scala
|
Scala
|
mit
| 1,958 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.postgresql.exceptions
class ColumnDecoderNotFoundException(kind: Int)
extends IllegalArgumentException("There is no decoder available for kind %s".format(kind))
|
ImLiar/postgresql-async
|
postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/exceptions/ColumnDecoderNotFoundException.scala
|
Scala
|
apache-2.0
| 839 |
package com.twitter.finagle.loadbalancer
import com.twitter.finagle._
import com.twitter.finagle.client.Transporter
import com.twitter.finagle.stats._
import com.twitter.finagle.util.DefaultMonitor
import com.twitter.util.{Activity, Var}
import java.util.logging.{Level, Logger}
import scala.util.control.NonFatal
/**
* Exposes a [[Stack.Module]] which composes load balancing into the respective
* [[Stack]]. This is mixed in by default into Finagle's [[com.twitter.finagle.client.StackClient]].
* The only necessary configuration is a [[LoadBalancerFactory.Dest]] which
* represents a changing collection of addresses that is load balanced over.
*/
object LoadBalancerFactory {
val role: Stack.Role = Stack.Role("LoadBalancer")
/**
* A class eligible for configuring a client's load balancer probation setting.
* When enabled, the balancer treats removals as advisory and flags them. If a
* a flagged endpoint is also detected as unhealthy by Finagle's session
* qualifiers (e.g. fail-fast, failure accrual, etc) then the host is removed
* from the collection.
*/
case class EnableProbation(enable: Boolean)
implicit object EnableProbation extends Stack.Param[EnableProbation] {
val default = EnableProbation(false)
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]] per host
* [[com.twitter.finagle.stats.StatsReceiver]]. If the per-host StatsReceiver is
* not null, the load balancer will broadcast stats to it (scoped with the
* "host:port" pair) for each host in the destination. For clients with a
* large host sets in their destination, this can cause unmanageable
* memory pressure.
*/
case class HostStats(hostStatsReceiver: StatsReceiver) {
def mk(): (HostStats, Stack.Param[HostStats]) =
(this, HostStats.param)
}
object HostStats {
implicit val param = Stack.Param(HostStats(NullStatsReceiver))
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]] with a collection
* of addrs to load balance.
*/
case class Dest(va: Var[Addr]) {
def mk(): (Dest, Stack.Param[Dest]) =
(this, Dest.param)
}
object Dest {
implicit val param = Stack.Param(Dest(Var.value(Addr.Neg)))
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]] with a label
* for use in error messages.
*/
case class ErrorLabel(label: String) {
def mk(): (ErrorLabel, Stack.Param[ErrorLabel]) =
(this, ErrorLabel.param)
}
object ErrorLabel {
implicit val param = Stack.Param(ErrorLabel("unknown"))
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]].
*/
case class Param(loadBalancerFactory: LoadBalancerFactory) {
def mk(): (Param, Stack.Param[Param]) =
(this, Param.param)
}
object Param {
implicit val param = new Stack.Param[Param] {
def default: Param = Param(defaultBalancerFactory)
}
}
/**
* A class eligible for configuring a [[com.twitter.finagle.Stackable]]
* [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]] with a
* finagle [[Address]] ordering. The collection of endpoints in a load
* balancer are sorted by this ordering. Although it's generally not a
* good idea to have the same ordering across process boundaries, the
* final ordering decision is left to the load balancer implementations.
* This only provides a stable ordering before we hand off the collection
* of endpoints to the balancer.
*/
case class AddressOrdering(ordering: Ordering[Address]) {
def mk(): (AddressOrdering, Stack.Param[AddressOrdering]) =
(this, AddressOrdering.param)
}
object AddressOrdering {
implicit val param = new Stack.Param[AddressOrdering] {
def default: AddressOrdering = AddressOrdering(defaultAddressOrdering)
}
}
/**
* A class eligible for configuring the [[LoadBalancerFactory]] behavior
* when the balancer does not find a node with `Status.Open`.
*
* The default is to "fail open" and pick a node at random.
*
* @see [[WhenNoNodesOpen]]
*/
case class WhenNoNodesOpenParam(whenNoNodesOpen: WhenNoNodesOpen) {
def mk(): (WhenNoNodesOpenParam, Stack.Param[WhenNoNodesOpenParam]) =
(this, WhenNoNodesOpenParam.param)
}
object WhenNoNodesOpenParam {
implicit val param = new Stack.Param[WhenNoNodesOpenParam] {
def default: WhenNoNodesOpenParam = WhenNoNodesOpenParam(WhenNoNodesOpen.PickOne)
}
}
/**
* A class eligible for configuring the way endpoints are created for
* a load balancer. In particular, each endpoint that is resolved is replicated
* by the given parameter. This increases concurrency for each identical endpoint
* and allows them to be load balanced over. This is useful for pipelining or
* multiplexing protocols that may incur head-of-line blocking (e.g. from the
* server's processing threads or the network) without this replication.
*/
case class ReplicateAddresses(count: Int) {
require(count >= 1, s"count must be >= 1 but was $count")
def mk(): (ReplicateAddresses, Stack.Param[ReplicateAddresses]) =
(this, ReplicateAddresses.param)
}
object ReplicateAddresses {
implicit val param = Stack.Param(ReplicateAddresses(1))
// Note, we need to change the structure of each replicated address
// so that the load balancer doesn't dedup them by their inet address.
// We do this by injecting an id into the addresses metadata map.
private val ReplicaKey = "lb_replicated_address_id"
private[finagle] def replicateFunc(num: Int): Address => Set[Address] = {
case Address.Inet(ia, metadata) =>
for (i: Int <- 0.until(num).toSet) yield {
Address.Inet(ia, metadata + (ReplicaKey -> i))
}
case addr => Set(addr)
}
}
/**
* Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]].
* The module creates a new `ServiceFactory` based on the module above it for each `Addr`
* in `LoadBalancerFactory.Dest`. Incoming requests are balanced using the load balancer
* defined by the `LoadBalancerFactory.Param` parameter.
*/
private[finagle] trait StackModule[Req, Rep] extends Stack.Module[ServiceFactory[Req, Rep]] {
val role: Stack.Role = LoadBalancerFactory.role
val parameters = Seq(
implicitly[Stack.Param[ErrorLabel]],
implicitly[Stack.Param[WhenNoNodesOpenParam]],
implicitly[Stack.Param[Dest]],
implicitly[Stack.Param[Param]],
implicitly[Stack.Param[HostStats]],
implicitly[Stack.Param[AddressOrdering]],
implicitly[Stack.Param[param.Stats]],
implicitly[Stack.Param[param.Logger]],
implicitly[Stack.Param[param.Monitor]],
implicitly[Stack.Param[param.Reporter]]
)
def make(
params: Stack.Params,
next: Stack[ServiceFactory[Req, Rep]]
): Stack[ServiceFactory[Req, Rep]] = {
val _dest = params[Dest].va
val count = params[ReplicateAddresses].count
val dest = if (count == 1) _dest else {
val f = ReplicateAddresses.replicateFunc(count)
_dest.map {
case [email protected](set, _) => bound.copy(addrs = set.flatMap(f))
case addr => addr
}
}
val Param(loadBalancerFactory) = params[Param]
val EnableProbation(probationEnabled) = params[EnableProbation]
val param.Stats(statsReceiver) = params[param.Stats]
val param.Logger(log) = params[param.Logger]
val param.Label(label) = params[param.Label]
val param.Monitor(monitor) = params[param.Monitor]
val param.Reporter(reporter) = params[param.Reporter]
val rawStatsReceiver = statsReceiver match {
case sr: RollupStatsReceiver => sr.underlying.head
case sr => sr
}
// Determine which stats receiver to use based on `perHostStats`
// flag and the configured `HostStats` param. Report per-host stats
// only when the flag is set.
val hostStatsReceiver =
if (!perHostStats()) NullStatsReceiver
else params[LoadBalancerFactory.HostStats].hostStatsReceiver
// Creates a ServiceFactory from the `next` in the stack and ensures
// that `sockaddr` is an available param for `next`.
def newEndpoint(addr: Address): ServiceFactory[Req, Rep] = {
val stats =
if (hostStatsReceiver.isNull) statsReceiver
else {
val scope = addr match {
case Address.Inet(ia, _) =>
"%s:%d".format(ia.getHostName, ia.getPort)
case other => other.toString
}
val host = hostStatsReceiver.scope(label).scope(scope)
BroadcastStatsReceiver(Seq(host, statsReceiver))
}
val composite = {
val ia = addr match {
case Address.Inet(isa, _) => Some(isa)
case _ => None
}
// We always install a `DefaultMonitor` that handles all the un-handled
// exceptions propagated from the user-defined monitor.
val defaultMonitor = DefaultMonitor(label, ia.map(_.toString).getOrElse("n/a"))
reporter(label, ia).andThen(monitor.orElse(defaultMonitor))
}
next.make(
params +
Transporter.EndpointAddr(addr) +
param.Stats(stats) +
param.Monitor(composite)
)
}
val balancerStats = rawStatsReceiver.scope("loadbalancer")
val balancerExc = new NoBrokersAvailableException(params[ErrorLabel].label)
def newBalancer(
endpoints: Activity[Set[EndpointFactory[Req, Rep]]]
): ServiceFactory[Req, Rep] = {
val ordering = params[AddressOrdering].ordering
val orderedEndpoints = endpoints.map { set =>
try set.toVector.sortBy(_.address)(ordering)
catch {
case NonFatal(exc) =>
log.log(Level.WARNING, "Unable to order endpoints via AddressOrdering", exc)
set.toVector
}
}
val underlying = loadBalancerFactory.newBalancer(
orderedEndpoints,
balancerExc,
params + param.Stats(balancerStats)
)
params[WhenNoNodesOpenParam].whenNoNodesOpen match {
case WhenNoNodesOpen.PickOne => underlying
case WhenNoNodesOpen.FailFast => new NoNodesOpenServiceFactory(underlying)
}
}
val destActivity: Activity[Set[Address]] = Activity(dest.map {
case Addr.Bound(set, _) =>
Activity.Ok(set)
case Addr.Neg =>
log.info(s"$label: name resolution is negative (local dtab: ${Dtab.local})")
Activity.Ok(Set.empty)
case Addr.Failed(e) =>
log.log(Level.INFO, s"$label: name resolution failed (local dtab: ${Dtab.local})", e)
Activity.Failed(e)
case Addr.Pending =>
if (log.isLoggable(Level.FINE)) {
log.fine(s"$label: name resolution is pending")
}
Activity.Pending
}: Var[Activity.State[Set[Address]]])
// Instead of simply creating a newBalancer here, we defer to the
// traffic distributor to interpret weighted `Addresses`.
Stack.Leaf(
role,
new TrafficDistributor[Req, Rep](
dest = destActivity,
newEndpoint = newEndpoint,
newBalancer = newBalancer,
eagerEviction = !probationEnabled,
statsReceiver = balancerStats
)
)
}
}
private[finagle] def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new StackModule[Req, Rep] {
val description = "Balances requests across a collection of endpoints."
}
}
/**
* A thin interface around a Balancer's constructor that allows Finagle to pass in
* context from the stack to the balancers at construction time.
*
* @see [[Balancers]] for a collection of available balancers.
*
* @see The [[https://twitter.github.io/finagle/guide/Clients.html#load-balancing user guide]]
* for more details.
*/
abstract class LoadBalancerFactory {
/**
* Returns a new balancer which is represented by a [[com.twitter.finagle.ServiceFactory]].
*
* @param endpoints The load balancer's collection is usually populated concurrently.
* So the interface to build a balancer is wrapped in an [[com.twitter.util.Activity]]
* which allows us to observe this process for changes.
*
* @param emptyException The exception returned when a balancer's collection is empty.
*
* @param params A collection of parameters usually passed in from the client stack.
*
* @note `endpoints` are ordered by the [[LoadBalancerFactory.AddressOrdering]] param.
*/
def newBalancer[Req, Rep](
endpoints: Activity[IndexedSeq[EndpointFactory[Req, Rep]]],
emptyException: NoBrokersAvailableException,
params: Stack.Params
): ServiceFactory[Req, Rep]
}
/**
* A [[LoadBalancerFactory]] proxy which instantiates the underlying
* based on flags (see flags.scala for applicable flags).
*/
object FlagBalancerFactory extends LoadBalancerFactory {
private val log = Logger.getLogger(getClass.getName)
/**
* Java friendly getter.
*/
def get: LoadBalancerFactory = this
private def p2c(): LoadBalancerFactory =
exp.loadMetric() match {
case "ewma" => Balancers.p2cPeakEwma()
case _ => Balancers.p2c()
}
private def aperture(): LoadBalancerFactory =
exp.loadMetric() match {
case "ewma" => Balancers.aperturePeakEwma()
case _ => Balancers.aperture()
}
private val underlying: LoadBalancerFactory =
defaultBalancer() match {
case "heap" => Balancers.heap()
case "choice" => p2c()
case "aperture" => aperture()
case x =>
log.warning(s"""Invalid load balancer $x, using "choice" balancer.""")
p2c()
}
def newBalancer[Req, Rep](
endpoints: Activity[IndexedSeq[EndpointFactory[Req, Rep]]],
emptyException: NoBrokersAvailableException,
params: Stack.Params
): ServiceFactory[Req, Rep] = {
underlying.newBalancer(endpoints, emptyException, params)
}
}
@deprecated("Use com.twitter.finagle.loadbalancer.FlagBalancerFactory instead.", "2017-11-28")
object DefaultBalancerFactory extends LoadBalancerFactory {
def newBalancer[Req, Rep](
endpoints: Activity[IndexedSeq[EndpointFactory[Req, Rep]]],
emptyException: NoBrokersAvailableException,
params: Stack.Params
): ServiceFactory[Req, Rep] = {
FlagBalancerFactory.newBalancer(endpoints, emptyException, params)
}
}
|
mkhq/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/loadbalancer/LoadBalancerFactory.scala
|
Scala
|
apache-2.0
| 14,826 |
package config
import uk.gov.hmrc.http._
import uk.gov.hmrc.http.hooks.HttpHooks
import uk.gov.hmrc.play.audit.http.HttpAuditing
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import uk.gov.hmrc.play.auth.microservice.connectors.AuthConnector
import uk.gov.hmrc.play.config.{AppName, RunMode, ServicesConfig}
import uk.gov.hmrc.play.http.ws._
import uk.gov.hmrc.http.hooks.HttpHook
import uk.gov.hmrc.play.microservice.config.LoadAuditingConfig
trait Hooks extends HttpHooks {
override val hooks = NoneRequired
}
trait WSHttp extends HttpGet with WSGet with HttpPut with WSPut with HttpPost with WSPost with HttpDelete with WSDelete with Hooks with AppName
object WSHttp extends WSHttp
object MicroserviceAuditConnector extends AuditConnector with RunMode {
override lazy val auditingConfig = LoadAuditingConfig(s"$env.auditing")
}
object MicroserviceAuthConnector extends AuthConnector with ServicesConfig with WSHttp {
override val authBaseUrl = baseUrl("auth")
}
|
andywhardy/address-reputation-ingester
|
app/config/microserviceWiring.scala
|
Scala
|
apache-2.0
| 991 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.action
import org.orbeon.dom.Element
import org.orbeon.oxf.common.OXFException
import org.orbeon.oxf.util.CollectionUtils._
import org.orbeon.oxf.util.{Logging, XPathCache}
import org.orbeon.oxf.xforms.XFormsConstants._
import org.orbeon.oxf.xforms.analysis.VariableAnalysis
import org.orbeon.oxf.xforms.control.XFormsControl
import org.orbeon.oxf.xforms.xbl.Scope
import org.orbeon.oxf.xml.Dom4j
import org.orbeon.oxf.xml.dom4j.{Dom4jUtils, LocationData}
import org.orbeon.saxon.om.Item
import scala.collection.JavaConverters._
abstract class XFormsAction extends Logging {
// Execute the action with the given context
// By default, run the legacy execute()
def execute(actionContext: DynamicActionContext): Unit =
execute(
actionContext.interpreter,
actionContext.analysis.element,
actionContext.analysis.scope,
actionContext.overriddenContext.isDefined,
actionContext.overriddenContext.orNull
)
// Legacy execute()
def execute(
actionInterpreter: XFormsActionInterpreter,
actionElement: Element,
actionScope: Scope,
hasOverriddenContext: Boolean,
overriddenContext: Item): Unit = ()
// Resolve a control given the name of an AVT
def resolveControlAvt(attName: String, required: Boolean = true)(implicit context: DynamicActionContext): Option[XFormsControl] = {
val interpreter = context.interpreter
val element = context.element
resolveStringAVT(attName)(context) match {
case Some(resolvedAvt) ⇒
resolveControl(resolvedAvt) match {
case Some(control) ⇒ Some(control)
case _ ⇒
implicit val indentedLogger = interpreter.indentedLogger
debug(
"attribute does not refer to an existing control",
Seq(
"attribute" → attName,
"value" → element.attributeValue("control"),
"resolved value" → resolvedAvt
)
)
None
}
case None if required ⇒
// This can happen if the attribute is missing or if the AVT cannot be evaluated due to an empty context
throw new OXFException("Cannot resolve mandatory '" + attName + "' attribute on " + context.actionName + " action.")
case None if ! required ⇒
None
}
}
def resolveControl(controlId: String)(implicit context: DynamicActionContext): Option[XFormsControl] = {
val interpreter = context.interpreter
val element = context.element
collectByErasedType[XFormsControl](interpreter.resolveObject(element, controlId))
}
// Resolve an optional boolean AVT
// Return None if there is no attribute or if the AVT cannot be evaluated
def resolveStringAVT(att: String)(implicit context: DynamicActionContext) =
context.element.attributeValueOpt(att) flatMap
(avt ⇒ Option(context.interpreter.resolveAVTProvideValue(context.element, avt)))
// Resolve an optional boolean AVT
def resolveBooleanAVT(att: String, default: Boolean)(implicit context: DynamicActionContext) =
resolveStringAVT(att)(context) map (_ == "true") getOrElse default
def synchronizeAndRefreshIfNeeded(context: DynamicActionContext): Unit =
if (context.interpreter.isDeferredUpdates(context.element))
context.containingDocument.synchronizeAndRefresh()
}
object XFormsAction {
// Obtain context attributes based on nested xf:property elements.
def eventProperties(actionInterpreter: XFormsActionInterpreter, actionElement: Element) = {
val contextStack = actionInterpreter.actionXPathContext
// Iterate over context information if any
val tuples =
for {
element ← Dom4j.elements(actionElement)
if Set(XFORMS_PROPERTY_QNAME, XXFORMS_CONTEXT_QNAME)(element.getQName) // xf:property since XForms 2.0
// Get and check attributes
name =
Option(Dom4jUtils.qNameToExplodedQName(Dom4jUtils.extractAttributeValueQName(element, NAME_QNAME))) getOrElse
(throw new OXFException(XXFORMS_CONTEXT_QNAME.qualifiedName + " element must have a \\"name\\" attribute."))
value = VariableAnalysis.valueOrSelectAttribute(element) match {
case Some(value) ⇒
// XPath expression
// Set context on context element
val currentActionScope = actionInterpreter.getActionScope(element)
contextStack.pushBinding(
element,
actionInterpreter.getSourceEffectiveId(element),
currentActionScope,
false
)
// Evaluate context parameter
val result =
XPathCache.normalizeSingletons(
XPathCache.evaluate(
contextItems = actionInterpreter.actionXPathContext.getCurrentBindingContext.nodeset,
contextPosition = actionInterpreter.actionXPathContext.getCurrentBindingContext.position,
xpathString = value,
namespaceMapping = actionInterpreter.getNamespaceMappings(element),
variableToValueMap = contextStack.getCurrentBindingContext.getInScopeVariables,
functionLibrary = actionInterpreter.containingDocument.getFunctionLibrary,
functionContext = contextStack.getFunctionContext(actionInterpreter.getSourceEffectiveId(element)),
baseURI = null,
locationData = element.getData.asInstanceOf[LocationData],
reporter = actionInterpreter.containingDocument.getRequestStats.addXPathStat
).asScala
)
contextStack.popBinding()
result
case None ⇒
// Literal text
element.getStringValue
}
} yield
(name, Option(value))
tuples.toMap
}
}
|
brunobuzzi/orbeon-forms
|
xforms/jvm/src/main/scala/org/orbeon/oxf/xforms/action/XFormsAction.scala
|
Scala
|
lgpl-2.1
| 6,592 |
package com.lightreporter.registration
import java.util.Date
import java.util.concurrent.TimeUnit
import java.util.concurrent.locks.{Condition, Lock, ReentrantLock}
import akka.actor.ActorRef
/**
* Created by y28yang on 5/4/2016.
*/
class SlowReceiver(testRef: ActorRef,val lock:Lock=new ReentrantLock) extends StringReceiver{
override def receive(msg: String): Unit = {
Thread.sleep(100)
println(s"get: $msg,at:"+System.currentTimeMillis())
testRef ! msg
}
}
|
wjingyao2008/firsttry
|
lightreporter/src/test/scala/com/lightreporter/Registration/SlowReceiver.scala
|
Scala
|
apache-2.0
| 483 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package docs.scaladsl.cluster.pubsub
package example {
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceCall
import play.api.libs.json.Format
import play.api.libs.json.Json
import scala.concurrent.Future
case class Temperature(value: Double)
object Temperature {
implicit val format: Format[Temperature] = Json.format
}
//#service-api
trait SensorService extends Service {
def registerTemperature(id: String): ServiceCall[Temperature, NotUsed]
def temperatureStream(id: String): ServiceCall[NotUsed, Source[Temperature, NotUsed]]
def descriptor = {
import Service._
named("/sensorservice").withCalls(
pathCall("/device/:id/temperature", registerTemperature _),
pathCall("/device/:id/temperature/stream", temperatureStream _)
)
}
}
//#service-api
//#service-impl
import com.lightbend.lagom.scaladsl.pubsub.PubSubRegistry
import com.lightbend.lagom.scaladsl.pubsub.TopicId
class SensorServiceImpl(pubSub: PubSubRegistry) extends SensorService {
def registerTemperature(id: String) = ServiceCall { temperature =>
val topic = pubSub.refFor(TopicId[Temperature](id))
topic.publish(temperature)
Future.successful(NotUsed.getInstance())
}
def temperatureStream(id: String) = ServiceCall { _ =>
val topic = pubSub.refFor(TopicId[Temperature](id))
Future.successful(topic.subscriber)
}
}
//#service-impl
}
package serviceimplstream {
import example.Temperature
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.lightbend.lagom.scaladsl.api.Service._
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceCall
import com.lightbend.lagom.scaladsl.pubsub.PubSubRegistry
import com.lightbend.lagom.scaladsl.pubsub.TopicId
import scala.concurrent.Future
trait SensorService extends Service {
def registerTemperature(id: String): ServiceCall[Source[Temperature, NotUsed], NotUsed]
def temperatureStream(id: String): ServiceCall[NotUsed, Source[Temperature, NotUsed]]
def descriptor = {
import Service._
named("/sensorservice").withCalls(
pathCall("/device/:id/temperature", registerTemperature _),
pathCall("/device/:id/temperature/stream", temperatureStream _)
)
}
}
//#service-impl-stream
import akka.stream.Materializer
class SensorServiceImpl(pubSub: PubSubRegistry)(implicit materializer: Materializer) extends SensorService {
def registerTemperature(id: String) = ServiceCall { temperatures =>
val topic = pubSub.refFor(TopicId[Temperature](id))
temperatures.runWith(topic.publisher)
Future.successful(NotUsed.getInstance())
}
def temperatureStream(id: String) = ServiceCall { _ =>
val topic = pubSub.refFor(TopicId[Temperature](id))
Future.successful(topic.subscriber)
}
}
//#service-impl-stream
}
package persistententity {
import akka.Done
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.lightbend.lagom.scaladsl.api.transport.Method
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceCall
import docs.home.scaladsl.persistence._
import play.api.libs.json.Format
import play.api.libs.json.Json
import scala.concurrent.Future
//#persistent-entity-inject
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity
import com.lightbend.lagom.scaladsl.pubsub.PubSubRegistry
import com.lightbend.lagom.scaladsl.pubsub.TopicId
final class Post(pubSubRegistry: PubSubRegistry) extends PersistentEntity {
private val publishedTopic = pubSubRegistry.refFor(TopicId[PostPublished])
//#persistent-entity-inject
override type Command = BlogCommand
override type Event = BlogEvent
override type State = BlogState
override def initialState: BlogState = BlogState.empty
override def behavior: Behavior = {
case state if state.isEmpty => initial
case state if !state.isEmpty => postAdded
}
private val initial: Actions = {
Actions()
.onCommand[AddPost, AddPostDone] {
case (AddPost(content), ctx, state) =>
ctx.thenPersist(PostAdded(entityId, content)) { evt =>
ctx.reply(AddPostDone(entityId))
}
}
.onCommand[AddPost, AddPostDone] {
case (AddPost(content), ctx, state) =>
if (content.title == null || content.title.equals("")) {
ctx.invalidCommand("Title must be defined")
ctx.done
} else {
ctx.thenPersist(PostAdded(entityId, content)) { evt =>
ctx.reply(AddPostDone(entityId))
}
}
}
.onEvent {
case (PostAdded(postId, content), state) =>
BlogState(Some(content), published = false)
}
}
private val postAdded: Actions = {
Actions()
.onCommand[ChangeBody, Done] {
case (ChangeBody(body), ctx, state) =>
ctx.thenPersist(BodyChanged(entityId, body))(_ => ctx.reply(Done))
}
//#persistent-entity-publish
.onCommand[Publish.type, Done] {
case (Publish, ctx, state) =>
ctx.thenPersist(PostPublished(entityId)) { evt =>
ctx.reply(Done)
publishedTopic.publish(evt)
}
}
//#persistent-entity-publish
.onEvent {
case (BodyChanged(_, body), state) =>
state.withBody(body)
}
.onReadOnlyCommand[GetPost.type, PostContent] {
case (GetPost, ctx, state) =>
ctx.reply(state.content.get)
}
}
}
trait BlogService extends Service {
def publishedStream: ServiceCall[NotUsed, Source[PostPublished, NotUsed]]
override def descriptor = {
import Service._
implicit val postPublishedFormat: Format[PostPublished] = Json.format
named("blogservice").withCalls(
restCall(Method.GET, "/blogs/published", publishedStream)
)
}
}
//#entity-service-impl
class BlogServiceImpl(pubSubRegistry: PubSubRegistry) extends BlogService {
private val publishedTopic = pubSubRegistry.refFor(TopicId[PostPublished])
override def publishedStream = ServiceCall { _ =>
Future.successful(publishedTopic.subscriber)
}
}
//#entity-service-impl
}
|
ignasi35/lagom
|
docs/manual/scala/guide/cluster/code/PubSub.scala
|
Scala
|
apache-2.0
| 6,603 |
/*
Copyright (c) 2017, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum.$internal
import scala.language.experimental.macros
object Macro {
val templateString = "st\\"...\\""
def parMap[T, U](arg: scala.collection.Seq[T], f: T => U): scala.collection.IndexedSeq[U] = macro Macro.parMapImpl
def sync[T](o: AnyRef, arg: T): T = macro Macro.syncImpl
def isJs: Boolean = macro Macro.isJsImpl
def version: String = macro Macro.versionImpl
def eval[T](c: scala.reflect.macros.blackbox.Context)(
t: Any, n: Int = 6): T = { // HACK: eval may non-deterministically fail, so try n times!
val tree = t.asInstanceOf[c.Tree]
val expr = c.Expr(c.untypecheck(tree))
for (_ <- 0 until n) {
scala.util.Try(c.eval[T](expr)) match {
case scala.util.Success(x) => return x
case _ =>
}
synchronized { wait(100) }
}
c.eval[T](expr)
}
}
import Macro._
class Macro(val c: scala.reflect.macros.blackbox.Context) {
val isJsCheck: Boolean = scala.util.Try(Class.forName("scala.scalajs.js.Any", false, getClass.getClassLoader)).isSuccess
import c.universe._
def l[T](args: c.Expr[Any]*): c.Expr[T] =
c.Expr[T]( q"""halt("Slang l\\"\\"\\"...\\"\\"\\" should have been erased by the Sireum Scala plugin.")""")
def lUnit(args: c.Expr[Any]*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit1(arg0: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit2(arg0: c.Tree, arg1: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit3(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit4(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree, arg3: c.Tree): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit0S(arg0: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit1S(arg0: c.Tree, arg1: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit2S(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lUnit3S(arg0: c.Tree, arg1: c.Tree, arg2: c.Tree, arg3: c.Tree*): c.Expr[Unit] = c.Expr[Unit](q"{}")
def lDef[T](args: c.Expr[Any]*): c.Expr[T] =
c.Expr[T]( q"""halt("Slang l\\"\\"\\"...\\"\\"\\" should have been erased by the Sireum Scala plugin.")""")
def $[T]: c.Expr[T] = c.Expr[T]( q"""halt("Slang '$$' should have been erased by the Sireum Scala compiler plugin.")""")
def extractParts: Seq[c.Tree] = (c.prefix.tree match {
case q"org.sireum.`package`.$$Slang(scala.StringContext.apply(..$ps)).$_" => ps
case q"sireum.this.`package`.$$Slang(scala.StringContext.apply(..$ps)).$_" => ps
case q"org.sireum.`package`.$$Slang(scala.StringContext.apply(..$ps))" => ps
case q"sireum.this.`package`.$$Slang(scala.StringContext.apply(..$ps))" => ps
}).asInstanceOf[Seq[c.Tree]]
def zApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang z\\"...\\" should not contain $$ arguments.")
q"_root_.org.sireum.Z.$$String(${parts.head})"
}
def cApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang c\\"...\\" should not contain $$ arguments.")
val s = Macro.eval[String](c)(parts.head)
if (s.codePointCount(0, s.length) != 1) c.abort(c.prefix.tree.pos, "Slang c\\"...\\" can only have a single character.")
q"_root_.org.sireum.C(${parts.head}.codePointAt(0))"
}
def f32Apply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang f32\\"...\\" should not contain $$ arguments.")
q"_root_.org.sireum.F32.$$String(${parts.head})"
}
def f64Apply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang f64\\"...\\" should not contain $$ arguments.")
q"_root_.org.sireum.F64.$$String(${parts.head})"
}
def rApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang r\\"...\\" should not contain $$ arguments.")
q"_root_.org.sireum.R.$$String(${parts.head})"
}
def stringApply(args: c.Tree*): c.Tree = {
val parts = extractParts
if (parts.size != 1) c.abort(c.prefix.tree.pos, "Slang string\\"...\\" should not contain $$ arguments.")
q"_root_.org.sireum.String(${parts.head})"
}
def $assign(arg: c.Tree): c.Tree = {
def args(n: Int): c.Tree = {
val l = (for (i <- 1 to n) yield
Apply(q"_root_.org.sireum.helper.assign", List(Select(Ident(TermName("x")), TermName(s"_$i"))))).toList
Block(List(q"val x = $arg"),
Apply(Select(Ident(TermName("scala")), TermName(s"Tuple$n")), l))
}
//println(showRaw(arg))
val mm = c.typeOf[MutableMarker]
val r = arg match {
case q"(..$args)" if args.size > 1 => arg
case _ =>
if (arg.tpe <:< mm) q"_root_.org.sireum.helper.assignMut($arg)"
else if (arg.tpe.typeSymbol.fullName.startsWith("scala.Tuple")) {
val n = arg.tpe.typeSymbol.fullName.substring("scala.Tuple".length).toInt
args(n)
}
else arg
}
//println(showRaw(r))
//println(showCode(r))
r
}
def $tmatch(arg: c.Tree): c.Tree = {
def args(n: Int): c.Tree = {
val l = (for (i <- 1 to n) yield
Apply(q"_root_.org.sireum.helper.assign", List(Select(Ident(TermName("x")), TermName(s"_$i"))))).toList
Block(List(q"val x = $arg"),
Apply(Select(Ident(TermName("scala")), TermName(s"Tuple$n")), l))
}
//println(showRaw(arg))
val r = arg match {
case q"(..$args)" if args.size > 1 => arg
case _ =>
if (arg.tpe.typeSymbol.fullName.startsWith("scala.Tuple")) {
val n = arg.tpe.typeSymbol.fullName.substring("scala.Tuple".length).toInt
args(n)
}
else arg
}
//println(showRaw(r))
//println(showCode(r))
r
}
def parMapImpl(arg: c.Tree, f: c.Tree): c.Tree =
if (isJsCheck) q"$arg.map($f).toIndexedSeq"
else q"$arg.par.map($f).toIndexedSeq"
def syncImpl(o: c.Tree, arg: c.Tree): c.Tree = if (isJsCheck) arg else q"$o.synchronized { $arg }"
def st(args: c.Tree*): c.Tree = {
def processArg(e: c.Tree, sep: c.Tree): c.Tree = {
val t = e.tpe.dealias
val templ = c.typeOf[org.sireum.$internal.STMarker]
val r =
if (t <:< templ) q"ST.Templ(scala.Seq($e), $sep)"
else if (t <:< c.typeOf[ISMarker] || t <:< c.typeOf[MSMarker]) {
t.typeArgs.length match {
case 1 if t.typeArgs.head <:< templ => q"ST.Templ($e.elements, $sep)"
case 2 if t.typeArgs(1) <:< templ => q"ST.Templ($e.elements, $sep)"
case _ => q"ST.Any($e.elements.map($$internal.Option.apply), $sep)"
}
} else if (t.erasure <:< c.typeOf[CollectionCompat.IterableOnce[Any]].erasure) {
if (t.typeArgs.head <:< templ) q"ST.Templ($e.toSeq, $sep)"
else q"ST.Any($e.toSeq.map($$internal.Option.apply), $sep)"
} else q"ST.Any(scala.Seq($$internal.Option($e)), $sep)"
//println(showCode(r))
r
}
//println(showRaw(c.prefix.tree))
//println(showCode(c.prefix.tree))
val pos = c.prefix.tree.pos
val isSingle =
if (pos.source.content.length >= pos.start + 5)
pos.source.content.subSequence(pos.start, pos.start + 5).toString != "st\\"\\"\\""
else true
val parts = {
val ps = extractParts
if (isSingle) ps.map(p => q"StringContext.processEscapes($p)") else ps
}
val stArgs = for (arg <- args) yield arg match {
case q"(..$exprs)" if exprs.size > 1 =>
if (exprs.size != 2) c.abort(arg.pos, s"Expecting a pair instead of a ${exprs.size}-tuple.")
val e = exprs(1).asInstanceOf[c.Tree]
val first = exprs.head.asInstanceOf[c.Tree]
val t = e.tpe
if (t <:< c.typeOf[Predef.String]) processArg(first, e)
else if (t.typeSymbol.fullName == "org.sireum.String") processArg(first, q"$e.value")
else c.abort(e.pos, s"Expecting a separator string instead of '${showCode(e)}'.")
case _ =>
processArg(arg, Literal(Constant("")))
}
val source = if (pos.isRange) {
val text = pos.source.content
val sb = new java.lang.StringBuilder
for (_ <- 0 until pos.column - 1) sb.append(' ')
for (i <- pos.start until pos.end) {
sb.append(text(i))
}
sb.toString
} else templateString
q"ST(scala.Seq(..$parts), scala.Seq(..$stArgs), ${Literal(Constant(source))})"
}
def isJsImpl: c.Tree = if (isJsCheck) q"true" else q"false"
def versionImpl: c.Tree = {
val p = Runtime.getRuntime.exec(Array("git", "log", "-n", "1", "--date=format:%Y%m%d", "--pretty=format:4.%cd.%h"))
val r = new java.io.LineNumberReader(new java.io.InputStreamReader(p.getInputStream))
val v = r.readLine()
r.close()
c.universe.Literal(c.universe.Constant(v))
}
}
|
sireum/v3-runtime
|
macros/shared/src/main/scala/org/sireum/$internal/Macro.scala
|
Scala
|
bsd-2-clause
| 10,153 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic
import TripleEqualsSupport._
/**
* Provides an implicit method that loosens the equality constraint defined by <code>TypeCheckedTripleEquals</code> or <code>ConversionCheckedTripleEquals</code>
* for Scala <code>Set</code>s to one that more closely matches Scala's approach to <code>Set</code> equality.
*
* <p>
* Scala's approach to <code>Set</code> equality is that if both objects being compared are <code>Set</code>s, the elements are compared to determine equality.
* This means you could compare an immutable <code>TreeSet</code> and a mutable <code>HashSet</code> for equality, for instance, and get true so long as the
* two <code>Set</code>s contained the same elements in the same order. Here's an example:
* </p>
*
* <pre class="stREPL">
* scala> import scala.collection.immutable.TreeSet
* import scala.collection.immutable.TreeSet
*
* scala> import scala.collection.mutable.HashSet
* import scala.collection.mutable.HashSet
*
* scala> TreeSet(1, 2) == HashSet(1, 2)
* res0: Boolean = true
* </pre>
*
* <p>
* Such a comparison would not, however, compile if you used <code>===</code> under either <code>TypeCheckedTripleEquals</code> or <code>ConversionCheckedTripleEquals</code>,
* because <code>TreeSet</code> and <code>HashSet</code> are not in a subtype/supertype relationship, nor does an implicit conversion by default exist between them:
* </p>
*
* <pre class="stREPL">
* scala> import org.scalactic._
* import org.scalactic._
*
* scala> import TypeCheckedTripleEquals._
* import TypeCheckedTripleEquals._
*
* scala> TreeSet(1, 2) === HashSet(1, 2)
* <console>:16: error: types scala.collection.immutable.TreeSet[Int] and
* scala.collection.mutable.HashSet[Int] do not adhere to the equality constraint selected for
* the === and !== operators; the missing implicit parameter is of type
* org.scalactic.EqualityConstraint[scala.collection.immutable.TreeSet[Int],
* scala.collection.mutable.HashSet[Int]]
* TreeSet(1, 2) === HashSet(1, 2)
* ^
* </pre>
*
* <p>
* If you mix or import the implicit conversion provided by <code>SetEqualityConstraint</code>, however, the comparison will be allowed:
* </p>
*
* <pre class="stREPL">
* scala> import SetEqualityConstraints._
* import SetEqualityConstraints._
*
* scala> TreeSet(1, 2) === HashSet(1, 2)
* res2: Boolean = true
* </pre>
*
* <p>
* The equality constraint provided by this trait requires that both left and right sides are subclasses of <code>scala.collection.GenSet</code> and that
* an <code>EqualityConstraint</code> can be found for the element types. In the example above, both the <code>TreeSet</code> and
* <code>HashSet</code> are subclasses of <code>scala.collection.GenSet</code>, and the regular <code>TypeCheckedTripleEquals</code> provides equality
* constraints for the element types, both of which are <code>Int</code>. By contrast, this
* trait would not allow a <code>TreeSet[Int]</code> to be compared against a <code>HashSet[java.util.Date]</code>, because no equality constraint
* will exist between the element types <code>Int</code> and <code>Date</code>:
* </p>
*
* <pre class="stREPL">
* scala> import java.util.Date
* import java.util.Date
*
* scala> TreeSet(1, 2) === HashSet(new Date, new Date)
* <console>:20: error: types scala.collection.immutable.TreeSet[Int] and
* scala.collection.mutable.HashSet[java.util.Date] do not adhere to the equality constraint selected for
* the === and !== operators; the missing implicit parameter is of type
* org.scalactic.EqualityConstraint[scala.collection.immutable.TreeSet[Int],
* scala.collection.mutable.HashSet[java.util.Date]]
* TreeSet(1, 2) === HashSet(new Date, new Date)
* ^
* </pre>
*
* @author Bill Venners
*/
trait SetEqualityConstraints {
import scala.language.higherKinds
/**
* Provides an equality constraint that allows two subtypes of <code>scala.collection.GenSet</code>s to be compared for equality with <code>===</code> so long
* as an <code>EqualityConstraint</code> is available for the element types.
*/
implicit def setEqualityConstraint[EA, CA[ea] <: collection.GenSet[ea], EB, CB[eb] <: collection.GenSet[eb]](implicit equalityOfA: Equality[CA[EA]], ev: EA CanEqual EB): CA[EA] CanEqual CB[EB] = new EqualityConstraint[CA[EA], CB[EB]](equalityOfA)
}
/**
* Companion object that facilitates the importing of <code>SetEqualityConstraints</code> members as
* an alternative to mixing it in. One use case is to import <code>SetEqualityConstraints</code> members so you can use
* them in the Scala interpreter.
*/
object SetEqualityConstraints extends SetEqualityConstraints
|
dotty-staging/scalatest
|
scalactic/src/main/scala/org/scalactic/SetEqualityConstraints.scala
|
Scala
|
apache-2.0
| 5,407 |
/*
* Copyright 2016 Carlo Micieli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hascalator
package data
import Prelude._
/** Arbitrary-precision rational numbers, represented as a ratio of two `Integer` values.
* A rational number may be constructed using the `%%` operator.
* @author Carlo Micieli
* @since 0.0.1
*/
final class Rational(n: Integer, d: Integer) extends Ratio[Integer](n, d)
object Rational {
def apply(n: Integer): Rational = {
new Rational(n, 1)
}
def apply(n: Integer, d: Integer): Rational = {
new Rational(n, d)
}
implicit class int2Rational(val n: Integer) extends AnyVal {
def %%(d: Integer): Rational = apply(n, d)
}
}
|
CarloMicieli/hascalator
|
core/src/main/scala/io/hascalator/data/Rational.scala
|
Scala
|
apache-2.0
| 1,209 |
// Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.hfile.reader.concrete.mmap
// hbase-0.94.6-cdh4.4.0-sources/org/apache/hadoop/hbase/util/ClassSize.java
object ClassSize {
val REFERENCE = 8
val ARRAY = align(3 * REFERENCE)
val OBJECT = 2 * REFERENCE
def align(num: Int): Int = {
align(num.toLong).toInt
}
def align(num: Long): Long = {
(((num + 7) >> 3) << 3)
}
def is32BitJVM(): Boolean = {
System.getProperty("sun.arch.data.model").equals("32");
}
}
|
foursquare/fsqio
|
src/jvm/io/fsq/hfile/reader/concrete/mmap/ClassSize.scala
|
Scala
|
apache-2.0
| 513 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.codec
import java.time.Instant
import java.util.Date
import wvlet.airframe.msgpack.io.ByteArrayBuffer
import wvlet.airframe.msgpack.spi._
import wvlet.log.LogSupport
import scala.util.{Failure, Success, Try}
/**
*/
object JavaInstantTimeCodec extends MessageCodec[Instant] {
override def pack(p: Packer, v: Instant): Unit = {
// TODO airframe-msgpack in Codec interface
// Use msgpack Timestamp type
val buf = ByteArrayBuffer.newBuffer(15)
val cursor = WriteCursor(buf, 0)
OffsetPacker.packTimestamp(cursor, v)
val extData = buf.readBytes(0, cursor.lastWrittenBytes)
p.writePayload(extData, 0, cursor.lastWrittenBytes)
}
override def unpack(u: Unpacker, v: MessageContext): Unit = {
Try {
u.getNextFormat.getValueType match {
case ValueType.STRING =>
// Use ISO instant formatter
val isoInstantFormat = u.unpackString
wvlet.airframe.codec.Compat
.parseInstant(isoInstantFormat)
.getOrElse(Instant.ofEpochMilli(isoInstantFormat.toLong))
case ValueType.INTEGER =>
val epochMillis = u.unpackLong
Instant.ofEpochMilli(epochMillis)
case ValueType.EXTENSION =>
u.unpackTimestamp
case other =>
v.setIncompatibleFormatException(this, s"Cannot create Instant from ${other} type")
}
} match {
case Success(x) => v.setObject(x)
case Failure(e) => v.setError(e)
}
}
}
object JavaUtilDateCodec extends MessageCodec[Date] with LogSupport {
override def pack(p: Packer, v: Date): Unit = {
// Use Instant for encoding
JavaInstantTimeCodec.pack(p, v.toInstant)
}
override def unpack(u: Unpacker, v: MessageContext): Unit = {
JavaInstantTimeCodec.unpack(u, v)
if (!v.isNull) {
v.setObject(Date.from(v.getLastValue.asInstanceOf[Instant]))
}
}
}
|
wvlet/airframe
|
airframe-codec/src/main/scala/wvlet/airframe/codec/TimeCodec.scala
|
Scala
|
apache-2.0
| 2,452 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.containerpool.logging
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import akka.stream.scaladsl.Flow
import akka.util.ByteString
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.containerpool.Container
import org.apache.openwhisk.core.entity.{ActivationLogs, ExecutableWhiskAction, Identity, WhiskActivation}
import org.apache.openwhisk.http.Messages
import org.apache.openwhisk.core.database.UserContext
import scala.concurrent.{ExecutionContext, Future}
import spray.json._
/**
* Represents a single log line as read from a docker log
*/
protected[core] case class LogLine(time: String, stream: String, log: String) {
def toFormattedString = f"$time%-30s $stream: ${log.trim}"
}
protected[core] object LogLine extends DefaultJsonProtocol {
implicit val serdes = jsonFormat3(LogLine.apply)
}
object DockerToActivationLogStore {
/** Transforms chunked JsObjects into formatted strings */
val toFormattedString: Flow[ByteString, String, NotUsed] =
Flow[ByteString].map(_.utf8String.parseJson.convertTo[LogLine].toFormattedString)
}
/**
* Docker based implementation of a LogStore.
*
* Relies on docker's implementation details with regards to the JSON log-driver. When using the JSON log-driver
* docker writes stdout/stderr to a JSON formatted file which is read by this store. Logs are written in the
* activation record itself.
*/
class DockerToActivationLogStore(system: ActorSystem) extends LogStore {
implicit val ec: ExecutionContext = system.dispatcher
implicit val mat: ActorMaterializer = ActorMaterializer()(system)
/* "json-file" is the log-driver that writes out to file */
override val containerParameters = Map("--log-driver" -> Set("json-file"))
/* As logs are already part of the activation record, just return that bit of it */
override def fetchLogs(activation: WhiskActivation, context: UserContext): Future[ActivationLogs] =
Future.successful(activation.logs)
override def collectLogs(transid: TransactionId,
user: Identity,
activation: WhiskActivation,
container: Container,
action: ExecutableWhiskAction): Future[ActivationLogs] = {
container
.logs(action.limits.logs.asMegaBytes, action.exec.sentinelledLogs)(transid)
.via(DockerToActivationLogStore.toFormattedString)
.runWith(Sink.seq)
.flatMap { seq =>
val possibleErrors = Set(Messages.logFailure, Messages.truncateLogs(action.limits.logs.asMegaBytes))
val errored = seq.lastOption.exists(last => possibleErrors.exists(last.contains))
val logs = ActivationLogs(seq.toVector)
if (!errored) {
Future.successful(logs)
} else {
Future.failed(LogCollectingException(logs))
}
}
}
}
object DockerToActivationLogStoreProvider extends LogStoreProvider {
override def instance(actorSystem: ActorSystem): LogStore = new DockerToActivationLogStore(actorSystem)
}
|
starpit/openwhisk
|
common/scala/src/main/scala/org/apache/openwhisk/core/containerpool/logging/DockerToActivationLogStore.scala
|
Scala
|
apache-2.0
| 3,938 |
package com.octobeat
import com.mpatric.mp3agic.Mp3File
/**
* Created by sam on 23/10/14.
*/
object Mp3Manager {
def loadFile(path:String):Mp3File = {
new Mp3File(path)
}
val passBy: PartialFunction[Mp3File, Mp3File] = {
case mp3file: Mp3File => mp3file
}
val removeId3v1Tag: PartialFunction[Mp3File, Mp3File] = {
case mp3file: Mp3File if mp3file.hasId3v1Tag => mp3file.removeId3v1Tag();mp3file
}
val removeId3v2Tag: PartialFunction[Mp3File, Mp3File] = {
case mp3file: Mp3File if mp3file.hasId3v2Tag => mp3file.removeId3v2Tag();mp3file
}
val removeCustomTag: PartialFunction[Mp3File, Mp3File] = {
case mp3file: Mp3File if mp3file.hasCustomTag => mp3file.removeCustomTag();mp3file
}
val _removeTags = ( removeId3v1Tag orElse passBy) andThen (removeId3v2Tag orElse passBy) andThen (removeCustomTag orElse passBy)
def removeTags(mp3file : Mp3File):Mp3File = {
_removeTags(mp3file)
}
val id3v1TagInfo : PartialFunction[Mp3File, Map[String,String]] = {
case mp3file: Mp3File if mp3file.hasId3v1Tag => Map("Title" -> mp3file.getId3v1Tag.getTitle ,"Artist" -> mp3file.getId3v1Tag.getArtist )
}
val id3v2TagInfo : PartialFunction[Mp3File, Map[String,String]] = {
case mp3file: Mp3File if mp3file.hasId3v2Tag => Map("Title" -> mp3file.getId3v2Tag.getTitle ,"Artist" -> mp3file.getId3v2Tag.getArtist )
}
val id3TagInfo = id3v1TagInfo orElse id3v2TagInfo orElse Map()
def extractInfo(mp3file : Mp3File):Map[String,String] = {
id3v2TagInfo(mp3file) //+ ("bitrate" -> mp3file.getBitrate)
}
}
|
sammyrulez/flaming-octo-beat
|
src/main/scala/com/octobeat/Mp3Manager.scala
|
Scala
|
mit
| 1,573 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.hadoop
import com.twitter.scalding._
import sources.{DailyPreprocessedSpanSource}
import com.twitter.zipkin.gen.{SpanServiceName, Constants, Annotation}
/**
* Obtain the IDs and the durations of the one hundred service calls which take the longest per service
*/
class WorstRuntimesPerTrace(args: Args) extends Job(args) with DefaultDateRangeJob {
val clientAnnotations = Seq(Constants.CLIENT_RECV, Constants.CLIENT_SEND)
val preprocessed = DailyPreprocessedSpanSource()
.read
.mapTo(0 -> ('service, 'trace_id, 'annotations)) {
s : SpanServiceName => (s.service_name, s.trace_id, s.annotations.toList)
}
val result = preprocessed
// let's find those client annotations and convert into service name and duration
.flatMap('annotations -> 'duration) { annotations: List[Annotation] =>
var clientSend: Option[Annotation] = None
var clientReceived: Option[Annotation] = None
annotations.foreach { a =>
if (Constants.CLIENT_SEND.equals(a.getValue)) clientSend = Some(a)
if (Constants.CLIENT_RECV.equals(a.getValue)) clientReceived = Some(a)
}
// only return a value if we have both annotations
for (cs <- clientSend; cr <- clientReceived)
yield (cr.timestamp - cs.timestamp)
}.discard('annotations)
//sort by duration, find the 100 largest
.groupBy('service, 'trace_id) { _.sum('duration) }
.groupBy('service) { _.sortBy('duration).reverse.take(100)}
.write(Tsv(args("output")))
}
|
davidbernick/zipkin
|
zipkin-hadoop/src/main/scala/com/twitter/zipkin/hadoop/WorstRuntimesPerTrace.scala
|
Scala
|
apache-2.0
| 2,121 |
package scala.build
import org.pantsbuild.jarjar
import org.pantsbuild.jarjar._
import org.pantsbuild.jarjar.util._
import scala.collection.JavaConverters._
import java.util.jar._
import java.io._
import sbt._
object JarJar {
sealed abstract class JarJarConfig {
def toPatternElement: PatternElement
}
object JarJarConfig {
case class Rule(pattern: String, result: String) extends JarJarConfig {
def toPatternElement: PatternElement = {
val rule = new jarjar.Rule
rule.setPattern(pattern)
rule.setResult(result)
rule
}
}
case class Keep(pattern: String) extends JarJarConfig {
def toPatternElement: PatternElement = {
val keep = new jarjar.Keep
keep.setPattern(pattern)
keep
}
}
}
sealed abstract class Entry {
def name: String
def time: Long
def data: Array[Byte]
}
case class JarEntryInput(jarFile: JarFile, entry: JarEntry) extends Entry {
def name = entry.getName.replace('\\\\', '/')
def time = entry.getTime
def data = sbt.IO.readBytes(jarFile.getInputStream(entry))
}
case class FileInput(base: File, file: File) extends Entry {
def name = file.relativeTo(base).get.getPath.replace('\\\\', '/')
def time = file.lastModified
def data = sbt.IO.readBytes(file)
}
private def newMainProcessor(patterns: java.util.List[PatternElement], verbose: Boolean, skipManifest: Boolean): JarProcessor = {
val cls = Class.forName("org.pantsbuild.jarjar.MainProcessor")
val constructor = cls.getConstructor(classOf[java.util.List[_]], java.lang.Boolean.TYPE, java.lang.Boolean.TYPE)
constructor.setAccessible(true)
constructor.newInstance(patterns, Boolean.box(verbose), Boolean.box(skipManifest)).asInstanceOf[JarProcessor]
}
def apply(in: Iterator[Entry], outdir: File,
config: Seq[JarJarConfig], verbose: Boolean = false): Seq[File] = {
val patterns = config.map(_.toPatternElement).asJava
val processor = newMainProcessor(patterns, verbose, false)
def process(e: Entry): Option[File] = {
val struct = new EntryStruct()
struct.name = e.name
struct.time = e.time
struct.data = e.data
if (processor.process(struct)) {
if (struct.name.endsWith("/")) None
else {
val f = outdir / struct.name
try {
f.getParentFile.mkdirs()
sbt.IO.write(f, struct.data)
} catch {
case ex: Exception =>
throw new IOException(s"Failed to write ${e.name} / ${f.getParentFile} / ${f.getParentFile.exists}", ex)
}
Some(f)
}
}
else None
}
val processed = in.flatMap(entry => process(entry)).toSet
val getter = processor.getClass.getDeclaredMethod("getExcludes")
getter.setAccessible(true)
val excludes = getter.invoke(processor).asInstanceOf[java.util.Set[String]].asScala
val excluded = excludes.map { name =>
val f: File = outdir / name
if(f.exists && !f.delete())
throw new IOException("Failed to delete excluded file $f")
f
}
(processed -- excluded).toSeq
}
}
|
shimib/scala
|
project/JarJar.scala
|
Scala
|
bsd-3-clause
| 3,159 |
package com.matianl.explore
import org.junit.runner.RunWith
import org.specs2.mutable._
import org.specs2.runner._
/**
* Sample specification.
*
* This specification can be executed with: scala -cp <your classpath=""> ${package}.SpecsTest
* Or using maven: mvn test
*
* For more information on how to write or run specifications, please visit:
* http://etorreborre.github.com/specs2/guide/org.specs2.guide.Runners.html
*
*/
@RunWith(classOf[JUnitRunner])
class MySpecTest extends Specification {
"The 'Hello world' string" should {
"contain 11 characters" in {
"Hello world" must have size(11)
}
"start with 'Hello'" in {
"Hello world" must startWith("Hello")
}
"end with 'world'" in {
"Hello world" must endWith("world")
}
}
}
|
matinalhope/explore
|
scala-base/src/test/scala/com/matianl/explore/specs.scala
|
Scala
|
apache-2.0
| 792 |
/**
* Copyright 2015 Frank Austin Nothaft
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fnothaft.gnocchi.cli
import net.fnothaft.gnocchi.models.{ GenotypeState, ReducedDimension }
import net.fnothaft.gnocchi.clustering.WideFlatPCA
import net.fnothaft.gnocchi.sql.GnocchiContext._
import org.apache.spark.SparkContext._
import org.apache.spark.{ Logging, SparkContext }
import org.apache.spark.sql.SQLContext
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.utils.misc.HadoopUtil
import org.bdgenomics.utils.cli._
import org.kohsuke.args4j.{ Argument, Option => Args4jOption }
object ReduceDimensions extends BDGCommandCompanion {
val commandName = "reduceDimensions"
val commandDescription = "Reduces the dimensionality of all genotypes."
def apply(cmdLine: Array[String]) = {
new ReduceDimensions(Args4j[ReduceDimensionsArgs](cmdLine))
}
}
class ReduceDimensionsArgs extends Args4jBase {
@Argument(required = true, metaVar = "INPUT", usage = "The genotypes to process.", index = 0)
var input: String = null
@Argument(required = true, metaVar = "OUTPUT", usage = "The reduced dimensions for all samples.", index = 1)
var output: String = null
@Argument(required = true, metaVar = "DIMENSIONS", usage = "The number of dimensions to reduce down to.", index = 2)
var dimensions: Int = -1
@Args4jOption(required = false, name = "-saveAsText", usage = "Chooses to save as text. If not selected, saves to Parquet.")
var saveAsText = false
@Args4jOption(required = false, name = "-ploidy", usage = "Ploidy to assume. Default value is 2 (diploid).")
var ploidy = 2
}
class ReduceDimensions(protected val args: ReduceDimensionsArgs) extends BDGSparkCommand[ReduceDimensionsArgs] {
val companion = ReduceDimensions
def run(sc: SparkContext) {
require(args.dimensions >= 1, "Dimensionality (%d) must be positive.".format(args.dimensions))
// load in genotype data
val sqlContext = SQLContext.getOrCreate(sc)
import sqlContext.implicits._
val genotypes = sqlContext.read
.format("parquet")
.load(args.input)
val genotypeStates = sqlContext.toGenotypeStateDataFrame(genotypes, args.ploidy, sparse = true)
.as[GenotypeState]
// compute similarity
val dimensions = WideFlatPCA(genotypeStates, args.dimensions)
// save to disk
val format = if (args.saveAsText) {
"json"
} else {
"parquet"
}
dimensions.toDF()
.write
.format(format)
.save(args.output)
}
}
|
fnothaft/gnocchi
|
gnocchi-cli/src/main/scala/net/fnothaft/gnocchi/cli/ReduceDimensions.scala
|
Scala
|
apache-2.0
| 3,041 |
import sbt._
import sbt.Keys._
object QuickfixsBuild extends Build {
lazy val quickfixs = Project(
id = "quickfixs",
base = file("."),
settings = Project.defaultSettings ++ Seq(
name := "quickfixs",
organization := "com.bostontechnologies",
version := "1.0.0",
scalaVersion := "2.9.1",
crossScalaVersions := Seq("2.9.1", "2.9.3", "2.10.2"),
libraryDependencies += "org.apache.servicemix.bundles" % "org.apache.servicemix.bundles.quickfix" % "1.5.2_1",
libraryDependencies += "org.slf4j" % "slf4j-api" % "1.6.4",
libraryDependencies += "org.scala-tools.testing" %% "specs" % "1.6.9" % "test",
libraryDependencies += "junit" % "junit" % "4.8.2" % "test"
)
)
}
|
Forexware/quickfixs
|
project/QuickfixsBuild.scala
|
Scala
|
apache-2.0
| 733 |
package examples
object HelloScala {
def main(args: Array[String]): Unit = {
println("Hello, world!")
}
}
|
mattmoor/bazel-glibc
|
java/examples/HelloScala.scala
|
Scala
|
apache-2.0
| 114 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v2
import uk.gov.hmrc.ct.box.CtBoxIdentifier
abstract class J4 extends CtBoxIdentifier(name = "Tax Avoidance 4 Reference Number")
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600j/v2/J4.scala
|
Scala
|
apache-2.0
| 768 |
/*
* Copyright 2014-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.render
/** Mapping of XSL-FO tag names to their supported properties.
* All properties not supported by Apache FOP are omitted.
*
* @author Jens Halm
*/
trait FOProperties {
private val border = Set(
"border",
"border-top",
"border-bottom",
"border-left",
"border-right",
"border-color",
"border-style",
"border-spacing",
"border-width",
"border-before-color",
"border-before-style",
"border-before-width",
"border-after-color",
"border-after-style",
"border-after-width",
"border-start-color",
"border-start-style",
"border-start-width",
"border-end-color",
"border-end-style",
"border-end-width",
"border-top-color",
"border-top-style",
"border-top-width",
"border-bottom-color",
"border-bottom-style",
"border-bottom-width",
"border-left-color",
"border-left-style",
"border-left-width",
"border-right-color",
"border-right-style",
"border-right-width")
private val padding = Set(
"padding",
"padding-before",
"padding-after",
"padding-start",
"padding-end",
"padding-top",
"padding-bottom",
"padding-left",
"padding-right")
private val background = Set(
"background-color",
"background-image",
"background-repeat",
"background-position",
"background-position-horizontal",
"background-position-vertical")
private val blockMargin = Set(
"margin",
"margin-top",
"margin-bottom",
"margin-left",
"margin-right",
"space-before",
"space-after",
"start-indent",
"end-indent")
private val absolutePosition = Set(
"position",
"absolute-position",
"top",
"bottom",
"left",
"right")
private val dimension = Set(
"block-progression-dimension",
"inline-progression-dimension",
"height",
"width")
private val areaAlign = Set(
"vertical-align",
"alignment-adjust",
"alignment-baseline",
"baseline-shift",
"dominant-baseline")
private val break = Set(
"break-after",
"break-before",
"page-break-after",
"page-break-before")
private val keep = Set(
"keep-with-next",
"keep-with-previous")
private val keepPlus = keep ++ Set(
"page-break-inside",
"keep-together")
private val font = Set(
"font",
"font-family",
"font-size",
"font-style",
"font-weight")
private val hyphenation = Set(
"country",
"language",
"hyphenate",
"hyphenation-character",
"hyphenation-push-character-count",
"hyphenation-remain-character-count")
private val region =
border ++
padding ++
background ++
Set(
"region-name",
"extent",
"precedence",
"display-align",
"overflow",
"reference-orientation",
"writing-mode"
)
private val pageNumber =
border ++
padding ++
background ++
font ++
areaAlign ++
keep ++
Set(
"id",
"letter-spacing",
"line-height",
"text-decoration",
"text-transform",
"word-spacing",
"wrap-option"
)
private val embedded =
border ++
padding ++
background ++
areaAlign ++
dimension ++
keep ++
Set(
"id",
"content-height",
"content-width",
"scaling",
"display-align",
"line-height",
"overflow",
"text-align"
)
private val tablePart: Set[String] = border ++ padding ++ background
private val map = Map[String, Set[String]](
"page-sequence" -> Set(
"id",
"country",
"flow-map-reference",
"format",
"language",
"letter-value",
"grouping-separator",
"grouping-size",
"initial-page-number",
"force-page-count",
"master-reference",
"reference-orientation",
"writing-mode"),
"layout-master-set" -> Set.empty,
"page-sequence-master" -> Set(
"master-name"),
"single-page-master-reference" -> Set(
"master-reference"),
"repeatable-master-reference" -> Set(
"master-reference",
"maximum-repeats"),
"repeatable-page-master-alternatives" -> Set(
"maximum-repeats"),
"conditional-page-master-reference" -> Set(
"master-reference",
"page-position",
"odd-or-even",
"blank-or-not-blank"),
"simple-page-master" -> (
blockMargin ++ Set(
"master-name",
"page-height",
"page-width",
"reference-orientation",
"writing-mode")),
"region-body" -> (
border ++
padding ++
background ++
blockMargin ++ Set(
"region-name",
"column-count",
"column-gap",
"display-align",
"overflow",
"reference-orientation",
"writing-mode")),
"region-before" -> region,
"region-after" -> region,
"region-start" -> region,
"region-end" -> region,
"flow" -> Set(
"id",
"flow-name"),
"static-content" -> Set(
"id",
"flow-name"),
"title" -> (
border ++
padding ++
background ++
font ++ Set(
"color",
"line-height")),
"block" -> (
border ++
padding ++
background ++
font ++
blockMargin ++
hyphenation ++
break ++
keepPlus ++ Set(
"id",
"orphans",
"widows",
"color",
"hyphenation-ladder-count",
"last-line-end-indent",
"line-height",
"line-height-shift-adjustment",
"line-stacking-strategy",
"span",
"text-align",
"text-align-last",
"text-indent",
"white-space",
"white-space-treatment",
"white-space-collapse",
"linefeed-treatment",
"wrap-option")),
"block-container" -> (
border ++
padding ++
background ++
blockMargin ++
absolutePosition ++
dimension ++
break ++
keepPlus ++ Set(
"id",
"display-align",
"reference-orientation",
"overflow",
"span",
"writing-mode")),
"inline" -> (
border ++
padding ++
background ++
font ++
dimension ++
keepPlus ++
areaAlign ++ Set(
"id",
"color",
"line-height",
"text-decoration",
"wrap-option")),
"leader" -> (
border ++
padding ++
background ++
font ++
keep ++
areaAlign ++ Set(
"id",
"color",
"leader-length",
"leader-pattern",
"leader-pattern-width",
"rule-style",
"rule-thickness",
"line-height",
"word-spacing")),
"page-number" -> pageNumber,
"page-number-citation" -> (pageNumber + "ref-id"),
"page-number-citation-last" -> (pageNumber + "ref-id"),
"character" -> (
border ++
padding ++
background ++
font ++
hyphenation ++
keep ++
areaAlign ++ Set(
"id",
"color",
"character",
"letter-spacing",
"line-height",
"text-decoration",
"text-transform",
"word-spacing")),
"basic-link" -> (
border ++
padding ++
background ++
keepPlus ++
areaAlign ++ Set(
"id",
"color",
"line-height",
"external-destination",
"internal-destination",
"show-destination")),
"external-graphic" -> (embedded + "src"),
"instream-foreign-object" -> embedded,
"bidi-override" -> (
font ++ Set(
"color",
"direction",
"letter-spacing",
"line-height",
"word-spacing")),
"table" -> (
border ++
padding ++
background ++
blockMargin ++
dimension ++
break ++
keepPlus ++ Set(
"id",
"border-separation",
"border-collapse",
"table-omit-footer-at-break",
"table-omit-header-at-break",
"writing-mode")),
"table-column" -> (
border ++
padding ++
background ++ Set(
"column-number",
"column-width",
"number-columns-repeated",
"number-columns-spanned")),
"table-header" -> tablePart,
"table-body" -> tablePart,
"table-footer" -> tablePart,
"table-row" -> (tablePart ++
dimension ++
break ++
keepPlus),
"table-cell" -> (tablePart ++
dimension ++
break ++
keepPlus ++ Set(
"id",
"display-align",
"column-number",
"starts-row",
"ends-row",
"number-columns-spanned",
"number-rows-spanned")),
"list-block" -> (
border ++
padding ++
background ++
blockMargin ++
break ++
keepPlus ++ Set(
"id",
"provisional-distance-between-starts",
"provisional-label-separation")),
"list-item" -> (
border ++
padding ++
background ++
blockMargin ++
break ++
keepPlus ++ Set(
"id")),
"list-item-label" -> Set("id","keep-together","end-indent"),
"list-item-body" -> Set("id","keep-together","start-indent"),
"footnote" -> Set("id"),
"footnote-body" -> Set("id"),
"marker" -> Set("marker-class-name"),
"retrieve-marker" -> Set(
"retrieve-class-name",
"retrieve-position",
"retrieve-boundary"),
"bookmark-tree" -> Set.empty,
"bookmark" -> Set(
"external-destination",
"internal-destination",
"starting-state"),
"bookmark-title" -> Set(
"color",
"font-style",
"font-weight"),
"wrapper" -> Set("id"),
"declarations" -> Set.empty,
"color-profile" -> Set("src")
).withDefaultValue(Set())
/** Filters out all unsupported attributes for the specified tagName
* and return a new sequence containing only attributes valid for that tag.
*
* @param tagName the name of tag to filter the attributes for
* @param attributes the attributes to filter as a sequence of key-name tuples
* @return a new sequence containing only attributes valid for that tag
*/
def filterAttributes (tagName: String, attributes: Seq[(String, Any)]): Seq[(String, Any)] = {
val supportedProps = map(tagName.drop(3))
attributes.filter(pair => supportedProps(pair._1))
}
}
|
amuramatsu/Laika
|
core/src/main/scala/laika/render/FOProperties.scala
|
Scala
|
apache-2.0
| 11,458 |
// DisplayVector.scala
def show(n:Int):Unit = { println("> "+ n) }
val v = Vector(1, 2, 3, 4)
v.foreach(show)
|
P7h/ScalaPlayground
|
Atomic Scala/atomic-scala-examples/examples/27_FunctionsasObjects/DisplayVector.scala
|
Scala
|
apache-2.0
| 111 |
package peregin.gpv.gui.gauge
import java.awt._
import java.awt.geom.Arc2D
import peregin.gpv.model.{Sonda, MinMax, InputValue}
import peregin.gpv.util.Trigo._
import peregin.gpv.util.UnitConverter
trait RadialSpeedGauge extends GaugePainter {
lazy val dummy: InputValue = InputValue(27.81, MinMax.max(62))
override def defaultInput: InputValue = dummy
override def sample(sonda: Sonda): Unit = {input = sonda.speed}
override def paint(g: Graphics2D, w: Int, h: Int): Unit = {
super.paint(g, w, h)
val box = math.min(w, h)
val strokeWidth = box / 5
var dia = box - strokeWidth * 1.5
// draw a thick open arc
var x = (w - dia) / 2
var y = (h - dia) / 2
val start = -45
val extent = 270
var arc = new Arc2D.Double(x, y, dia, dia, start, extent, Arc2D.OPEN)
g.setStroke(new BasicStroke(strokeWidth, BasicStroke.CAP_ROUND, BasicStroke.JOIN_MITER, 10.0f, null, 0.0f))
g.setColor(Color.black)
g.draw(arc)
// draw the border
dia = box - strokeWidth / 2
x = (w - dia) / 2
y = (h - dia) / 2
arc = new Arc2D.Double(x, y, dia, dia, start, extent, Arc2D.OPEN)
g.setColor(Color.white)
val borderStroke = new BasicStroke(math.max(2, strokeWidth / 10))
g.setStroke(borderStroke)
g.draw(arc)
// draw the the ticks and units
g.setColor(Color.white)
val r = dia / 2 // the radius of the circle
val cx = w / 2
val cy = h / 2
val ticks = input.boundary.tenths
val longTickLength = math.max(2, r / 10)
val smallTickLength = math.max(1, longTickLength / 2)
val tickStroke = new BasicStroke(math.max(1, strokeWidth / 20))
g.setFont(gaugeFont.deriveFont((longTickLength + 2).toFloat))
val ticksWithNumber = if (input.boundary.max > 180) 30 else if (input.boundary.max > 100) 20 else 10
for (t <- 0 to ticks) {
val angle = -start - t * extent / ticks
val tickLength = if (t % ticksWithNumber == 0) {
g.setStroke(borderStroke)
val text = f"${UnitConverter.speed(ticks - t, units)}%2.0f"
val tb = g.getFontMetrics.getStringBounds(text, g)
val tw = tb.getWidth / 2
val th = tb.getHeight / 2
val tp = r - longTickLength - tw - 2
g.drawString(text, polarX(cx, tp, angle) - tw.toInt, polarY(cy, tp, angle) + th.toInt)
longTickLength
} else {
g.setStroke(tickStroke)
smallTickLength
}
g.drawLine(polarX(cx, r, angle), polarY(cy, r, angle), polarX(cx, r - tickLength, angle), polarY(cy, r - tickLength, angle))
}
// draw colored sections with color
dia = box - strokeWidth * 2.5
x = (w - dia) / 2
y = (h - dia) / 2
g.setColor(Color.red)
arc = new Arc2D.Double(x, y, dia, dia, start, 50, Arc2D.OPEN)
g.draw(arc)
g.setColor(Color.yellow)
arc = new Arc2D.Double(x, y, dia, dia, start + 50, 50, Arc2D.OPEN)
g.draw(arc)
g.setColor(Color.green)
arc = new Arc2D.Double(x, y, dia, dia, start + 100, 100, Arc2D.OPEN)
g.draw(arc)
g.setColor(Color.gray)
arc = new Arc2D.Double(x, y, dia, dia, start + 200, extent - 200, Arc2D.OPEN)
g.draw(arc)
// draw current speed
g.setFont(gaugeFont.deriveFont(Font.BOLD, (longTickLength * 4.7).toFloat))
val text = f"${UnitConverter.speed(input.current, units)}%2.1f"
val tb = g.getFontMetrics.getStringBounds(text, g)
textWidthShadow(g, text, (w - tb.getWidth) / 2, cy + box / 2 - tb.getHeight * 1.2)
// draw unit
g.setFont(gaugeFont.deriveFont(Font.BOLD, (longTickLength * 2).toFloat))
val utext = UnitConverter.speedUnits(units)
val utb = g.getFontMetrics.getStringBounds(utext, g)
textWidthShadow(g, utext, (w - utb.getWidth) / 2, cy + box / 2 - utb.getHeight * 1.5)
// draw pointer
g.setColor(Color.black)
var cr = (longTickLength / 2).toInt + 1
g.fillOval(cx - cr, cy - cr, 2 * cr, 2 * cr)
g.setColor(Color.yellow)
cr -= 1
g.fillOval(cx - cr, cy - cr, 2 * cr, 2 * cr)
val pointerAngle = - extent - start + input.current * extent / input.boundary.tenths
val pointer = r - strokeWidth / 1.2
val pointerStroke = new BasicStroke(math.max(2, strokeWidth / 5), BasicStroke.CAP_ROUND, BasicStroke.JOIN_MITER, 10.0f, null, 0.0f)
g.setStroke(pointerStroke)
val px = polarX(cx, pointer, pointerAngle)
val py = polarY(cy, pointer, pointerAngle)
g.setColor(Color.black)
g.drawLine(cx + 1, cy + 1, px + 1, py + 1)
g.setColor(Color.yellow)
g.drawLine(cx, cy, px, py)
}
}
|
peregin/gps-overlay-on-video
|
src/main/scala/peregin/gpv/gui/gauge/RadialSpeedGauge.scala
|
Scala
|
mit
| 4,503 |
package models.daos.slickdaos
import models.User
import play.api.db.slick._
import slick.driver.PostgresDriver.simple._
import DBTableDefinitions._
import com.mohiva.play.silhouette.api.LoginInfo
import scala.concurrent.Future
import java.util.UUID
import play.Logger
import models.daos.UserDAO
import play.api.db.DB
import javax.inject.Inject
import slick.profile.RelationalProfile
import scala.concurrent.ExecutionContext
/**
* Give access to the user object using Slick
*/
class UserDAOSlick @Inject() (dbConfigProvider : DatabaseConfigProvider, slickQueries : SlickQueries) extends UserDAO {
lazy val dbConfig = dbConfigProvider.get[RelationalProfile]
import dbConfig.driver.api._
val db = dbConfig.db
/**
* Finds a user by its login info.
*
* @param loginInfo The login info of the user to find.
* @return The found user or None if no user for the given login info could be found.
*/
def find(loginInfo: LoginInfo)(implicit ec : ExecutionContext) =
db.run (slickQueries.user.find(loginInfo))
/**
* Finds a user by its user ID.
*
* @param userID The ID of the user to find.
* @return The found user or None if no user for the given ID could be found.
*/
def find(userID: UUID)(implicit ec : ExecutionContext) = db.run (slickQueries.user.find(userID))
/**
* Saves a user.
*
* @param user The user to save.
* @return The saved user.
*/
def save(user: User)(implicit ec : ExecutionContext) = db.run (slickQueries.user.save(user))
}
|
joaoraf/tripspace
|
app/models/daos/slickdaos/UserDAOSlick.scala
|
Scala
|
apache-2.0
| 1,515 |
package uk.gov.hmrc.smartstub
import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
import org.scalatest.FlatSpec
class EnumerableSpec extends FlatSpec with Checkers {
"A Nino" should "convert back and from a Long unchanged" in {
import Enumerable.instances.ninoEnum
check{(a: Long) =>
val t = {a % ninoEnum.size}.abs
t == ninoEnum.apply(t).asLong}
}
}
|
hmrclt/stub-data-generator
|
src/test/scala/EnumerableSpec.scala
|
Scala
|
apache-2.0
| 424 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart.graphics
import java.awt.Font
import java.awt.Graphics2D
import com.netflix.atlas.chart.model.PlotDef
/**
* Draws a legend for a given plot.
*
* @param plot
* Plot definition corresponding to the legend.
* @param label
* Overall label to show for this legend.
* @param showStats
* Whether or not to show basic line statistics for the legend entries.
* @param maxEntries
* Maximum number of entries to show in the legend.
*/
case class Legend(
styles: Styles,
plot: PlotDef,
label: Option[String],
showStats: Boolean,
maxEntries: Int
) extends Element
with VariableHeight {
private val numEntries = plot.data.size
private val header = HorizontalPadding(5) :: label.toList.map { str =>
val bold = ChartSettings.normalFont.deriveFont(Font.BOLD)
val headerColor = plot.getAxisColor(styles.text.color)
Text(str, font = bold, alignment = TextAlignment.LEFT, style = Style(headerColor))
}
private val entries = plot.data.take(maxEntries).flatMap { data =>
List(HorizontalPadding(2), LegendEntry(styles, plot, data, showStats))
}
private val footer =
if (numEntries <= maxEntries) Nil
else {
val remaining = numEntries - maxEntries
val txt =
Text(s"... $remaining suppressed ...", alignment = TextAlignment.LEFT, style = styles.text)
List(HorizontalPadding(2), txt)
}
private val block = Block(header ::: entries ::: footer)
override def minHeight: Int = block.minHeight
override def computeHeight(g: Graphics2D, width: Int): Int = block.computeHeight(g, width)
override def draw(g: Graphics2D, x1: Int, y1: Int, x2: Int, y2: Int): Unit = {
block.draw(g, x1, y1, x2, y2)
}
}
|
copperlight/atlas
|
atlas-chart/src/main/scala/com/netflix/atlas/chart/graphics/Legend.scala
|
Scala
|
apache-2.0
| 2,352 |
package carryx.amazon.api.stackable
/**
* @author alari ([email protected])
* @since 31.10.13 18:54
*
* @see http://docs.aws.amazon.com/AWSECommerceService/latest/DG/RG_Images.html
*/
trait ImagesRG extends RG{
self: AmazonItem =>
abstract override def rgName = buildName(super.rgName, "Images")
lazy val images = ImagesRG.readImages(node)
lazy val imageSets = node \\ "ImageSets" \\ "ImageSet" map {n =>
(n \\ "@Category").text.toString -> ImagesRG.readImages(n)
} toMap
}
object ImagesRG {
case class Image(url: String, height: Int, heightUnits: String, width: Int, widthUnits: String)
private def readImages(x: xml.Node): Map[String,Image] = {
x.child map {i =>
if(i.label.endsWith("Image")) Some(
i.label.substring(0, i.label.length - 5) -> Image(
i \\ "URL" text,
(i \\ "Height" text).toString.toInt,
i \\ "Height" \\ "@Units" text,
(i \\ "Width" text).toString.toInt,
i \\ "Width" \\ "@Units" text
))
else None
} filter(_.isDefined) map(_.get) toMap
}
}
|
alari/amazon-scala-ecommerce
|
src/main/scala/carryx/amazon/api/stackable/ImagesRG.scala
|
Scala
|
apache-2.0
| 1,069 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest3_0_1
import org.jetbrains.plugins.scala.DependencyManagerBase._
import org.jetbrains.plugins.scala.base.libraryLoaders.{IvyManagedLoader, LibraryLoader}
import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestTestCase
/**
* @author Roman.Shein
* @since 10.03.2017
*/
abstract class Scalatest2_11_3_0_1_Base extends ScalaTestTestCase {
override protected def additionalLibraries: Seq[LibraryLoader] = IvyManagedLoader(
"org.scala-lang.modules" %% "scala-xml" % "1.0.6",
"org.scalatest" %% "scalatest" % "3.0.1",
"org.scalactic" %% "scalactic" % "3.0.1"
) :: Nil
}
|
jastice/intellij-scala
|
scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest3_0_1/Scalatest2_11_3_0_1_Base.scala
|
Scala
|
apache-2.0
| 688 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val buildOrganization = "io.github.chaosky"
val appName = "loom"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Add your project dependencies here,
// "org.seleniumhq.selenium" % "selenium-firefox-driver" % "2.33.0" % "test",
"com.googlecode.xmemcached" % "xmemcached" % "1.4.1",
"com.alibaba" % "fastjson" % "1.1.32",
jdbc,
anorm
)
val main = play.Project(appName, appVersion, appDependencies).settings(
//.settings(defaultScalaSettings: _*)
net.virtualvoid.sbt.graph.Plugin.graphSettings ++
Seq(
// Add your own project settings here
resolvers ++= Seq("snapshots" at "http://oss.sonatype.org/content/repositories/snapshots",
"releases" at "http://oss.sonatype.org/content/repositories/releases"),
organization := buildOrganization,
scalacOptions ++= Seq("-feature")
): _*
)
}
|
chaosky/loom
|
project/Build.scala
|
Scala
|
mit
| 989 |
package mesosphere.marathon
package state
import com.wix.accord._
import mesosphere.UnitTest
import mesosphere.marathon.api.serialization.VolumeSerializer
import mesosphere.marathon.api.v2.ValidationHelper
import org.apache.mesos.Protos.Resource.DiskInfo.Source
import org.apache.mesos.Protos.Volume.Mode
class VolumeTest extends UnitTest {
import mesosphere.marathon.test.MarathonTestHelper.constraint
def survivesProtobufSerializationRoundtrip(title: => String, volume: => Volume): Unit = {
s"$title survives protobuf serialization round-trip" in {
val protobuf = VolumeSerializer.toProto(volume)
val resurrected = Volume(protobuf)
resurrected should be(volume)
}
}
def persistent(info: PersistentVolumeInfo, containerPath: String = "cpath", mode: Mode = Mode.RW): PersistentVolume =
PersistentVolume(
containerPath = containerPath,
persistent = info,
mode = mode
)
def external(info: ExternalVolumeInfo, containerPath: String = "cpath", mode: Mode = Mode.RW): ExternalVolume =
ExternalVolume(
containerPath = containerPath,
external = info,
mode = mode
)
trait Fixture {
val rootVolNoConstraints = PersistentVolumeInfo(
1024,
constraints = Set.empty)
val pathVolWithConstraint = PersistentVolumeInfo(
1024,
`type` = DiskType.Path,
constraints = Set(constraint("path", "LIKE", Some("valid regex"))))
val mountVolWithMaxSize = PersistentVolumeInfo(
1024,
`type` = DiskType.Mount,
maxSize = Some(2048))
val extVolNoSize = ExternalVolumeInfo(
name = "volname",
provider = "provider",
options = Map("foo" -> "bar")
)
val extVolWithSize = ExternalVolumeInfo(
size = Option(1),
name = "volname",
provider = "provider",
options = Map("foo" -> "bar", "baz" -> "qaw")
)
val hostVol = DockerVolume(
containerPath = "cpath",
hostPath = "/host/path",
mode = Mode.RW
)
}
object Fixture extends Fixture
"Volume" should {
behave like survivesProtobufSerializationRoundtrip("root vol, no constraints", persistent(Fixture.rootVolNoConstraints))
behave like survivesProtobufSerializationRoundtrip("path vol w/ constraint", persistent(Fixture.pathVolWithConstraint))
behave like survivesProtobufSerializationRoundtrip("mount vol w/ maxSize", persistent(Fixture.mountVolWithMaxSize))
behave like survivesProtobufSerializationRoundtrip("ext vol w/o size", external(Fixture.extVolNoSize))
behave like survivesProtobufSerializationRoundtrip("ext vol w/ size", external(Fixture.extVolWithSize))
behave like survivesProtobufSerializationRoundtrip("host vol", Fixture.hostVol)
"validating PersistentVolumeInfo constraints accepts an empty constraint list" in new Fixture {
validate(rootVolNoConstraints).isSuccess shouldBe true
}
"validating PersistentVolumeInfo constraints rejects unsupported fields" in {
val pvi = PersistentVolumeInfo(
1024,
`type` = DiskType.Path,
constraints = Set(constraint("invalid", "LIKE", Some("regex"))))
val result = validate(pvi)
result.isSuccess shouldBe false
ValidationHelper.getAllRuleConstrains(result).map(_.message) shouldBe Set("Unsupported field")
}
"validating PersistentVolumeInfo constraints rejected for root resources" in {
val result = validate(
PersistentVolumeInfo(
1024,
`type` = DiskType.Root,
constraints = Set(constraint("path", "LIKE", Some("regex")))))
result.isSuccess shouldBe false
ValidationHelper.getAllRuleConstrains(result).map(_.message) shouldBe Set("Constraints on root volumes are not supported")
}
"validating PersistentVolumeInfo constraints rejects bad regex" in {
val pvi = PersistentVolumeInfo(
1024,
`type` = DiskType.Path,
constraints = Set(constraint("path", "LIKE", Some("(bad regex"))))
val result = validate(pvi)
result.isSuccess shouldBe false
ValidationHelper.getAllRuleConstrains(result).map(_.message) shouldBe Set("Invalid regular expression")
}
"validating PersistentVolumeInfo accepts a valid constraint" in new Fixture {
val result = validate(pathVolWithConstraint)
result.isSuccess shouldBe true
}
"validating PersistentVolumeInfo maxSize parameter wrt type" in new Fixture {
val resultRoot = validate(
PersistentVolumeInfo(1024, `type` = DiskType.Root, maxSize = Some(2048)))
resultRoot.isSuccess shouldBe false
ValidationHelper.getAllRuleConstrains(resultRoot).map(_.message) shouldBe Set("Only mount volumes can have maxSize")
val resultPath = validate(
PersistentVolumeInfo(1024, `type` = DiskType.Path, maxSize = Some(2048)))
resultPath.isSuccess shouldBe false
ValidationHelper.getAllRuleConstrains(resultPath).map(_.message) shouldBe Set("Only mount volumes can have maxSize")
validate(mountVolWithMaxSize).isSuccess shouldBe true
}
"validating that DiskSource asMesos converts to an Option Mesos Protobuffer" in {
DiskSource(DiskType.Root, None).asMesos shouldBe None
val Some(pathDisk) = DiskSource(DiskType.Path, Some("/path/to/folder")).asMesos
pathDisk.getPath.getRoot shouldBe "/path/to/folder"
pathDisk.getType shouldBe Source.Type.PATH
val Some(mountDisk) = DiskSource(DiskType.Mount, Some("/path/to/mount")).asMesos
mountDisk.getMount.getRoot shouldBe "/path/to/mount"
mountDisk.getType shouldBe Source.Type.MOUNT
a[IllegalArgumentException] shouldBe thrownBy {
DiskSource(DiskType.Root, Some("/path")).asMesos
}
a[IllegalArgumentException] shouldBe thrownBy {
DiskSource(DiskType.Path, None).asMesos
}
a[IllegalArgumentException] shouldBe thrownBy {
DiskSource(DiskType.Mount, None).asMesos
}
}
}
}
|
Caerostris/marathon
|
src/test/scala/mesosphere/marathon/state/VolumeTest.scala
|
Scala
|
apache-2.0
| 5,950 |
/*
* Copyright (C) 2012 Lalit Pant <[email protected]>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo
package xscala
// Do not format source. It messes up help code formatting.
object Help {
implicit def elem2str(e: xml.Elem) = e.toString
val CommonContent = Map[String, String](
"repeat" ->
<div>
<strong>repeat</strong>(n){{ }} - Repeats the commands within braces n number of times.<br/>
<br/>
<em>Example:</em> <br/><br/>
<pre>
clear()
// make a square with the help of the repeat command
repeat (4) {{
forward(100)
right()
}}
</pre>
</div>
,
"repeati" -> "repeati(n) {i => } - Repeats the commands within braces n number of times. The current repeat index is available within the braces.",
"repeatWhile" -> "repeatWhile(cond) {} - Repeats the commands within braces while the given condition is true.",
"repeatUntil" -> "repeatUntil(cond) {} - Repeats the commands within braces until the given condition is true.",
"zoom" ->
"""zoom(factor) - Zooms in by the given factor, leaving the center point unchanged.<br/>
<br/>
zoom(factor, cx, cy) - Zooms in by the given factor, and positions (cx, cy) at the center of the turtle canvas.
""",
"gridOn" -> "gridOn() - Shows a grid on the turtle canvas.",
"gridOff" -> "gridOff() - Hides the grid on the turtle canvas.",
"axesOn" -> "axesOn() - Shows the X and Y axes on the turtle canvas.",
"axesOff" -> "axesOff() - Hides the X and Y axes on the turtle canvas.",
"showScriptInOutput" -> "showScriptInOutput() - Enables the display of scripts in the output window when they run.",
"hideScriptInOutput" -> "hideScriptInOutput() - Stops the display of scripts in the output window.",
"showVerboseOutput" -> "showVerboseOutput() - Enables the display of output from the Scala interpreter. By default, output from the interpreter is shown only for single line scripts.",
"hideVerboseOutput" -> "hideVerboseOutput() - Stops the display of output from the Scala interpreter.",
"retainSingleLineCode" -> "retainSingleLineCode() - Makes Kojo retain a single line of code after running it. By default, single lines of code are cleared after running.",
"clearSingleLineCode" -> "clearSingleLineCode() - Makes Kojo clear a single line of code after running it. This is the default behavior.",
"version" -> "version - Displays the version of Scala being used.",
"println" -> "println(obj) - Displays the given object as a string in the output window, with a newline at the end.",
"print" -> "print(obj) - Displays the given object as a string in the output window, without a newline at the end.",
"readln" -> "readln(promptString) - Displays the given prompt in the output window and reads a line that the user enters.",
"readInt" -> "readInt(promptString) - Displays the given prompt in the output window and reads an Integer value that the user enters.",
"readDouble" -> "readDouble(promptString) - Displays the given prompt in the output window and reads a Double-precision Real value that the user enters.",
"random" -> "random(upperBound) - Returns a random Integer between 0 (inclusive) and upperBound (exclusive).",
"randomDouble" -> "randomDouble(upperBound) - Returns a random Double-precision Real between 0 (inclusive) and upperBound (exclusive).",
"inspect" -> "inspect(obj) - Opens up a window showing the internal fields of the given object",
"playMusic" -> "playMusic(score) - Plays the specified melody, rhythm, or score.",
"playMusicUntilDone" -> "playMusicUntilDone(score) - Plays the specified melody, rhythm, or score, and waits till the music finishes.",
"playMusicLoop" -> "playMusicLoop(score) - Plays the specified melody, rhythm, or score in the background - in a loop.",
"textExtent" -> "textExtent(text, fontSize) - Determines the size/extent of the given text fragment for the given font size.",
"runInBackground" -> "runInBackground(command) - Runs the given code in the background, concurrently with other code that follows right after this command.",
"playMp3" -> "playMp3(fileName) - Plays the specified MP3 file.",
"playMp3Loop" -> "playMp3Loop(fileName) - Plays the specified MP3 file in the background.",
"ColorHSB" -> "ColorHSB(h, s, b) - Creates a color with the given Hue (0-360), Saturation (0-100), and Brighness (0-100) values.",
"Color" -> "Color(r, g, b, opac) - Creates a color with the given red, green, blue, and opacity (optional) values.",
"ColorG" -> "ColorG(x1, y1, color1, x2, y2, color2, cyclic) - Creates a color gradient for filling shapes. The cyclic value is optional.",
"setBackground" -> "setBackground(color) - Sets the canvas background to the specified color. You can use predefined colors for setting the background, or you can create your own colors using the Color, ColorHSB, and ColorG functions.",
"setBackgroundH" -> "setBackgroundH(color1, color2) - Sets the canvas background to a horizontal color gradient defined by the two specified colors.",
"setBackgroundV" -> "setBackgroundV(color1, color2) - Sets the canvas background to a vertical color gradient defined by the two specified colors."
)
val TwContent = Map[String, String](
"forward" ->
<div>
<strong>forward</strong>(numSteps) - Moves the turtle forward by the given number of steps. <br/>
<br/>
<em>Example:</em> <br/><br/>
<pre>
clear()
// move forward by 100 steps
forward(100)
// move forward by 200 steps
forward(200)
</pre>
</div>
,
"back" ->
<div>
<strong>back</strong>(numSteps) - Moves the turtle back by the given number of steps. <br/>
<br/>
<em>Example:</em> <br/><br/>
<pre>
clear()
// move back by 100 steps
back(100)
// move back by 200 steps
back(200)
</pre>
</div>
,
"home" ->
<div>
<strong>home</strong>() - Moves the turtle to its original location, and makes it point north. <br/>
<br/>
<em>Example:</em> <br/><br/>
<pre>
clear()
// move the turtle out
forward(100)
right()
forward(50)
// now take it back home
home()
</pre>
</div>
,
"setPosition" ->
<div>
<strong>setPosition</strong>() - (x, y) - Sends the turtle to the point (x, y) without drawing a line. The turtle's heading is not changed. <br/>
<br/>
<em>Examples:</em> <br/><br/>
<pre>
setPosition(100, 50)
setPosition(80, 150)
</pre>
</div>
,
"position" ->
<div>
<strong>position</strong> - Tells you the turtle's current position. <br/>
<br/>
<em>Example:</em> <br/><br/>
<pre>
clear()
// move the turtle out
forward(100)
right()
forward(50)
// now report its position
print(position) // Point(50.00, 100.00)
</pre>
</div>
,
"style" -> "style - Tells you the turtle's current style. See the help for saveStyle() for more information on styles.",
"moveTo" -> "moveTo(x, y) - Turns the turtle towards (x, y) and moves the turtle to that point. ",
"turn" ->
<div>
<strong>turn</strong>(angle) - Turns the turtle through the specified angle.<br/>
Positive angles are in the anti-clockwise direction. Negative angles are in the clockwise direction. <br/>
<br/>
<em>Note: </em>It's easier to use <strong>left</strong>(angle) or <strong>right</strong>(angle) to turn the turtle.
</div>
,
"right" ->
<div>
<strong>right</strong>() - Turns the turtle 90 degrees right (clockwise). <br/>
<strong>right</strong>(angle) - Turns the turtle right (clockwise) through the given angle in degrees.<br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
// turn right by 90 degrees
right()
// turn right by 30 degrees
right(30)
</pre>
</div>
,
"left" ->
<div>
<strong>left</strong>() - Turns the turtle 90 degrees left (anti-clockwise). <br/>
<strong>left</strong>(angle) - Turns the turtle left (anti-clockwise) through the given angle in degrees.<br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
// turn left by 90 degrees
left()
// turn left by 30 degrees
left(30)
</pre>
</div>
,
"towards" -> "towards(x, y) - Turns the turtle towards the point (x, y).",
"setHeading" -> "setHeading(angle) - Sets the turtle's heading to angle (0 is towards the right side of the screen ('east'), 90 is up ('north')).",
"heading" -> "heading - Queries the turtle's heading (0 is towards the right side of the screen ('east'), 90 is up ('north')).",
"penDown" ->
<div>
<strong>penDown</strong>() - Pushes the turtle's pen down, and makes it draw lines as it moves. <br/>
The turtle's pen is down by default. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
// pull the turtle's pen up
penUp()
// the turtle moves forward without drawing a line
forward(100)
// push the turtle's pen down
penDown()
// now the turtle draws a line as it moves forward
forward(100)
</pre>
</div>
,
"penUp" ->
<div>
<strong>penUp</strong>() - Pulls the turtle's pen up, and prevents it from drawing lines as it moves. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
// pull the turtle's pen up
penUp()
// the turtle moves forward without drawing a line
forward(100)
// push the turtle's pen down
penDown()
// now the turtle draws a line as it moves forward
forward(100)
</pre>
</div>
,
"setPenColor" ->
<div>
<strong>setPenColor</strong>(color) - Specifies the color of the pen that the turtle draws with. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
setPenColor(blue)
// makes a blue line
forward(100)
setPenColor(green)
// makes a green line
forward(100)
</pre>
</div>
,
"setFillColor" ->
<div>
<strong>setFillColor</strong>(color) - Specifies the fill color of the figures drawn by the turtle. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
setFillColor(blue)
// make a circle filled with blue
circle(50)
setFillColor(green)
// make a circle filled with green
circle(50)
</pre>
</div>
,
"setPenThickness" ->
<div>
<strong>setPenThickness</strong>(thickness) - Specifies the width of the pen that the turtle draws with. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
setPenThickness(10)
// make a line that is 10 units thick
forward(100)
setPenThickness(15)
// make a line that is 15 units thick
forward(100)
</pre>
</div>
,
"setPenFontSize" ->
<div>
<strong>setPenFontSize</strong>(n) - Specifies the font size of the pen that the turtle writes with. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
setPenFontSize(15)
// write with a font size of 15
write("Hi There")
setPenFontSize(20)
// write with a font size of 20
write("Hi There")
</pre>
</div>
,
"savePosHe" ->
<div>
<strong>savePosHe</strong>() - Saves the turtle's current position and heading, so that they can
easily be restored later with a <tt>restorePosHe()</tt>.<br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
// save the turtle's position and heading
savePosHe()
// move wherever
forward(100)
right(45)
forward(60)
// now restore the saved position and heading,
// so that the turtles gets back to
// exactly where it started out from
restorePosHe()
</pre>
</div>
,
"restorePosHe" ->
<div>
<strong>restorePosHe</strong>() - Restores the turtle's current position and heading
based on an earlier <tt>savePosHe()</tt>.<br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
// save the turtle's position and heading
savePosHe()
// move wherever
forward(100)
right(45)
forward(60)
// now restore the saved position and heading,
// so that the turtles gets back to
// exactly where it started out from
restorePosHe()
</pre>
</div>
,
"saveStyle" ->
<div>
<strong>saveStyle</strong>() - Saves the turtle's current style, so that it can
easily be restored later with <tt>restoreStyle()</tt> .<br/>
<p>
The turtle's style includes:
<ul>
<li>Pen Color</li>
<li>Pen Thickness</li>
<li>Fill color</li>
<li>Pen Font Size</li>
</ul>
</p>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def tick(n: Int) {{
// save current style, position and heading
saveStyle()
savePosHe()
setPenColor(gray)
right()
forward(n)
back(n * 2)
restorePosHe()
restoreStyle()
// restore caller's style, position and heading
}}
clear()
setPenColor(green)
right()
// green line
forward(100)
// grey tick
tick(10)
// green line
forward(100)
</pre>
</div>
,
"restoreStyle" ->
<div>
<strong>restoreStyle</strong>() - Restores the turtle's style
based on an earlier <tt>saveStyle()</tt>.
<br/>
<p>
The turtle's style includes:
<ul>
<li>Pen Color</li>
<li>Pen Thickness</li>
<li>Fill color</li>
<li>Pen Font Size</li>
</ul>
</p>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def tick(n: Int) {{
// save current style, position and heading
saveStyle()
savePosHe()
setPenColor(gray)
right()
forward(n)
back(n * 2)
restorePosHe()
restoreStyle()
// restore caller's style, position and heading
}}
clear()
setPenColor(green)
right()
// green line
forward(100)
// grey tick
tick(10)
// green line
forward(100)
</pre>
</div>
,
"beamsOn" -> "beamsOn() - Shows crossbeams centered on the turtle - to help with thinking about the turtle's heading/orientation.",
"beamsOff" -> "beamsOff() - Hides the turtle crossbeams that are turned on by beamsOn().",
"invisible" -> "invisible() - Hides the turtle.",
"visible" -> "visible() - Makes the hidden turtle visible again.",
"write" -> "write(obj) - Makes the turtle write the specified object as a string at its current location.",
"setAnimationDelay" ->
<div>
<strong>setAnimationDelay</strong>(delay) - Sets the turtle's speed. The specified delay
is the amount of time (in milliseconds) taken by the turtle to move through a distance of one hundred steps.<br/>
The default delay is 1000 milliseconds (or 1 second).<br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
// default animation delay
// drawing the line takes 1 second
forward(100)
setAnimationDelay(500)
// drawing the line takes 1/2 seconds
forward(100)
setAnimationDelay(100)
// drawing the line takes 1/10 seconds
forward(100)
</pre>
</div>
,
"animationDelay" -> "animationDelay - Queries the turtle's delay setting.",
"clear" -> "clear() - Clears the turtle canvas, and brings the turtle to the center of the canvas.",
"wipe" -> "wipe() - Wipes the turtle canvas by earsing all pictures. Meant to be used during an animation.",
"clearOutput" -> "clearOutput() - Clears the output window.",
"clearWithUL" -> "clearWithUL(unit) - Clears the turtle canvas, sets the given unit length (Pixel, Cm, or Inch), and brings the turtle to the center of the canvas.",
"arc" ->
<div>
<strong>arc</strong>(radius, angle) - Gets the turtle to make an arc with the given
radius and angle.<br/>
Positive angles make the turtle go left (ant-clockwise). Negative angles make the turtle go right (clockwise) <br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
// a simple arc
clear()
arc(100, 45)
// a pattern of arcs
clear()
right(135)
repeat (5) {{
arc(50, 90)
arc(50, -90)
}}
</pre>
</div>
,
"circle" ->
<div>
<strong>circle</strong>(radius) - Gets the turtle to make a circle with the given
radius. <br/>
A circle(50) command is equivalent to an arc(50, 360) command.<br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
circle(50)
</pre>
</div>
,
"def" ->
<div>
<strong>def</strong> - Let's you define a new command or function.<br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
// A User defined command named square
// Takes one input
def square(side: Int) {{
repeat(4) {{
forward(side)
right()
}}
}}
clear()
// two different calls to square command
square(100)
square(200)
// A User defined function named sum
// Takes two inputs, and returns a result
def sum(n1: Int, n2: Int) = {{
n1 + n2
}}
clearOutput()
// call to the sum function within print command
print(sum(3, 5))
// another call to the sum function
print(sum(20, 7))
</pre>
</div>
,
"if" ->
<div>
<strong>if</strong> or <strong>if-else</strong> - Let you do conditional execution.<br/>
<br/>
<em>Examples:</em> <br/>
<br/>
<pre>
clear()
val size = 50
// conditionally run a command
// the else part is optional
if (size > 100) {{
setFillColor(blue)
}}
else {{
setFillColor(green)
}}
circle(size)
val n = 100
// conditionally evaluate an expression
val big = if (n > 50) true else false
clearOutput()
println(big)
</pre>
</div>
,
"val" ->
<div>
<strong>val</strong> - Let's you create a named value (thus letting you
associate a name with a value). This makes your programs easier to modify
and easier to understand.<br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
clear()
val size = 50
circle(size)
repeat (4) {{
forward(size)
right()
}}
</pre>
</div>
,
"pict" -> "pict { t => } is obsolete. Use the PictureT (preferred) or Picture function instead.",
"Picture" ->
<div>
<strong>Picture</strong>{{ drawingCode }} - Makes a picture out of the given turtle drawing code. <br/>
The picture needs to be drawn for it to become visible in the turtle canvas. <br/><br/>
<em>Note - every picture has its own turtle. For pictures created with the <tt>Picture</tt> function,
Kojo's defalt turtle is set to the picture's turtle while the picture is being drawn.
Your drawing code can then continue to use the default turtle for drawing. Contrast this with
picture's created using the <tt>PictureT</tt> function. For those, a turtle is explicitly supplied to
your drawing code, and your code needs to draw using that turtle. Kojo's default turtle is left
alone in that case.</em><br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
// create a function for making a picture with a circle in it
def p = Picture {{
circle(50)
}}
clear()
invisible()
// draw the picture
draw(p)
</pre>
</div>
,
"PictureT" ->
<div>
<strong>PictureT</strong>{{ t => drawingCode }} - Makes a picture out of the given turtle drawing code,
which needs to draw using the supplied turtle <tt>t</tt>.<br/>
The picture needs to be drawn for it to become visible in the turtle canvas. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
// create a function for making a picture with a circle in it
def p = PictureT {{ t =>
import t._
circle(50)
}}
clear()
invisible()
// draw the picture
draw(p)
</pre>
</div>
,
"HPics" ->
<div>
<strong>HPics</strong>(pictures) <br/>
A container for pictures that lays out the given pictures horizontally. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = HPics(
p,
p,
p
)
draw(pic)
</pre>
</div>
,
"VPics" ->
<div>
<strong>VPics</strong>(pictures) <br/>
A container for pictures that lays out the given pictures vertically. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = VPics(
p,
p,
p
)
draw(pic)
</pre>
</div>
,
"GPics" ->
<div>
<strong>GPics</strong>(pictures) <br/>
A container for pictures that lays out the given pictures one on top of the other. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = GPics(
p,
rot(30) -> p,
rot(60) -> p
)
draw(pic)
</pre>
</div>
,
"rot" ->
<div>
<strong>rot</strong>(angle) -> picture <br/>
Rotates the given picture by the given angle. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = rot(30) -> p
draw(pic)
</pre>
</div>
,
"trans" ->
<div>
<strong>trans</strong>(x, y) -> picture <br/>
Translates the given picture by the given x and y values. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = trans(10, 5) -> p
draw(pic)
</pre>
</div>,
"offset" ->
<div>
<strong>offset</strong>(x, y) -> picture <br/>
Offsets the given picture by the given x and y values, with respect to the
global (canvas) coordinate system. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
axesOn()
val pic = rot(60) * offset(100, 0) -> p
draw(pic)
</pre>
</div>,
"scale" ->
<div>
<strong>scale</strong>(factor) -> picture <br/>
<strong>scale</strong>(xf, yf) -> picture <br/>
Scales the given picture by the given scaling factor(s). <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = scale(2) -> p
draw(pic)
</pre>
</div>,
"fillColor" ->
<div>
<strong>fillColor</strong>(color) -> picture <br/>
Fills the given picture with the given color. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = fillColor(green) -> p
draw(pic)
</pre>
</div>,
"penColor" ->
<div>
<strong>penColor</strong>(color) -> picture <br/>
Sets the pen color for the given picture to the given color. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = penColor(blue) -> p
draw(pic)
</pre>
</div>,
"penWidth" ->
<div>
<strong>penWidth</strong>(thickness) -> picture <br/>
Sets the pen width for the given picture to the given thickness. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = penWidth(10) -> p
draw(pic)
</pre>
</div>,
"hue" ->
<div>
<strong>hue</strong>(factor) -> picture <br/>
Changes the hue of the given picture's fill color by the given factor. <br/>
The factor needs to be between -1 and 1. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = hue(0.5) * fillColor(blue) -> p
// val pic = hue(-0.5) * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"sat" ->
<div>
<strong>sat</strong>(factor) -> picture <br/>
Changes the saturation of the given picture's fill color by the given factor. <br/>
The factor needs to be between -1 and 1. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = sat(-0.5) * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"brit" ->
<div>
<strong>brit</strong>(factor) -> picture <br/>
Changes the brightness of the given picture's fill color by the given factor.<br/>
The factor needs to be between -1 and 1. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = brit(-0.5) * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"opac" ->
<div>
<strong>opac</strong>(factor) -> picture <br/>
Changes the opacity of the given picture by the given factor.<br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = opac(-0.5) * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"axes" ->
<div>
<strong>axes</strong> -> picture <br/>
Turns on local axes for the picture (to help during picture construction). <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
val pic = axes * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"flipY" ->
<div>
<strong>flipY</strong> -> picture <br/>
Flips the given picture around the local Y axis. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
axesOn()
val pic = trans(100, 0) * flipY * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"flipX" ->
<div>
<strong>flipX</strong> -> picture <br/>
Flips the given picture around the local X axis. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
axesOn()
val pic = trans(100, 0) * flipX * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"flip" ->
<div>
<strong>flip</strong> -> picture <br/>
The same thing as flipY. <br/>
Flips the given picture around the local Y axis. <br/>
<br/>
<em>Example:</em> <br/>
<br/>
<pre>
def p = Picture {{
repeat (4) {{
forward(50)
right()
}}
}}
clear()
invisible()
axesOn()
val pic = trans(100, 0) * flip * fillColor(blue) -> p
draw(pic)
</pre>
</div>,
"stClear" -> "stClear() - Clears the Story Teller Window.",
"stPlayStory" -> "stPlayStory(story) - Plays the given story.",
"stFormula" -> "stFormula(latex) - Converts the supplied latex string into html that can be displayed in the Story Teller Window.",
"stPlayMp3" -> "stPlayMp3(fileName) - Plays the specified MP3 file.",
"stPlayMp3Loop" -> "stPlayMp3Loop(fileName) - Plays the specified MP3 file in the background.",
"stAddButton" -> "stAddButton(label) {code} - Adds a button with the given label to the Story Teller Window, and runs the supplied code when the button is clicked.",
"stAddField" -> "stAddField(label, default) - Adds an input field with the supplied label and default value to the Story Teller Window.",
"stFieldValue" -> "stFieldValue(label, default) - Gets the value of the specified field.",
"stShowStatusMsg" -> "stShowStatusMsg(msg) - Shows the specified message in the Story Teller status bar.",
"stSetScript" -> "stSetScript(code) - Copies the supplied code to the script editor.",
"stRunCode" -> "stRunCode(code) - Runs the supplied code (without copying it to the script editor).",
"stClickRunButton" -> "stClickRunButton() - Simulates a click of the run button.",
"stShowStatusError" -> "stShowStatusError(msg) - Shows the specified error message in the Story Teller status bar.",
"stNext" -> "stNext() - Moves the story to the next page/view."
)
val StagingContent = Map[String, String](
"clear" -> "clear() - Clears the canvas.",
"clearWithUL" -> "clearWithUL(unit) - Clears the canvas, and sets the given unit length (Pixel, Cm, or Inch)."
)
val MwContent = Map[String, String]()
@volatile var modeSpecificContent: Map[String, String] = TwContent
def activateTw() {
modeSpecificContent = TwContent
clearLangContent()
}
def activateMw() {
modeSpecificContent = MwContent
clearLangContent()
}
def activateStaging() {
modeSpecificContent = StagingContent
clearLangContent()
}
val langContent: collection.mutable.Map[String, Map[String, String]] = collection.mutable.Map()
def addContent(lang: String, content: Map[String, String]) {
// import util.Typeclasses._
// langContent += (lang -> (langContent.getOrElse(lang, Map()) |+| content))
langContent += (lang -> (langContent.getOrElse(lang, Map()) ++ content))
}
def clearLangContent() {
langContent.clear()
}
def langHelp(name: String, lang: String): Option[String] = {
langContent.get(lang) match {
case Some(content) => content.get(name)
case None => None
}
}
def apply(topic: String) = {
CommonContent.getOrElse(
topic,
modeSpecificContent.getOrElse(
topic,
langHelp(topic, System.getProperty("user.language")).getOrElse(null)
)
)
}
}
|
vnkmr7620/kojo
|
KojoEnv/src/net/kogics/kojo/xscala/Help.scala
|
Scala
|
gpl-3.0
| 34,357 |
package dundertext.editor.cmd
object MoveCursor {
object Left extends CommandDescription {
def apply() = new Left
}
class Left extends SubtitlingCommand {
override def applies: Boolean =
!cursor.isAtBeginningOfRow
override def execute(): Unit = {
cursor.moveLeft(1)
}
}
object Right extends CommandDescription {
def apply() = new Right
}
class Right extends SubtitlingCommand {
override def applies: Boolean =
!cursor.isAtEndOfRow
override def execute(): Unit = {
cursor.moveRight(1)
}
}
object Up extends CommandDescription {
def apply() = new Up
}
class Up extends SubtitlingCommand {
override def applies = cursor.row.prev != null
override def execute(): Unit = {
cursor.moveTo(cursor.row.prev)
}
}
object Down extends CommandDescription {
def apply() = new Down
}
class Down extends SubtitlingCommand {
override def applies = cursor.row.next != null
override def execute(): Unit = {
cursor.moveTo(cursor.row.next)
}
}
object RowBegin extends CommandDescription {
def apply() = new RowBegin
}
class RowBegin extends SubtitlingCommand {
override def applies: Boolean =
!cursor.isAtBeginningOfRow
override def execute(): Unit = {
cursor.moveTo(cursor.row)
}
}
object RowEnd extends CommandDescription {
def apply() = new RowEnd
}
class RowEnd extends SubtitlingCommand {
override def applies: Boolean =
!cursor.isAtEndOfRow
override def execute(): Unit = {
cursor.moveRowEnd()
}
}
}
|
dundertext/dundertext
|
editor/src/main/scala/dundertext/editor/cmd/MoveCursor.scala
|
Scala
|
gpl-3.0
| 1,609 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.upload.fileupload {
import scala.concurrent.ExecutionContext
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test._
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import controllers._
import play.api.libs.Files.SingletonTemporaryFileCreator
import java.io.File
import java.nio.file.attribute.PosixFilePermission._
import java.nio.file.attribute.PosixFilePermissions
import java.nio.file.{Files => JFiles, Path, Paths}
import akka.stream.IOResult
import akka.stream.scaladsl._
import akka.util.ByteString
import play.api._
import play.api.libs.streams._
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc._
import play.core.parsers.Multipart.FileInfo
@RunWith(classOf[JUnitRunner])
class ScalaFileUploadSpec extends AbstractController(Helpers.stubControllerComponents()) with PlaySpecification {
import scala.concurrent.ExecutionContext.Implicits.global
"A scala file upload" should {
"upload file" in new WithApplication {
val tmpFile = JFiles.createTempFile(null, null)
writeFile(tmpFile, "hello")
new File("/tmp/picture").mkdirs()
val uploaded = new File("/tmp/picture/formuploaded")
uploaded.delete()
val parse = app.injector.instanceOf[PlayBodyParsers]
val Action = app.injector.instanceOf[DefaultActionBuilder]
//#upload-file-action
def upload = Action(parse.multipartFormData) { request =>
request.body.file("picture").map { picture =>
// only get the last part of the filename
// otherwise someone can send a path like ../../home/foo/bar.txt to write to other files on the system
val filename = Paths.get(picture.filename).getFileName
picture.ref.moveTo(Paths.get(s"/tmp/picture/$filename"), replace = true)
Ok("File uploaded")
}.getOrElse {
Redirect(routes.ScalaFileUploadController.index).flashing(
"error" -> "Missing file")
}
}
//#upload-file-action
val temporaryFileCreator = SingletonTemporaryFileCreator
val tf = temporaryFileCreator.create(tmpFile)
val request = FakeRequest().withBody(
MultipartFormData(Map.empty, Seq(FilePart("picture", "formuploaded", None, tf)), Nil)
)
testAction(upload, request)
uploaded.delete()
success
}
"upload file directly" in new WithApplication {
val tmpFile = Paths.get("/tmp/picture/tmpuploaded")
writeFile(tmpFile, "hello")
new File("/tmp/picture").mkdirs()
val uploaded = new File("/tmp/picture/uploaded")
uploaded.delete()
val temporaryFileCreator = SingletonTemporaryFileCreator
val tf = temporaryFileCreator.create(tmpFile)
val request = FakeRequest().withBody(tf)
val controllerComponents = app.injector.instanceOf[ControllerComponents]
testAction(new controllers.ScalaFileUploadController(controllerComponents).upload, request)
uploaded.delete()
success
}
}
private def testAction[A](action: Action[A], request: => Request[A] = FakeRequest(), expectedResponse: Int = OK)(implicit app: Application) = {
val result = action(request)
status(result) must_== expectedResponse
}
def writeFile(file: File, content: String): Path = {
writeFile(file.toPath, content)
}
def writeFile(path: Path, content: String): Path = {
JFiles.write(path, content.getBytes)
}
}
package controllers {
class ScalaFileUploadController(controllerComponents: ControllerComponents)(implicit ec: ExecutionContext) extends AbstractController(controllerComponents) {
//#upload-file-directly-action
def upload = Action(parse.temporaryFile) { request =>
request.body.moveTo(Paths.get("/tmp/picture/uploaded"), replace = true)
Ok("File uploaded")
}
//#upload-file-directly-action
def index = Action { request =>
Ok("Upload failed")
}
//#upload-file-customparser
type FilePartHandler[A] = FileInfo => Accumulator[ByteString, FilePart[A]]
def handleFilePartAsFile: FilePartHandler[File] = {
case FileInfo(partName, filename, contentType) =>
val perms = java.util.EnumSet.of(OWNER_READ, OWNER_WRITE)
val attr = PosixFilePermissions.asFileAttribute(perms)
val path = JFiles.createTempFile("multipartBody", "tempFile", attr)
val file = path.toFile
val fileSink = FileIO.toPath(path)
val accumulator = Accumulator(fileSink)
accumulator.map { case IOResult(count, status) =>
FilePart(partName, filename, contentType, file)
}(ec)
}
def uploadCustom = Action(parse.multipartFormData(handleFilePartAsFile)) { request =>
val fileOption = request.body.file("name").map {
case FilePart(key, filename, contentType, file) =>
file.toPath
}
Ok(s"File uploaded: $fileOption")
}
//#upload-file-customparser
}
}
}
|
Shenker93/playframework
|
documentation/manual/working/scalaGuide/main/upload/code/ScalaFileUpload.scala
|
Scala
|
apache-2.0
| 5,278 |
package vanadis.modules.examples.scalacalc.calculator
class Expression(expr: String) {
def split:Array[String] = expr.toLowerCase.substring(0, expr.length - 1).split("\\s+")
val op = split(0).toLowerCase
val args = new Array[Int](split.length - 1)
for (i:Int <- 1.to(args.length)) {
args(i - 1) = Integer.parseInt(split(i))
}
}
|
kjetilv/vanadis
|
modules/examples/scalacalc/calculator/src/main/scala/vanadis/modules/examples/scalacalc/calculator/Expression.scala
|
Scala
|
apache-2.0
| 344 |
package cn.edu.suda.ada.spatialspark.core
import scala.math._
/**
* The class Trajectory is an abstract representation of a trajectory, which basically contains a list
* of GPS points and an ID that uniquely identifies it. This class provides some basic operations that
* can be performed to answer queries on a particular trajectory, such as average speed, average sampling
* frequency and similarity between trajectories .etc.
* @author Graberial
*/
case class Trajectory(val trajectoryID: String,val carID:String,var GPSPoints:List[GPSPoint]) extends Serializable{
var travelDistance: Float = -1 //Total length of travel distance
var range: Range = _ //The minimum rectangle that merely covers this trajectory
/** @return the timestamp of the first GPSPoint in the trajectory */
def getStarTime = GPSPoints.head.timestamp
def getEndTime = GPSPoints.last.timestamp
/**
* @return Trajectory travel time
*/
def getDuration = getEndTime - getStarTime
/** @return the first GPSPoint in the trajectory */
def getStartPoint: GPSPoint = GPSPoints.head
def getEndPoint: GPSPoint = GPSPoints.last
/**
* @return travel distance of a trajectory
* @note 只计算坐标距离不是长度,需要进一步完善
*/
def getTravelDistance: Float = {
if (travelDistance != -1)
travelDistance
else {
var sum: Double = 0
for(index <- 0 to GPSPoints.length - 2){
// sum += hypot(GPSPoints(index).latitude - GPSPoints(index+1).latitude,GPSPoints(index).longitude - GPSPoints(index+1).longitude)
sum += GPSPoints(index).getDistance(GPSPoints(index+1))
}
travelDistance = sum.toFloat
travelDistance
}
}
def getAverageSpeed: Float = getTravelDistance / getDuration
def getAverageSampleInterval: Float = (getEndTime - getStarTime) / GPSPoints.length
/**
* Return how many sample points in this trajectory
* @return Number of sample points
*/
def length = GPSPoints.length
/**
* Get the sub trajectory giving the time limit and rectangle area boundary. This method will create a new Trajectory object
* while the original one remain unchanged.
* @param rect the minimal rectangle that merely covers the this trajectory
* @return sub trajectory satisfies the time and pass-by area filter
*/
def getSubTrajectory(rect: Range): Trajectory = {
// var subGPSPoints: List[GPSPoint] = Nil
// for (point <- GPSPoints if rect.contains(point.latitude, point.longitude)) {
// subGPSPoints = point :: subGPSPoints
// }
val subTraj = GPSPoints.filter(p => rect.contains(p.getPoint()))
new Trajectory(trajectoryID, carID, subTraj)
}
def getSubTrajectory(interval: Int): Trajectory = {
val subGPSPoints : List[GPSPoint] = GPSPoints.filter(_.speed > interval)
new Trajectory(trajectoryID,carID,subGPSPoints)
}
/**
* @return return the rectangle area that merely covers this trajectory
*/
def getRange: Range = {
if (range != null)
range
else {
var top, bottom = GPSPoints.head.latitude
var left, right = GPSPoints.head.longitude
for (point <- GPSPoints.tail) {
if (point.latitude > top) top = point.latitude
else if (point.latitude < bottom) bottom = point.latitude
if (point.longitude < left) left = point.longitude
else if (point.longitude > right) right = point.longitude
}
range = new Range(left, top, right, bottom)
range
}
}
/**
* Get the nearest distance between a GPS point and a trajectory.
* d(p,T) = min(p,T.q)
* q in T
* @param point
* @return
*/
def getDistance(point: GPSPoint):Double = {
GPSPoints.maxBy(p => p.getDistance(point)).getDistance(point)
}
override def toString = "Trajectory: Id ("+trajectoryID+") numGPSPoints("+GPSPoints.length+")"
}
|
LiYangsuda/SpatialSpark
|
src/main/scala/cn/edu/suda/ada/spatialspark/core/Trajectory.scala
|
Scala
|
apache-2.0
| 3,866 |
import java.io.{FileOutputStream, FileInputStream}
import scala.tools.asm.{ClassWriter, Opcodes, ClassReader}
import scala.tools.asm.tree.{InsnNode, ClassNode}
import scala.tools.nsc.backend.jvm.AsmUtils
import scala.tools.partest.DirectTest
import scala.jdk.CollectionConverters._
/**
* Test that ClassReader does not crash if the bytecode of a method has unreachable code.
*/
object Test extends DirectTest {
def code: String = ???
def show(): Unit = {
// The bytecode of f will be modified using ASM by `addDeadCode`
val aCode =
"""
|package p
|class A {
| @inline final def f = 1
|}
""".stripMargin
val bCode =
"""
|package p
|class B {
| def g = (new A()).f
|}
""".stripMargin
compileString(newCompiler("-cp", testOutput.path))(aCode)
addDeadCode()
// If inlining fails, the compiler will issue an inliner warning that is not present in the
// check file
compileString(newCompiler("-cp", testOutput.path, "-opt:inline:**"))(bCode)
}
def readClass(file: String) = {
val cnode = new ClassNode()
val is = new FileInputStream(file)
val reader = new ClassReader(is)
reader.accept(cnode, 0)
is.close()
cnode
}
def writeClass(file: String, cnode: ClassNode): Unit = {
val writer = new ClassWriter(0)
cnode.accept(writer)
val os = new FileOutputStream(file)
os.write(writer.toByteArray)
os.close()
}
def addDeadCode(): Unit = {
val file = (testOutput / "p" / "A.class").path
val cnode = readClass(file)
val method = cnode.methods.asScala.find(_.name == "f").get
AsmUtils.traceMethod(method)
val insns = method.instructions
val it = insns.iterator
while (it.hasNext) {
val in = it.next()
if (in.getOpcode == Opcodes.IRETURN) {
// Insert an ATHROW before the IRETURN. The IRETURN will then be dead code.
// The ICodeReader should not crash if there's dead code.
insns.insert(in.getPrevious, new InsnNode(Opcodes.ATHROW))
}
}
AsmUtils.traceMethod(method)
writeClass(file, cnode)
}
}
|
scala/scala
|
test/files/run/icode-reader-dead-code.scala
|
Scala
|
apache-2.0
| 2,160 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator
import java.util.concurrent.locks.ReentrantReadWriteLock
import kafka.utils.CoreUtils._
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.protocol.types.{ArrayOf, Struct, Schema, Field}
import org.apache.kafka.common.protocol.types.Type.STRING
import org.apache.kafka.common.protocol.types.Type.INT32
import org.apache.kafka.common.protocol.types.Type.INT64
import org.apache.kafka.common.protocol.types.Type.BYTES
import org.apache.kafka.common.utils.Utils
import kafka.utils._
import kafka.common._
import kafka.message._
import kafka.log.FileMessageSet
import kafka.metrics.KafkaMetricsGroup
import kafka.common.TopicAndPartition
import kafka.tools.MessageFormatter
import kafka.api.ProducerResponseStatus
import kafka.server.ReplicaManager
import scala.collection._
import java.io.PrintStream
import java.nio.ByteBuffer
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.TimeUnit
import com.yammer.metrics.core.Gauge
case class DelayedStore(messageSet: Map[TopicAndPartition, MessageSet],
callback: Map[TopicAndPartition, ProducerResponseStatus] => Unit)
class GroupMetadataManager(val brokerId: Int,
val config: OffsetConfig,
replicaManager: ReplicaManager,
zkUtils: ZkUtils) extends Logging with KafkaMetricsGroup {
/* offsets cache */
private val offsetsCache = new Pool[GroupTopicPartition, OffsetAndMetadata]
/* group metadata cache */
private val groupsCache = new Pool[String, GroupMetadata]
/* partitions of consumer groups that are being loaded, its lock should be always called BEFORE offsetExpireLock and the group lock if needed */
private val loadingPartitions: mutable.Set[Int] = mutable.Set()
/* partitions of consumer groups that are assigned, using the same loading partition lock */
private val ownedPartitions: mutable.Set[Int] = mutable.Set()
/* lock for expiring stale offsets, it should be always called BEFORE the group lock if needed */
private val offsetExpireLock = new ReentrantReadWriteLock()
/* shutting down flag */
private val shuttingDown = new AtomicBoolean(false)
/* number of partitions for the consumer metadata topic */
private val groupMetadataTopicPartitionCount = getOffsetsTopicPartitionCount
/* Single-thread scheduler to handling offset/group metadata cache loading and unloading */
private val scheduler = new KafkaScheduler(threads = 1, threadNamePrefix = "group-metadata-manager-")
this.logIdent = "[Group Metadata Manager on Broker " + brokerId + "]: "
scheduler.startup()
scheduler.schedule(name = "delete-expired-consumer-offsets",
fun = deleteExpiredOffsets,
period = config.offsetsRetentionCheckIntervalMs,
unit = TimeUnit.MILLISECONDS)
newGauge("NumOffsets",
new Gauge[Int] {
def value = offsetsCache.size
}
)
newGauge("NumGroups",
new Gauge[Int] {
def value = groupsCache.size
}
)
def currentGroups(): Iterable[GroupMetadata] = groupsCache.values
def partitionFor(groupId: String): Int = Utils.abs(groupId.hashCode) % groupMetadataTopicPartitionCount
def isGroupLocal(groupId: String): Boolean = loadingPartitions synchronized ownedPartitions.contains(partitionFor(groupId))
def isGroupLoading(groupId: String): Boolean = loadingPartitions synchronized loadingPartitions.contains(partitionFor(groupId))
def isLoading(): Boolean = loadingPartitions synchronized !loadingPartitions.isEmpty
/**
* Get the group associated with the given groupId, or null if not found
*/
def getGroup(groupId: String): GroupMetadata = {
groupsCache.get(groupId)
}
/**
* Add a group or get the group associated with the given groupId if it already exists
*/
def addGroup(group: GroupMetadata): GroupMetadata = {
val currentGroup = groupsCache.putIfNotExists(group.groupId, group)
if (currentGroup != null) {
currentGroup
} else {
group
}
}
/**
* Remove all metadata associated with the group
* @param group
*/
def removeGroup(group: GroupMetadata) {
// guard this removal in case of concurrent access (e.g. if a delayed join completes with no members
// while the group is being removed due to coordinator emigration)
if (groupsCache.remove(group.groupId, group)) {
// Append the tombstone messages to the partition. It is okay if the replicas don't receive these (say,
// if we crash or leaders move) since the new leaders will still expire the consumers with heartbeat and
// retry removing this group.
val groupPartition = partitionFor(group.groupId)
val tombstone = new Message(bytes = null, key = GroupMetadataManager.groupMetadataKey(group.groupId))
val partitionOpt = replicaManager.getPartition(GroupCoordinator.GroupMetadataTopicName, groupPartition)
partitionOpt.foreach { partition =>
val appendPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, groupPartition)
trace("Marking group %s as deleted.".format(group.groupId))
try {
// do not need to require acks since even if the tombstone is lost,
// it will be appended again by the new leader
// TODO KAFKA-2720: periodic purging instead of immediate removal of groups
partition.appendMessagesToLeader(new ByteBufferMessageSet(config.offsetsTopicCompressionCodec, tombstone))
} catch {
case t: Throwable =>
error("Failed to mark group %s as deleted in %s.".format(group.groupId, appendPartition), t)
// ignore and continue
}
}
}
}
def prepareStoreGroup(group: GroupMetadata,
groupAssignment: Map[String, Array[Byte]],
responseCallback: Short => Unit): DelayedStore = {
// construct the message to append
val message = new Message(
key = GroupMetadataManager.groupMetadataKey(group.groupId),
bytes = GroupMetadataManager.groupMetadataValue(group, groupAssignment)
)
val groupMetadataPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, partitionFor(group.groupId))
val groupMetadataMessageSet = Map(groupMetadataPartition ->
new ByteBufferMessageSet(config.offsetsTopicCompressionCodec, message))
val generationId = group.generationId
// set the callback function to insert the created group into cache after log append completed
def putCacheCallback(responseStatus: Map[TopicAndPartition, ProducerResponseStatus]) {
// the append response should only contain the topics partition
if (responseStatus.size != 1 || ! responseStatus.contains(groupMetadataPartition))
throw new IllegalStateException("Append status %s should only have one partition %s"
.format(responseStatus, groupMetadataPartition))
// construct the error status in the propagated assignment response
// in the cache
val status = responseStatus(groupMetadataPartition)
var responseCode = Errors.NONE.code
if (status.error != ErrorMapping.NoError) {
debug("Metadata from group %s with generation %d failed when appending to log due to %s"
.format(group.groupId, generationId, ErrorMapping.exceptionNameFor(status.error)))
// transform the log append error code to the corresponding the commit status error code
responseCode = if (status.error == ErrorMapping.UnknownTopicOrPartitionCode) {
Errors.GROUP_COORDINATOR_NOT_AVAILABLE.code
} else if (status.error == ErrorMapping.NotLeaderForPartitionCode) {
Errors.NOT_COORDINATOR_FOR_GROUP.code
} else if (status.error == ErrorMapping.RequestTimedOutCode) {
Errors.REBALANCE_IN_PROGRESS.code
} else if (status.error == ErrorMapping.MessageSizeTooLargeCode
|| status.error == ErrorMapping.MessageSetSizeTooLargeCode
|| status.error == ErrorMapping.InvalidFetchSizeCode) {
error("Appending metadata message for group %s generation %d failed due to %s, returning UNKNOWN error code to the client"
.format(group.groupId, generationId, ErrorMapping.exceptionNameFor(status.error)))
Errors.UNKNOWN.code
} else {
error("Appending metadata message for group %s generation %d failed due to unexpected error: %s"
.format(group.groupId, generationId, status.error))
status.error
}
}
responseCallback(responseCode)
}
DelayedStore(groupMetadataMessageSet, putCacheCallback)
}
def store(delayedAppend: DelayedStore) {
// call replica manager to append the group message
replicaManager.appendMessages(
config.offsetCommitTimeoutMs.toLong,
config.offsetCommitRequiredAcks,
true, // allow appending to internal offset topic
delayedAppend.messageSet,
delayedAppend.callback)
}
/**
* Store offsets by appending it to the replicated log and then inserting to cache
*/
def prepareStoreOffsets(groupId: String,
consumerId: String,
generationId: Int,
offsetMetadata: immutable.Map[TopicAndPartition, OffsetAndMetadata],
responseCallback: immutable.Map[TopicAndPartition, Short] => Unit): DelayedStore = {
// first filter out partitions with offset metadata size exceeding limit
val filteredOffsetMetadata = offsetMetadata.filter { case (topicAndPartition, offsetAndMetadata) =>
validateOffsetMetadataLength(offsetAndMetadata.metadata)
}
// construct the message set to append
val messages = filteredOffsetMetadata.map { case (topicAndPartition, offsetAndMetadata) =>
new Message(
key = GroupMetadataManager.offsetCommitKey(groupId, topicAndPartition.topic, topicAndPartition.partition),
bytes = GroupMetadataManager.offsetCommitValue(offsetAndMetadata)
)
}.toSeq
val offsetTopicPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, partitionFor(groupId))
val offsetsAndMetadataMessageSet = Map(offsetTopicPartition ->
new ByteBufferMessageSet(config.offsetsTopicCompressionCodec, messages:_*))
// set the callback function to insert offsets into cache after log append completed
def putCacheCallback(responseStatus: Map[TopicAndPartition, ProducerResponseStatus]) {
// the append response should only contain the topics partition
if (responseStatus.size != 1 || ! responseStatus.contains(offsetTopicPartition))
throw new IllegalStateException("Append status %s should only have one partition %s"
.format(responseStatus, offsetTopicPartition))
// construct the commit response status and insert
// the offset and metadata to cache if the append status has no error
val status = responseStatus(offsetTopicPartition)
val responseCode =
if (status.error == ErrorMapping.NoError) {
filteredOffsetMetadata.foreach { case (topicAndPartition, offsetAndMetadata) =>
putOffset(GroupTopicPartition(groupId, topicAndPartition), offsetAndMetadata)
}
ErrorMapping.NoError
} else {
debug("Offset commit %s from group %s consumer %s with generation %d failed when appending to log due to %s"
.format(filteredOffsetMetadata, groupId, consumerId, generationId, ErrorMapping.exceptionNameFor(status.error)))
// transform the log append error code to the corresponding the commit status error code
if (status.error == ErrorMapping.UnknownTopicOrPartitionCode)
ErrorMapping.ConsumerCoordinatorNotAvailableCode
else if (status.error == ErrorMapping.NotLeaderForPartitionCode)
ErrorMapping.NotCoordinatorForConsumerCode
else if (status.error == ErrorMapping.MessageSizeTooLargeCode
|| status.error == ErrorMapping.MessageSetSizeTooLargeCode
|| status.error == ErrorMapping.InvalidFetchSizeCode)
Errors.INVALID_COMMIT_OFFSET_SIZE.code
else
status.error
}
// compute the final error codes for the commit response
val commitStatus = offsetMetadata.map { case (topicAndPartition, offsetAndMetadata) =>
if (validateOffsetMetadataLength(offsetAndMetadata.metadata))
(topicAndPartition, responseCode)
else
(topicAndPartition, ErrorMapping.OffsetMetadataTooLargeCode)
}
// finally trigger the callback logic passed from the API layer
responseCallback(commitStatus)
}
DelayedStore(offsetsAndMetadataMessageSet, putCacheCallback)
}
/**
* The most important guarantee that this API provides is that it should never return a stale offset. i.e., it either
* returns the current offset or it begins to sync the cache from the log (and returns an error code).
*/
def getOffsets(group: String, topicPartitions: Seq[TopicAndPartition]): Map[TopicAndPartition, OffsetMetadataAndError] = {
trace("Getting offsets %s for group %s.".format(topicPartitions, group))
if (isGroupLocal(group)) {
if (topicPartitions.isEmpty) {
// Return offsets for all partitions owned by this consumer group. (this only applies to consumers that commit offsets to Kafka.)
offsetsCache.filter(_._1.group == group).map { case(groupTopicPartition, offsetAndMetadata) =>
(groupTopicPartition.topicPartition, OffsetMetadataAndError(offsetAndMetadata.offset, offsetAndMetadata.metadata, ErrorMapping.NoError))
}.toMap
} else {
topicPartitions.map { topicAndPartition =>
val groupTopicPartition = GroupTopicPartition(group, topicAndPartition)
(groupTopicPartition.topicPartition, getOffset(groupTopicPartition))
}.toMap
}
} else {
debug("Could not fetch offsets for group %s (not offset coordinator).".format(group))
topicPartitions.map { topicAndPartition =>
val groupTopicPartition = GroupTopicPartition(group, topicAndPartition)
(groupTopicPartition.topicPartition, OffsetMetadataAndError.NotCoordinatorForGroup)
}.toMap
}
}
/**
* Asynchronously read the partition from the offsets topic and populate the cache
*/
def loadGroupsForPartition(offsetsPartition: Int,
onGroupLoaded: GroupMetadata => Unit) {
val topicPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)
scheduler.schedule(topicPartition.toString, loadGroupsAndOffsets)
def loadGroupsAndOffsets() {
info("Loading offsets and group metadata from " + topicPartition)
loadingPartitions synchronized {
if (loadingPartitions.contains(offsetsPartition)) {
info("Offset load from %s already in progress.".format(topicPartition))
return
} else {
loadingPartitions.add(offsetsPartition)
}
}
val startMs = SystemTime.milliseconds
try {
replicaManager.logManager.getLog(topicPartition) match {
case Some(log) =>
var currOffset = log.logSegments.head.baseOffset
val buffer = ByteBuffer.allocate(config.loadBufferSize)
// loop breaks if leader changes at any time during the load, since getHighWatermark is -1
inWriteLock(offsetExpireLock) {
val loadedGroups = mutable.Map[String, GroupMetadata]()
val removedGroups = mutable.Set[String]()
while (currOffset < getHighWatermark(offsetsPartition) && !shuttingDown.get()) {
buffer.clear()
val messages = log.read(currOffset, config.loadBufferSize).messageSet.asInstanceOf[FileMessageSet]
messages.readInto(buffer, 0)
val messageSet = new ByteBufferMessageSet(buffer)
messageSet.foreach { msgAndOffset =>
require(msgAndOffset.message.key != null, "Offset entry key should not be null")
val baseKey = GroupMetadataManager.readMessageKey(msgAndOffset.message.key)
if (baseKey.isInstanceOf[OffsetKey]) {
// load offset
val key = baseKey.key.asInstanceOf[GroupTopicPartition]
if (msgAndOffset.message.payload == null) {
if (offsetsCache.remove(key) != null)
trace("Removed offset for %s due to tombstone entry.".format(key))
else
trace("Ignoring redundant tombstone for %s.".format(key))
} else {
// special handling for version 0:
// set the expiration time stamp as commit time stamp + server default retention time
val value = GroupMetadataManager.readOffsetMessageValue(msgAndOffset.message.payload)
putOffset(key, value.copy (
expireTimestamp = {
if (value.expireTimestamp == org.apache.kafka.common.requests.OffsetCommitRequest.DEFAULT_TIMESTAMP)
value.commitTimestamp + config.offsetsRetentionMs
else
value.expireTimestamp
}
))
trace("Loaded offset %s for %s.".format(value, key))
}
} else {
// load group metadata
val groupId = baseKey.key.asInstanceOf[String]
val groupMetadata = GroupMetadataManager.readGroupMessageValue(groupId, msgAndOffset.message.payload)
if (groupMetadata != null) {
trace(s"Loaded group metadata for group ${groupMetadata.groupId} with generation ${groupMetadata.generationId}")
removedGroups.remove(groupId)
loadedGroups.put(groupId, groupMetadata)
} else {
loadedGroups.remove(groupId)
removedGroups.add(groupId)
}
}
currOffset = msgAndOffset.nextOffset
}
}
loadedGroups.values.foreach { group =>
val currentGroup = addGroup(group)
if (group != currentGroup)
debug(s"Attempt to load group ${group.groupId} from log with generation ${group.generationId} failed " +
s"because there is already a cached group with generation ${currentGroup.generationId}")
else
onGroupLoaded(group)
}
removedGroups.foreach { groupId =>
val group = groupsCache.get(groupId)
if (group != null)
throw new IllegalStateException(s"Unexpected unload of acitve group ${group.groupId} while " +
s"loading partition ${topicPartition}")
}
}
if (!shuttingDown.get())
info("Finished loading offsets from %s in %d milliseconds."
.format(topicPartition, SystemTime.milliseconds - startMs))
case None =>
warn("No log found for " + topicPartition)
}
}
catch {
case t: Throwable =>
error("Error in loading offsets from " + topicPartition, t)
}
finally {
loadingPartitions synchronized {
ownedPartitions.add(offsetsPartition)
loadingPartitions.remove(offsetsPartition)
}
}
}
}
/**
* When this broker becomes a follower for an offsets topic partition clear out the cache for groups that belong to
* that partition.
* @param offsetsPartition Groups belonging to this partition of the offsets topic will be deleted from the cache.
*/
def removeGroupsForPartition(offsetsPartition: Int,
onGroupUnloaded: GroupMetadata => Unit) {
val topicPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)
scheduler.schedule(topicPartition.toString, removeGroupsAndOffsets)
def removeGroupsAndOffsets() {
var numOffsetsRemoved = 0
var numGroupsRemoved = 0
loadingPartitions synchronized {
// we need to guard the group removal in cache in the loading partition lock
// to prevent coordinator's check-and-get-group race condition
ownedPartitions.remove(offsetsPartition)
// clear the offsets for this partition in the cache
/**
* NOTE: we need to put this in the loading partition lock as well to prevent race condition of the leader-is-local check
* in getOffsets to protects against fetching from an empty/cleared offset cache (i.e., cleared due to a leader->follower
* transition right after the check and clear the cache), causing offset fetch return empty offsets with NONE error code
*/
offsetsCache.keys.foreach { key =>
if (partitionFor(key.group) == offsetsPartition) {
offsetsCache.remove(key)
numOffsetsRemoved += 1
}
}
// clear the groups for this partition in the cache
for (group <- groupsCache.values) {
onGroupUnloaded(group)
groupsCache.remove(group.groupId, group)
numGroupsRemoved += 1
}
}
if (numOffsetsRemoved > 0) info("Removed %d cached offsets for %s on follower transition."
.format(numOffsetsRemoved, TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)))
if (numGroupsRemoved > 0) info("Removed %d cached groups for %s on follower transition."
.format(numGroupsRemoved, TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)))
}
}
/**
* Fetch the current offset for the given group/topic/partition from the underlying offsets storage.
*
* @param key The requested group-topic-partition
* @return If the key is present, return the offset and metadata; otherwise return None
*/
private def getOffset(key: GroupTopicPartition) = {
val offsetAndMetadata = offsetsCache.get(key)
if (offsetAndMetadata == null)
OffsetMetadataAndError.NoOffset
else
OffsetMetadataAndError(offsetAndMetadata.offset, offsetAndMetadata.metadata, ErrorMapping.NoError)
}
/**
* Put the (already committed) offset for the given group/topic/partition into the cache.
*
* @param key The group-topic-partition
* @param offsetAndMetadata The offset/metadata to be stored
*/
private def putOffset(key: GroupTopicPartition, offsetAndMetadata: OffsetAndMetadata) {
offsetsCache.put(key, offsetAndMetadata)
}
private def deleteExpiredOffsets() {
debug("Collecting expired offsets.")
val startMs = SystemTime.milliseconds
val numExpiredOffsetsRemoved = inWriteLock(offsetExpireLock) {
val expiredOffsets = offsetsCache.filter { case (groupTopicPartition, offsetAndMetadata) =>
offsetAndMetadata.expireTimestamp < startMs
}
debug("Found %d expired offsets.".format(expiredOffsets.size))
// delete the expired offsets from the table and generate tombstone messages to remove them from the log
val tombstonesForPartition = expiredOffsets.map { case (groupTopicAndPartition, offsetAndMetadata) =>
val offsetsPartition = partitionFor(groupTopicAndPartition.group)
trace("Removing expired offset and metadata for %s: %s".format(groupTopicAndPartition, offsetAndMetadata))
offsetsCache.remove(groupTopicAndPartition)
val commitKey = GroupMetadataManager.offsetCommitKey(groupTopicAndPartition.group,
groupTopicAndPartition.topicPartition.topic, groupTopicAndPartition.topicPartition.partition)
(offsetsPartition, new Message(bytes = null, key = commitKey))
}.groupBy { case (partition, tombstone) => partition }
// Append the tombstone messages to the offset partitions. It is okay if the replicas don't receive these (say,
// if we crash or leaders move) since the new leaders will get rid of expired offsets during their own purge cycles.
tombstonesForPartition.flatMap { case (offsetsPartition, tombstones) =>
val partitionOpt = replicaManager.getPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)
partitionOpt.map { partition =>
val appendPartition = TopicAndPartition(GroupCoordinator.GroupMetadataTopicName, offsetsPartition)
val messages = tombstones.map(_._2).toSeq
trace("Marked %d offsets in %s for deletion.".format(messages.size, appendPartition))
try {
// do not need to require acks since even if the tombsone is lost,
// it will be appended again in the next purge cycle
partition.appendMessagesToLeader(new ByteBufferMessageSet(config.offsetsTopicCompressionCodec, messages: _*))
tombstones.size
}
catch {
case t: Throwable =>
error("Failed to mark %d expired offsets for deletion in %s.".format(messages.size, appendPartition), t)
// ignore and continue
0
}
}
}.sum
}
info("Removed %d expired offsets in %d milliseconds.".format(numExpiredOffsetsRemoved, SystemTime.milliseconds - startMs))
}
private def getHighWatermark(partitionId: Int): Long = {
val partitionOpt = replicaManager.getPartition(GroupCoordinator.GroupMetadataTopicName, partitionId)
val hw = partitionOpt.map { partition =>
partition.leaderReplicaIfLocal().map(_.highWatermark.messageOffset).getOrElse(-1L)
}.getOrElse(-1L)
hw
}
/*
* Check if the offset metadata length is valid
*/
private def validateOffsetMetadataLength(metadata: String) : Boolean = {
metadata == null || metadata.length() <= config.maxMetadataSize
}
def shutdown() {
shuttingDown.set(true)
scheduler.shutdown()
// TODO: clear the caches
}
/**
* Gets the partition count of the offsets topic from ZooKeeper.
* If the topic does not exist, the configured partition count is returned.
*/
private def getOffsetsTopicPartitionCount = {
val topic = GroupCoordinator.GroupMetadataTopicName
val topicData = zkUtils.getPartitionAssignmentForTopics(Seq(topic))
if (topicData(topic).nonEmpty)
topicData(topic).size
else
config.offsetsTopicNumPartitions
}
/**
* Add the partition into the owned list
*
* NOTE: this is for test only
*/
def addPartitionOwnership(partition: Int) {
loadingPartitions synchronized {
ownedPartitions.add(partition)
}
}
}
/**
* Messages stored for the group topic has versions for both the key and value fields. Key
* version is used to indicate the type of the message (also to differentiate different types
* of messages from being compacted together if they have the same field values); and value
* version is used to evolve the messages within their data types:
*
* key version 0: group consumption offset
* -> value version 0: [offset, metadata, timestamp]
*
* key version 1: group consumption offset
* -> value version 1: [offset, metadata, commit_timestamp, expire_timestamp]
*
* key version 2: group metadata
* -> value version 0: [protocol_type, generation, protocol, leader, members]
*/
object GroupMetadataManager {
private val CURRENT_OFFSET_KEY_SCHEMA_VERSION = 1.toShort
private val CURRENT_GROUP_KEY_SCHEMA_VERSION = 2.toShort
private val OFFSET_COMMIT_KEY_SCHEMA = new Schema(new Field("group", STRING),
new Field("topic", STRING),
new Field("partition", INT32))
private val OFFSET_KEY_GROUP_FIELD = OFFSET_COMMIT_KEY_SCHEMA.get("group")
private val OFFSET_KEY_TOPIC_FIELD = OFFSET_COMMIT_KEY_SCHEMA.get("topic")
private val OFFSET_KEY_PARTITION_FIELD = OFFSET_COMMIT_KEY_SCHEMA.get("partition")
private val OFFSET_COMMIT_VALUE_SCHEMA_V0 = new Schema(new Field("offset", INT64),
new Field("metadata", STRING, "Associated metadata.", ""),
new Field("timestamp", INT64))
private val OFFSET_VALUE_OFFSET_FIELD_V0 = OFFSET_COMMIT_VALUE_SCHEMA_V0.get("offset")
private val OFFSET_VALUE_METADATA_FIELD_V0 = OFFSET_COMMIT_VALUE_SCHEMA_V0.get("metadata")
private val OFFSET_VALUE_TIMESTAMP_FIELD_V0 = OFFSET_COMMIT_VALUE_SCHEMA_V0.get("timestamp")
private val OFFSET_COMMIT_VALUE_SCHEMA_V1 = new Schema(new Field("offset", INT64),
new Field("metadata", STRING, "Associated metadata.", ""),
new Field("commit_timestamp", INT64),
new Field("expire_timestamp", INT64))
private val OFFSET_VALUE_OFFSET_FIELD_V1 = OFFSET_COMMIT_VALUE_SCHEMA_V1.get("offset")
private val OFFSET_VALUE_METADATA_FIELD_V1 = OFFSET_COMMIT_VALUE_SCHEMA_V1.get("metadata")
private val OFFSET_VALUE_COMMIT_TIMESTAMP_FIELD_V1 = OFFSET_COMMIT_VALUE_SCHEMA_V1.get("commit_timestamp")
private val OFFSET_VALUE_EXPIRE_TIMESTAMP_FIELD_V1 = OFFSET_COMMIT_VALUE_SCHEMA_V1.get("expire_timestamp")
private val GROUP_METADATA_KEY_SCHEMA = new Schema(new Field("group", STRING))
private val GROUP_KEY_GROUP_FIELD = GROUP_METADATA_KEY_SCHEMA.get("group")
private val MEMBER_METADATA_V0 = new Schema(new Field("member_id", STRING),
new Field("client_id", STRING),
new Field("client_host", STRING),
new Field("session_timeout", INT32),
new Field("subscription", BYTES),
new Field("assignment", BYTES))
private val MEMBER_METADATA_MEMBER_ID_V0 = MEMBER_METADATA_V0.get("member_id")
private val MEMBER_METADATA_CLIENT_ID_V0 = MEMBER_METADATA_V0.get("client_id")
private val MEMBER_METADATA_CLIENT_HOST_V0 = MEMBER_METADATA_V0.get("client_host")
private val MEMBER_METADATA_SESSION_TIMEOUT_V0 = MEMBER_METADATA_V0.get("session_timeout")
private val MEMBER_METADATA_SUBSCRIPTION_V0 = MEMBER_METADATA_V0.get("subscription")
private val MEMBER_METADATA_ASSIGNMENT_V0 = MEMBER_METADATA_V0.get("assignment")
private val GROUP_METADATA_VALUE_SCHEMA_V0 = new Schema(new Field("protocol_type", STRING),
new Field("generation", INT32),
new Field("protocol", STRING),
new Field("leader", STRING),
new Field("members", new ArrayOf(MEMBER_METADATA_V0)))
private val GROUP_METADATA_PROTOCOL_TYPE_V0 = GROUP_METADATA_VALUE_SCHEMA_V0.get("protocol_type")
private val GROUP_METADATA_GENERATION_V0 = GROUP_METADATA_VALUE_SCHEMA_V0.get("generation")
private val GROUP_METADATA_PROTOCOL_V0 = GROUP_METADATA_VALUE_SCHEMA_V0.get("protocol")
private val GROUP_METADATA_LEADER_V0 = GROUP_METADATA_VALUE_SCHEMA_V0.get("leader")
private val GROUP_METADATA_MEMBERS_V0 = GROUP_METADATA_VALUE_SCHEMA_V0.get("members")
// map of versions to key schemas as data types
private val MESSAGE_TYPE_SCHEMAS = Map(
0 -> OFFSET_COMMIT_KEY_SCHEMA,
1 -> OFFSET_COMMIT_KEY_SCHEMA,
2 -> GROUP_METADATA_KEY_SCHEMA)
// map of version of offset value schemas
private val OFFSET_VALUE_SCHEMAS = Map(
0 -> OFFSET_COMMIT_VALUE_SCHEMA_V0,
1 -> OFFSET_COMMIT_VALUE_SCHEMA_V1)
private val CURRENT_OFFSET_VALUE_SCHEMA_VERSION = 1.toShort
// map of version of group metadata value schemas
private val GROUP_VALUE_SCHEMAS = Map(0 -> GROUP_METADATA_VALUE_SCHEMA_V0)
private val CURRENT_GROUP_VALUE_SCHEMA_VERSION = 0.toShort
private val CURRENT_OFFSET_KEY_SCHEMA = schemaForKey(CURRENT_OFFSET_KEY_SCHEMA_VERSION)
private val CURRENT_GROUP_KEY_SCHEMA = schemaForKey(CURRENT_GROUP_KEY_SCHEMA_VERSION)
private val CURRENT_OFFSET_VALUE_SCHEMA = schemaForOffset(CURRENT_OFFSET_VALUE_SCHEMA_VERSION)
private val CURRENT_GROUP_VALUE_SCHEMA = schemaForGroup(CURRENT_GROUP_VALUE_SCHEMA_VERSION)
private def schemaForKey(version: Int) = {
val schemaOpt = MESSAGE_TYPE_SCHEMAS.get(version)
schemaOpt match {
case Some(schema) => schema
case _ => throw new KafkaException("Unknown offset schema version " + version)
}
}
private def schemaForOffset(version: Int) = {
val schemaOpt = OFFSET_VALUE_SCHEMAS.get(version)
schemaOpt match {
case Some(schema) => schema
case _ => throw new KafkaException("Unknown offset schema version " + version)
}
}
private def schemaForGroup(version: Int) = {
val schemaOpt = GROUP_VALUE_SCHEMAS.get(version)
schemaOpt match {
case Some(schema) => schema
case _ => throw new KafkaException("Unknown group metadata version " + version)
}
}
/**
* Generates the key for offset commit message for given (group, topic, partition)
*
* @return key for offset commit message
*/
private def offsetCommitKey(group: String, topic: String, partition: Int, versionId: Short = 0): Array[Byte] = {
val key = new Struct(CURRENT_OFFSET_KEY_SCHEMA)
key.set(OFFSET_KEY_GROUP_FIELD, group)
key.set(OFFSET_KEY_TOPIC_FIELD, topic)
key.set(OFFSET_KEY_PARTITION_FIELD, partition)
val byteBuffer = ByteBuffer.allocate(2 /* version */ + key.sizeOf)
byteBuffer.putShort(CURRENT_OFFSET_KEY_SCHEMA_VERSION)
key.writeTo(byteBuffer)
byteBuffer.array()
}
/**
* Generates the key for group metadata message for given group
*
* @return key bytes for group metadata message
*/
private def groupMetadataKey(group: String): Array[Byte] = {
val key = new Struct(CURRENT_GROUP_KEY_SCHEMA)
key.set(GROUP_KEY_GROUP_FIELD, group)
val byteBuffer = ByteBuffer.allocate(2 /* version */ + key.sizeOf)
byteBuffer.putShort(CURRENT_GROUP_KEY_SCHEMA_VERSION)
key.writeTo(byteBuffer)
byteBuffer.array()
}
/**
* Generates the payload for offset commit message from given offset and metadata
*
* @param offsetAndMetadata consumer's current offset and metadata
* @return payload for offset commit message
*/
private def offsetCommitValue(offsetAndMetadata: OffsetAndMetadata): Array[Byte] = {
// generate commit value with schema version 1
val value = new Struct(CURRENT_OFFSET_VALUE_SCHEMA)
value.set(OFFSET_VALUE_OFFSET_FIELD_V1, offsetAndMetadata.offset)
value.set(OFFSET_VALUE_METADATA_FIELD_V1, offsetAndMetadata.metadata)
value.set(OFFSET_VALUE_COMMIT_TIMESTAMP_FIELD_V1, offsetAndMetadata.commitTimestamp)
value.set(OFFSET_VALUE_EXPIRE_TIMESTAMP_FIELD_V1, offsetAndMetadata.expireTimestamp)
val byteBuffer = ByteBuffer.allocate(2 /* version */ + value.sizeOf)
byteBuffer.putShort(CURRENT_OFFSET_VALUE_SCHEMA_VERSION)
value.writeTo(byteBuffer)
byteBuffer.array()
}
/**
* Generates the payload for group metadata message from given offset and metadata
* assuming the generation id, selected protocol, leader and member assignment are all available
*
* @param groupMetadata
* @return payload for offset commit message
*/
private def groupMetadataValue(groupMetadata: GroupMetadata, assignment: Map[String, Array[Byte]]): Array[Byte] = {
// generate commit value with schema version 1
val value = new Struct(CURRENT_GROUP_VALUE_SCHEMA)
value.set(GROUP_METADATA_PROTOCOL_TYPE_V0, groupMetadata.protocolType)
value.set(GROUP_METADATA_GENERATION_V0, groupMetadata.generationId)
value.set(GROUP_METADATA_PROTOCOL_V0, groupMetadata.protocol)
value.set(GROUP_METADATA_LEADER_V0, groupMetadata.leaderId)
val memberArray = groupMetadata.allMemberMetadata.map {
case memberMetadata =>
val memberStruct = value.instance(GROUP_METADATA_MEMBERS_V0)
memberStruct.set(MEMBER_METADATA_MEMBER_ID_V0, memberMetadata.memberId)
memberStruct.set(MEMBER_METADATA_CLIENT_ID_V0, memberMetadata.clientId)
memberStruct.set(MEMBER_METADATA_CLIENT_HOST_V0, memberMetadata.clientHost)
memberStruct.set(MEMBER_METADATA_SESSION_TIMEOUT_V0, memberMetadata.sessionTimeoutMs)
val metadata = memberMetadata.metadata(groupMetadata.protocol)
memberStruct.set(MEMBER_METADATA_SUBSCRIPTION_V0, ByteBuffer.wrap(metadata))
val memberAssignment = assignment(memberMetadata.memberId)
assert(memberAssignment != null)
memberStruct.set(MEMBER_METADATA_ASSIGNMENT_V0, ByteBuffer.wrap(memberAssignment))
memberStruct
}
value.set(GROUP_METADATA_MEMBERS_V0, memberArray.toArray)
val byteBuffer = ByteBuffer.allocate(2 /* version */ + value.sizeOf)
byteBuffer.putShort(CURRENT_GROUP_VALUE_SCHEMA_VERSION)
value.writeTo(byteBuffer)
byteBuffer.array()
}
/**
* Decodes the offset messages' key
*
* @param buffer input byte-buffer
* @return an GroupTopicPartition object
*/
private def readMessageKey(buffer: ByteBuffer): BaseKey = {
val version = buffer.getShort
val keySchema = schemaForKey(version)
val key = keySchema.read(buffer).asInstanceOf[Struct]
if (version <= CURRENT_OFFSET_KEY_SCHEMA_VERSION) {
// version 0 and 1 refer to offset
val group = key.get(OFFSET_KEY_GROUP_FIELD).asInstanceOf[String]
val topic = key.get(OFFSET_KEY_TOPIC_FIELD).asInstanceOf[String]
val partition = key.get(OFFSET_KEY_PARTITION_FIELD).asInstanceOf[Int]
OffsetKey(version, GroupTopicPartition(group, TopicAndPartition(topic, partition)))
} else if (version == CURRENT_GROUP_KEY_SCHEMA_VERSION) {
// version 2 refers to offset
val group = key.get(GROUP_KEY_GROUP_FIELD).asInstanceOf[String]
GroupMetadataKey(version, group)
} else {
throw new IllegalStateException("Unknown version " + version + " for group metadata message")
}
}
/**
* Decodes the offset messages' payload and retrieves offset and metadata from it
*
* @param buffer input byte-buffer
* @return an offset-metadata object from the message
*/
private def readOffsetMessageValue(buffer: ByteBuffer): OffsetAndMetadata = {
if(buffer == null) { // tombstone
null
} else {
val version = buffer.getShort
val valueSchema = schemaForOffset(version)
val value = valueSchema.read(buffer).asInstanceOf[Struct]
if (version == 0) {
val offset = value.get(OFFSET_VALUE_OFFSET_FIELD_V0).asInstanceOf[Long]
val metadata = value.get(OFFSET_VALUE_METADATA_FIELD_V0).asInstanceOf[String]
val timestamp = value.get(OFFSET_VALUE_TIMESTAMP_FIELD_V0).asInstanceOf[Long]
OffsetAndMetadata(offset, metadata, timestamp)
} else if (version == 1) {
val offset = value.get(OFFSET_VALUE_OFFSET_FIELD_V1).asInstanceOf[Long]
val metadata = value.get(OFFSET_VALUE_METADATA_FIELD_V1).asInstanceOf[String]
val commitTimestamp = value.get(OFFSET_VALUE_COMMIT_TIMESTAMP_FIELD_V1).asInstanceOf[Long]
val expireTimestamp = value.get(OFFSET_VALUE_EXPIRE_TIMESTAMP_FIELD_V1).asInstanceOf[Long]
OffsetAndMetadata(offset, metadata, commitTimestamp, expireTimestamp)
} else {
throw new IllegalStateException("Unknown offset message version")
}
}
}
/**
* Decodes the group metadata messages' payload and retrieves its member metadatafrom it
*
* @param buffer input byte-buffer
* @return a group metadata object from the message
*/
private def readGroupMessageValue(groupId: String, buffer: ByteBuffer): GroupMetadata = {
if(buffer == null) { // tombstone
null
} else {
val version = buffer.getShort
val valueSchema = schemaForGroup(version)
val value = valueSchema.read(buffer).asInstanceOf[Struct]
if (version == 0) {
val protocolType = value.get(GROUP_METADATA_PROTOCOL_TYPE_V0).asInstanceOf[String]
val group = new GroupMetadata(groupId, protocolType)
group.generationId = value.get(GROUP_METADATA_GENERATION_V0).asInstanceOf[Int]
group.leaderId = value.get(GROUP_METADATA_LEADER_V0).asInstanceOf[String]
group.protocol = value.get(GROUP_METADATA_PROTOCOL_V0).asInstanceOf[String]
value.getArray(GROUP_METADATA_MEMBERS_V0).foreach {
case memberMetadataObj =>
val memberMetadata = memberMetadataObj.asInstanceOf[Struct]
val memberId = memberMetadata.get(MEMBER_METADATA_MEMBER_ID_V0).asInstanceOf[String]
val clientId = memberMetadata.get(MEMBER_METADATA_CLIENT_ID_V0).asInstanceOf[String]
val clientHost = memberMetadata.get(MEMBER_METADATA_CLIENT_HOST_V0).asInstanceOf[String]
val sessionTimeout = memberMetadata.get(MEMBER_METADATA_SESSION_TIMEOUT_V0).asInstanceOf[Int]
val subscription = Utils.toArray(memberMetadata.get(MEMBER_METADATA_SUBSCRIPTION_V0).asInstanceOf[ByteBuffer])
val member = new MemberMetadata(memberId, groupId, clientId, clientHost, sessionTimeout,
List((group.protocol, subscription)))
member.assignment = Utils.toArray(memberMetadata.get(MEMBER_METADATA_ASSIGNMENT_V0).asInstanceOf[ByteBuffer])
group.add(memberId, member)
}
group
} else {
throw new IllegalStateException("Unknown group metadata message version")
}
}
}
// Formatter for use with tools such as console consumer: Consumer should also set exclude.internal.topics to false.
// (specify --formatter "kafka.coordinator.GroupMetadataManager\\$OffsetsMessageFormatter" when consuming __consumer_offsets)
class OffsetsMessageFormatter extends MessageFormatter {
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream) {
val formattedKey = if (key == null) "NULL" else GroupMetadataManager.readMessageKey(ByteBuffer.wrap(key))
// only print if the message is an offset record
if (formattedKey.isInstanceOf[OffsetKey]) {
val groupTopicPartition = formattedKey.asInstanceOf[OffsetKey].toString
val formattedValue = if (value == null) "NULL" else GroupMetadataManager.readOffsetMessageValue(ByteBuffer.wrap(value)).toString
output.write(groupTopicPartition.getBytes)
output.write("::".getBytes)
output.write(formattedValue.getBytes)
output.write("\\n".getBytes)
}
}
}
// Formatter for use with tools to read group metadata history
class GroupMetadataMessageFormatter extends MessageFormatter {
def writeTo(key: Array[Byte], value: Array[Byte], output: PrintStream) {
val formattedKey = if (key == null) "NULL" else GroupMetadataManager.readMessageKey(ByteBuffer.wrap(key))
// only print if the message is a group metadata record
if (formattedKey.isInstanceOf[GroupMetadataKey]) {
val groupId = formattedKey.asInstanceOf[GroupMetadataKey].key
val formattedValue = if (value == null) "NULL" else GroupMetadataManager.readGroupMessageValue(groupId, ByteBuffer.wrap(value)).toString
output.write(groupId.getBytes)
output.write("::".getBytes)
output.write(formattedValue.getBytes)
output.write("\\n".getBytes)
}
}
}
}
case class GroupTopicPartition(group: String, topicPartition: TopicAndPartition) {
def this(group: String, topic: String, partition: Int) =
this(group, new TopicAndPartition(topic, partition))
override def toString =
"[%s,%s,%d]".format(group, topicPartition.topic, topicPartition.partition)
}
trait BaseKey{
def version: Short
def key: Object
}
case class OffsetKey(version: Short, key: GroupTopicPartition) extends BaseKey {
override def toString = key.toString
}
case class GroupMetadataKey(version: Short, key: String) extends BaseKey {
override def toString = key
}
|
eljefe6a/kafka
|
core/src/main/scala/kafka/coordinator/GroupMetadataManager.scala
|
Scala
|
apache-2.0
| 44,419 |
package services.free
import cats.free.{Free, Inject}
import cats.~>
import monix.eval.Task
object DataOps {
sealed trait DSL[A]
final case class Add(value: String) extends DSL[Option[String]]
final case class FindAll() extends DSL[List[String]]
}
final class DataOpService[F[_]](implicit I: Inject[DataOps.DSL, F]) {
import DataOps._
def add(value: String): Free[F, Option[String]] = Free.inject[DSL, F](Add(value))
def findAll: Free[F, List[String]] = Free.inject[DSL, F](FindAll())
}
object DataOpService {
implicit def dataOps[F[_]](implicit I: Inject[DataOps.DSL, F]): DataOpService[F] = new DataOpService[F]
}
final class InMemoryDataOpInterpreter extends (DataOps.DSL ~> Task) {
import DataOps._
private[this] val storage = new scala.collection.mutable.HashSet[String]
def apply[A](d: DSL[A]) = d match {
case Add(a) => Task { if (storage.add(a)) Some(a) else None }
case FindAll() => Task { storage.toList.sorted }
}
}
|
radusw/tagless-free-monix-sample
|
src/main/scala/services/free/DataOpService.scala
|
Scala
|
apache-2.0
| 964 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.