code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package kornell.server.ws.rs.exception
import javax.ws.rs.core._
import javax.ws.rs.ext._
import kornell.core.error.exception.ServerErrorException
@Provider
class ServerErrorMapper extends ExceptionMapper[ServerErrorException] {
override def toResponse(see: ServerErrorException): Response = {
if (see.getCause != null)
ExceptionMapperHelper.handleError(500, see.getMessageKey, see.getCause.getMessage)
else
ExceptionMapperHelper.handleError(500, see.getMessageKey)
}
}
|
Craftware/Kornell
|
kornell-api/src/main/scala/kornell/server/ws/rs/exception/ServerErrorMapper.scala
|
Scala
|
apache-2.0
| 496 |
package cpup.mc.lib.inspecting
import cpup.mc.lib.network.CPupMessage
import cpup.mc.lib.util.Side
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.network.PacketBuffer
class RequestMessage(val typ: String, val id: List[Data]) extends CPupMessage[AnyRef] {
def this(player: EntityPlayer, buf: PacketBuffer, data: AnyRef) = {
this(
buf.readBytes(buf.readInt).toString(Registry.charset),
(0 until buf.readInt).map(v => Registry.readFromByteBuf(buf)).toList
)
}
override def writeTo(buf: PacketBuffer) {
val bytes = typ.getBytes(Registry.charset)
buf.writeInt(bytes.length)
buf.writeBytes(bytes)
buf.writeInt(id.size)
for(data <- id) Registry.writeToByteBuf(data, buf)
}
override def handle(data: AnyRef) = {
if(Side.effective.isServer)
Some(new ResponseMessage((typ, id).hashCode, Registry.get(typ, id: _*)))
else
None
}
}
|
CoderPuppy/cpup-mc
|
src/main/scala/cpup/mc/lib/inspecting/RequestMessage.scala
|
Scala
|
mit
| 881 |
package org.openurp.edu.eams.teach.planaudit.service
import org.openurp.edu.teach.planaudit.GroupAuditResult
import org.openurp.edu.teach.plan.CourseGroup
import org.openurp.edu.teach.plan.PlanCourse
trait PlanAuditListener {
def startPlanAudit(context: PlanAuditContext): Boolean
def startGroupAudit(context: PlanAuditContext, courseGroup: CourseGroup, groupResult: GroupAuditResult): Boolean
def startCourseAudit(context: PlanAuditContext, groupResult: GroupAuditResult, planCourse: PlanCourse): Boolean
def endPlanAudit(context: PlanAuditContext): Unit
}
|
openurp/edu-eams-webapp
|
core/src/main/scala/org/openurp/edu/eams/teach/planaudit/service/PlanAuditListener.scala
|
Scala
|
gpl-3.0
| 572 |
/*
* # Trove
*
* This file is part of Trove - A FREE desktop budgeting application that
* helps you track your finances, FREES you from complex budgeting, and
* enables you to build your TROVE of savings!
*
* Copyright © 2016-2019 Eric John Fredericks.
*
* Trove is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Trove is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Trove. If not, see <http://www.gnu.org/licenses/>.
*/
package trove.ui.tracking
import trove.core.infrastructure.event
import trove.events._
import trove.models.{Account, AccountParent}
import trove.services.AccountsService
import trove.ui._
// The accounts view. We use a tree table view to get the account name column, although we do
// disable user sorting of the data.
private[tracking] class MainAccountsView(override val eventSubscriberGroup: Int, accountsService: AccountsService) extends AccountTreeTableView(
promptUserWithError(accountsService.getAllAccounts).toOption.getOrElse(Seq.empty)
) with UIEventListener {
override def onReceive: PartialFunction[event.Event, Unit] = {
case ItemAdded(_, account: Account) =>
addAccount(account)
case ItemUpdated(_, account: Account) =>
updateAccount(account)
case AccountParentChanged(id, oldParent, newParent) =>
updateParent(id, oldParent, newParent)
case ItemDeleted(_, account: Account) =>
deleteAccount(account.id.get, account.parentAccountId.map[AccountParent](Right(_)).getOrElse(Left(account.accountType)))
case ProjectChanged(_) =>
unsubscribe() // ejf-fixMe: Unsubscribe group! Might be a good way to quickly remove subscriptions for all things related to a project.
}
private[this] def addAccount(account: Account): Unit = {
val accountItem = new AccountItem(new AccountView(account))
account.parentAccountId match {
case Some(parentAccountId) =>
accountItemsByAccountId.get(parentAccountId).children += accountItem
case None =>
accountTypeItemsByAccountType.get(account.accountType).children += accountItem
}
accountItemsByAccountId.put(account.id.get, accountItem)
}
private[this] def updateAccount(account: Account): Unit =
accountItemsByAccountId.get(account.id.get).update(account)
private[this] def updateParent(accountId: Int, oldParent: AccountParent, newParent: AccountParent): Unit = {
val accountItem = accountItemsByAccountId.get(accountId)
oldParent match {
case Left(accountType) =>
accountTypeItemsByAccountType.get(accountType).children -= accountItem
case Right(oldParentId) =>
accountItemsByAccountId.get(oldParentId).children -= accountItem
}
newParent match {
case Left(accountType) =>
accountTypeItemsByAccountType.get(accountType).children += accountItem
case Right(newParentId) =>
accountItemsByAccountId.get(newParentId).children += accountItem
}
}
private[this] def deleteAccount(accountId: Long, parent: AccountParent): Unit = {
val accountItem = accountItemsByAccountId.get(accountId)
accountItem.accountView.account.parentAccountId match {
case Some(id) =>
accountItemsByAccountId.get(id).children -= accountItem
case None =>
accountTypeItemsByAccountType.get(accountItem.accountView.account.accountType).children -= accountItem
}
accountItemsByAccountId.remove(accountId)
}
}
|
emanchgo/budgetfree
|
src/main/scala/trove/ui/tracking/MainAccountsView.scala
|
Scala
|
gpl-3.0
| 3,871 |
package cart
import akka.persistence.fsm.PersistentFSM
sealed trait CartState extends PersistentFSM.FSMState {
override def identifier: String = this.getClass.getName
}
case object Empty extends CartState
case object NonEmpty extends CartState
case object InCheckout extends CartState
|
apisarek/reactivescala-course
|
src/main/scala/cart/CartState.scala
|
Scala
|
mit
| 291 |
package cakesolutions
import akka.actor.ActorSystem
import akka.testkit.{TestKit, TestActorRef}
import org.scalacheck._
import org.specs2.mutable.SpecificationLike
import org.specs2.ScalaCheck
class WorkerSpec extends TestKit(ActorSystem()) with SpecificationLike with ScalaCheck with Generators {
import Messages._
"Worker actor" should {
"correctly collate single Word messages" ! Prop.forAll(WordGenerator, Gen.posNum[Int], Gen.posNum[Int]) {
case (value, n, m) =>
val worker = TestActorRef(new Worker with ResultView)
worker ! Word(value, n, m)
val word = value.toLowerCase()
val frequency = worker.underlyingActor.frequency
frequency.keySet.contains(word) :|
"ERROR: failed to find word in frequency data structure"
frequency(word).contains((n, m)) :|
"ERROR: failed to save word co-ordinates in frequency data structure"
}
"correctly collate multiple Word messages" ! Prop.forAll(Gen.listOf(MessageGenerator)) { messages =>
val worker = TestActorRef(new Worker with ResultView)
for (word <- messages) {
worker ! word
}
val frequency = worker.underlyingActor.frequency
(frequency.keySet.size == messages.distinct.size) :|
s"ERROR: not all expected Word messages ($messages) are present in the frequency data structure - $frequency"
(frequency.mapValues(_.size) == messages.groupBy(_.value.toLowerCase).mapValues(_.distinct.size)) :|
s"ERROR: not all grouped word co-ordinate values (${messages.groupBy(_.value.toLowerCase)}) are present in the frequency data structure - $frequency"
}
}
}
|
carlpulley/concordance
|
src/test/scala/cakesolutions/WorkerSpec.scala
|
Scala
|
gpl-2.0
| 1,664 |
// scalac: -Werror -Xlint -nowarn
//
// nowarn should mean no warnings are emitted,
// irrespective of other flags, and also no
// warnings should be summarized.
//
class C {
def f = 1 → 2
def g: Unit = 1
}
|
lrytz/scala
|
test/files/pos/t11952.scala
|
Scala
|
apache-2.0
| 213 |
package de.fosd.typechef.parser.test.parsers
import de.fosd.typechef.parser._
import de.fosd.typechef.featureexpr.{FeatureExpr, FeatureExprFactory}
import de.fosd.typechef.error.Position
class MyToken(val text: String, val feature: FeatureExpr) extends ProfilingToken {
def t() = text
def getText = text
def getFeature = feature
def getPosition = new Position {
def getFile = "stream"
def getLine = 1
def getColumn = 1
}
override def toString = "\\"" + text + "\\"" + (if (!feature.isTautology()) feature else "")
def isInteger: Boolean = false
def isIdentifier: Boolean = false
def isString: Boolean = false
def isCharacter: Boolean = false
}
object EofToken extends MyToken("EOF", FeatureExprFactory.True)
|
ckaestne/TypeChef
|
ParserFramework/src/test/scala/de/fosd/typechef/parser/test/parsers/MyToken.scala
|
Scala
|
lgpl-3.0
| 751 |
package chapter08
class Point(x: Double, y: Double)
class LabeledPoint(label: String, x: Double, y: Double) extends Point(x, y)
object Exercise05 {
def main(args: Array[String]) {
val point = new LabeledPoint("Black Thursday", 1929, 23007)
println(point)
}
}
|
paul-reiners/scala-for-the-impatient-solutions
|
src/main/scala/exercises/chapter08/Exercise05.scala
|
Scala
|
apache-2.0
| 273 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.util.Locale
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.JavaConverters._
import scala.util.Random
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkConf
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.BaseRelation
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.util.Utils
abstract class KafkaRelationSuiteBase extends QueryTest with SharedSparkSession with KafkaTest {
import testImplicits._
private val topicId = new AtomicInteger(0)
protected var testUtils: KafkaTestUtils = _
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "kafka")
protected def newTopic(): String = s"topic-${topicId.getAndIncrement()}"
override def beforeAll(): Unit = {
super.beforeAll()
testUtils = new KafkaTestUtils
testUtils.setup()
}
override def afterAll(): Unit = {
try {
if (testUtils != null) {
testUtils.teardown()
testUtils = null
}
} finally {
super.afterAll()
}
}
protected def createDF(
topic: String,
withOptions: Map[String, String] = Map.empty[String, String],
brokerAddress: Option[String] = None) = {
val df = spark
.read
.format("kafka")
.option("kafka.bootstrap.servers",
brokerAddress.getOrElse(testUtils.brokerAddress))
.option("subscribe", topic)
withOptions.foreach {
case (key, value) => df.option(key, value)
}
df.load().selectExpr("CAST(value AS STRING)")
}
test("explicit earliest to latest offsets") {
val topic = newTopic()
testUtils.createTopic(topic, partitions = 3)
testUtils.sendMessages(topic, (0 to 9).map(_.toString).toArray, Some(0))
testUtils.sendMessages(topic, (10 to 19).map(_.toString).toArray, Some(1))
testUtils.sendMessages(topic, Array("20"), Some(2))
// Specify explicit earliest and latest offset values
val df = createDF(topic,
withOptions = Map("startingOffsets" -> "earliest", "endingOffsets" -> "latest"))
checkAnswer(df, (0 to 20).map(_.toString).toDF)
// "latest" should late bind to the current (latest) offset in the df
testUtils.sendMessages(topic, (21 to 29).map(_.toString).toArray, Some(2))
checkAnswer(df, (0 to 29).map(_.toString).toDF)
}
test("default starting and ending offsets") {
val topic = newTopic()
testUtils.createTopic(topic, partitions = 3)
testUtils.sendMessages(topic, (0 to 9).map(_.toString).toArray, Some(0))
testUtils.sendMessages(topic, (10 to 19).map(_.toString).toArray, Some(1))
testUtils.sendMessages(topic, Array("20"), Some(2))
// Implicit offset values, should default to earliest and latest
val df = createDF(topic)
// Test that we default to "earliest" and "latest"
checkAnswer(df, (0 to 20).map(_.toString).toDF)
}
test("explicit offsets") {
val topic = newTopic()
testUtils.createTopic(topic, partitions = 3)
testUtils.sendMessages(topic, (0 to 9).map(_.toString).toArray, Some(0))
testUtils.sendMessages(topic, (10 to 19).map(_.toString).toArray, Some(1))
testUtils.sendMessages(topic, Array("20"), Some(2))
// Test explicitly specified offsets
val startPartitionOffsets = Map(
new TopicPartition(topic, 0) -> -2L, // -2 => earliest
new TopicPartition(topic, 1) -> -2L,
new TopicPartition(topic, 2) -> 0L // explicit earliest
)
val startingOffsets = JsonUtils.partitionOffsets(startPartitionOffsets)
val endPartitionOffsets = Map(
new TopicPartition(topic, 0) -> -1L, // -1 => latest
new TopicPartition(topic, 1) -> -1L,
new TopicPartition(topic, 2) -> 1L // explicit offset happens to = the latest
)
val endingOffsets = JsonUtils.partitionOffsets(endPartitionOffsets)
val df = createDF(topic,
withOptions = Map("startingOffsets" -> startingOffsets, "endingOffsets" -> endingOffsets))
checkAnswer(df, (0 to 20).map(_.toString).toDF)
// static offset partition 2, nothing should change
testUtils.sendMessages(topic, (31 to 39).map(_.toString).toArray, Some(2))
checkAnswer(df, (0 to 20).map(_.toString).toDF)
// latest offset partition 1, should change
testUtils.sendMessages(topic, (21 to 30).map(_.toString).toArray, Some(1))
checkAnswer(df, (0 to 30).map(_.toString).toDF)
}
test("reuse same dataframe in query") {
// This test ensures that we do not cache the Kafka Consumer in KafkaRelation
val topic = newTopic()
testUtils.createTopic(topic, partitions = 1)
testUtils.sendMessages(topic, (0 to 10).map(_.toString).toArray, Some(0))
// Specify explicit earliest and latest offset values
val df = createDF(topic,
withOptions = Map("startingOffsets" -> "earliest", "endingOffsets" -> "latest"))
checkAnswer(df.union(df), ((0 to 10) ++ (0 to 10)).map(_.toString).toDF)
}
test("test late binding start offsets") {
// Kafka fails to remove the logs on Windows. See KAFKA-1194.
assume(!Utils.isWindows)
var kafkaUtils: KafkaTestUtils = null
try {
/**
* The following settings will ensure that all log entries
* are removed following a call to cleanupLogs
*/
val brokerProps = Map[String, Object](
"log.retention.bytes" -> 1.asInstanceOf[AnyRef], // retain nothing
"log.retention.ms" -> 1.asInstanceOf[AnyRef] // no wait time
)
kafkaUtils = new KafkaTestUtils(withBrokerProps = brokerProps)
kafkaUtils.setup()
val topic = newTopic()
kafkaUtils.createTopic(topic, partitions = 1)
kafkaUtils.sendMessages(topic, (0 to 9).map(_.toString).toArray, Some(0))
// Specify explicit earliest and latest offset values
val df = createDF(topic,
withOptions = Map("startingOffsets" -> "earliest", "endingOffsets" -> "latest"),
Some(kafkaUtils.brokerAddress))
checkAnswer(df, (0 to 9).map(_.toString).toDF)
// Blow away current set of messages.
kafkaUtils.cleanupLogs()
// Add some more data, but do not call cleanup
kafkaUtils.sendMessages(topic, (10 to 19).map(_.toString).toArray, Some(0))
// Ensure that we late bind to the new starting position
checkAnswer(df, (10 to 19).map(_.toString).toDF)
} finally {
if (kafkaUtils != null) {
kafkaUtils.teardown()
}
}
}
test("bad batch query options") {
def testBadOptions(options: (String, String)*)(expectedMsgs: String*): Unit = {
val ex = intercept[IllegalArgumentException] {
val reader = spark
.read
.format("kafka")
options.foreach { case (k, v) => reader.option(k, v) }
reader.load().collect()
}
expectedMsgs.foreach { m =>
assert(ex.getMessage.toLowerCase(Locale.ROOT).contains(m.toLowerCase(Locale.ROOT)))
}
}
// Specifying an ending offset as the starting point
testBadOptions("startingOffsets" -> "latest")("starting offset can't be latest " +
"for batch queries on Kafka")
// Now do it with an explicit json start offset indicating latest
val startPartitionOffsets = Map( new TopicPartition("t", 0) -> -1L)
val startingOffsets = JsonUtils.partitionOffsets(startPartitionOffsets)
testBadOptions("subscribe" -> "t", "startingOffsets" -> startingOffsets)(
"startingOffsets for t-0 can't be latest for batch queries on Kafka")
// Make sure we catch ending offsets that indicate earliest
testBadOptions("endingOffsets" -> "earliest")("ending offset can't be earliest " +
"for batch queries on Kafka")
// Make sure we catch ending offsets that indicating earliest
val endPartitionOffsets = Map(new TopicPartition("t", 0) -> -2L)
val endingOffsets = JsonUtils.partitionOffsets(endPartitionOffsets)
testBadOptions("subscribe" -> "t", "endingOffsets" -> endingOffsets)(
"ending offset for t-0 can't be earliest for batch queries on Kafka")
// No strategy specified
testBadOptions()("options must be specified", "subscribe", "subscribePattern")
// Multiple strategies specified
testBadOptions("subscribe" -> "t", "subscribePattern" -> "t.*")(
"only one", "options can be specified")
testBadOptions("subscribe" -> "t", "assign" -> """{"a":[0]}""")(
"only one", "options can be specified")
testBadOptions("assign" -> "")("no topicpartitions to assign")
testBadOptions("subscribe" -> "")("no topics to subscribe")
testBadOptions("subscribePattern" -> "")("pattern to subscribe is empty")
}
test("allow group.id prefix") {
testGroupId("groupIdPrefix", (expected, actual) => {
assert(actual.exists(_.startsWith(expected)) && !actual.exists(_ === expected),
"Valid consumer groups don't contain the expected group id - " +
s"Valid consumer groups: $actual / expected group id: $expected")
})
}
test("allow group.id override") {
testGroupId("kafka.group.id", (expected, actual) => {
assert(actual.exists(_ === expected), "Valid consumer groups don't " +
s"contain the expected group id - Valid consumer groups: $actual / " +
s"expected group id: $expected")
})
}
private def testGroupId(groupIdKey: String, validateGroupId: (String, Iterable[String]) => Unit) {
// Tests code path KafkaSourceProvider.createRelation(.)
val topic = newTopic()
testUtils.createTopic(topic, partitions = 3)
testUtils.sendMessages(topic, (1 to 10).map(_.toString).toArray, Some(0))
testUtils.sendMessages(topic, (11 to 20).map(_.toString).toArray, Some(1))
testUtils.sendMessages(topic, (21 to 30).map(_.toString).toArray, Some(2))
val customGroupId = "id-" + Random.nextInt()
val df = createDF(topic, withOptions = Map(groupIdKey -> customGroupId))
checkAnswer(df, (1 to 30).map(_.toString).toDF())
val consumerGroups = testUtils.listConsumerGroups()
val validGroups = consumerGroups.valid().get()
val validGroupsId = validGroups.asScala.map(_.groupId())
validateGroupId(customGroupId, validGroupsId)
}
test("read Kafka transactional messages: read_committed") {
val topic = newTopic()
testUtils.createTopic(topic)
testUtils.withTranscationalProducer { producer =>
val df = spark
.read
.format("kafka")
.option("kafka.bootstrap.servers", testUtils.brokerAddress)
.option("kafka.isolation.level", "read_committed")
.option("subscribe", topic)
.load()
.selectExpr("CAST(value AS STRING)")
producer.beginTransaction()
(1 to 5).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
// Should not read any messages before they are committed
assert(df.isEmpty)
producer.commitTransaction()
// Should read all committed messages
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 6)
checkAnswer(df, (1 to 5).map(_.toString).toDF)
producer.beginTransaction()
(6 to 10).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
producer.abortTransaction()
// Should not read aborted messages
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 12)
checkAnswer(df, (1 to 5).map(_.toString).toDF)
producer.beginTransaction()
(11 to 15).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
producer.commitTransaction()
// Should skip aborted messages and read new committed ones.
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 18)
checkAnswer(df, ((1 to 5) ++ (11 to 15)).map(_.toString).toDF)
}
}
test("read Kafka transactional messages: read_uncommitted") {
val topic = newTopic()
testUtils.createTopic(topic)
testUtils.withTranscationalProducer { producer =>
val df = spark
.read
.format("kafka")
.option("kafka.bootstrap.servers", testUtils.brokerAddress)
.option("kafka.isolation.level", "read_uncommitted")
.option("subscribe", topic)
.load()
.selectExpr("CAST(value AS STRING)")
producer.beginTransaction()
(1 to 5).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
// "read_uncommitted" should see all messages including uncommitted ones
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 5)
checkAnswer(df, (1 to 5).map(_.toString).toDF)
producer.commitTransaction()
// Should read all committed messages
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 6)
checkAnswer(df, (1 to 5).map(_.toString).toDF)
producer.beginTransaction()
(6 to 10).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
producer.abortTransaction()
// "read_uncommitted" should see all messages including uncommitted or aborted ones
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 12)
checkAnswer(df, (1 to 10).map(_.toString).toDF)
producer.beginTransaction()
(11 to 15).foreach { i =>
producer.send(new ProducerRecord[String, String](topic, i.toString)).get()
}
producer.commitTransaction()
// Should read all messages
testUtils.waitUntilOffsetAppears(new TopicPartition(topic, 0), 18)
checkAnswer(df, (1 to 15).map(_.toString).toDF)
}
}
}
class KafkaRelationSuiteV1 extends KafkaRelationSuiteBase {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "kafka")
test("V1 Source is used when set through SQLConf") {
val topic = newTopic()
val df = createDF(topic)
assert(df.logicalPlan.collect {
case LogicalRelation(_, _, _, _) => true
}.nonEmpty)
}
}
class KafkaRelationSuiteV2 extends KafkaRelationSuiteBase {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "")
test("V2 Source is used when set through SQLConf") {
val topic = newTopic()
val df = createDF(topic)
assert(df.logicalPlan.collect {
case DataSourceV2Relation(_, _, _) => true
}.nonEmpty)
}
}
|
pgandhi999/spark
|
external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala
|
Scala
|
apache-2.0
| 15,527 |
package com.arcusys.valamis.web.init.util
import com.liferay.portal.kernel.messaging.config.PluginMessagingConfigurator
import com.liferay.portal.kernel.portlet.PortletClassLoaderUtil
/**
* Created by pkornilov on 22.06.16.
*/
class CustomPluginMessagingConfigurator extends PluginMessagingConfigurator {
override def afterPropertiesSet(): Unit = {
val oldContextName = try {
PortletClassLoaderUtil.getServletContextName
} catch {
case ex: IllegalStateException => null
}
if (oldContextName == null) {
//we need to set a non-empty servletContextName in order for PluginMessagingConfigurator.afterPropertiesSet
//to don't throw an exception, but the value itself is not important, because this value is later used
//in PluginMessagingConfigurator.getOperatingClassloader and if there is no classLoader associated with the value,
//then ContextFinder is used and it works ok
PortletClassLoaderUtil.setServletContextName("spring-msg-cfg")
}
try {
super.afterPropertiesSet()
} finally {
//restore old value just in case
PortletClassLoaderUtil.setServletContextName(oldContextName)
}
}
}
|
arcusys/Valamis
|
valamis-portlets-activator/src/main/scala/com/arcusys/valamis/web/init/util/CustomPluginMessagingConfigurator.scala
|
Scala
|
gpl-3.0
| 1,191 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.coref
import cc.factorie._
import cc.factorie.app.nlp._
import cc.factorie.app.nlp.phrase._
import cc.factorie.app.nlp.pos.PennPosDomain
import cc.factorie.util.{Attr,UniqueId,ImmutableArrayIndexedSeq,EvaluatableClustering}
import cc.factorie.variable._
import scala.collection.mutable.ArrayBuffer
/** Either a mention, entity or sub-entity in an coreference or entity resolution model.
These are the "nodes" in a trees in which observed mentions are the leaves and inferred entities are the roots.
In "hierarchical coreference" there may be additional nodes at intermediate levels of the tree.
@author Andrew McCallum */
trait Node extends UniqueId with Attr {
type ParentType <: Node
/** A pointer to the Node immediate above this Node in the tree. */
def parent: ParentType
}
/** A "mention" of an entity in a resolution problem.
A leaf in a coreference hierarchy.
This is the super-trait for mentions in both within-document coreference and cross-document entity resolution.
@author Andrew McCallum */
trait AbstractMention extends Node {
def parent: ParentType
/** The root of the coreference tree in which this mention is a leaf. */
def entity: ParentType
/** A string representation of the observed mention, e.g. "Michael Smith". */
def string: String
}
/** An "entity" in an entity resolution problem.
A non-leaf Node in a coreference hierarchy.
It could be a root (entity) or an intermediate node (sub-entity in hierarchical coref).
This is the super-trait for entities in both within-document coreference and cross-document entity resolution.
@author Andrew McCallum */
trait AbstractEntity extends Node {
def children: Iterable[Node] // Immediate children
def childIds: Iterable[String] = children.map(_.uniqueId)
def mentions: Iterable[AbstractMention] // Leaves of tree
}
// Below is infrastructure for within-document coreference
// TODO Turn this into a trait. Only concrete will be an inner class of WithinDocCoref
/** An entity mention whose contents come from a nlp.phrase.Phrase.
Users should not create these themselves, but rather use WithinDocCoref create them.
The uniqueId is abstract.
@author Andrew McCallum */
abstract class Mention(val phrase:Phrase) extends AbstractMention {
type ParentType = WithinDocEntity
private var _entity:WithinDocEntity = null
protected[coref] def _setEntity(e:WithinDocEntity): Unit = _entity = e
def entity: ParentType = _entity
def parent: ParentType = _entity
lazy val string = phrase.tokensString(" ")
// If number, gender and entity type are needed, put a CategoricalVariable subclass in the Attr
}
// TODO All three of these classes should be removed. -akm
/** A collection of Mentions, either immutable or mutable. */
trait MentionCollection extends Iterable[Mention]
/** An immutable ordered collection of Mentions. */
class MentionList(mentions:Iterable[Mention]) extends ImmutableArrayIndexedSeq(mentions) with MentionCollection
/** An mutable ordered collection of Mentions. */
class MentionBuffer extends ArrayBuffer[Mention] with MentionCollection
/** An entity whose evidence comes from some Phrases within a single document.
Users should not create these themselves, but rather use WithinDocCoref create them.
The uniqueId is abstract.
@author Andrew McCallum */
abstract class WithinDocEntity(val document:Document) extends AbstractEntity {
type ParentType = WithinDocEntity
private val _mentions = new scala.collection.mutable.LinkedHashSet[Mention]
def parent: WithinDocEntity = null
def mentions:scala.collection.Set[Mention] = _mentions
def isSingleton:Boolean = _mentions.size == 1
def isEmpty:Boolean = _mentions.isEmpty
def children: Iterable[Mention] = _mentions
// TODO Rename this to remove the "get".
def getFirstMention: Mention = if(isEmpty) null else if(isSingleton) _mentions.head else mentions.minBy(m => m.phrase.start)
def +=(mention:Mention): Unit = {
assert(mention.phrase.document eq document)
//assert(!_mentions.contains(mention)) // No reason to do this; might catch a bug.
if (mention.entity ne null) mention.entity._mentions -= mention
if(!_mentions.contains(mention))_mentions += mention
mention._setEntity(WithinDocEntity.this)
}
def -=(mention:Mention): Unit = {
assert(mention.phrase.document eq document)
assert(_mentions.contains(mention)) // No reason to do this; might catch a bug.
assert(mention.entity == this)
_mentions -= mention
mention._setEntity(null)
}
/** Return the canonical mention for the entity cluster. If the canonical mention is not already set it computes, sets, and returns the canonical mention */
def getCanonicalMention: Mention = {
if (canonicalMention eq null) {
val canonicalOption = _mentions.filter{m =>
(m.phrase.attr[NounPhraseType].value == NounPhraseTypeDomain.value("NOM") ||
m.phrase.attr[NounPhraseType].value == NounPhraseTypeDomain.value("NAM")) &&
m.phrase.last.posTag.intValue != PennPosDomain.posIndex
}.toSeq.sortBy(m => (m.phrase.start, m.phrase.length)).headOption
canonicalMention = canonicalOption.getOrElse(children.headOption.orNull)
canonicalName = canonicalMention.string
}
canonicalMention
}
var canonicalName: String = null
var canonicalMention: Mention = null
// If number, gender and entity type are needed, put a CategoricalVariable subclass in the Attr
}
/** Container for a within-document coreference solution, typically stored as an attr of the Document.
Some may contain an imperfect inferred coref solution; others may store a gold-standard target coref solution.
Concrete instances of Mention and WithinDocEntity are created here.
@author Andrew McCallum
*/
class WithinDocCoref(val document:Document) extends EvaluatableClustering[WithinDocEntity,Phrase#Value] {
/** When we have labeled gold-standard truth for coref, it is stored here. */
var target: WithinDocCoref = null // ...the alternative would have been to create different subclasses of WithinDocCoref so they could be stored separately in the Document.attr, but I chose this as cleaner. -akm
/** A mapping from (the Phrase's span value) to Mention */
private val _spanToMention = new scala.collection.mutable.LinkedHashMap[Span[Section,Token],Mention]
//private val _phraseToMention = new scala.collection.mutable.LinkedHashMap[Phrase,Mention] // Used to index by this instead. I think we can remove this now. -akm
/** A mapping from entity.uniqueId to WithinDocEntity */
private val _entities = new scala.collection.mutable.LinkedHashMap[String,WithinDocEntity]
/** A mapping from entity key (i.e. an Int identifying the true entity) to the entity.uniqueId */
private lazy val _entityKeyToId = new scala.collection.mutable.HashMap[Int,String]
private var _entityCount = 0 // The number of WithinDocEntities ever created here. This number never goes down.
/** A string that will be used as a prefix on the uniqueIds of the Mentions and WithinDocEntities created here. */
def uniqueId: String = document.uniqueId // TODO Perhaps this should be something more safely unique if we save more than one WithinDocCoref objects per Document? -akm
def uniqueIdEntitySuffix(entityIndex:Int): String = "//WithinDocEntity" + entityIndex
def uniqueIdMentionSuffix(phraseStart:Int, phraseLength:Int): String = "//Mention(" + phraseStart + "," + phraseLength + ")"
/** Concrete implementation of WithinDocEntity that automatically stores itself in WithinDocCoref.entities. */
protected class WithinDocEntity1(val uniqueId:String) extends WithinDocEntity(document) {
def this() = this(WithinDocCoref.this.uniqueId + uniqueIdEntitySuffix(_entityCount)) // TODO Is this what we want? -akm
_entityCount += 1
assert(!_entities.contains(uniqueId))
_entities(uniqueId) = this
def coref: WithinDocCoref = WithinDocCoref.this
}
/** Concrete implementation of Mention that automatically stores itself in WithinDocCoref.mentions. */
protected class Mention1(phrase:Phrase, entity:WithinDocEntity) extends Mention(phrase) {
def this(phrase:Phrase, entityKey:Int) = this(phrase, entityFromKey(entityKey)) // Typically used for labeled data
def this(phrase:Phrase, entityUniqueId:String) = this(phrase, entityFromUniqueId(entityUniqueId)) // Typically used for deserialization
def this(phrase:Phrase) = this(phrase, null.asInstanceOf[WithinDocEntity]) // Typically used for new inference // TODO Should this be null, or a newly created blank Entity; See LoadConll2011 also.
assert(entity == null || entity.asInstanceOf[WithinDocEntity1].coref == WithinDocCoref.this)
_spanToMention(phrase.value) = this
val uniqueId = WithinDocCoref.this.uniqueId + uniqueIdMentionSuffix(phrase.start, phrase.length) // TODO Is this what we want? -akm
if (entity ne null) entity += this
def coref: WithinDocCoref = WithinDocCoref.this
}
/** Given Span (typically the value of a Phrase), return the corresponding Mention.
Note that Span is a case class, so the lookup is done by the span's boundaries, not by its identity. */
def mention(span:Span[Section,Token]): Mention = _spanToMention(span)
/** Return the Mention corresponding to the given Phrase. If none present, return null.
Note that since the lookup happens by the Phrase's Span value, the returned mention.phrase may be different than this method's argument. */
def mention(phrase:Phrase): Mention = _spanToMention(phrase.value)
/** Create a new Mention whose entity will be null. */
def addMention(phrase:Phrase): Mention = _spanToMention.getOrElse(phrase.value, new Mention1(phrase))
/** Create a new Mention with entity specified by given uniqueId. */
def addMention(phrase:Phrase, entityId:String): Mention = { assert(!_spanToMention.contains(phrase.value)); new Mention1(phrase, entityId) }
/** Create a new Mention with entity specified by given key. */
def addMention(phrase:Phrase, entityKey:Int): Mention = { assert(!_spanToMention.contains(phrase.value)); new Mention1(phrase, entityKey) }
/** Create a new Mention with the given entity, which must also be in this WithinDocCoref */
def addMention(phrase:Phrase, entity:WithinDocEntity): Mention = new Mention1(phrase, entity)
/** Remove a Mention from this coreference solution, and from its entity if it has one. */
def deleteMention(mention:Mention): Unit = {
if (mention.entity ne null) mention.entity -= mention
_spanToMention.remove(mention.phrase.value)
}
/** Checks whether the given tokenspan overlaps with an existing mention, returns the overlapping mention if it does. */
def findOverlapping(tokenSpan:TokenSpan):Option[Mention] = tokenSpan match {
case ts if ts.document == this.document => mentions.find(_.phrase.characterOffsets overlapsWith ts.characterOffsets)
case _ => None
}
/** Return all Mentions in this coreference solution. */
def mentions: Seq[Mention] = _spanToMention.values.toVector
/** Return a collection of WithinDocEntities managed by this coref solution. Note that some of them may have no Mentions. */
def entities: Iterable[WithinDocEntity] = _entities.values
/** Create and return a new WithinDocEntity with uniqueId determined by the number entities created so far. */
def newEntity(): WithinDocEntity = new WithinDocEntity1()
/** Return the entity associated with the given uniqueId, or create a new entity if not found already among 'entities'. */
def entityFromUniqueId(id:String): WithinDocEntity = _entities.getOrElse(id, new WithinDocEntity1(id))
/** Return the entity associated with the given key, or create a new entity if not found already among 'entities'. */
def entityFromKey(key:Int): WithinDocEntity = {
val id = _entityKeyToId.getOrElse(key,null)
val result = if (id eq null) new WithinDocEntity1 else _entities(id)
_entityKeyToId(key) = result.uniqueId
result
}
/** Return the entity associated with the given uniqueId. Return null if not found. */
def idToEntity(id:String): WithinDocEntity = _entities(id)
/** Remove from the list of entities all entities that contain no mentions. */
def trimEmptyEntities(): Unit = _entities.values.filter(_.mentions.size == 0).map(_.uniqueId).foreach(_entities.remove) // TODO But note that this doesn't purge _entityKeyToId; perhaps it should.
/** Remove from all entities and mentions associated with entities that contain only one mention. */
def removeSingletons():Unit ={
_entities.values.filter(_.mentions.size == 1).map(_.uniqueId).foreach{
id =>
_entities(id).mentions.foreach(m => deleteMention(m))
_entities.remove(id)
}
}
/**Reset the clustered entities for this coref solution without losing mentions and their cached properties*/
def resetPredictedMapping():Unit = {_entities.clear();mentions.foreach(_._setEntity(null));_entityCount = 0 }
// Support for evaluation
// These assure we ignore any singletons for conll scoring
// TODO: Allow for ACE scoring where singletons are counted
def clusterIds: Iterable[WithinDocEntity] = _entities.values.filterNot(_.isSingleton)
def pointIds: Iterable[Phrase#Value] = _spanToMention.values.filterNot(m => m.entity == null || m.entity.isSingleton).map(_.phrase.value)
def pointIds(entityId:WithinDocEntity): Iterable[Phrase#Value] = if(!entityId.isSingleton) entityId.mentions.map(_.phrase.value) else Seq()
def intersectionSize(entityId1:WithinDocEntity, entityId2:WithinDocEntity): Int = if(!entityId1.isSingleton && !entityId2.isSingleton) entityId1.mentions.map(_.phrase.value).intersect(entityId2.mentions.map(_.phrase.value)).size else 0
def clusterId(mentionId:Phrase#Value): WithinDocEntity = {
val mention = _spanToMention.getOrElse(mentionId,null)
if(mention == null || mention.entity == null ||mention.entity.isSingleton) null
else mention.entity
}
}
// CrossDocEntity should be unified with Jack's new hcoref replacement.
// ids, including cross-doc ids will be part of this work.
trait CrossDocMention extends AbstractMention {
def withinDocEntityId: String
}
trait CrossDocEntity extends AbstractEntity // ...
///** Categorical variable indicating whether the mention is a pronoun, nominal or named proper noun.
// (Obviously different from MentionEntityType, which may indicate whether it is a person, location, organization, etc.) */
//class MentionType(val mention:AbstractMention, targetValue:String) extends LabeledCategoricalVariable(targetValue) {
// def domain = OntonotesMentionTypeDomain
//}
///** The domain of MentionType, consisting of pronouns (PRO), nominals (NOM) and named proper nouns (NAM). */
//object OntonotesMentionTypeDomain extends CategoricalDomain(List("PRO", "NOM", "NAM"))
// // In case we need to put labels on Mentions or Entities in addition to their underlying Phrases. -akm
//class OntonotesEntityType(category:String) extends LabeledCategoricalVariable[String](category) {
// def domain = OntonotesEntityTypeDomain
//}
//
//class PhraseOntonotesEntityType(val phrase:Phrase, value:String) extends OntonotesEntityType(value)
//class EntityOntonotesEntityType(val entity:AbstractEntity, value:String) extends OntonotesEntityType(value)
//class WithinDocEntityOntonotesEntityType(override val entity:WithinDocEntity, value:String) extends EntityOntonotesEntityType(entity, value)
//
//class EntityGender(val entity:AbstractEntity, value:String) extends Gender(value)
//class WithinDocEntityGender(override val entity:WithinDocEntity, value:String) extends EntityGender(entity, value)
////class CrossDocEntityGender(override val entity:CrossDocEntity, value:String) extends EntityGender(entity, value)
//
//class EntityNumber(val entity:AbstractEntity, value:String) extends Number(value)
//class WithinDocEntityNumber(override val entity:WithinDocEntity, value:String) extends EntityNumber(entity, value)
|
iesl/fuse_ttl
|
src/factorie-factorie_2.11-1.1/src/main/scala/cc/factorie/app/nlp/coref/Mention.scala
|
Scala
|
apache-2.0
| 16,762 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.schedulers
import java.util.concurrent.{CountDownLatch, TimeUnit, TimeoutException}
import minitest.SimpleTestSuite
import monix.execution.ExecutionModel.AlwaysAsyncExecution
import monix.execution.cancelables.SingleAssignCancelable
import monix.execution.{Cancelable, Scheduler}
import monix.execution.{ExecutionModel => ExecModel}
import scala.concurrent.duration._
import scala.concurrent.{Await, Future, Promise}
object AsyncSchedulerJVMSuite extends SimpleTestSuite {
val s: Scheduler = monix.execution.Scheduler.global
def scheduleOnce(s: Scheduler, delay: FiniteDuration)(action: => Unit): Cancelable = {
s.scheduleOnce(delay.length, delay.unit, runnableAction(action))
}
test("scheduleOnce with delay") {
val p = Promise[Long]()
val startedAt = System.nanoTime()
scheduleOnce(s, 100.millis) { p.success(System.nanoTime()); () }
val timeTaken = Await.result(p.future, 3.second)
assert((timeTaken - startedAt).nanos.toMillis >= 100)
}
test("scheduleOnce with delay lower than 1.milli") {
val p = Promise[Int]()
scheduleOnce(s, 20.nanos) { p.success(1); () }
assert(Await.result(p.future, 3.seconds) == 1)
}
test("scheduleOnce with delay and cancel") {
val p = Promise[Int]()
val task = scheduleOnce(s, 100.millis) { p.success(1); () }
task.cancel()
intercept[TimeoutException] {
Await.result(p.future, 150.millis)
()
}
()
}
test("schedule with fixed delay") {
val sub = SingleAssignCancelable()
val p = Promise[Int]()
var value = 0
sub := s.scheduleWithFixedDelay(
10,
50,
TimeUnit.MILLISECONDS,
runnableAction {
if (value + 1 == 4) {
value += 1
sub.cancel()
p.success(value)
()
} else if (value < 4) {
value += 1
}
})
assert(Await.result(p.future, 5.second) == 4)
}
test("schedule at fixed rate") {
val sub = SingleAssignCancelable()
val p = Promise[Int]()
var value = 0
sub := s.scheduleAtFixedRate(
10,
50,
TimeUnit.MILLISECONDS,
runnableAction {
if (value + 1 == 4) {
value += 1
sub.cancel()
p.success(value)
()
} else if (value < 4) {
value += 1
}
})
assert(Await.result(p.future, 5.second) == 4)
}
test("builder for ExecutionModel works") {
import monix.execution.ExecutionModel.AlwaysAsyncExecution
import monix.execution.Scheduler
val s: Scheduler = Scheduler(AlwaysAsyncExecution)
assertEquals(s.executionModel, AlwaysAsyncExecution)
val latch = new CountDownLatch(1)
s.execute(new Runnable {
def run(): Unit = latch.countDown()
})
assert(latch.await(15, TimeUnit.MINUTES), "latch.await")
}
test("execute local") {
var result = 0
def loop(n: Int): Unit =
s.executeTrampolined { () =>
result += 1
if (n - 1 > 0) loop(n - 1)
}
val count = 100000
loop(count)
assertEquals(result, count)
}
test("change execution model") {
val s: Scheduler = monix.execution.Scheduler.global
assertEquals(s.executionModel, ExecModel.Default)
val s2 = s.withExecutionModel(AlwaysAsyncExecution)
assertEquals(s.executionModel, ExecModel.Default)
assertEquals(s2.executionModel, AlwaysAsyncExecution)
}
test("Scheduler.cached") {
import scala.concurrent.duration._
intercept[IllegalArgumentException] {
monix.execution.Scheduler.cached("dummy", -1, 2, 1.second)
()
}
intercept[IllegalArgumentException] {
monix.execution.Scheduler.cached("dummy", 0, 0, 1.second)
()
}
intercept[IllegalArgumentException] {
monix.execution.Scheduler.cached("dummy", 2, 1, 1.second)
()
}
intercept[IllegalArgumentException] {
monix.execution.Scheduler.cached("dummy", 2, 10, -1.second)
()
}
implicit val s: Scheduler = monix.execution.Scheduler
.cached(name = "cached-test", minThreads = 0, maxThreads = 2, keepAliveTime = 1.second, daemonic = true)
val futureStarted = new CountDownLatch(1)
val start = new CountDownLatch(1)
val future = Future {
futureStarted.countDown()
start.await()
1 + 1
}
futureStarted.await()
start.countDown()
val result = Await.result(future, 60.seconds)
assertEquals(result, 2)
}
test("clockRealTime") {
val t1 = System.currentTimeMillis()
val t2 = s.clockRealTime(MILLISECONDS)
assert(t2 >= t1, "t2 >= t1")
}
test("clockMonotonic") {
val t1 = System.nanoTime()
val t2 = s.clockMonotonic(NANOSECONDS)
assert(t2 >= t1, "t2 >= t1")
}
def runnableAction(f: => Unit): Runnable =
new Runnable { def run() = f }
}
|
monifu/monix
|
monix-execution/jvm/src/test/scala/monix/execution/schedulers/AsyncSchedulerJVMSuite.scala
|
Scala
|
apache-2.0
| 5,491 |
package com.twitter.finagle.stats
/**
* BroadcastStatsReceiver is a helper object that create a StatsReceiver wrapper around multiple
* StatsReceivers (n).
*/
object BroadcastStatsReceiver {
def apply(receivers: Seq[StatsReceiver]): StatsReceiver = receivers.filterNot(_.isNull) match {
case Seq() => NullStatsReceiver
case Seq(fst) => fst
case Seq(first, second) => new Two(first, second)
case more => new N(more)
}
private class Two(first: StatsReceiver, second: StatsReceiver) extends StatsReceiver {
val repr = this
def counter(names: String*): Counter =
new BroadcastCounter.Two(first.counter(names:_*), second.counter(names:_*))
def stat(names: String*): Stat =
new BroadcastStat.Two(first.stat(names:_*), second.stat(names:_*))
def addGauge(names: String*)(f: => Float): Gauge = new Gauge {
val firstGauge = first.addGauge(names:_*)(f)
val secondGauge = second.addGauge(names:_*)(f)
def remove() = {
firstGauge.remove()
secondGauge.remove()
}
}
override def toString: String =
s"Broadcast($first, $second)"
}
private class N(statsReceivers: Seq[StatsReceiver]) extends StatsReceiver {
val repr = this
def counter(names: String*): Counter =
BroadcastCounter(statsReceivers map { _.counter(names:_*) })
def stat(names: String*): Stat =
BroadcastStat(statsReceivers map { _.stat(names:_*) })
def addGauge(names: String*)(f: => Float): Gauge = new Gauge {
val gauges = statsReceivers map { _.addGauge(names:_*)(f) }
def remove() = gauges foreach { _.remove() }
}
override def toString: String =
s"Broadcast(${statsReceivers.mkString(", ")})"
}
}
/**
* BroadcastCounter is a helper object that create a Counter wrapper around multiple
* Counters (n).
* For performance reason, we have specialized cases if n == (0, 1, 2, 3 or 4)
*/
object BroadcastCounter {
def apply(counters: Seq[Counter]): Counter = counters match {
case Seq() => NullCounter
case Seq(counter) => counter
case Seq(a, b) => new Two(a, b)
case Seq(a, b, c) => new Three(a, b, c)
case Seq(a, b, c, d) => new Four(a, b, c, d)
case more => new N(more)
}
private object NullCounter extends Counter {
def incr(delta: Int): Unit = ()
}
private[stats] class Two(a: Counter, b: Counter) extends Counter {
def incr(delta: Int): Unit = {
a.incr(delta)
b.incr(delta)
}
}
private class Three(a: Counter, b: Counter, c: Counter) extends Counter {
def incr(delta: Int): Unit = {
a.incr(delta)
b.incr(delta)
c.incr(delta)
}
}
private class Four(a: Counter, b: Counter, c: Counter, d: Counter) extends Counter {
def incr(delta: Int): Unit = {
a.incr(delta)
b.incr(delta)
c.incr(delta)
d.incr(delta)
}
}
private class N(counters: Seq[Counter]) extends Counter {
def incr(delta: Int): Unit = { counters.foreach(_.incr(delta)) }
}
}
/**
* BroadcastStat is a helper object that create a Counter wrapper around multiple
* Stats (n).
* For performance reason, we have specialized cases if n == (0, 1, 2, 3 or 4)
*/
object BroadcastStat {
def apply(stats: Seq[Stat]): Stat = stats match {
case Seq() => NullStat
case Seq(counter) => counter
case Seq(a, b) => new Two(a, b)
case Seq(a, b, c) => new Three(a, b, c)
case Seq(a, b, c, d) => new Four(a, b, c, d)
case more => new N(more)
}
private object NullStat extends Stat {
def add(value: Float): Unit = ()
}
private[stats] class Two(a: Stat, b: Stat) extends Stat {
def add(value: Float): Unit = {
a.add(value)
b.add(value)
}
}
private class Three(a: Stat, b: Stat, c: Stat) extends Stat {
def add(value: Float): Unit = {
a.add(value)
b.add(value)
c.add(value)
}
}
private class Four(a: Stat, b: Stat, c: Stat, d: Stat) extends Stat {
def add(value: Float): Unit = {
a.add(value)
b.add(value)
c.add(value)
d.add(value)
}
}
private class N(stats: Seq[Stat]) extends Stat {
def add(value: Float): Unit = { stats.foreach(_.add(value)) }
}
}
|
tdyas/util
|
util-stats/src/main/scala/com/twitter/finagle/stats/BroadcastStatsReceiver.scala
|
Scala
|
apache-2.0
| 4,202 |
package org.jetbrains.sbt.project.template.wizard.buildSystem
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.template.wizard.ScalaNewProjectWizardStep
final class SbtScalaNewProjectWizard extends BuildSystemScalaNewProjectWizard {
override def getName: String = SbtProjectSystem.Id.getReadableName
override def createStep(parent: ScalaNewProjectWizardStep): SbtScalaNewProjectWizardStep =
new SbtScalaNewProjectWizardStep(parent)
override def getOrdinal: Int = 0
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/sbt/project/template/wizard/buildSystem/SbtScalaNewProjectWizard.scala
|
Scala
|
apache-2.0
| 515 |
/*
* Copyright 2014 Dennis Vis
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talares.api.datatypes.items
import org.joda.time.DateTime
/**
* Items of this type represent versioned pieces of content.
*
* @author Dennis Vis
* @since 0.1.0
*/
trait PublishedItem extends Item {
val itemId: Int
val title: Option[String]
val author: Option[String]
val creationDate: Option[DateTime]
val initialPublishDate: Option[DateTime]
val lastPublishDate: Option[DateTime]
val modificationDate: Option[DateTime]
val majorVersion: Option[Int]
val minorVersion: Option[Int]
val owningPublication: Option[Int]
}
|
talares/talares
|
src/talares/src/main/scala/org/talares/api/datatypes/items/PublishedItem.scala
|
Scala
|
apache-2.0
| 1,150 |
/*
* Copyright 2013 Josselin Pujo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.assoba.open.template.compiler
import java.io.File
import scalax.file.{PathSet, Path}
object PathCompiler {
def compile(path: File, output: File) {
val templates: PathSet[Path] = Path(path) ** "*.stl"
templates.foreach(
f => {
f.fileOption match {
case Some(file: File) => file match {
case h: File if h.getName.endsWith(".html.stl") => ScalaTemplateCompiler.compile(h, path, output, "HtmlFormat")
case _ => ScalaTemplateCompiler.compile(file, path, output, "TxtFormat")
}
case None =>
}
}
)
}
}
|
neuneu2k/assoba-template
|
template-maven-plugin/src/main/scala/fr/assoba/open/template/compiler/PathCompiler.scala
|
Scala
|
apache-2.0
| 1,232 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.util.Random
import breeze.linalg.normalize
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared.HasSeed
import org.apache.spark.ml.util._
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
/**
* :: Experimental ::
*
* Params for [[BucketedRandomProjectionLSH]].
*/
private[ml] trait BucketedRandomProjectionLSHParams extends Params {
/**
* The length of each hash bucket, a larger bucket lowers the false negative rate. The number of
* buckets will be `(max L2 norm of input vectors) / bucketLength`.
*
*
* If input vectors are normalized, 1-10 times of pow(numRecords, -1/inputDim) would be a
* reasonable value
* @group param
*/
val bucketLength: DoubleParam = new DoubleParam(this, "bucketLength",
"the length of each hash bucket, a larger bucket lowers the false negative rate.",
ParamValidators.gt(0))
/** @group getParam */
final def getBucketLength: Double = $(bucketLength)
}
/**
* :: Experimental ::
*
* Model produced by [[BucketedRandomProjectionLSH]], where multiple random vectors are stored. The
* vectors are normalized to be unit vectors and each vector is used in a hash function:
* `h_i(x) = floor(r_i.dot(x) / bucketLength)`
* where `r_i` is the i-th random unit vector. The number of buckets will be `(max L2 norm of input
* vectors) / bucketLength`.
*
* @param randUnitVectors An array of random unit vectors. Each vector represents a hash function.
*/
@Experimental
@Since("2.1.0")
class BucketedRandomProjectionLSHModel private[ml](
override val uid: String,
private[ml] val randUnitVectors: Array[Vector])
extends LSHModel[BucketedRandomProjectionLSHModel] with BucketedRandomProjectionLSHParams {
/** @group setParam */
@Since("2.4.0")
override def setInputCol(value: String): this.type = super.set(inputCol, value)
/** @group setParam */
@Since("2.4.0")
override def setOutputCol(value: String): this.type = super.set(outputCol, value)
@Since("2.1.0")
override protected[ml] def hashFunction(elems: Vector): Array[Vector] = {
val hashValues = randUnitVectors.map(
randUnitVector => Math.floor(BLAS.dot(elems, randUnitVector) / $(bucketLength))
)
// TODO: Output vectors of dimension numHashFunctions in SPARK-18450
hashValues.map(Vectors.dense(_))
}
@Since("2.1.0")
override protected[ml] def keyDistance(x: Vector, y: Vector): Double = {
Math.sqrt(Vectors.sqdist(x, y))
}
@Since("2.1.0")
override protected[ml] def hashDistance(x: Seq[Vector], y: Seq[Vector]): Double = {
// Since it's generated by hashing, it will be a pair of dense vectors.
x.zip(y).map(vectorPair => Vectors.sqdist(vectorPair._1, vectorPair._2)).min
}
@Since("2.1.0")
override def copy(extra: ParamMap): BucketedRandomProjectionLSHModel = {
val copied = new BucketedRandomProjectionLSHModel(uid, randUnitVectors).setParent(parent)
copyValues(copied, extra)
}
@Since("2.1.0")
override def write: MLWriter = {
new BucketedRandomProjectionLSHModel.BucketedRandomProjectionLSHModelWriter(this)
}
}
/**
* :: Experimental ::
*
* This [[BucketedRandomProjectionLSH]] implements Locality Sensitive Hashing functions for
* Euclidean distance metrics.
*
* The input is dense or sparse vectors, each of which represents a point in the Euclidean
* distance space. The output will be vectors of configurable dimension. Hash values in the
* same dimension are calculated by the same hash function.
*
* References:
*
* 1. <a href="https://en.wikipedia.org/wiki/Locality-sensitive_hashing#Stable_distributions">
* Wikipedia on Stable Distributions</a>
*
* 2. Wang, Jingdong et al. "Hashing for similarity search: A survey." arXiv preprint
* arXiv:1408.2927 (2014).
*/
@Experimental
@Since("2.1.0")
class BucketedRandomProjectionLSH(override val uid: String)
extends LSH[BucketedRandomProjectionLSHModel]
with BucketedRandomProjectionLSHParams with HasSeed {
@Since("2.1.0")
override def setInputCol(value: String): this.type = super.setInputCol(value)
@Since("2.1.0")
override def setOutputCol(value: String): this.type = super.setOutputCol(value)
@Since("2.1.0")
override def setNumHashTables(value: Int): this.type = super.setNumHashTables(value)
@Since("2.1.0")
def this() = {
this(Identifiable.randomUID("brp-lsh"))
}
/** @group setParam */
@Since("2.1.0")
def setBucketLength(value: Double): this.type = set(bucketLength, value)
/** @group setParam */
@Since("2.1.0")
def setSeed(value: Long): this.type = set(seed, value)
@Since("2.1.0")
override protected[this] def createRawLSHModel(
inputDim: Int): BucketedRandomProjectionLSHModel = {
val rand = new Random($(seed))
val randUnitVectors: Array[Vector] = {
Array.fill($(numHashTables)) {
val randArray = Array.fill(inputDim)(rand.nextGaussian())
Vectors.fromBreeze(normalize(breeze.linalg.Vector(randArray)))
}
}
new BucketedRandomProjectionLSHModel(uid, randUnitVectors)
}
@Since("2.1.0")
override def transformSchema(schema: StructType): StructType = {
SchemaUtils.checkColumnType(schema, $(inputCol), new VectorUDT)
validateAndTransformSchema(schema)
}
@Since("2.1.0")
override def copy(extra: ParamMap): this.type = defaultCopy(extra)
}
@Since("2.1.0")
object BucketedRandomProjectionLSH extends DefaultParamsReadable[BucketedRandomProjectionLSH] {
@Since("2.1.0")
override def load(path: String): BucketedRandomProjectionLSH = super.load(path)
}
@Since("2.1.0")
object BucketedRandomProjectionLSHModel extends MLReadable[BucketedRandomProjectionLSHModel] {
@Since("2.1.0")
override def read: MLReader[BucketedRandomProjectionLSHModel] = {
new BucketedRandomProjectionLSHModelReader
}
@Since("2.1.0")
override def load(path: String): BucketedRandomProjectionLSHModel = super.load(path)
private[BucketedRandomProjectionLSHModel] class BucketedRandomProjectionLSHModelWriter(
instance: BucketedRandomProjectionLSHModel) extends MLWriter {
// TODO: Save using the existing format of Array[Vector] once SPARK-12878 is resolved.
private case class Data(randUnitVectors: Matrix)
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val numRows = instance.randUnitVectors.length
require(numRows > 0)
val numCols = instance.randUnitVectors.head.size
val values = instance.randUnitVectors.map(_.toArray).reduce(Array.concat(_, _))
val randMatrix = Matrices.dense(numRows, numCols, values)
val data = Data(randMatrix)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class BucketedRandomProjectionLSHModelReader
extends MLReader[BucketedRandomProjectionLSHModel] {
/** Checked against metadata when loading model */
private val className = classOf[BucketedRandomProjectionLSHModel].getName
override def load(path: String): BucketedRandomProjectionLSHModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val Row(randUnitVectors: Matrix) = MLUtils.convertMatrixColumnsToML(data, "randUnitVectors")
.select("randUnitVectors")
.head()
val model = new BucketedRandomProjectionLSHModel(metadata.uid,
randUnitVectors.rowIter.toArray)
metadata.getAndSetParams(model)
model
}
}
}
|
WindCanDie/spark
|
mllib/src/main/scala/org/apache/spark/ml/feature/BucketedRandomProjectionLSH.scala
|
Scala
|
apache-2.0
| 8,645 |
package org.bfn.ninetynineprobs
import org.scalatest._
class P21Spec extends UnitSpec {
"insertAt" should "return a list with 1 element w/ an empty one and N=0" in {
assert(P21.insertAt(42, 0, List()) == List(42))
}
it should "add an element at the end if N=list.length" in {
assert(P21.insertAt(42, 3, List(1, 2, 3)) == List(1, 2, 3, 42))
}
it should "insert an element at index N" in {
assert(P21.insertAt(17, 2, List(3, 2, 1, 0)) == List(3, 2, 17, 1, 0))
}
it should "throw an IndexOutOfBoundsException if N<0" in {
intercept[IndexOutOfBoundsException] {
P21.insertAt(42, -1, List())
}
}
it should "throw an IndexOutOfBoundsException if N>list.length" in {
intercept[IndexOutOfBoundsException] {
P21.insertAt(42, 1000, List(1, 2, 3))
}
}
it should "work with String lists as well" in {
assert(P21.insertAt(12, 1, List(1, 2, 3)) == List(1, 12, 2, 3))
}
}
|
bfontaine/99Scala
|
src/test/scala/P21Spec.scala
|
Scala
|
mit
| 934 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package statements
import com.intellij.psi._
import com.intellij.psi.impl.source.PsiFileImpl
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.tree.TokenSet
import org.jetbrains.plugins.scala.annotator.intention.ScalaImportTypeFix
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAnnotation, ScAnnotations}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType
/**
* User: Alexander Podkhalyuzin
* Date: 10.01.2009
*/
trait ScAnnotationsHolder extends ScalaPsiElement with PsiAnnotationOwner {
def annotations: Seq[ScAnnotation] = {
val stub: StubElement[_ <: PsiElement] = this match {
case st: StubBasedPsiElement[_] if st.getStub != null =>
st.getStub.asInstanceOf[StubElement[_ <: PsiElement]] // !!! Appeasing an unexplained compile error
case file: PsiFileImpl if file.getStub != null => file.getStub
case _ => null
}
if (stub != null) {
val annots: Array[ScAnnotations] =
stub.getChildrenByType(TokenSet.create(ScalaElementTypes.ANNOTATIONS), JavaArrayFactoryUtil.ScAnnotationsFactory)
if (annots.length > 0) {
return annots(0).getAnnotations.toSeq
} else return Seq.empty
}
if (findChildByClassScala(classOf[ScAnnotations]) != null)
findChildByClassScala(classOf[ScAnnotations]).getAnnotations.toSeq
else Seq.empty
}
def annotationNames: Seq[String] = annotations.map((x: ScAnnotation) => {
val text: String = x.annotationExpr.constr.typeElement.getText
text.substring(text.lastIndexOf(".", 0) + 1, text.length)
})
def hasAnnotation(clazz: PsiClass): Boolean = hasAnnotation(clazz.qualifiedName) != None
def hasAnnotation(qualifiedName: String): Option[ScAnnotation] = {
def acceptType(tp: ScType): Boolean = {
tp match {
case ScDesignatorType(clazz: PsiClass) => clazz.qualifiedName == qualifiedName
case ScParameterizedType(ScDesignatorType(clazz: PsiClass), _) => clazz.qualifiedName == qualifiedName
case _ =>
tp.isAliasType match {
case Some(AliasType(ta: ScTypeAliasDefinition, _, _)) => acceptType(ta.aliasedType(TypingContext.empty).getOrAny)
case _ => false
}
}
}
annotations.find(annot => acceptType(annot.typeElement.getType(TypingContext.empty).getOrAny))
}
def addAnnotation(qualifiedName: String): PsiAnnotation = {
val simpleName = qualifiedName.lastIndexOf('.') |> { i =>
if (i >= 0) qualifiedName.drop(i + 1) else qualifiedName
}
val container = findChildByClassScala(classOf[ScAnnotations])
val element = ScalaPsiElementFactory.createAnAnnotation(simpleName, getManager)
container.add(element)
container.add(ScalaPsiElementFactory.createNewLine(getManager))
val unresolvedReferences = element.depthFirst
.findByType(classOf[ScReferenceElement]).filter(_.resolve() == null)
for (topReference <- unresolvedReferences.headOption;
manager = JavaPsiFacade.getInstance(getProject);
annotationClass = manager.findClass(qualifiedName, topReference.getResolveScope)) {
val holder = ScalaImportTypeFix.getImportHolder(this, getProject)
holder.addImportForClass(annotationClass, topReference)
}
element
}
def findAnnotation(qualifiedName: String): PsiAnnotation = {
hasAnnotation(qualifiedName) match {
case Some(x) => x
case None => null
}
}
def getApplicableAnnotations: Array[PsiAnnotation] = getAnnotations //todo: understatnd and fix
def getAnnotations: Array[PsiAnnotation] = annotations.toArray
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScAnnotationsHolder.scala
|
Scala
|
apache-2.0
| 4,061 |
package java.io
abstract class InputStream extends Closeable {
def read(): Int
def read(b: Array[Byte]): Int = read(b, 0, b.length)
def read(b: Array[Byte], off: Int, len: Int): Int = {
if (off < 0 || len < 0 || len > b.length - off)
throw new IndexOutOfBoundsException
if (len == 0) 0
else {
var bytesWritten = 0
var next = 0
while (bytesWritten < len && next != -1) {
next =
if (bytesWritten == 0) read()
else {
try read()
catch { case _: IOException => -1 }
}
if (next != -1) {
b(off + bytesWritten) = next.toByte
bytesWritten += 1
}
}
if (bytesWritten <= 0) -1
else bytesWritten
}
}
def skip(n: Long): Long = {
var skipped = 0
while (skipped < n && read() != -1) skipped += 1
skipped
}
def available(): Int = 0
def close(): Unit = ()
def mark(readlimit: Int): Unit = ()
def reset(): Unit =
throw new IOException("Reset not supported")
def markSupported(): Boolean = false
}
|
cedricviaccoz/scala-native
|
javalib/src/main/scala/java/io/InputStream.scala
|
Scala
|
bsd-3-clause
| 1,092 |
package pt.org.apec.services.users.common.json
import play.api.libs.json._
import play.api.libs.functional.syntax._
import pt.org.apec.services.users.common._
trait JsonProtocol {
implicit val userREgistrationFormat = Json.format[UserRegistration]
implicit val userCredentialsFormat = Json.format[UserCredentials]
implicit val authenticationResultReads: Reads[AuthenticationResult] = {
implicit val authenticationSuccessReads = Json.reads[AuthenticationSuccess]
implicit val authenticationPasswordExpiredReads = Json.reads[AuthenticationPasswordExpired]
implicit val authenticationNotActiveReads = Json.reads[AuthenticationNotActive]
(__ \\ "success").read[AuthenticationSuccess].map(_.asInstanceOf[AuthenticationResult]) |
(__ \\ "failure").read(AuthenticationFailure.asInstanceOf[AuthenticationResult]) |
(__ \\ "notActive").read[AuthenticationNotActive].map(_.asInstanceOf[AuthenticationResult]) |
(__ \\ "passwordExpired").read[AuthenticationPasswordExpired].map(_.asInstanceOf[AuthenticationResult])
}
implicit val authenticationResultWrites = Writes[AuthenticationResult] { result =>
implicit val authenticationSuccessWrites = Json.writes[AuthenticationSuccess]
implicit val authenticationNotActiveWrites = Json.writes[AuthenticationNotActive]
implicit val authenticationPasswordExpiredWrites = Json.writes[AuthenticationPasswordExpired]
result match {
case s: AuthenticationSuccess => Writes.at[AuthenticationSuccess](__ \\ "success").writes(s)
case n: AuthenticationNotActive => Writes.at[AuthenticationNotActive](__ \\ "notActive").writes(n)
case e: AuthenticationPasswordExpired => Writes.at[AuthenticationPasswordExpired](__ \\ "passwordExpired").writes(e)
case AuthenticationFailure => Writes.at[JsValue](__ \\ "failure").writes(JsObject.apply(Seq.empty))
}
}
implicit val userRegistrationSuccessFormat = Json.format[UserRegistrationSuccess]
implicit val userRegistrationResultWrites: Writes[UserRegistrationResult] = Writes[UserRegistrationResult] { result =>
result match {
case s: UserRegistrationSuccess => userRegistrationSuccessFormat.writes(s)
case _ => ???
}
}
}
|
ragb/apec-users-service
|
common/src/main/scala/pt/org/apec/services/users/common/json/JsonProtocol.scala
|
Scala
|
apache-2.0
| 2,194 |
object Test extends App{
BooleanArrayClone;
ByteArrayClone;
ShortArrayClone;
CharArrayClone;
IntArrayClone;
LongArrayClone;
FloatArrayClone;
DoubleArrayClone;
ObjectArrayClone;
PolymorphicArrayClone;
}
object BooleanArrayClone{
val it : Array[Boolean] = Array(true, false);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = false;
assert(it(0) == true)
}
object ByteArrayClone{
val it : Array[Byte] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object ShortArrayClone{
val it : Array[Short] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object CharArrayClone{
val it : Array[Char] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object IntArrayClone{
val it : Array[Int] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object LongArrayClone{
val it : Array[Long] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object FloatArrayClone{
val it : Array[Float] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object DoubleArrayClone{
val it : Array[Double] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object ObjectArrayClone{
val it : Array[String] = Array("1", "0");
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = "0";
assert(it(0) == "1")
}
object PolymorphicArrayClone{
def testIt[T](it : Array[T], one : T, zero : T) = {
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = zero;
assert(it(0) == one)
}
testIt(Array("one", "two"), "one", "two");
class Mangler[T: Manifest](ts : T*){
// this will always be a BoxedAnyArray even after we've unboxed its contents.
val it = ts.toArray[T];
}
val mangled = new Mangler[Int](0, 1);
val y : Array[Int] = mangled.it; // make sure it's unboxed
testIt(mangled.it, 0, 1);
}
|
som-snytt/dotty
|
tests/pending/run/arrayclone-old.scala
|
Scala
|
apache-2.0
| 2,272 |
/*
* Part of NDLA learningpath-api.
* Copyright (C) 2016 NDLA
*
* See LICENSE
*
*/
package no.ndla.learningpathapi.caching
import no.ndla.learningpathapi.UnitSuite
import org.mockito.Mockito._
class MemoizeTest extends UnitSuite {
class Target {
def targetMethod(value: String): String = s"Hei, $value"
}
test("That an uncached value will do an actual call") {
val targetMock = mock[Target]
val name = "Rune Rudberg"
val memoizedTarget = Memoize[String, String](targetMock.targetMethod)
when(targetMock.targetMethod(name)).thenReturn("Hello from mock")
memoizedTarget(name) should equal("Hello from mock")
verify(targetMock, times(1)).targetMethod(name)
}
test("That a cached value will not forward the call to the target") {
val targetMock = mock[Target]
val name = "Rune Rudberg"
val memoizedTarget = Memoize[String, String](targetMock.targetMethod)
when(targetMock.targetMethod(name)).thenReturn("Hello from mock")
Seq(1 to 10).foreach(i => {
memoizedTarget(name) should equal("Hello from mock")
})
verify(targetMock, times(1)).targetMethod(name)
}
test("That the cache is invalidated after cacheMaxAge") {
val cacheMaxAgeInMs = 20
val name = "Rune Rudberg"
val targetMock = mock[Target]
val memoizedTarget =
Memoize[String, String](targetMock.targetMethod, cacheMaxAgeInMs)
when(targetMock.targetMethod(name)).thenReturn("Hello from mock")
memoizedTarget(name) should equal("Hello from mock")
memoizedTarget(name) should equal("Hello from mock")
Thread.sleep(cacheMaxAgeInMs)
memoizedTarget(name) should equal("Hello from mock")
memoizedTarget(name) should equal("Hello from mock")
verify(targetMock, times(2)).targetMethod(name)
}
}
|
NDLANO/learningpath-api
|
src/test/scala/no/ndla/learningpathapi/caching/MemoizeTest.scala
|
Scala
|
gpl-3.0
| 1,784 |
/*
* Sentries
* Copyright (c) 2012-2015 Erik van Oosten All rights reserved.
*
* The primary distribution site is https://github.com/erikvanoosten/sentries
*
* This software is released under the terms of the BSD 2-Clause License.
* There is NO WARRANTY. See the file LICENSE for the full text.
*/
package nl.grons.sentries.support
import com.yammer.metrics.core.{Stoppable, MetricName}
import java.util.concurrent.{Executors, CopyOnWriteArrayList}
import scala.collection.concurrent.{Map => CMap}
import scala.collection.concurrent.TrieMap.{empty => emptyCMap}
import scala.collection.JavaConverters._
/**
* A registry of sentry instances.
*/
class SentriesRegistry() {
private[this] val listeners = new CopyOnWriteArrayList[SentriesRegistryListener]().asScala
private[this] val sentries: CMap[MetricName, NamedSentry] = newSentriesMap()
/**
* Adds a [[nl.grons.sentries.support.SentriesRegistryListener]] to a collection of listeners that will
* be notified on sentry creation. Listeners will be notified in the order in which they are added.
* <p/>
* <b>N.B.:</b> The listener will be notified of all existing sentries when it first registers.
*
* @param listener the listener that will be notified
*/
def addListener(listener: SentriesRegistryListener) {
listeners += listener
sentries.foreach {
case (name, sentry) => listener.onSentryAdded(name, sentry)
}
}
/**
* Removes a [[nl.grons.sentries.support.SentriesRegistryListener]] from this registry's collection of listeners.
*
* @param listener the listener that will be removed
*/
def removeListener(listener: SentriesRegistryListener) {
listeners -= listener
}
/**
* Gets any existing sentry with the given name or, if none exists, adds the given sentry.
*
* @param sentry the sentry to add
* @param sentryOwner the class that owns the sentry
* @param name name of the sentry
* @param sentryType sentryType type of sentry
* @tparam S type of the sentry
* @return either the existing sentry or `sentry`
*/
def getOrAdd[S <: NamedSentry](sentry: S, sentryOwner: Class[_], name: String, sentryType: String): S =
getOrAdd(createName(sentryOwner, name, sentryType), sentry)
/**
* Removes the sentry for the given class with the given name (and sentryType).
*
* @param sentryOwner the class that owns the sentry
* @param name the name of the sentry
* @param sentryType the sentryType of the sentry
*/
def removeSentry(sentryOwner: Class[_], name: String, sentryType: String) {
removeSentry(createName(sentryOwner, name, sentryType))
}
/**
* Removes the sentry with the given name.
*
* @param name the name of the sentry
*/
def removeSentry(name: MetricName) {
sentries.remove(name).map { sentry =>
if (sentry.isInstanceOf[Stoppable]) sentry.asInstanceOf[Stoppable].stop()
notifySentriesRemoved(name)
}
}
/**
* Remove all sentries from the registry.
*
* See README.md section 'Sentries in tests' for alternatives during testing.
*/
@deprecated(message = "will be removed in sentries 0.6", since = "0.5")
def clear() {
val sentryNames = Set() ++ sentries.keySet
sentryNames.map(sentryName => removeSentry(sentryName))
}
/**
* Reset all known sentries to their initial state by calling [Sentry#reset] on each sentry.
*
* See README.md section 'Sentries in tests' for alternatives during testing.
*/
def resetAllSentries() {
sentries.foreach {
case (_, sentry) => sentry.reset()
}
}
/**
* Override to customize how [[com.yammer.metrics.core.MetricName]]s are created.
*
* @param sentryOwner the class which owns the sentry
* @param name the name of the sentry
* @param sentryType the sentry's sentryType
* @return the sentry's full name
*/
protected def createName(sentryOwner: Class[_], name: String, sentryType: String): MetricName =
new MetricName(sentryOwner, name + "." + sentryType)
/**
* Returns a new concurrent map implementation. Subclass this to do weird things with
* your own [[nl.grons.sentries.support.SentriesRegistry]] implementation.
*
* @return a new [[scala.collection.concurrent.Map]]
*/
protected def newSentriesMap(): CMap[MetricName, NamedSentry] = emptyCMap
/**
* Gets any existing sentry with the given name or, if none exists, adds the given sentry.
*
* @param name the sentry's name
* @param sentry the new sentry
* @tparam S the type of the sentry
* @return either the existing sentry or `sentry`
*/
private def getOrAdd[S <: NamedSentry](name: MetricName, sentry: S): S = {
sentries.putIfAbsent(name, sentry) match {
case Some(existing) =>
if (sentry.isInstanceOf[Stoppable]) sentry.asInstanceOf[Stoppable].stop()
existing.asInstanceOf[S]
case None =>
notifySentriesAdded(name, sentry)
sentry
}
}
private def notifySentriesRemoved(name: MetricName) {
listeners.foreach(_.onSentryRemoved(name))
}
private def notifySentriesAdded(name: MetricName, sentry: NamedSentry) {
listeners.foreach(_.onSentryAdded(name, sentry))
}
}
object SentriesRegistry {
val executor = Executors.newCachedThreadPool()
}
|
erikvanoosten/sentries
|
src/main/scala/nl/grons/sentries/support/SentriesRegistry.scala
|
Scala
|
bsd-2-clause
| 5,289 |
package com.pamu_nagarjuna.meetingroom.ui.commons
import android.support.v7.widget.Toolbar
import android.view.{ContextThemeWrapper, View}
import com.fortysevendeg.android.scaladays.ui.commons.ToolbarStyles
import com.fortysevendeg.macroid.extras.ResourcesExtras._
import com.pamu_nagarjuna.meetingroom.R
import macroid.FullDsl._
import macroid.{ActivityContextWrapper, Ui, ContextWrapper}
/**
* Created by pnagarjuna on 19/08/15.
*/
trait ToolbarLayout extends ToolbarStyles {
var toolBar = slot[Toolbar]
def toolBarLayout(children: Ui[View]*)(implicit appContext: ContextWrapper, activityContext: ActivityContextWrapper): Ui[Toolbar] =
Ui {
val darkToolBar = getToolbarThemeDarkActionBar
children foreach (uiView => darkToolBar.addView(uiView.get))
toolBar = Some(darkToolBar)
darkToolBar
} <~ toolbarStyle(resGetDimensionPixelSize(R.dimen.height_toolbar))
def expandedToolBarLayout(children: Ui[View]*)
(height: Int)
(implicit appContext: ContextWrapper, activityContext: ActivityContextWrapper): Ui[Toolbar] =
Ui {
val darkToolBar = getToolbarThemeDarkActionBar
children foreach (uiView => darkToolBar.addView(uiView.get))
toolBar = Some(darkToolBar)
darkToolBar
} <~ toolbarStyle(height)
private def getToolbarThemeDarkActionBar(implicit activityContext: ActivityContextWrapper) = {
val contextTheme = new ContextThemeWrapper(activityContext.getOriginal, R.style.ThemeOverlay_AppCompat_Dark_ActionBar)
val darkToolBar = new Toolbar(contextTheme)
darkToolBar.setPopupTheme(R.style.ThemeOverlay_AppCompat_Light)
darkToolBar
}
}
|
pamu/next-meeting
|
src/main/scala/com/pamu_nagarjuna/meetingroom/ui/commons/ToolbarLayout.scala
|
Scala
|
apache-2.0
| 1,685 |
package gitbucket.core.service
import gitbucket.core.model.Account
import gitbucket.core.util.Directory._
import gitbucket.core.util.SyntaxSugars._
import org.eclipse.jgit.merge.{MergeStrategy, Merger, RecursiveMerger}
import org.eclipse.jgit.api.{Git, MergeResult}
import org.eclipse.jgit.transport.RefSpec
import org.eclipse.jgit.errors.NoMergeBaseException
import org.eclipse.jgit.lib.{CommitBuilder, ObjectId, PersonIdent, Repository}
import org.eclipse.jgit.revwalk.{RevCommit, RevWalk}
import scala.collection.JavaConverters._
trait MergeService {
import MergeService._
/**
* Checks whether conflict will be caused in merging within pull request.
* Returns true if conflict will be caused.
*/
def checkConflict(userName: String, repositoryName: String, branch: String, issueId: Int): Option[String] = {
using(Git.open(getRepositoryDir(userName, repositoryName))) { git =>
new MergeCacheInfo(git, branch, issueId).checkConflict()
}
}
/**
* Checks whether conflict will be caused in merging within pull request.
* only cache check.
* Returns Some(true) if conflict will be caused.
* Returns None if cache has not created yet.
*/
def checkConflictCache(userName: String, repositoryName: String, branch: String, issueId: Int): Option[Option[String]] = {
using(Git.open(getRepositoryDir(userName, repositoryName))) { git =>
new MergeCacheInfo(git, branch, issueId).checkConflictCache()
}
}
/** merge the pull request with a merge commit */
def mergePullRequest(git: Git, branch: String, issueId: Int, message: String, committer: PersonIdent): Unit = {
new MergeCacheInfo(git, branch, issueId).merge(message, committer)
}
/** rebase to the head of the pull request branch */
def rebasePullRequest(git: Git, branch: String, issueId: Int, commits: Seq[RevCommit], committer: PersonIdent): Unit = {
new MergeCacheInfo(git, branch, issueId).rebase(committer, commits)
}
/** squash commits in the pull request and append it */
def squashPullRequest(git: Git, branch: String, issueId: Int, message: String, committer: PersonIdent): Unit = {
new MergeCacheInfo(git, branch, issueId).squash(message, committer)
}
/** fetch remote branch to my repository refs/pull/{issueId}/head */
def fetchAsPullRequest(userName: String, repositoryName: String, requestUserName: String, requestRepositoryName: String, requestBranch:String, issueId:Int){
using(Git.open(getRepositoryDir(userName, repositoryName))){ git =>
git.fetch
.setRemote(getRepositoryDir(requestUserName, requestRepositoryName).toURI.toString)
.setRefSpecs(new RefSpec(s"refs/heads/${requestBranch}:refs/pull/${issueId}/head"))
.call
}
}
/**
* Checks whether conflict will be caused in merging. Returns true if conflict will be caused.
*/
def tryMergeRemote(localUserName: String, localRepositoryName: String, localBranch: String,
remoteUserName: String, remoteRepositoryName: String, remoteBranch: String): Either[String, (ObjectId, ObjectId, ObjectId)] = {
using(Git.open(getRepositoryDir(localUserName, localRepositoryName))) { git =>
val remoteRefName = s"refs/heads/${remoteBranch}"
val tmpRefName = s"refs/remote-temp/${remoteUserName}/${remoteRepositoryName}/${remoteBranch}"
val refSpec = new RefSpec(s"${remoteRefName}:${tmpRefName}").setForceUpdate(true)
try {
// fetch objects from origin repository branch
git.fetch
.setRemote(getRepositoryDir(remoteUserName, remoteRepositoryName).toURI.toString)
.setRefSpecs(refSpec)
.call
// merge conflict check
val merger = MergeStrategy.RECURSIVE.newMerger(git.getRepository, true)
val mergeBaseTip = git.getRepository.resolve(s"refs/heads/${localBranch}")
val mergeTip = git.getRepository.resolve(tmpRefName)
try {
if(merger.merge(mergeBaseTip, mergeTip)){
Right((merger.getResultTreeId, mergeBaseTip, mergeTip))
} else {
Left(createConflictMessage(mergeTip, mergeBaseTip, merger))
}
} catch {
case e: NoMergeBaseException => Left(e.toString)
}
} finally {
val refUpdate = git.getRepository.updateRef(refSpec.getDestination)
refUpdate.setForceUpdate(true)
refUpdate.delete()
}
}
}
/**
* Checks whether conflict will be caused in merging. Returns `Some(errorMessage)` if conflict will be caused.
*/
def checkConflict(userName: String, repositoryName: String, branch: String,
requestUserName: String, requestRepositoryName: String, requestBranch: String): Option[String] =
tryMergeRemote(userName, repositoryName, branch, requestUserName, requestRepositoryName, requestBranch).left.toOption
def pullRemote(localUserName: String, localRepositoryName: String, localBranch: String,
remoteUserName: String, remoteRepositoryName: String, remoteBranch: String,
loginAccount: Account, message: String): Option[ObjectId] = {
tryMergeRemote(localUserName, localRepositoryName, localBranch, remoteUserName, remoteRepositoryName, remoteBranch).map { case (newTreeId, oldBaseId, oldHeadId) =>
using(Git.open(getRepositoryDir(localUserName, localRepositoryName))) { git =>
val committer = new PersonIdent(loginAccount.fullName, loginAccount.mailAddress)
val newCommit = Util.createMergeCommit(git.getRepository, newTreeId, committer, message, Seq(oldBaseId, oldHeadId))
Util.updateRefs(git.getRepository, s"refs/heads/${localBranch}", newCommit, false, committer, Some("merge"))
}
oldBaseId
}.toOption
}
}
object MergeService{
object Util{
// return merge commit id
def createMergeCommit(repository: Repository, treeId: ObjectId, committer: PersonIdent, message: String, parents: Seq[ObjectId]): ObjectId = {
val mergeCommit = new CommitBuilder()
mergeCommit.setTreeId(treeId)
mergeCommit.setParentIds(parents:_*)
mergeCommit.setAuthor(committer)
mergeCommit.setCommitter(committer)
mergeCommit.setMessage(message)
// insertObject and got mergeCommit Object Id
using(repository.newObjectInserter){ inserter =>
val mergeCommitId = inserter.insert(mergeCommit)
inserter.flush()
mergeCommitId
}
}
def updateRefs(repository: Repository, ref: String, newObjectId: ObjectId, force: Boolean, committer: PersonIdent, refLogMessage: Option[String] = None): Unit = {
val refUpdate = repository.updateRef(ref)
refUpdate.setNewObjectId(newObjectId)
refUpdate.setForceUpdate(force)
refUpdate.setRefLogIdent(committer)
refLogMessage.map(refUpdate.setRefLogMessage(_, true))
refUpdate.update()
}
}
class MergeCacheInfo(git: Git, branch: String, issueId: Int){
private val repository = git.getRepository
private val mergedBranchName = s"refs/pull/${issueId}/merge"
private val conflictedBranchName = s"refs/pull/${issueId}/conflict"
lazy val mergeBaseTip = repository.resolve(s"refs/heads/${branch}")
lazy val mergeTip = repository.resolve(s"refs/pull/${issueId}/head")
def checkConflictCache(): Option[Option[String]] = {
Option(repository.resolve(mergedBranchName)).flatMap { merged =>
if(parseCommit(merged).getParents().toSet == Set( mergeBaseTip, mergeTip )){
// merged branch exists
Some(None)
} else {
None
}
}.orElse(Option(repository.resolve(conflictedBranchName)).flatMap{ conflicted =>
val commit = parseCommit(conflicted)
if(commit.getParents().toSet == Set( mergeBaseTip, mergeTip )){
// conflict branch exists
Some(Some(commit.getFullMessage))
} else {
None
}
})
}
def checkConflict(): Option[String] ={
checkConflictCache.getOrElse(checkConflictForce)
}
def checkConflictForce(): Option[String] ={
val merger = MergeStrategy.RECURSIVE.newMerger(repository, true)
val conflicted = try {
!merger.merge(mergeBaseTip, mergeTip)
} catch {
case e: NoMergeBaseException => true
}
val mergeTipCommit = using(new RevWalk( repository ))(_.parseCommit( mergeTip ))
val committer = mergeTipCommit.getCommitterIdent
def _updateBranch(treeId: ObjectId, message: String, branchName: String){
// creates merge commit
val mergeCommitId = createMergeCommit(treeId, committer, message)
Util.updateRefs(repository, branchName, mergeCommitId, true, committer)
}
if(!conflicted){
_updateBranch(merger.getResultTreeId, s"Merge ${mergeTip.name} into ${mergeBaseTip.name}", mergedBranchName)
git.branchDelete().setForce(true).setBranchNames(conflictedBranchName).call()
None
} else {
val message = createConflictMessage(mergeTip, mergeBaseTip, merger)
_updateBranch(mergeTipCommit.getTree().getId(), message, conflictedBranchName)
git.branchDelete().setForce(true).setBranchNames(mergedBranchName).call()
Some(message)
}
}
// update branch from cache
def merge(message:String, committer:PersonIdent) = {
if(checkConflict().isDefined){
throw new RuntimeException("This pull request can't merge automatically.")
}
val mergeResultCommit = parseCommit(Option(repository.resolve(mergedBranchName)).getOrElse {
throw new RuntimeException(s"Not found branch ${mergedBranchName}")
})
// creates merge commit
val mergeCommitId = createMergeCommit(mergeResultCommit.getTree().getId(), committer, message)
// update refs
Util.updateRefs(repository, s"refs/heads/${branch}", mergeCommitId, false, committer, Some("merged"))
}
def rebase(committer: PersonIdent, commits: Seq[RevCommit]): Unit = {
if(checkConflict().isDefined){
throw new RuntimeException("This pull request can't merge automatically.")
}
def _cloneCommit(commit: RevCommit, parents: Array[ObjectId]): CommitBuilder = {
val newCommit = new CommitBuilder()
newCommit.setTreeId(commit.getTree.getId)
parents.foreach { parentId =>
newCommit.addParentId(parentId)
}
newCommit.setAuthor(commit.getAuthorIdent)
newCommit.setCommitter(committer)
newCommit.setMessage(commit.getFullMessage)
newCommit
}
val mergeBaseTipCommit = using(new RevWalk( repository ))(_.parseCommit( mergeBaseTip ))
var previousId = mergeBaseTipCommit.getId
using(repository.newObjectInserter){ inserter =>
commits.foreach { commit =>
val nextCommit = _cloneCommit(commit, Array(previousId))
previousId = inserter.insert(nextCommit)
}
inserter.flush()
}
Util.updateRefs(repository, s"refs/heads/${branch}", previousId, false, committer, Some("rebased"))
}
def squash(message: String, committer: PersonIdent): Unit = {
if(checkConflict().isDefined){
throw new RuntimeException("This pull request can't merge automatically.")
}
val mergeBaseTipCommit = using(new RevWalk( repository ))(_.parseCommit(mergeBaseTip))
val mergeBranchHeadCommit = using(new RevWalk( repository ))(_.parseCommit(repository.resolve(mergedBranchName)))
// Create squash commit
val mergeCommit = new CommitBuilder()
mergeCommit.setTreeId(mergeBranchHeadCommit.getTree.getId)
mergeCommit.setParentId(mergeBaseTipCommit)
mergeCommit.setAuthor(mergeBranchHeadCommit.getAuthorIdent)
mergeCommit.setCommitter(committer)
mergeCommit.setMessage(message)
// insertObject and got squash commit Object Id
val newCommitId = using(repository.newObjectInserter){ inserter =>
val newCommitId = inserter.insert(mergeCommit)
inserter.flush()
newCommitId
}
Util.updateRefs(repository, mergedBranchName, newCommitId, true, committer)
// rebase to squash commit
Util.updateRefs(repository, s"refs/heads/${branch}", repository.resolve(mergedBranchName), false, committer, Some("squashed"))
}
// return treeId
private def createMergeCommit(treeId: ObjectId, committer: PersonIdent, message: String) =
Util.createMergeCommit(repository, treeId, committer, message, Seq[ObjectId](mergeBaseTip, mergeTip))
private def parseCommit(id: ObjectId) = using(new RevWalk( repository ))(_.parseCommit(id))
}
private def createConflictMessage(mergeTip: ObjectId, mergeBaseTip: ObjectId, merger: Merger): String = {
val mergeResults = merger.asInstanceOf[RecursiveMerger].getMergeResults
s"Can't merge ${mergeTip.name} into ${mergeBaseTip.name}\\n\\n" +
"Conflicting files:\\n" +
mergeResults.asScala.map { case (key, _) => "- " + key + "\\n" }.mkString
}
}
|
gencer/gitbucket
|
src/main/scala/gitbucket/core/service/MergeService.scala
|
Scala
|
apache-2.0
| 13,025 |
package text.search
/**
* @author ynupc
* Created on 2016/08/21
*/
object ApostolicoCrochemore extends Search {
override def indexOf[T](source: Array[T], target: Array[T]): Int = {
-1
}
override def indicesOf[T](source: Array[T], target: Array[T]): Array[Int] = {
Array()
}
}
|
ynupc/scalastringcourseday6
|
src/main/scala/text/search/ApostolicoCrochemore.scala
|
Scala
|
apache-2.0
| 308 |
package mesosphere.marathon
import mesosphere.chaos.AppConfiguration
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.core.event.EventConf
import mesosphere.marathon.core.plugin.PluginManagerConfiguration
import mesosphere.marathon.metrics.MetricsReporterConf
import org.rogach.scallop.ScallopConf
class AllConf(args: Seq[String] = Nil) extends ScallopConf(args)
with MetricsReporterConf
with HttpConf
with MarathonConf
with AppConfiguration
with EventConf
with DebugConf
with PluginManagerConfiguration {
verify()
}
object AllConf {
def apply(args: String*): AllConf = {
new AllConf(args.to[Seq])
}
def withTestConfig(args: String*): AllConf = {
new AllConf(Seq("--master", "local") ++ args)
}
}
|
natemurthy/marathon
|
src/main/scala/mesosphere/marathon/AllConf.scala
|
Scala
|
apache-2.0
| 763 |
package org.sisioh.aws4s.dynamodb.document
import com.amazonaws.services.dynamodbv2.document.Attribute
import org.sisioh.aws4s.PimpedType
object AttributeFactory {
def create(attrName: String, value: AnyRef): Attribute = new Attribute(attrName, value)
}
class RichAttribute(val underlying: Attribute) extends AnyVal with PimpedType[Attribute] {
def name: String = underlying.getName
def value: AnyRef = underlying.getValue
}
|
everpeace/aws4s
|
aws4s-dynamodb/src/main/scala/org/sisioh/aws4s/dynamodb/document/RichAttribute.scala
|
Scala
|
mit
| 439 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.lang
import java.lang.{Byte => JByte}
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.AssertThrows.assertThrows
/** Tests the implementation of the java standard library Byte
*/
class ByteTest {
@Test def compareToJavaByte(): Unit = {
def compare(x: Byte, y: Byte): Int =
new JByte(x).compareTo(new JByte(y))
assertTrue(compare(0.toByte, 5.toByte) < 0)
assertTrue(compare(10.toByte, 9.toByte) > 0)
assertTrue(compare(-2.toByte, -1.toByte) < 0)
assertEquals(0, compare(3.toByte, 3.toByte))
}
@Test def compareTo(): Unit = {
def compare(x: Any, y: Any): Int =
x.asInstanceOf[Comparable[Any]].compareTo(y)
assertTrue(compare(0.toByte, 5.toByte) < 0)
assertTrue(compare(10.toByte, 9.toByte) > 0)
assertTrue(compare(-2.toByte, -1.toByte) < 0)
assertEquals(0, compare(3.toByte, 3.toByte))
}
@Test def parseString(): Unit = {
def test(s: String, v: Byte): Unit = {
assertEquals(v, JByte.parseByte(s))
assertEquals(v, JByte.valueOf(s).byteValue())
assertEquals(v, new JByte(s).byteValue())
assertEquals(v, JByte.decode(s))
}
test("0", 0)
test("5", 5)
test("127", 127)
test("-100", -100)
}
@Test def parseStringInvalidThrows(): Unit = {
def test(s: String): Unit = {
assertThrows(classOf[NumberFormatException], JByte.parseByte(s))
assertThrows(classOf[NumberFormatException], JByte.decode(s))
}
test("abc")
test("")
test("200") // out of range
}
@Test def parseStringBase16(): Unit = {
def test(s: String, v: Byte): Unit = {
assertEquals(v, JByte.parseByte(s, 16))
assertEquals(v, JByte.valueOf(s, 16).intValue())
assertEquals(v, JByte.decode(IntegerTest.insertAfterSign("0x", s)))
assertEquals(v, JByte.decode(IntegerTest.insertAfterSign("0X", s)))
assertEquals(v, JByte.decode(IntegerTest.insertAfterSign("#", s)))
}
test("0", 0x0)
test("5", 0x5)
test("7f", 0x7f)
test("-24", -0x24)
test("30", 0x30)
test("-9", -0x9)
}
@Test def decodeStringBase8(): Unit = {
def test(s: String, v: Byte): Unit = {
assertEquals(v, JByte.decode(s))
}
test("00", 0)
test("0123", 83)
test("-012", -10)
}
@Test def decodeInvalidThrows(): Unit = {
def test(s: String): Unit =
assertThrows(classOf[NumberFormatException], JByte.decode(s))
// sign after another sign or after a base prefix
test("++0")
test("--0")
test("0x+1")
test("0X-1")
test("#-1")
test("0-1")
// empty string after sign or after base prefix
test("")
test("+")
test("-")
test("-0x")
test("+0X")
test("#")
// integer too large
test("0x80")
test("-0x81")
test("0200")
test("-0201")
}
}
|
gzm0/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/lang/ByteTest.scala
|
Scala
|
apache-2.0
| 3,126 |
package funsets
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* This class is a test suite for the methods in object FunSets. To run
* the test suite, you can either:
* - run the "test" command in the SBT console
* - right-click the file in eclipse and chose "Run As" - "JUnit Test"
*/
@RunWith(classOf[JUnitRunner])
class FunSetSuite extends FunSuite {
/**
* Link to the scaladoc - very clear and detailed tutorial of FunSuite
*
* http://doc.scalatest.org/1.9.1/index.html#org.scalatest.FunSuite
*
* Operators
* - test
* - ignore
* - pending
*/
/**
* Tests are written using the "test" operator and the "assert" method.
*/
test("string take") {
val message = "hello, world"
assert(message.take(5) == "hello")
}
/**
* For ScalaTest tests, there exists a special equality operator "===" that
* can be used inside "assert". If the assertion fails, the two values will
* be printed in the error message. Otherwise, when using "==", the test
* error message will only say "assertion failed", without showing the values.
*
* Try it out! Change the values so that the assertion fails, and look at the
* error message.
*/
test("adding ints") {
assert(1 + 2 === 3)
}
import FunSets._
test("contains is implemented") {
assert(contains(x => true, 100))
}
/**
* When writing tests, one would often like to re-use certain values for multiple
* tests. For instance, we would like to create an Int-set and have multiple test
* about it.
*
* Instead of copy-pasting the code for creating the set into every test, we can
* store it in the test class using a val:
*
* val s1 = singletonSet(1)
*
* However, what happens if the method "singletonSet" has a bug and crashes? Then
* the test methods are not even executed, because creating an instance of the
* test class fails!
*
* Therefore, we put the shared values into a separate trait (traits are like
* abstract classes), and create an instance inside each test method.
*
*/
trait TestSets {
val s1 = singletonSet(1)
val s2 = singletonSet(2)
val s3 = singletonSet(3)
val s4 = singletonSet(4)
}
/**
* This test is currently disabled (by using "ignore") because the method
* "singletonSet" is not yet implemented and the test would fail.
*
* Once you finish your implementation of "singletonSet", exchange the
* function "ignore" by "test".
*/
test("singletonSet(1) contains 1") {
/**
* We create a new instance of the "TestSets" trait, this gives us access
* to the values "s1" to "s3".
*/
new TestSets {
/**
* The string argument of "assert" is a message that is printed in case
* the test fails. This helps identifying which assertion failed.
*/
assert(contains(s1, 1), "Singleton")
}
}
test("union contains all elements") {
new TestSets {
val s = union(s1, s2)
assert(contains(s, 1), "Union 1")
assert(contains(s, 2), "Union 2")
assert(!contains(s, 3), "Union 3")
}
}
test("intsection contains common elements") {
new TestSets {
val s = union(s1, s2)
val t = union(s2, s3)
val i = intersect(s, t)
assert(!contains(i, 1), "Intersect 1")
assert(contains(i, 2), "Intersect 2")
assert(!contains(i, 3), "Intersect 3")
}
}
test("diff contains unique elements") {
new TestSets {
val s = union(s1, s2)
val t = union(s2, s3)
val d = diff(s, t)
assert(contains(d, 1), "Diff 1")
assert(!contains(d, 2), "Diff 2")
assert(!contains(d, 3), "Diff 3")
}
}
test("filter contains qualified elements") {
new TestSets {
val evenNumbers = (x:Int) => x % 2 == 0
val s = union(s1, s2)
val t = union(s, s3)
val u = union(t, s4)
val f = filter(u, evenNumbers)
assert(!contains(f, 1), "Filter 1")
assert(contains(f, 2), "Filter 2")
assert(!contains(f, 3), "Filter 3")
assert(contains(f, 4), "Filter 4")
}
}
test("forall contains qualified elements") {
new TestSets {
val evenNumbers = (x: Int) => x % 2 == 0
val s = union(s2, s4)
assert(forall(s, evenNumbers), "find all evens")
val t = union(s2, s3)
assert(!forall(t, evenNumbers), "dont find all evens")
}
}
test("exists contains at least one qualified element") {
new TestSets {
val evenNumbers = (x: Int) => x % 2 == 0
val s = union(s1, s3)
assert(!forall(s, evenNumbers), "find no evens")
val t = union(s1, s4)
assert(!forall(t, evenNumbers), "find first evens")
}
}
test("map generates transformed set") {
new TestSets {
val doubleNumbers = (x: Int) => x * 2
val s = union(s1, s2)
val t = union(s, s3)
val u = union(t, s4)
val dbl = map(u, doubleNumbers)
assert(contains(dbl, 2), "Double is 2")
assert(contains(dbl, 4), "Double is 4")
assert(contains(dbl, 6), "Double is 6")
assert(contains(dbl, 8), "Double is 8")
printSet(dbl)
}
}
}
|
mmcc007/scala-principles
|
funsets/src/test/scala/funsets/FunSetSuite.scala
|
Scala
|
gpl-3.0
| 5,221 |
package org.jetbrains.plugins.scala
package lang.parser.parsing.patterns
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.CommonUtils
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author kfeodorov
* @since 01.03.14.
*/
object InterpolationPattern extends InterpolationPattern {
override protected val commonUtils = CommonUtils
}
trait InterpolationPattern {
protected val commonUtils: CommonUtils
def parse(builder: ScalaPsiBuilder): Boolean =
builder.getTokenType match {
case ScalaTokenTypes.tINTERPOLATED_STRING_ID =>
commonUtils.parseInterpolatedString(builder, isPattern = true)
true
case _ => false
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/parser/parsing/patterns/InterpolationPattern.scala
|
Scala
|
apache-2.0
| 766 |
package com.timeout.docless.schema.derive
import com.timeout.docless.schema._
import JsonSchema._
import shapeless._
import io.circe._
import io.circe.syntax._
import shapeless.labelled.FieldType
import reflect.runtime.{universe => ru}
trait HListInstances {
implicit val hNilSchema: JsonSchema[HNil] = inlineInstance(
JsonObject.fromMap(Map.empty)
)
implicit def hlistSchema[K <: Symbol, H, T <: HList](
implicit witness: Witness.Aux[K],
lazyHSchema: Lazy[JsonSchema[H]],
lazyTSchema: Lazy[JsonSchema[T]]
): JsonSchema[FieldType[K, H] :: T] = instanceAndRelated {
val fieldName = witness.value.name
val hSchema = lazyHSchema.value
val tSchema = lazyTSchema.value
val (hValue, related) =
if (hSchema.inline)
hSchema.asJson -> tSchema.relatedDefinitions
else
hSchema.asJsonRef -> (tSchema.relatedDefinitions + hSchema
.NamedDefinition(fieldName))
val hField = fieldName -> hValue
val tFields = tSchema.jsonObject.toList
JsonObject.fromIterable(hField :: tFields) -> related
}
implicit def genericSchema[A, R <: HList](
implicit gen: LabelledGeneric.Aux[A, R],
rSchema: JsonSchema[R],
fields: Required.Fields[R],
tag: ru.WeakTypeTag[A]
): JsonSchema[A] =
instanceAndRelated[A] {
JsonObject.fromMap(
Map(
"type" -> Json.fromString("object"),
"required" -> fields.asJson,
"properties" -> rSchema.jsonObject.asJson
)
) -> rSchema.relatedDefinitions
}
}
|
timeoutdigital/docless
|
src/main/scala/com/timeout/docless/schema/derive/HListInstances.scala
|
Scala
|
mit
| 1,560 |
package exercises
object Exercise22 {
def isSorted[A](as: Array[A], ordering: (A, A) => Boolean): Boolean = {
@annotation.tailrec
def go(n: Int): Boolean =
if (n >= as.length - 1) true
else if (!ordering(as(n), as(n + 1))) false
else go(n + 1)
go(0)
}
}
|
tobal/scala-course
|
red-book/src/main/scala/exercises/Exercise22.scala
|
Scala
|
gpl-3.0
| 291 |
object Test extends App {
val x: Option[Int] = Macros.foo
println(x)
}
|
lrytz/scala
|
test/files/run/t8048a/Test_2.scala
|
Scala
|
apache-2.0
| 75 |
package com.github.tminglei.bind
import org.scalatest._
import scala.collection.mutable.ListBuffer
class ProcessorsSpec extends FunSpec with Matchers {
describe("test pre-defined pre-processors") {
it("trim") {
val trim = Processors.trim
trim("", Map("" -> null), Options.apply()) should be (Map("" -> null))
trim("", Map("" -> " yuu"), Options.apply()) should be (Map("" -> "yuu"))
trim("a", Map("a" -> "eyuu"), Options.apply()) should be (Map("a" -> "eyuu"))
}
it("omit") {
val omit = Processors.omit(",")
omit("", Map("" -> null), Options.apply()) should be (Map("" -> null))
omit("", Map("" -> "123,334"), Options.apply()) should be (Map("" -> "123334"))
omit("a", Map("a" -> "2.345e+5"), Options.apply()) should be (Map("a" -> "2.345e+5"))
}
it("omit-left") {
val omitLeft = Processors.omitLeft("$")
omitLeft("", Map("" -> null), Options.apply()) should be (Map("" -> null))
omitLeft("", Map("" -> "$3,567"), Options.apply()) should be (Map("" -> "3,567"))
omitLeft("a", Map("a" -> "35667"), Options.apply()) should be (Map("a" -> "35667"))
}
it("omit-right") {
val omitRight = Processors.omitRight("-tat")
omitRight("", Map("" -> null), Options.apply()) should be (Map("" -> null))
omitRight("a", Map("a" -> "tewwwtt-tat"), Options.apply()) should be (Map("a" -> "tewwwtt"))
}
it("omit-redundant") {
val cleanRedundant = Processors.omitRedundant(" ")
cleanRedundant("", Map("" -> null), Options.apply()) should be (Map("" -> null))
cleanRedundant("a", Map("a" -> " a teee 86y"), Options.apply()) should be (Map("a" -> " a teee 86y"))
cleanRedundant("", Map("" -> "te yu "), Options.apply()) should be (Map("" -> "te yu "))
}
it("omit-matched") {
val omitMatched = Processors.omitMatched("-\\d\\d$".r)
omitMatched("", Map("" -> null), Options.apply()) should be (Map("" -> null))
omitMatched("", Map("" -> "2342-334-12"), Options.apply()) should be (Map("" -> "2342-334"))
omitMatched("a", Map("a" -> "2342-334"), Options.apply()) should be (Map("a" -> "2342-334"))
}
it("omit-matched w/ replacement") {
val omitMatched = Processors.omitMatched("-\\d\\d$".r, "-1")
omitMatched("", Map("" -> null), Options.apply()) should be (Map("" -> null))
omitMatched("", Map("" -> "2342-334-12"), Options.apply()) should be (Map("" -> "2342-334-1"))
omitMatched("a", Map("a" -> "2342-334"), Options.apply()) should be (Map("a" -> "2342-334"))
}
}
describe("test pre-defined bulk pre-processors") {
describe("changePrefix") {
it("simple") {
val changePrefix = Processors.changePrefix("json", "data")
val data = Map(
"aa" -> "wett",
"json.id" -> "123",
"json.name" -> "tewd",
"json.dr-1[0]" -> "33",
"json.dr-1[1]" -> "45"
)
val expected = Map(
"aa" -> "wett",
"data.id" -> "123",
"data.name" -> "tewd",
"data.dr-1[0]" -> "33",
"data.dr-1[1]" -> "45"
)
changePrefix("", data, Options.apply()) should be (expected)
}
}
describe("expandJson") {
it("simple") {
val expandJson = Processors.expandJson(Some("json"))
val data = Map(
"aa" -> "wett",
"json" -> """{"id":123, "name":"tewd", "dr-1":[33,45]}"""
)
val expected = Map(
"aa" -> "wett",
"json.id" -> "123",
"json.name" -> "tewd",
"json.dr-1[0]" -> "33",
"json.dr-1[1]" -> "45"
)
expandJson("", data, Options.apply()) should be (expected)
}
it("null or empty") {
val expandJsonData = Processors.expandJson(Some("json"))
val nullData = Map("aa" -> "wett")
expandJsonData("", nullData, Options.apply()) should be (nullData)
val nullData1 = Map("aa" -> "wett", "json" -> null)
expandJsonData("", nullData1, Options.apply()) should be (nullData1)
val emptyData1 = Map("aa" -> "wett", "json" -> "")
expandJsonData("", emptyData1, Options.apply()) should be (emptyData1)
}
it("with dest prefix") {
val expandJson = Processors.expandJson(Some("body"))
val data = Map(
"aa" -> "wett",
"body" -> """{"id":123, "name":"tewd", "dr-1":[33,45]}"""
)
val expected = Map(
"aa" -> "wett",
"body.id" -> "123",
"body.name" -> "tewd",
"body.dr-1[0]" -> "33",
"body.dr-1[1]" -> "45"
)
expandJson("", data, Options.apply()) should be (expected)
}
}
}
describe("test pre-defined post err-processors") {
describe("foldErrs") {
it("simple") {
val errs = Seq(
"" -> "top error1",
"aa" -> "error aa",
"aa.ty" -> "error aa.ty",
"aa" -> "error aa 1",
"aa.tl[3]" -> "ewty",
"aa.tl[3]" -> "ewyu7",
"br-1[t0]" -> "key: eeor",
"br-1[t0]" -> "tert",
"br-1[1]" -> "tetty",
"" -> "top error2"
)
val expected = Map(
"" -> List("top error1", "top error2"),
"aa" -> List("error aa", "error aa 1"),
"aa.ty" -> List("error aa.ty"),
"aa.tl[3]" -> List("ewty", "ewyu7"),
"br-1[t0]" -> List("key: eeor", "tert"),
"br-1[1]" -> List("tetty")
)
Processors.foldErrs()(errs) should be (expected)
}
}
describe("errsTree") {
it("simple") {
val errs = Seq(
"" -> "top error1",
"aa" -> "error aa",
"aa.ty" -> "error aa.ty",
"aa" -> "error aa 1",
"aa.tl[3]" -> "ewty",
"aa.tl[3]" -> "ewyu7",
"br-1[t0]" -> "key: eeor",
"br-1[t0]" -> "tert",
"br-1[1]" -> "tetty",
"" -> "top error2"
)
val expected = Map(
"_errors" -> ListBuffer("top error1", "top error2"),
"br-1" -> Map(
"t0" -> Map(
"_errors" -> ListBuffer("key: eeor", "tert")
),
"1" -> Map(
"_errors" -> ListBuffer("tetty")
)
),
"aa" -> Map(
"ty" -> Map(
"_errors" -> ListBuffer("error aa.ty")
),
"tl" -> Map(
"3" -> Map(
"_errors" -> ListBuffer("ewty", "ewyu7")
)
),
"_errors" -> ListBuffer("error aa", "error aa 1")
))
Processors.errsTree()(errs) should be (expected)
}
}
}
}
|
tminglei/form-binder
|
src/test/scala/com/github/tminglei/bind/ProcessorsSpec.scala
|
Scala
|
bsd-2-clause
| 6,704 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.async
package run
package ifelse3
import language.{reflectiveCalls, postfixOps}
import scala.concurrent.{Future, ExecutionContext, Await}
import scala.concurrent.duration._
import scala.async.Async.{async, await}
import org.junit.Test
class TestIfElse3Class {
import ExecutionContext.Implicits.global
def base(x: Int): Future[Int] = Future {
x + 2
}
def m(y: Int): Future[Int] = async {
val f = base(y)
var z = 0
if (y > 0) {
val x1 = await(f)
var w = x1 + 2
z = w + 2
} else {
val x2 = await(f)
var w = x2 + 2
z = w - 2
}
z
}
}
class IfElse3Spec {
@Test
def `variables of the same name in different blocks`(): Unit = {
val o = new TestIfElse3Class
val fut = o.m(10)
val res = Await.result(fut, 2 seconds)
res mustBe (16)
}
}
|
scala/async
|
src/test/scala/scala/async/run/ifelse3/IfElse3.scala
|
Scala
|
bsd-3-clause
| 1,135 |
package inloopio.math.vector
/**
*
* @author Caoyuan Deng
*/
class TInputOutputPoint protected (_input: Vec, _output: Vec, val time: Long) extends InputOutputPoint(_input, _output)
object TInputOutputPoint {
def apply(input: Vec, output: Vec, time: Long) =
new TInputOutputPoint(input, output, time)
def apply(inputDimension: Int, outputDimension: Int, time: Long) =
new TInputOutputPoint(new DefaultVec(inputDimension), new DefaultVec(outputDimension), time)
def unapply(iop: TInputOutputPoint): Option[(Vec, Vec, Long)] = Some(iop.input, iop.output, iop.time)
}
|
dcaoyuan/inloopio-libs
|
inloopio-math/src/main/scala/inloopio/math/vector/TInputOutputPoint.scala
|
Scala
|
bsd-3-clause
| 584 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.stats
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.util.factory.Hints
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.TestGeoMesaDataStore
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.SimpleFeature
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MetadataBackedStatsTest extends Specification {
val sft = SimpleFeatureTypes.createType("test", "trackId:String:index=true,dtg:Date,*geom:Point:srid=4326")
"MetadataBackedStatsTest" should {
"work with initial values very close together" in {
val pt0 = ScalaSimpleFeature.create(sft, "0", s"track-0", "2018-01-01T00:00:00.000Z", "POINT (-87.92926054 41.76166190973163)")
val pt1 = ScalaSimpleFeature.create(sft, "1", s"track-1", "2018-01-01T01:00:00.000Z", "POINT (-87.92926053956762 41.76166191)")
val ds = new TestGeoMesaDataStore(false)
ds.createSchema(sft)
Seq(pt0, pt1).foreach(_.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE))
ds.getFeatureSource(sft.getTypeName).addFeatures(new ListFeatureCollection(sft, Array[SimpleFeature](pt0, pt1)))
ds.stats.writer.updater(sft) must not(throwAn[Exception])
}
}
}
|
locationtech/geomesa
|
geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/stats/MetadataBackedStatsTest.scala
|
Scala
|
apache-2.0
| 1,876 |
/*
* Copyright (C) 2011 Mathieu Mathieu Leclaire <mathieu.Mathieu Leclaire at openmole.org>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.task.template
import org.openmole.core.dsl._
import org.openmole.core.workflow.test.TestHook
import org.openmole.tool.hash._
import org.scalatest._
class TemplateFileTaskSpec extends FlatSpec with Matchers {
import org.openmole.core.workflow.test.Stubs._
"A template file generator task" should "parse a template file and evalutate the ${} expressions" in {
lazy val templateFile: File = {
val template = java.io.File.createTempFile("file", ".test")
template.content =
"""My first line
|${2*3}
|I am ${a*5} year old
|${s"I am ${a*5} year old"}""".stripMargin
template
}
lazy val targetFile: File = {
val target = java.io.File.createTempFile("target", ".test")
target.content =
"""My first line
|6
|I am 30 year old
|I am 30 year old""".stripMargin
target
}
val outputP = Val[File]
val a = Val[Int]
val t1 = TemplateFileTask(templateFile, outputP) set (a := 6)
val testHook = TestHook { c ⇒ targetFile.hash() should equal(c(outputP).hash()) }
(t1 hook testHook).run()
}
}
|
openmole/openmole
|
openmole/plugins/org.openmole.plugin.task.template/src/test/scala/org/openmole/plugin/task/template/TemplateFileTaskSpec.scala
|
Scala
|
agpl-3.0
| 1,931 |
package de.thomasvolk.easy.core
/*
* Copyright 2014 Thomas Volk
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import org.junit.{Assert, Test, After, Before}
import de.thomasvolk.easy.core.persistence.PagePersistenceService
import de.thomasvolk.easy.core.persistence.file.{FilePagePersistenceServiceImpl}
import de.thomasvolk.easy.core.model.{Page}
import java.nio.file.{FileSystems, Files}
class PagePersistenceServiceTest {
var persistenceService: PagePersistenceService = _
@Before
def prepareTestDatabase() {
persistenceService = new FilePagePersistenceServiceImpl(
Files.createTempDirectory(FileSystems.getDefault().getPath(System.getProperty("java.io.tmpdir")),
this.getClass.getName))
}
@After
def destroyTestDatabase() {
}
@Test
def persistPage() {
val id = "/1/2/test1"
Assert.assertEquals("<p>Hello</p>", persistenceService.persist(Page(id, "<p>Hello</p>")).content)
Assert.assertEquals("<p>Hello</p>", persistenceService.loadPage(id).get.content)
Assert.assertEquals("<p>Hello 123</p>", persistenceService.persist(Page(id, "<p>Hello 123</p>")).content)
Assert.assertEquals("<p>Hello 123</p>", persistenceService.loadPage(id).get.content)
Assert.assertEquals(0, persistenceService.loadPage(id).get.subPages.size)
Assert.assertFalse(persistenceService.loadPage(id).get.parentPage.isDefined)
val subPageId01 = "/1/2/test1/sub1"
persistenceService.persist(Page(subPageId01, "<p>Sub01</p>"))
Assert.assertEquals(1, persistenceService.loadPage(id).get.subPages.size)
Assert.assertEquals("sub1", persistenceService.loadPage(id).get.subPages(0)._2)
val subPageId02 = "/1/2/test1/sub2"
persistenceService.persist(Page(subPageId02, "<p>Sub02</p>"))
Assert.assertEquals(2, persistenceService.loadPage(id).get.subPages.size)
Assert.assertEquals("sub1", persistenceService.loadPage(id).get.subPages(0)._2)
Assert.assertEquals("sub2", persistenceService.loadPage(id).get.subPages(1)._2)
val parentId = "/1/2"
persistenceService.persist(Page(parentId, "<p>Parent</p>"))
Assert.assertEquals(2, persistenceService.loadPage(id).get.subPages.size)
Assert.assertTrue(persistenceService.loadPage(id).get.parentPage.isDefined)
Assert.assertEquals("2", persistenceService.loadPage(id).get.parentPage.get._2)
persistenceService.deletePage(subPageId01)
persistenceService.deletePage(subPageId02)
persistenceService.deletePage(parentId)
Assert.assertEquals(0, persistenceService.loadPage(id).get.subPages.size)
Assert.assertFalse(persistenceService.loadPage(id).get.parentPage.isDefined)
persistenceService.deletePage(id)
Assert.assertEquals(None, persistenceService.loadPage(id))
}
@Test
def parentPage() {
val rootPageId = "/1"
persistenceService.persist(Page(rootPageId, "<p>Hello</p>"))
assert(None == persistenceService.getParentPage(rootPageId))
val rootPage = persistenceService.loadPage(rootPageId)
assert(None != rootPage)
val childPageId = s"${rootPageId}/2"
persistenceService.persist(Page(childPageId, "<p>Hello Child</p>"))
val parentPage = persistenceService.getParentPage(childPageId)
assert(None != parentPage)
assert(rootPage.get == parentPage.get)
}
@Test
def subpages() {
val parentId = "/dir1/main"
val subPages0 = persistenceService.getSubpages(parentId)
persistenceService.persist(Page(parentId, "<p>Main</p>"))
val subPages1 = persistenceService.getSubpages(parentId)
List("A", "XXX", "06", "test123").foreach { childName =>
val childId = s"${parentId}/${childName}"
persistenceService.persist(Page(childId, s"<p>Child-${childName}</p>"))
}
val subPages4 = persistenceService.getSubpages(parentId)
Assert.assertEquals(4, subPages4.size)
val subPages4Iter = subPages4.iterator
Assert.assertEquals("/dir1/main/06", subPages4Iter.next().id)
Assert.assertEquals("/dir1/main/A", subPages4Iter.next().id)
Assert.assertEquals("/dir1/main/XXX", subPages4Iter.next().id)
Assert.assertEquals("/dir1/main/test123", subPages4Iter.next().id)
}
}
|
thomasvolk/easy
|
core/src/test/scala/de/thomasvolk/easy/core/PagePersistenceServiceTest.scala
|
Scala
|
apache-2.0
| 5,005 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.pipes
import java.util.concurrent.ThreadLocalRandom
import org.neo4j.cypher.internal.compiler.v2_3.ExecutionContext
import org.neo4j.cypher.internal.compiler.v2_3.executionplan.{ReadsAllNodes, Effects, ReadsRelationships}
import org.neo4j.cypher.internal.compiler.v2_3.planDescription.InternalPlanDescription.Arguments.ExpandExpression
import org.neo4j.cypher.internal.compiler.v2_3.spi.QueryContext
import org.neo4j.cypher.internal.frontend.v2_3.{SemanticDirection, InternalException}
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
import org.neo4j.graphdb.{Node, Relationship}
import org.neo4j.helpers.collection.PrefetchingIterator
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* Expand when both end-points are known, find all relationships of the given
* type in the given direction between the two end-points.
*
* This is done by checking both nodes and starts from any non-dense node of the two.
* If both nodes are dense, we find the degree of each and expand from the smaller of the two
*
* This pipe also caches relationship information between nodes for the duration of the query
*/
case class ExpandIntoPipe(source: Pipe,
fromName: String,
relName: String,
toName: String,
dir: SemanticDirection,
lazyTypes: LazyTypes)(val estimatedCardinality: Option[Double] = None)
(implicit pipeMonitor: PipeMonitor)
extends PipeWithSource(source, pipeMonitor) with RonjaPipe {
self =>
private final val CACHE_SIZE = 100000
protected def internalCreateResults(input: Iterator[ExecutionContext], state: QueryState): Iterator[ExecutionContext] = {
//cache of known connected nodes
val relCache = new RelationshipsCache(CACHE_SIZE)
input.flatMap {
row =>
val fromNode = getRowNode(row, fromName)
fromNode match {
case fromNode: Node =>
val toNode = getRowNode(row, toName)
if (toNode == null) Iterator.empty
else {
val relationships = relCache.get(fromNode, toNode)
.getOrElse(findRelationships(state.query, fromNode, toNode, relCache))
if (relationships.isEmpty) Iterator.empty
else relationships.map(row.newWith2(relName, _, toName, toNode))
}
case null =>
Iterator.empty
}
}
}
/**
* Finds all relationships connecting fromNode and toNode.
*/
private def findRelationships(query: QueryContext, fromNode: Node, toNode: Node,
relCache: RelationshipsCache): Iterator[Relationship] = {
val relTypes = lazyTypes.types(query)
val fromNodeIsDense = query.nodeIsDense(fromNode.getId)
val toNodeIsDense = query.nodeIsDense(toNode.getId)
//if both nodes are dense, start from the one with the lesser degree
if (fromNodeIsDense && toNodeIsDense) {
//check degree and iterate from the node with smaller degree
val fromDegree = getDegree(fromNode, relTypes, dir, query)
if (fromDegree == 0) {
return Iterator.empty
}
val toDegree = getDegree(toNode, relTypes, dir.reversed, query)
if (toDegree == 0) {
return Iterator.empty
}
relIterator(query, fromNode, toNode, preserveDirection = fromDegree < toDegree, relTypes, relCache)
}
// iterate from a non-dense node
else if (toNodeIsDense)
relIterator(query, fromNode, toNode, preserveDirection = true, relTypes, relCache)
else if (fromNodeIsDense)
relIterator(query, fromNode, toNode, preserveDirection = false, relTypes, relCache)
//both nodes are non-dense, choose a starting point by alternating from and to nodes
else
relIterator(query, fromNode, toNode, preserveDirection = alternate(), relTypes, relCache)
}
private var alternateState = false
private def alternate(): Boolean = {
val result = !alternateState
alternateState = result
result
}
private def relIterator(query: QueryContext, fromNode: Node, toNode: Node, preserveDirection: Boolean,
relTypes: Option[Seq[Int]], relCache: RelationshipsCache) = {
val (start, localDirection, end) = if(preserveDirection) (fromNode, dir, toNode) else (toNode, dir.reversed, fromNode)
val relationships = query.getRelationshipsForIds(start, localDirection, relTypes)
new PrefetchingIterator[Relationship] {
//we do not expect two nodes to have many connecting relationships
val connectedRelationships = new ArrayBuffer[Relationship](2)
override def fetchNextOrNull(): Relationship = {
while (relationships.hasNext) {
val rel = relationships.next()
val other = rel.getOtherNode(start)
if (end == other) {
connectedRelationships.append(rel)
return rel
}
}
relCache.put(fromNode, toNode, connectedRelationships)
null
}
}.asScala
}
private def getDegree(node: Node, relTypes: Option[Seq[Int]], direction: SemanticDirection, query: QueryContext) = {
relTypes.map {
case rels if rels.isEmpty => query.nodeGetDegree(node.getId, direction)
case rels if rels.size == 1 => query.nodeGetDegree(node.getId, direction, rels.head)
case rels => rels.foldLeft(0)(
(acc, rel) => acc + query.nodeGetDegree(node.getId, direction, rel)
)
}.getOrElse(query.nodeGetDegree(node.getId, direction))
}
@inline
private def getRowNode(row: ExecutionContext, col: String): Node = {
row.getOrElse(col, throw new InternalException(s"Expected to find a node at $col but found nothing")) match {
case n: Node => n
case null => null
case value => throw new InternalException(s"Expected to find a node at $col but found $value instead")
}
}
def planDescriptionWithoutCardinality =
source.planDescription.andThen(this.id, "Expand(Into)", identifiers, ExpandExpression(fromName, relName, lazyTypes.names, toName, dir))
val symbols = source.symbols.add(toName, CTNode).add(relName, CTRelationship)
override def localEffects = Effects(ReadsAllNodes, ReadsRelationships)
def dup(sources: List[Pipe]): Pipe = {
val (source :: Nil) = sources
copy(source = source)(estimatedCardinality)
}
def withEstimatedCardinality(estimated: Double) = copy()(Some(estimated))
private final class RelationshipsCache(capacity: Int) {
val table = new mutable.OpenHashMap[(Long, Long), Seq[Relationship]]()
def get(start: Node, end: Node): Option[Seq[Relationship]] = table.get(key(start, end))
def put(start: Node, end: Node, rels: Seq[Relationship]) = {
if (table.size < capacity) {
table.put(key(start, end), rels)
}
}
@inline
private def key(start: Node, end: Node) = {
// if direction is BOTH than we keep the key sorted, otherwise direction is important and we keep key as is
if (dir != SemanticDirection.BOTH) (start.getId, end.getId)
else {
if (start.getId < end.getId)
(start.getId, end.getId)
else
(end.getId, start.getId)
}
}
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/pipes/ExpandIntoPipe.scala
|
Scala
|
apache-2.0
| 8,184 |
package im.actor.server.user
import akka.pattern.ask
import akka.util.Timeout
import im.actor.api.rpc.messaging.{ Message ⇒ ApiMessage }
import im.actor.api.rpc.peers.Peer
import im.actor.api.rpc.users.{ Sex, User ⇒ ApiUser }
import im.actor.api.rpc.{ AuthorizedClientData, Update }
import im.actor.server.file.Avatar
import im.actor.server.sequence.{ SeqState, SeqUpdatesExtension, SeqUpdatesManager, UpdateRefs }
import org.joda.time.DateTime
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.control.NoStackTrace
object UserOffice extends Commands with Queries {
case object InvalidAccessHash extends Exception with NoStackTrace
case object FailedToFetchInfo
def persistenceIdFor(userId: Int): String = s"User-${userId}"
}
private[user] sealed trait Commands extends AuthCommands {
self: Queries ⇒
import UserCommands._
def create(userId: Int, accessSalt: String, name: String, countryCode: String, sex: Sex.Sex, isBot: Boolean)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[CreateAck] = {
(userOfficeRegion.ref ? Create(userId, accessSalt, name, countryCode, sex, isBot)).mapTo[CreateAck]
}
def addPhone(userId: Int, phone: Long)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[Unit] = {
(userOfficeRegion.ref ? AddPhone(userId, phone)).mapTo[AddPhoneAck] map (_ ⇒ ())
}
def addEmail(userId: Int, email: String)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[Unit] = {
(userOfficeRegion.ref ? AddEmail(userId, email)).mapTo[AddEmailAck] map (_ ⇒ ())
}
def delete(userId: Int)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[Unit] = {
(userOfficeRegion.ref ? Delete(userId)).mapTo[DeleteAck] map (_ ⇒ ())
}
def changeCountryCode(userId: Int, countryCode: String)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[Unit] = {
userOfficeRegion.ref ? ChangeCountryCode(userId, countryCode) map (_ ⇒ ())
}
def changeName(userId: Int, name: String)(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[SeqState] = {
(userOfficeRegion.ref ? ChangeName(userId, name)).mapTo[SeqState]
}
def deliverMessage(userId: Int, peer: Peer, senderUserId: Int, randomId: Long, date: DateTime, message: ApiMessage, isFat: Boolean)(
implicit
region: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[Unit] =
(region.ref ? DeliverMessage(userId, peer, senderUserId, randomId, date, message, isFat)) map (_ ⇒ ())
def deliverOwnMessage(userId: Int, peer: Peer, senderAuthId: Long, randomId: Long, date: DateTime, message: ApiMessage, isFat: Boolean)(
implicit
region: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[SeqState] =
(region.ref ? DeliverOwnMessage(userId, peer, senderAuthId, randomId, date, message, isFat)).mapTo[SeqState]
def changeNickname(userId: Int, clientAuthId: Long, nickname: Option[String])(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[SeqState] = {
(userOfficeRegion.ref ? ChangeNickname(userId, clientAuthId, nickname)).mapTo[SeqState]
}
def changeAbout(userId: Int, clientAuthId: Long, about: Option[String])(
implicit
userOfficeRegion: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[SeqState] = {
(userOfficeRegion.ref ? ChangeAbout(userId, clientAuthId, about)).mapTo[SeqState]
}
def updateAvatar(userId: Int, clientAuthId: Long, avatarOpt: Option[Avatar])(
implicit
region: UserProcessorRegion,
timeout: Timeout,
ec: ExecutionContext
): Future[UpdateAvatarAck] = (region.ref ? UpdateAvatar(userId, clientAuthId, avatarOpt)).mapTo[UpdateAvatarAck]
def broadcastUserUpdate(
userId: Int,
update: Update,
pushText: Option[String],
isFat: Boolean,
deliveryId: Option[String]
)(implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout): Future[Seq[SeqState]] = {
val header = update.header
val serializedData = update.toByteArray
val originPeer = SeqUpdatesManager.getOriginPeer(update)
val refs = SeqUpdatesManager.updateRefs(update)
broadcastUserUpdate(userId, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
}
def broadcastUserUpdate(
userId: Int,
header: Int,
serializedData: Array[Byte],
refs: UpdateRefs,
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean,
deliveryId: Option[String]
)(implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout): Future[Seq[SeqState]] = {
for {
authIds ← getAuthIds(userId)
seqstates ← SeqUpdatesManager.persistAndPushUpdatesF(authIds.toSet, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
} yield seqstates
}
def broadcastUsersUpdate(
userIds: Set[Int],
update: Update,
pushText: Option[String],
isFat: Boolean,
deliveryId: Option[String]
)(implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout): Future[Seq[SeqState]] = {
val header = update.header
val serializedData = update.toByteArray
val originPeer = SeqUpdatesManager.getOriginPeer(update)
val refs = SeqUpdatesManager.updateRefs(update)
for {
authIds ← getAuthIds(userIds)
seqstates ← Future.sequence(
authIds.map(SeqUpdatesManager.persistAndPushUpdateF(_, header, serializedData, refs, pushText, originPeer, isFat, deliveryId))
)
} yield seqstates
}
def broadcastClientUpdate(
update: Update,
pushText: Option[String],
isFat: Boolean = false,
deliveryId: Option[String] = None
)(
implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
client: AuthorizedClientData,
ec: ExecutionContext,
timeout: Timeout
): Future[SeqState] = broadcastClientUpdate(client.userId, client.authId, update, pushText, isFat, deliveryId)
def broadcastClientUpdate(
clientUserId: Int,
clientAuthId: Long,
update: Update,
pushText: Option[String],
isFat: Boolean,
deliveryId: Option[String]
)(
implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout
): Future[SeqState] = {
val header = update.header
val serializedData = update.toByteArray
val originPeer = SeqUpdatesManager.getOriginPeer(update)
val refs = SeqUpdatesManager.updateRefs(update)
for {
otherAuthIds ← UserOffice.getAuthIds(clientUserId) map (_.filter(_ != clientAuthId))
_ ← Future.sequence(
otherAuthIds map (
SeqUpdatesManager.persistAndPushUpdateF(_, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
)
)
seqstate ← SeqUpdatesManager.persistAndPushUpdateF(clientAuthId, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
} yield seqstate
}
def broadcastClientAndUsersUpdate(
userIds: Set[Int],
update: Update,
pushText: Option[String],
isFat: Boolean = false,
deliveryId: Option[String] = None
)(implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout,
client: AuthorizedClientData): Future[(SeqState, Seq[SeqState])] =
broadcastClientAndUsersUpdate(client.userId, client.authId, userIds, update, pushText, isFat, deliveryId)
def broadcastClientAndUsersUpdate(
clientUserId: Int,
clientAuthId: Long,
userIds: Set[Int],
update: Update,
pushText: Option[String],
isFat: Boolean,
deliveryId: Option[String]
)(implicit
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion,
ec: ExecutionContext,
timeout: Timeout): Future[(SeqState, Seq[SeqState])] = {
val header = update.header
val serializedData = update.toByteArray
val originPeer = SeqUpdatesManager.getOriginPeer(update)
val refs = SeqUpdatesManager.updateRefs(update)
for {
authIds ← getAuthIds(userIds + clientUserId)
seqstates ← Future.sequence(
authIds.view
.filterNot(_ == clientAuthId)
.map(SeqUpdatesManager.persistAndPushUpdateF(_, header, serializedData, refs, pushText, originPeer, isFat, deliveryId))
)
seqstate ← SeqUpdatesManager.persistAndPushUpdateF(clientAuthId, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
} yield (seqstate, seqstates)
}
def notifyUserUpdate(
userId: Int,
exceptAuthId: Long,
update: Update,
pushText: Option[String],
isFat: Boolean = false,
deliveryId: Option[String] = None
)(
implicit
ec: ExecutionContext,
timeout: Timeout,
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion
): Future[Seq[SeqState]] = {
val header = update.header
val serializedData = update.toByteArray
val originPeer = SeqUpdatesManager.getOriginPeer(update)
notifyUserUpdate(userId, exceptAuthId, header, serializedData, SeqUpdatesManager.updateRefs(update), pushText, originPeer, isFat, deliveryId)
}
def notifyUserUpdate(
userId: Int,
exceptAuthId: Long,
header: Int,
serializedData: Array[Byte],
refs: UpdateRefs,
pushText: Option[String],
originPeer: Option[Peer],
isFat: Boolean,
deliveryId: Option[String]
)(implicit
ec: ExecutionContext,
timeout: Timeout,
ext: SeqUpdatesExtension,
userViewRegion: UserViewRegion) = {
for {
otherAuthIds ← UserOffice.getAuthIds(userId) map (_.filter(_ != exceptAuthId))
seqstates ← Future.sequence(otherAuthIds map { authId ⇒
SeqUpdatesManager.persistAndPushUpdateF(authId, header, serializedData, refs, pushText, originPeer, isFat, deliveryId)
})
} yield seqstates
}
}
private[user] sealed trait Queries {
import UserQueries._
def getAuthIds(userId: Int)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[Seq[Long]] = {
(region.ref ? GetAuthIds(userId)).mapTo[GetAuthIdsResponse] map (_.authIds)
}
def getAuthIds(userIds: Set[Int])(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[Seq[Long]] = {
Future.sequence(userIds map (getAuthIds(_))) map (_.toSeq.flatten)
}
def getApiStruct(userId: Int, clientUserId: Int, clientAuthId: Long)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[ApiUser] = {
(region.ref ? GetApiStruct(userId, clientUserId, clientAuthId)).mapTo[GetApiStructResponse] map (_.struct)
}
def getContactRecords(userId: Int)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[(Seq[Long], Seq[String])] = {
(region.ref ? GetContactRecords(userId)).mapTo[GetContactRecordsResponse] map (r ⇒ (r.phones, r.emails))
}
def getContactRecordsSet(userId: Int)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[(Set[Long], Set[String])] =
for ((phones, emails) ← getContactRecords(userId)) yield (phones.toSet, emails.toSet)
def checkAccessHash(userId: Int, senderAuthId: Long, accessHash: Long)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[Boolean] = {
(region.ref ? CheckAccessHash(userId, senderAuthId, accessHash)).mapTo[CheckAccessHashResponse] map (_.isCorrect)
}
def getAccessHash(userId: Int, clientAuthId: Long)(implicit region: UserViewRegion, timeout: Timeout, ec: ExecutionContext): Future[Long] =
(region.ref ? GetAccessHash(userId, clientAuthId)).mapTo[GetAccessHashResponse] map (_.accessHash)
}
|
dsaved/africhat-platform-0.1
|
actor-server/actor-core/src/main/scala/im/actor/server/user/UserOffice.scala
|
Scala
|
mit
| 12,820 |
/*
* Copyright 2017-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package higherkindness.mu.rpc.config
import com.typesafe.config.Config
trait ConfigM[F[_]] {
def load: F[Config]
}
|
frees-io/freestyle-rpc
|
modules/config/src/main/scala/higherkindness/mu/rpc/config/ConfigM.scala
|
Scala
|
apache-2.0
| 755 |
package com.airbnb.aerosolve.training
import java.io.{BufferedWriter, OutputStreamWriter}
import java.util.concurrent.ConcurrentHashMap
import com.airbnb.aerosolve.core.util.Util
import com.airbnb.aerosolve.core.models.MaxoutModel
import com.airbnb.aerosolve.core.{Example, FeatureVector}
import com.typesafe.config.Config
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
object MaxoutTrainer {
private final val log: Logger = LoggerFactory.getLogger("MaxoutTrainer")
def train(sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) : MaxoutModel = {
val loss : String = config.getString(key + ".loss")
val numHidden : Int = config.getInt(key + ".num_hidden")
val iterations : Int = config.getInt(key + ".iterations")
val rankKey : String = config.getString(key + ".rank_key")
val learningRate : Double = config.getDouble(key + ".learning_rate")
val lambda : Double = config.getDouble(key + ".lambda")
val lambda2 : Double = config.getDouble(key + ".lambda2")
val dropout : Double = config.getDouble(key + ".dropout")
val dropoutHidden : Double = config.getDouble(key + ".dropout_hidden")
val minCount : Int = config.getInt(key + ".min_count")
val subsample : Double = config.getDouble(key + ".subsample")
val momentum : Double = config.getDouble(key + ".momentum")
val pointwise : RDD[Example] =
LinearRankerUtils
.makePointwiseFloat(input, config, key)
.cache()
var model = new MaxoutModel()
model.initForTraining(numHidden)
initModel(minCount, rankKey, pointwise, model)
log.info("Computing max values for all features")
log.info("Training using " + loss)
for (i <- 1 to iterations) {
model = sgdTrain(sc,
config,
key,
pointwise,
numHidden,
rankKey,
loss,
learningRate,
lambda,
lambda2,
dropout,
dropoutHidden,
momentum,
subsample,
i,
model)
}
pointwise.unpersist()
model
}
// Intializes the model
def initModel(minCount : Int,
rankKey : String,
input : RDD[Example],
model : MaxoutModel) = {
val maxScale = getMaxScale(minCount, rankKey, input)
log.info("Num features = %d".format(maxScale.length))
for (entry <- maxScale) {
model.addVector(entry._1._1, entry._1._2, entry._2.toFloat)
}
}
// Returns 1 / largest absolute value of the feature
def getMaxScale(minCount : Int,
rankKey : String,
input : RDD[Example]) : Array[((String, String), Double)] = {
input
.mapPartitions(partition => {
val weights = new ConcurrentHashMap[(String, String), (Double, Int)]().asScala
partition.foreach(example => {
val flatFeature = Util.flattenFeature(example.example.get(0)).asScala
flatFeature.foreach(familyMap => {
if (!rankKey.equals(familyMap._1)) {
familyMap._2.foreach(feature => {
val key = (familyMap._1, feature._1)
val curr = weights.getOrElse(key, (0.0, 0))
weights.put(key, (scala.math.max(curr._1, feature._2), curr._2 + 1))
})
}
})
})
weights.iterator
})
.reduceByKey((a, b) => (scala.math.max(a._1, b._1), a._2 + b._2))
.filter(x => x._2._1 > 1e-10 && x._2._2 >= minCount)
.map(x => (x._1, 1.0 / x._2._1))
.collect
.toArray
}
def sgdTrain(sc : SparkContext,
config : Config,
key : String,
input : RDD[Example],
numHidden : Int,
rankKey : String,
loss : String,
learningRate : Double,
lambda : Double,
lambda2 : Double,
dropout : Double,
dropoutHidden : Double,
momentum : Double,
subsample : Double,
iteration : Int,
model : MaxoutModel) : MaxoutModel = {
log.info("Iteration %d".format(iteration))
val modelBC = sc.broadcast(model)
val threshold : Double = config.getDouble(key + ".rank_threshold")
val lossMod : Int = try {
config.getInt(key + ".loss_mod")
} catch {
case _ : Throwable => 100
}
val modelRet = input
.sample(false, subsample)
.coalesce(1, true)
.mapPartitions(partition => {
val workingModel = modelBC.value
@volatile var lossSum : Double = 0.0
@volatile var lossCount : Int = 0
partition.foreach(example => {
val fv = example.example.get(0)
val rank = fv.floatFeatures.get(rankKey).asScala.head._2
val label = if (rank <= threshold) {
-1.0
} else {
1.0
}
loss match {
case "logistic" => lossSum = lossSum + updateLogistic(workingModel, fv, label, learningRate, lambda, lambda2, dropout, dropoutHidden, momentum)
case "hinge" => lossSum = lossSum + updateHinge(workingModel, fv, label, learningRate, lambda, lambda2, dropout, dropoutHidden, momentum)
case _ => {
log.error("Unknown loss function %s".format(loss))
System.exit(-1)
}
}
lossCount = lossCount + 1
if (lossCount % lossMod == 0) {
log.info("Loss = %f, samples = %d".format(lossSum / lossMod.toDouble, lossCount))
lossSum = 0.0
}
})
Array[MaxoutModel](workingModel).iterator
})
.collect
.head
saveModel(modelRet, config, key)
return modelRet
}
def updateLogistic(model : MaxoutModel,
fv : FeatureVector,
label : Double,
learningRate : Double,
lambda : Double,
lambda2 : Double,
dropout : Double,
dropoutHidden : Double,
momentum : Double) : Double = {
val flatFeatures = Util.flattenFeatureWithDropout(fv, dropout)
val response = model.getResponse(flatFeatures)
val values = response.getValues
for (i <- 0 until values.length) {
if (scala.util.Random.nextDouble() < dropoutHidden) {
values(i) = 0
}
}
val result = response.getMinMaxResult
val prediction = result.maxValue - result.minValue
// To prevent blowup.
val corr = scala.math.min(10.0, label * prediction)
val expCorr = scala.math.exp(corr)
val loss = scala.math.log(1.0 + 1.0 / expCorr)
val grad = -label / (1.0 + expCorr)
model.update(grad.toFloat,
learningRate.toFloat,
lambda.toFloat,
lambda2.toFloat,
momentum.toFloat,
result,
flatFeatures)
return loss
}
def updateHinge(model : MaxoutModel,
fv : FeatureVector,
label : Double,
learningRate : Double,
lambda : Double,
lambda2 : Double,
dropout : Double,
dropoutHidden : Double,
momentum : Double) : Double = {
val flatFeatures = Util.flattenFeatureWithDropout(fv, dropout)
val response = model.getResponse(flatFeatures)
val values = response.getValues
for (i <- 0 until values.length) {
if (scala.util.Random.nextDouble() < dropoutHidden) {
values(i) = 0
}
}
val result = response.getMinMaxResult
val prediction = result.maxValue - result.minValue
val loss = scala.math.max(0.0, 1.0 - label * prediction)
if (loss > 0.0) {
val grad = -label
model.update(grad.toFloat,
learningRate.toFloat,
lambda.toFloat,
lambda2.toFloat,
momentum.toFloat,
result,
flatFeatures)
}
return loss
}
def saveModel(model : MaxoutModel,
config : Config,
key : String) = {
try {
val output: String = config.getString(key + ".model_output")
val fileSystem = FileSystem.get(new java.net.URI(output),
new Configuration())
val file = fileSystem.create(new Path(output), true)
val writer = new BufferedWriter(new OutputStreamWriter(file))
model.save(writer)
writer.close()
file.close()
} catch {
case _ : Throwable => log.error("Could not save model")
}
}
def trainAndSaveToFile(sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) = {
val model = train(sc, input, config, key)
saveModel(model, config, key)
}
}
|
aglne/aerosolve
|
training/src/main/scala/com/airbnb/aerosolve/training/MaxoutTrainer.scala
|
Scala
|
apache-2.0
| 9,217 |
package com.pragmaxim.scalajs.dom
import org.scalajs.dom
import org.scalajs.dom.KeyboardEvent
import scala.scalajs.js
import scala.scalajs.js.UndefOr
object KeyboardPolyfill {
/**
* Primarily it allows you to abandon onpress events that have cross-browser incompatible behavior and
* that are to be deprecated in favor of beforeinput events in W3C DOM4. Calling polyfill method on keydown/keyup event
* gives you normalized keyCode across platforms and browsers and estimated charCode in case of a key representing
* printable character. pfKeyCode & optional pfCharCode properties are added to Event object for possible latter use
*/
implicit class PfEvent(e: KeyboardEvent) {
/** to retrieve polyfilled code later */
def pfKeyCode: Option[Int] = getDynamic("pfKeyCode")
def pfCharCode: Option[Int] = getDynamic("pfCharCode")
/**
* Basically attempts to unite keyCodes across variety of platforms and browsers and
* find a corresponding charCode in case of a printable unicode point
* @return (keyCode, Option[charCode])
*/
def polyfill(): (Int, Option[Int]) = {
require(e.`type` != "keypress", "This polyfill only works with keydown/keyup events")
val keyCode = normalize(e.keyCode)
val result = ChCode.shiftableKey2Char.lift(keyCode) match {
case Some(shift) => (keyCode, Option(shift(e.shiftKey)))
case None => (keyCode, ChCode.key2char.lift(keyCode))
}
pfKeyCode(keyCode)
result._2.foreach(pfCharCode(_))
result
}
private def setDynamic(name: String, value: js.Any) = e.asInstanceOf[js.Dynamic].updateDynamic(name)(value)
private def getDynamic[T](name: String): Option[T] = Option((e.asInstanceOf[js.Dynamic].selectDynamic(name): UndefOr[Dynamic]).orNull).asInstanceOf[Option[T]]
private def pfKeyCode(keyCode: Int) = setDynamic("pfKeyCode", keyCode)
private def pfCharCode(charCode: Int) = setDynamic("pfCharCode", charCode)
/**
* To be improved continuously, most of the other stuff concerns Mac atypical keyboard layout and ancient browsers
* You're welcome to contribute
*/
private def normalize(keyCode: Int): Int = {
if (Device.isGecko)
keyCode match {
case 173 => KCode.Dash
case 59 => KCode.Semicolon
case 61 => KCode.Equals
case 0 => KCode.Win
case other => keyCode
}
else if (Device.isMac)
keyCode match {
case 224 => KCode.Meta
case 12 => KCode.NumLock
case other => keyCode
}
else
keyCode
}
}
object Device {
val IOSRegex = "iPhone|iPod|iPad".r
val userAgent = dom.window.navigator.userAgent
val platform = dom.window.navigator.platform
val isIOS = IOSRegex.findFirstIn(userAgent).isDefined
val isIPad = userAgent.contains("iPad")
val isIPod = userAgent.contains("iPod")
val isIPhone = userAgent.contains("iPhone")
val isAndroid = userAgent.contains("Android")
val isGecko = userAgent.contains("Gecko/")
val isWebKit = userAgent.contains("WebKit/")
val isIE = userAgent.contains("Trident/")
val isOpera = userAgent.contains("Opera/")
val isChrome = userAgent.contains("Chrome/")
val isLinux = platform.contains("Linux")
val isWin = platform.contains("Win")
val isMac = platform.contains("Mac")
val isChrOS = platform.contains("CrOS")
val isTouchable = isIOS || isAndroid
}
}
/**
* @note ([0-9] * + - / .) are the only characters shared by 2 keys on keyboard, this duplication is caused by existence of numpad.
*/
object ChCode {
import KCode._
/** shift changes charCode */
private def >(w: Int, wo: Int)(shift: Boolean) = if (shift) w else wo
/** add offset to a lower case letter which gives you it's char code */
private def >>(keyCode: Int)(shift: Boolean) = if (shift) keyCode else letterKeyToLowerCaseCharCode(keyCode)
/** keys that have different charCode representation when shift key is pressed */
private val letterKey2Char = for(letterKeyCode <- A to Z) yield (letterKeyCode, >>(letterKeyCode)_)
val shiftableKey2Char = Map[Int, Boolean => Int](
(Num0, >(')', Num0)),
(Num1, >('!', Num1)),
(Num2, >('@', Num2)),
(Num3, >('#', Num3)),
(Num4, >('$', Num4)),
(Num5, >('%', Num5)),
(Num6, >('^', Num6)),
(Num7, >('&', Num7)),
(Num8, >('*', Num8)),
(Num9, >('(', Num9)),
(Comma, >('<', ',')),
(Dash, >('_', '-')),
(Period, >('>', '.')),
(Slash, >('?', '/')),
(Backtick, >('~', '`')),
(SquareBracketOpen, >('{', '[')),
(Backslash, >('|', '\\\\')),
(SquareBracketClose, >('}', ']')),
(SingleQuote, >('"', '\\'')),
(Semicolon, >(':', ';')),
(Equals, >('+', '='))
) ++ letterKey2Char
val key2char = Map[Int, Int](
Space -> Space,
Enter -> Enter,
Numpad0,
Numpad1,
Numpad2,
Numpad3,
Numpad4,
Numpad5,
Numpad6,
Numpad7,
Numpad8,
Numpad9,
NumpadMultiply,
NumpadAdd,
NumpadSubtract,
NumpadDivide,
NumpadPeriod
)
}
object KCode {
/** numbers have KeyCode equal to CharCode */
def isNumber(keyCode: Int) = keyCode >= Num0 && keyCode <= Num9
val Num0 = '0'.toInt // 48
val Num1 = '1'.toInt // 49
val Num2 = '2'.toInt // 50
val Num3 = '3'.toInt // 51
val Num4 = '4'.toInt // 52
val Num5 = '5'.toInt // 53
val Num6 = '6'.toInt // 54
val Num7 = '7'.toInt // 55
val Num8 = '8'.toInt // 56
val Num9 = '9'.toInt // 57
/** [A-Z] charCode is equal to [a-z] keyCode, thus I won't duplicate constants */
val charSizeOffset = 'a'.toInt - 'A'.toInt
def isLetterKey(keyCode: Int) = keyCode >= A && keyCode <= Z
def isUpperCaseLetter(charCode: Int) = isLetterKey(charCode)
def letterKeyToLowerCaseCharCode(keyCode: Int) = keyCode + charSizeOffset
def letterKeyToUpperCaseCharCode(keyCode: Int) = keyCode // informative method
/** Upper case letters have CharCode equal to KeyCode */
val A = 'A'.toInt // 65
val B = 'B'.toInt // 66
val C = 'C'.toInt // 67
val D = 'D'.toInt // 68
val E = 'E'.toInt // 69
val F = 'F'.toInt // 70
val G = 'G'.toInt // 71
val H = 'H'.toInt // 72
val I = 'I'.toInt // 73
val J = 'J'.toInt // 74
val K = 'K'.toInt // 75
val L = 'L'.toInt // 76
val M = 'M'.toInt // 77
val N = 'N'.toInt // 78
val O = 'O'.toInt // 79
val P = 'P'.toInt // 80
val Q = 'Q'.toInt // 81
val R = 'R'.toInt // 82
val S = 'S'.toInt // 83
val T = 'T'.toInt // 84
val U = 'U'.toInt // 85
val V = 'V'.toInt // 86
val W = 'W'.toInt // 87
val X = 'X'.toInt // 88
val Y = 'Y'.toInt // 89
val Z = 'Z'.toInt // 90
val Comma = 188
val Dash = 189
val Period = 190
val Slash = 191
val Backtick = 192
val SquareBracketOpen = 219
val Backslash = 220
val SquareBracketClose = 221
val SingleQuote = 222
val Semicolon = 186
val Equals = 187
/** Space & Enter have KeyCode equal to CharCode */
val Space = 32 // both charCode and keyCode
val Enter = 13 // both charCode and keyCode
/** Numpad numbers share common numbers charCode */
val Numpad0 = (96, Num0)
val Numpad1 = (97, Num1)
val Numpad2 = (98, Num2)
val Numpad3 = (99, Num3)
val Numpad4 = (100, Num4)
val Numpad5 = (101, Num5)
val Numpad6 = (102, Num6)
val Numpad7 = (103, Num7)
val Numpad8 = (104, Num8)
val Numpad9 = (105, Num9)
val NumpadMultiply = (106, '*'.toInt)
val NumpadAdd = (107, '+'.toInt)
val NumpadSubtract = (109, '-'.toInt)
val NumpadDivide = (111, '/'.toInt)
val NumpadPeriod = (110, '.'.toInt)
/** Keys that do not have unicode representation */
val Backspace = 8
val Tab = 9
val Shift = 16
val Ctrl = 17
val Alt = 18
val Pause = 19
val CapsLock = 20
val Escape = 27
val PageUp = 33
val PageDown = 34
val End = 35
val Home = 36
val Left = 37
val Up = 38
val Right = 39
val Down = 40
val Insert = 45
val Delete = 46
val Meta = 91
val Win = 224
val F1 = 112
val F2 = 113
val F3 = 114
val F4 = 115
val F5 = 116
val F6 = 117
val F7 = 118
val F8 = 119
val F9 = 120
val F10 = 121
val F11 = 122
val F12 = 123
val NumLock = 144
}
|
viagraphs/scalajs-keyboard-polyfill
|
src/main/scala/com/pragmaxim/scalajs/dom/Keyboard.scala
|
Scala
|
mit
| 9,071 |
/*
* Copyright 2019 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.consumer.service.repo
import za.co.absa.spline.consumer.service.model._
import scala.concurrent.{ExecutionContext, Future}
trait LabelRepository {
def findNames(query: Option[String], offset: Int, length: Int)(implicit ec: ExecutionContext): Future[Seq[Label.Name]]
def findValuesFor(labelName: Label.Name, query: Option[String], offset: Int, length: Int)(implicit ec: ExecutionContext): Future[Seq[Label.Value]]
}
|
AbsaOSS/spline
|
consumer-services/src/main/scala/za/co/absa/spline/consumer/service/repo/LabelRepository.scala
|
Scala
|
apache-2.0
| 1,051 |
package levar.data
import levar.Experiment
import levar.Prediction
import levar.Dataset.{ DatasetType, ClassificationType, RegressionType }
class TabularExperiment(dtype: DatasetType, name: String, idCol: Int, predCol: Int, arrays: Iterator[Array[String]]) {
require(idCol >= 0)
require(predCol >= 0)
require(idCol != predCol)
def asExperiment = Experiment(name)
def data = dtype match {
case RegressionType => arrays.map { a => Prediction(a(idCol), Left(a(predCol).toDouble)) }
case ClassificationType => arrays.map { a => Prediction(a(idCol), Right(a(predCol))) }
}
}
|
peoplepattern/LeVar
|
levar-cli/src/main/scala/levar/data/TabularExperiment.scala
|
Scala
|
apache-2.0
| 594 |
package uk.co.seansaville.ninetyninescalaprobs.lists
import uk.co.seansaville.ninetyninescalaprobs.UnitSpec
class Problem5Spec extends UnitSpec {
"reverse" should "reverse any list" in {
assert(Problem5.reverse(List(1, 2, 3)) == List(3, 2, 1))
assert(Problem5.reverse(List(1)) == List(1))
assert(Problem5.reverse(List()) == List())
}
}
|
seansaville/99scalaprobs
|
src/test/scala/uk/co/seansaville/ninetyninescalaprobs/lists/Problem5Spec.scala
|
Scala
|
mit
| 356 |
package doobie.util
import scala.collection.generic.CanBuildFrom
import doobie.imports._
import doobie.util.analysis.Analysis
import scalaz.{ MonadPlus, Profunctor, Contravariant, Functor }
import scalaz.stream.Process
import scalaz.syntax.monad._
/** Module defining queries parameterized by input and output types. */
object query {
/**
* A query parameterized by some input type `A` yielding values of type `B`. We define here the
* core operations that are needed. Additional operations are provided on `[[Query0]]` which is the
* residual query after applying an `A`. This is the type constructed by the `sql` interpolator.
*/
trait Query[A, B] { outer =>
// jiggery pokery to support CBF; we're doing the coyoneda trick on B to to avoid a Functor
// constraint on the `F` parameter in `to`, and it's just easier to do the contravariant coyo
// trick on A while we're at it.
protected type I
protected type O
protected val ai: A => I
protected val ob: O => B
protected implicit val ic: Composite[I]
protected implicit val oc: Composite[O]
/**
* The SQL string.
* @group Diagnostics
*/
def sql: String
/**
* An optional `[[StackTraceElement]]` indicating the source location where this `[[Query]]` was
* constructed. This is used only for diagnostic purposes.
* @group Diagnostics
*/
def stackFrame: Option[StackTraceElement]
/**
* Program to construct an analysis of this query's SQL statement and asserted parameter and
* column types.
* @group Diagnostics
*/
def analysis: ConnectionIO[Analysis] =
HC.prepareQueryAnalysis[I, O](sql)
/**
* Apply the argument `a` to construct a `Process` with effect type
* `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding elements of type `B`.
* @group Results
*/
def process(a: A): Process[ConnectionIO, B] =
HC.process[O](sql, HPS.set(ai(a))).map(ob)
/**
* Apply the argument `a` to construct a program in
*`[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an `F[B]` accumulated
* via the provided `CanBuildFrom`. This is the fastest way to accumulate a collection.
* @group Results
*/
def to[F[_]](a: A)(implicit cbf: CanBuildFrom[Nothing, B, F[B]]): ConnectionIO[F[B]] =
HC.prepareStatement(sql)(HPS.set(ai(a)) *> HPS.executeQuery(HRS.buildMap[F,O,B](ob)))
/**
* Apply the argument `a` to construct a program in
* `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an `F[B]` accumulated
* via `MonadPlus` append. This method is more general but less efficient than `to`.
* @group Results
*/
def accumulate[F[_]: MonadPlus](a: A): ConnectionIO[F[B]] =
HC.prepareStatement(sql)(HPS.set(ai(a)) *> HPS.executeQuery(HRS.accumulate[F, O].map(_.map(ob))))
/**
* Apply the argument `a` to construct a program in
* `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding a unique `B` and
* raising an exception if the resultset does not have exactly one row. See also `option`.
* @group Results
*/
def unique(a: A): ConnectionIO[B] =
HC.prepareStatement(sql)(HPS.set(ai(a)) *> HPS.executeQuery(HRS.getUnique[O])).map(ob)
/**
* Apply the argument `a` to construct a program in
* `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an optional `B` and
* raising an exception if the resultset has more than one row. See also `unique`.
* @group Results
*/
def option(a: A): ConnectionIO[Option[B]] =
HC.prepareStatement(sql)(HPS.set(ai(a)) *> HPS.executeQuery(HRS.getOption[O])).map(_.map(ob))
/** @group Transformations */
def map[C](f: B => C): Query[A, C] =
new Query[A, C] {
type I = outer.I
type O = outer.O
val ai = outer.ai
val ob = outer.ob andThen f
val ic: Composite[I] = outer.ic
val oc: Composite[O] = outer.oc
def sql = outer.sql
def stackFrame = outer.stackFrame
}
/** @group Transformations */
def contramap[C](f: C => A): Query[C, B] =
new Query[C, B] {
type I = outer.I
type O = outer.O
val ai = outer.ai compose f
val ob = outer.ob
val ic: Composite[I] = outer.ic
val oc: Composite[O] = outer.oc
def sql = outer.sql
def stackFrame = outer.stackFrame
}
/**
* Apply an argument, yielding a residual `[[Query0]]`. Note that this is the typical (and the
* only provided) way to construct a `[[Query0]]`.
* @group Transformations
*/
def toQuery0(a: A): Query0[B] =
new Query0[B] {
def sql = outer.sql
def stackFrame = outer.stackFrame
def analysis = outer.analysis
def process = outer.process(a)
def to[F[_]](implicit cbf: CanBuildFrom[Nothing, B, F[B]]) = outer.to[F](a)
def accumulate[F[_]: MonadPlus] = outer.accumulate[F](a)
def unique = outer.unique(a)
def option = outer.option(a)
def map[C](f: B => C): Query0[C] = outer.map(f).toQuery0(a)
}
}
object Query {
/**
* Construct a `Query` with the given SQL string, an optional `StackTraceElement` for diagnostic
* purposes, and composite type arguments for input and output types. The most common way to
* construct a `Query` is via the `sql` interpolator.
* @group Constructors
*/
def apply[A, B](sql0: String, stackFrame0: Option[StackTraceElement] = None)(implicit A: Composite[A], B: Composite[B]): Query[A, B] =
new Query[A, B] {
type I = A
type O = B
val ai: A => I = a => a
val ob: O => B = o => o
implicit val ic: Composite[I] = A
implicit val oc: Composite[O] = B
val sql = sql0
val stackFrame = stackFrame0
}
/** @group Typeclass Instances */
implicit val queryProfunctor: Profunctor[Query] =
new Profunctor[Query] {
def mapfst[A, B, C](fab: Query[A,B])(f: C => A) = fab contramap f
def mapsnd[A, B, C](fab: Query[A,B])(f: B => C) = fab map f
}
/** @group Typeclass Instances */
implicit def queryCovariant[A]: Functor[Query[A, ?]] =
queryProfunctor.covariantInstance[A]
/** @group Typeclass Instances */
implicit def queryContravariant[B]: Contravariant[Query[?, B]] =
queryProfunctor.contravariantInstance[B]
}
/**
* An abstract query closed over its input arguments and yielding values of type `B`, without a
* specified disposition. Methods provided on `[[Query0]]` allow the query to be interpreted as a
* stream or program in `CollectionIO`.
*/
trait Query0[B] { outer =>
/**
* The SQL string.
* @group Diagnostics
*/
def sql: String
/**
* An optional `StackTraceElement` indicating the source location where this `Query` was
* constructed. This is used only for diagnostic purposes.
* @group Diagnostics
*/
def stackFrame: Option[StackTraceElement]
/**
* Program to construct an analysis of this query's SQL statement and asserted parameter and
* column types.
* @group Diagnostics
*/
def analysis: ConnectionIO[Analysis]
/**
* `Process` with effect type `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding
* elements of type `B`.
* @group Results
*/
def process: Process[ConnectionIO, B]
/**
* Program in `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an `F[B]`
* accumulated via the provided `CanBuildFrom`. This is the fastest way to accumulate a
* collection.
* @group Results
*/
def to[F[_]](implicit cbf: CanBuildFrom[Nothing, B, F[B]]): ConnectionIO[F[B]]
/**
* Program in `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an `F[B]`
* accumulated via `MonadPlus` append. This method is more general but less efficient than `to`.
* @group Results
*/
def accumulate[F[_]: MonadPlus]: ConnectionIO[F[B]]
/**
* Program in `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding a unique `B` and
* raising an exception if the resultset does not have exactly one row. See also `option`.
* @group Results
*/
def unique: ConnectionIO[B]
/**
* Program in `[[doobie.free.connection.ConnectionIO ConnectionIO]]` yielding an optional `B`
* and raising an exception if the resultset has more than one row. See also `unique`.
* @group Results
*/
def option: ConnectionIO[Option[B]]
/** @group Transformations */
def map[C](f: B => C): Query0[C]
/**
* Convenience method; equivalent to `process.sink(f)`
* @group Results
*/
def sink(f: B => ConnectionIO[Unit]): ConnectionIO[Unit] = process.sink(f)
/**
* Convenience method; equivalent to `to[List]`
* @group Results
*/
def list: ConnectionIO[List[B]] = to[List]
/**
* Convenience method; equivalent to `to[Vector]`
* @group Results
*/
def vector: ConnectionIO[Vector[B]] = to[Vector]
}
object Query0 {
/** @group Typeclass Instances */
implicit val queryFunctor: Functor[Query0] =
new Functor[Query0] {
def map[A, B](fa: Query0[A])(f: A => B) = fa map f
}
}
}
|
jamescway/doobie
|
core/src/main/scala/doobie/util/query.scala
|
Scala
|
mit
| 9,473 |
object Whatever {
override def equals(x: Any) = true
}
object Test extends dotty.runtime.LegacyApp {
// this should make it abundantly clear Any is the best return type we can guarantee
def matchWhatever(x: Any): Any = x match { case n @ Whatever => n }
// when left to its own devices, and not under -Xfuture, the return type is Whatever.type
def matchWhateverCCE(x: Any) = x match { case n @ Whatever => n }
// just to exercise it a bit
assert(matchWhatever(1) == 1)
assert(matchWhatever("1") == "1")
try {
matchWhateverCCE("1"): Whatever.type
} catch {
case _: ClassCastException => println("whoops")
}
}
|
yusuke2255/dotty
|
tests/pending/run/t1503.scala
|
Scala
|
bsd-3-clause
| 641 |
package spire.math.prime
import spire.implicits._
import org.scalatest.FunSuite
import spire.math.SafeLong
class PrimeTest extends FunSuite {
val largePrime = SafeLong("393050634124102232869567034555427371542904833")
val largeNonPrime = largePrime + 4
val tenPrimes = IndexedSeq(2, 3, 5, 7, 11, 13, 17, 19, 23, 29).map(x ⇒ SafeLong(x))
val nonPrimes = IndexedSeq(10L, 64L, 2L ** 32, 3L ** 10).map(x ⇒ SafeLong(x))
test("nth") {
for(i ← tenPrimes.indices)
assert(nth(i + 1) == tenPrimes(i))
}
test("isPrime") {
for(p ← tenPrimes)
assert(isPrime(p))
for(n ← nonPrimes)
assert(!isPrime(n))
}
test("fill") {
assert(fill(10).toSeq == tenPrimes)
assert(fill(2, 2).toSeq == tenPrimes.slice(2, 4))
}
test("stream") {
assert(stream.take(10).toSeq == tenPrimes)
}
test("factor") {
for(p ← tenPrimes) {
assert(factor(p) == Factors(p))
assert(factorPollardRho(p) == Factors(p))
assert(factorTrialDivision(p) == Factors(p))
assert(factorWheelDivision(p) == Factors(p))
}
def terms(f: Factors): Int = f.map(_._2).sum
for(n ← nonPrimes) {
assert(terms(factor(n)) > 1)
assert(terms(factorPollardRho(n)) > 1)
assert(terms(factorTrialDivision(n)) > 1)
assert(terms(factorWheelDivision(n)) > 1)
}
}
}
|
guersam/spire
|
tests/src/test/scala/spire/math/prime/PrimeTest.scala
|
Scala
|
mit
| 1,342 |
package org.jetbrains.plugins.scala.lang.psi
package stubs
package index
import com.intellij.psi.stubs.StubIndexKey
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
/**
* User: Alexander Podkhalyuzin
* Date: 14.10.2008
*/
class ScFunctionNameIndex extends ScStringStubIndexExtension[ScFunction] {
override def getKey: StubIndexKey[String, ScFunction] =
ScalaIndexKeys.METHOD_NAME_KEY
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/stubs/index/ScFunctionNameIndex.scala
|
Scala
|
apache-2.0
| 422 |
/*
* Copyright (c) 2012, The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.queue.function
import java.io.PrintStream
/**
* Runs a function in process.
*/
trait InProcessFunction extends QFunction {
analysisName = this.getClass.getSimpleName
def run()
/**
* During run() this stream will write to the stdout.
*/
var jobOutputStream: PrintStream = null
/**
* Write errors to this stream run().
*/
var jobErrorStream: PrintStream = null
}
|
iontorrent/Torrent-Variant-Caller-stable
|
public/scala/src/org/broadinstitute/sting/queue/function/InProcessFunction.scala
|
Scala
|
mit
| 1,558 |
package tables
import models.Item
import play.api.db.slick.Config.driver.simple._
// An Item table with 4 columns: id, name, price, stock
class ItemsTable(tag: Tag) extends Table[Item](tag, "ITEMS") {
// This is the primary key column:
def id = column[Int]("ITEM_ID", O.PrimaryKey)
def name = column[String]("NAME")
def price = column[Double]("PRICE")
def stock = column[Int]("STOCK")
def * = (id, name, price, stock) <>(Item.tupled, Item.unapply _)
}
|
SofyanHadiA/simple-rest-scala
|
app/tables/ItemsTable.scala
|
Scala
|
mit
| 471 |
package org.scaladebugger.api.profiles.traits.info.events
import com.sun.jdi._
import com.sun.jdi.event._
import org.scaladebugger.api.lowlevel.JDIArgument
import org.scaladebugger.api.profiles.traits.info.{InfoProducer, JavaInfo}
import org.scaladebugger.api.virtualmachines.ScalaVirtualMachine
/**
* Represents the generic interface used to produce event info instances.
*/
trait EventInfoProducer extends JavaInfo {
/**
* Converts the current profile instance to a representation of
* low-level Java instead of a higher-level abstraction.
*
* @return The profile instance providing an implementation corresponding
* to Java
*/
override def toJavaInfo: EventInfoProducer
/**
* Returns the parent information producer of this event information
* producer.
*
* @return The parent information producer
*/
def infoProducer: InfoProducer
/** Fills in additional properties with default values. */
def newDefaultEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
event: Event,
jdiArguments: JDIArgument*
): EventInfo = newEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
event = event,
jdiArguments = jdiArguments
)
/** Creates a new instance of the event profile. */
def newEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
event: Event,
jdiArguments: Seq[JDIArgument]
): EventInfo
/** Fills in additional properties with default values. */
def newDefaultLocatableEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
locatableEvent: LocatableEvent,
jdiArguments: JDIArgument*
): LocatableEventInfo = newLocatableEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
locatableEvent = locatableEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the locatable event profile. */
def newLocatableEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
locatableEvent: LocatableEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = locatableEvent.virtualMachine(),
thread: => ThreadReference = locatableEvent.thread(),
threadReferenceType: => ReferenceType = locatableEvent.thread().referenceType(),
location: => Location = locatableEvent.location()
): LocatableEventInfo
/** Fills in additional properties with default values. */
def newDefaultMonitorEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorEvent: MonitorEvent,
jdiArguments: JDIArgument*
): MonitorEventInfo = newMonitorEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
monitorEvent = monitorEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the monitor contended entered event profile. */
def newMonitorEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorEvent: MonitorEvent,
jdiArguments: Seq[JDIArgument]
)(
monitor: => ObjectReference = monitorEvent.monitor(),
monitorReferenceType: => ReferenceType = monitorEvent.monitor().referenceType(),
virtualMachine: => VirtualMachine = monitorEvent.virtualMachine(),
thread: => ThreadReference = monitorEvent.thread(),
threadReferenceType: => ReferenceType = monitorEvent.thread().referenceType(),
location: => Location = monitorEvent.location()
): MonitorEventInfo
/** Fills in additional properties with default values. */
def newDefaultWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
watchpointEvent: WatchpointEvent,
jdiArguments: JDIArgument*
): WatchpointEventInfo = newWatchpointEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
watchpointEvent = watchpointEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the watchpoint event profile. */
def newWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
watchpointEvent: WatchpointEvent,
jdiArguments: Seq[JDIArgument]
)(
container: => Either[ObjectReference, ReferenceType] =
Option(watchpointEvent.`object`())
.map(Left.apply)
.getOrElse(Right(watchpointEvent.field().declaringType())),
field: => Field = watchpointEvent.field(),
virtualMachine: => VirtualMachine = watchpointEvent.virtualMachine(),
thread: => ThreadReference = watchpointEvent.thread(),
threadReferenceType: => ReferenceType = watchpointEvent.thread().referenceType(),
location: => Location = watchpointEvent.location()
): WatchpointEventInfo
/** Fills in additional properties with default values. */
def newDefaultAccessWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
accessWatchpointEvent: AccessWatchpointEvent,
jdiArguments: JDIArgument*
): AccessWatchpointEventInfo = newAccessWatchpointEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
accessWatchpointEvent = accessWatchpointEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the access watchpoint event profile. */
def newAccessWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
accessWatchpointEvent: AccessWatchpointEvent,
jdiArguments: Seq[JDIArgument]
)(
container: => Either[ObjectReference, ReferenceType] =
Option(accessWatchpointEvent.`object`())
.map(Left.apply)
.getOrElse(Right(accessWatchpointEvent.field().declaringType())),
field: => Field = accessWatchpointEvent.field(),
virtualMachine: => VirtualMachine = accessWatchpointEvent.virtualMachine(),
thread: => ThreadReference = accessWatchpointEvent.thread(),
threadReferenceType: => ReferenceType = accessWatchpointEvent.thread().referenceType(),
location: => Location = accessWatchpointEvent.location()
): AccessWatchpointEventInfo
/** Fills in additional properties with default values. */
def newDefaultBreakpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
breakpointEvent: BreakpointEvent,
jdiArguments: JDIArgument*
): BreakpointEventInfo = newBreakpointEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
breakpointEvent = breakpointEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the breakpoint event profile. */
def newBreakpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
breakpointEvent: BreakpointEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = breakpointEvent.virtualMachine(),
thread: => ThreadReference = breakpointEvent.thread(),
threadReferenceType: => ReferenceType = breakpointEvent.thread().referenceType(),
location: => Location = breakpointEvent.location()
): BreakpointEventInfo
/** Fills in additional properties with default values. */
def newDefaultClassPrepareEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
classPrepareEvent: ClassPrepareEvent,
jdiArguments: JDIArgument*
): ClassPrepareEventInfo = newClassPrepareEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
classPrepareEvent = classPrepareEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the class prepare event profile. */
def newClassPrepareEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
classPrepareEvent: ClassPrepareEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = classPrepareEvent.virtualMachine(),
thread: => ThreadReference = classPrepareEvent.thread(),
threadReferenceType: => ReferenceType = classPrepareEvent.thread().referenceType(),
referenceType: => ReferenceType = classPrepareEvent.referenceType()
): ClassPrepareEventInfo
/** Fills in additional properties with default values. */
def newDefaultClassUnloadEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
classUnloadEvent: ClassUnloadEvent,
jdiArguments: JDIArgument*
): ClassUnloadEventInfo = newClassUnloadEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
classUnloadEvent = classUnloadEvent,
jdiArguments = jdiArguments
)
/** Creates a new instance of the class unload event profile. */
def newClassUnloadEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
classUnloadEvent: ClassUnloadEvent,
jdiArguments: Seq[JDIArgument]
): ClassUnloadEventInfo
/** Fills in additional properties with default values. */
def newDefaultExceptionEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
exceptionEvent: ExceptionEvent,
jdiArguments: JDIArgument*
): ExceptionEventInfo = newExceptionEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
exceptionEvent = exceptionEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the exception event profile. */
def newExceptionEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
exceptionEvent: ExceptionEvent,
jdiArguments: Seq[JDIArgument]
)(
catchLocation: => Option[Location] = Option(exceptionEvent.catchLocation()),
exception: => ObjectReference = exceptionEvent.exception(),
exceptionReferenceType: => ReferenceType =exceptionEvent.exception().referenceType(),
virtualMachine: => VirtualMachine = exceptionEvent.virtualMachine(),
thread: => ThreadReference = exceptionEvent.thread(),
threadReferenceType: => ReferenceType = exceptionEvent.thread().referenceType(),
location: => Location = exceptionEvent.location()
): ExceptionEventInfo
/** Fills in additional properties with default values. */
def newDefaultMethodEntryEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
methodEntryEvent: MethodEntryEvent,
jdiArguments: JDIArgument*
): MethodEntryEventInfo = newMethodEntryEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
methodEntryEvent = methodEntryEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the method entry event profile. */
def newMethodEntryEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
methodEntryEvent: MethodEntryEvent,
jdiArguments: Seq[JDIArgument]
)(
method: => Method = methodEntryEvent.method(),
virtualMachine: => VirtualMachine = methodEntryEvent.virtualMachine(),
thread: => ThreadReference = methodEntryEvent.thread(),
threadReferenceType: => ReferenceType = methodEntryEvent.thread().referenceType(),
location: => Location = methodEntryEvent.location()
): MethodEntryEventInfo
/** Fills in additional properties with default values. */
def newDefaultMethodExitEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
methodExitEvent: MethodExitEvent,
jdiArguments: JDIArgument*
): MethodExitEventInfo = newMethodExitEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
methodExitEvent = methodExitEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the method exit event profile. */
def newMethodExitEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
methodExitEvent: MethodExitEvent,
jdiArguments: Seq[JDIArgument]
)(
method: => Method = methodExitEvent.method(),
returnValue: => Value = methodExitEvent.returnValue(),
virtualMachine: => VirtualMachine = methodExitEvent.virtualMachine(),
thread: => ThreadReference = methodExitEvent.thread(),
threadReferenceType: => ReferenceType = methodExitEvent.thread().referenceType(),
location: => Location = methodExitEvent.location()
): MethodExitEventInfo
/** Fills in additional properties with default values. */
def newDefaultModificationWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
modificationWatchpointEvent: ModificationWatchpointEvent,
jdiArguments: JDIArgument*
): ModificationWatchpointEventInfo = newModificationWatchpointEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
modificationWatchpointEvent = modificationWatchpointEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the modification watchpoint event profile. */
def newModificationWatchpointEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
modificationWatchpointEvent: ModificationWatchpointEvent,
jdiArguments: Seq[JDIArgument]
)(
container: => Either[ObjectReference, ReferenceType] =
Option(modificationWatchpointEvent.`object`())
.map(Left.apply)
.getOrElse(Right(modificationWatchpointEvent.field().declaringType())),
field: => Field = modificationWatchpointEvent.field(),
virtualMachine: => VirtualMachine = modificationWatchpointEvent.virtualMachine(),
thread: => ThreadReference = modificationWatchpointEvent.thread(),
threadReferenceType: => ReferenceType = modificationWatchpointEvent.thread().referenceType(),
location: => Location = modificationWatchpointEvent.location()
): ModificationWatchpointEventInfo
/** Fills in additional properties with default values. */
def newDefaultMonitorContendedEnteredEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorContendedEnteredEvent: MonitorContendedEnteredEvent,
jdiArguments: JDIArgument*
): MonitorContendedEnteredEventInfo = newMonitorContendedEnteredEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
monitorContendedEnteredEvent = monitorContendedEnteredEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the monitor contended entered event profile. */
def newMonitorContendedEnteredEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorContendedEnteredEvent: MonitorContendedEnteredEvent,
jdiArguments: Seq[JDIArgument]
)(
monitor: => ObjectReference = monitorContendedEnteredEvent.monitor(),
monitorReferenceType: => ReferenceType = monitorContendedEnteredEvent.monitor().referenceType(),
virtualMachine: => VirtualMachine = monitorContendedEnteredEvent.virtualMachine(),
thread: => ThreadReference = monitorContendedEnteredEvent.thread(),
threadReferenceType: => ReferenceType = monitorContendedEnteredEvent.thread().referenceType(),
location: => Location = monitorContendedEnteredEvent.location()
): MonitorContendedEnteredEventInfo
/** Fills in additional properties with default values. */
def newDefaultMonitorContendedEnterEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorContendedEnterEvent: MonitorContendedEnterEvent,
jdiArguments: JDIArgument*
): MonitorContendedEnterEventInfo = newMonitorContendedEnterEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
monitorContendedEnterEvent = monitorContendedEnterEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the monitor contended enter event profile. */
def newMonitorContendedEnterEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorContendedEnterEvent: MonitorContendedEnterEvent,
jdiArguments: Seq[JDIArgument]
)(
monitor: => ObjectReference = monitorContendedEnterEvent.monitor(),
monitorReferenceType: => ReferenceType = monitorContendedEnterEvent.monitor().referenceType(),
virtualMachine: => VirtualMachine = monitorContendedEnterEvent.virtualMachine(),
thread: => ThreadReference = monitorContendedEnterEvent.thread(),
threadReferenceType: => ReferenceType = monitorContendedEnterEvent.thread().referenceType(),
location: => Location = monitorContendedEnterEvent.location()
): MonitorContendedEnterEventInfo
/** Fills in additional properties with default values. */
def newDefaultMonitorWaitedEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorWaitedEvent: MonitorWaitedEvent,
jdiArguments: JDIArgument*
): MonitorWaitedEventInfo = newMonitorWaitedEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
monitorWaitedEvent = monitorWaitedEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the monitor waited event profile. */
def newMonitorWaitedEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorWaitedEvent: MonitorWaitedEvent,
jdiArguments: Seq[JDIArgument]
)(
monitor: => ObjectReference = monitorWaitedEvent.monitor(),
monitorReferenceType: => ReferenceType = monitorWaitedEvent.monitor().referenceType(),
virtualMachine: => VirtualMachine = monitorWaitedEvent.virtualMachine(),
thread: => ThreadReference = monitorWaitedEvent.thread(),
threadReferenceType: => ReferenceType = monitorWaitedEvent.thread().referenceType(),
location: => Location = monitorWaitedEvent.location()
): MonitorWaitedEventInfo
/** Fills in additional properties with default values. */
def newDefaultMonitorWaitEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorWaitEvent: MonitorWaitEvent,
jdiArguments: JDIArgument*
): MonitorWaitEventInfo = newMonitorWaitEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
monitorWaitEvent = monitorWaitEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the monitor wait event profile. */
def newMonitorWaitEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
monitorWaitEvent: MonitorWaitEvent,
jdiArguments: Seq[JDIArgument]
)(
monitor: => ObjectReference = monitorWaitEvent.monitor(),
monitorReferenceType: => ReferenceType = monitorWaitEvent.monitor().referenceType(),
virtualMachine: => VirtualMachine = monitorWaitEvent.virtualMachine(),
thread: => ThreadReference = monitorWaitEvent.thread(),
threadReferenceType: => ReferenceType = monitorWaitEvent.thread().referenceType(),
location: => Location = monitorWaitEvent.location()
): MonitorWaitEventInfo
/** Fills in additional properties with default values. */
def newDefaultStepEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
stepEvent: StepEvent,
jdiArguments: JDIArgument*
): StepEventInfo = newStepEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
stepEvent = stepEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the step event profile. */
def newStepEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
stepEvent: StepEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = stepEvent.virtualMachine(),
thread: => ThreadReference = stepEvent.thread(),
threadReferenceType: => ReferenceType = stepEvent.thread().referenceType(),
location: => Location = stepEvent.location()
): StepEventInfo
/** Fills in additional properties with default values. */
def newDefaultThreadDeathEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
threadDeathEvent: ThreadDeathEvent,
jdiArguments: JDIArgument*
): ThreadDeathEventInfo = newThreadDeathEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
threadDeathEvent = threadDeathEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the thread death event profile. */
def newThreadDeathEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
threadDeathEvent: ThreadDeathEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = threadDeathEvent.virtualMachine(),
thread: => ThreadReference = threadDeathEvent.thread(),
threadReferenceType: => ReferenceType = threadDeathEvent.thread().referenceType()
): ThreadDeathEventInfo
/** Fills in additional properties with default values. */
def newDefaultThreadStartEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
threadStartEvent: ThreadStartEvent,
jdiArguments: JDIArgument*
): ThreadStartEventInfo = newThreadStartEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
threadStartEvent = threadStartEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the thread start event profile. */
def newThreadStartEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
threadStartEvent: ThreadStartEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = threadStartEvent.virtualMachine(),
thread: => ThreadReference = threadStartEvent.thread(),
threadReferenceType: => ReferenceType = threadStartEvent.thread().referenceType()
): ThreadStartEventInfo
/** Fills in additional properties with default values. */
def newDefaultVMDeathEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmDeathEvent: VMDeathEvent,
jdiArguments: JDIArgument*
): VMDeathEventInfo = newVMDeathEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
vmDeathEvent = vmDeathEvent,
jdiArguments = jdiArguments
)
/** Creates a new instance of the vm death event profile. */
def newVMDeathEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmDeathEvent: VMDeathEvent,
jdiArguments: Seq[JDIArgument]
): VMDeathEventInfo
/** Fills in additional properties with default values. */
def newDefaultVMDisconnectEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmDisconnectEvent: VMDisconnectEvent,
jdiArguments: JDIArgument*
): VMDisconnectEventInfo = newVMDisconnectEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
vmDisconnectEvent = vmDisconnectEvent,
jdiArguments = jdiArguments
)
/** Creates a new instance of the vm disconnect event profile. */
def newVMDisconnectEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmDisconnectEvent: VMDisconnectEvent,
jdiArguments: Seq[JDIArgument]
): VMDisconnectEventInfo
/** Fills in additional properties with default values. */
def newDefaultVMStartEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmStartEvent: VMStartEvent,
jdiArguments: JDIArgument*
): VMStartEventInfo = newVMStartEventInfo(
scalaVirtualMachine = scalaVirtualMachine,
vmStartEvent = vmStartEvent,
jdiArguments = jdiArguments
)()
/** Creates a new instance of the vm start event profile. */
def newVMStartEventInfo(
scalaVirtualMachine: ScalaVirtualMachine,
vmStartEvent: VMStartEvent,
jdiArguments: Seq[JDIArgument]
)(
virtualMachine: => VirtualMachine = vmStartEvent.virtualMachine(),
thread: => ThreadReference = vmStartEvent.thread(),
threadReferenceType: => ReferenceType = vmStartEvent.thread().referenceType()
): VMStartEventInfo
}
|
ensime/scala-debugger
|
scala-debugger-api/src/main/scala/org/scaladebugger/api/profiles/traits/info/events/EventInfoProducer.scala
|
Scala
|
apache-2.0
| 22,061 |
package rs.fon.kvizic.networkAnalysis.algorithm.clans
import rs.fon.kvizic.networkAnalysis.Actor
class Clique {
def isA(actors: List[Actor]): Boolean = {
val connected: List[Boolean] = for (actor <- actors; connects <- actors)
yield (actor == connects) || actor.getAllEndActors.contains(connects);
connected.foldLeft(true)((acc, current) => acc && current)
}
}
|
dpavkov/network-analysis-metrics
|
src/main/scala/rs/fon/kvizic/networkAnalysis/algorithm/clans/Clique.scala
|
Scala
|
mit
| 373 |
package chandu0101.scalajs.react.components.reactslick
import chandu0101.macros.tojs.JSMacro
import japgolly.scalajs.react._
import scala.scalajs.js
case class ReactSlick (
/* Additional class name for the inner slider div */
className: js.UndefOr[Int] = js.undefined,
/* Adjust the slide's height automatically */
adaptiveHeight: js.UndefOr[Boolean] = js.undefined,
/* Should we show Left and right nav arrows */
arrows: js.UndefOr[Boolean] = js.undefined,
/* Should the scroller auto scroll? */
autoplay: js.UndefOr[Boolean] = js.undefined,
/* delay between each auto scoll. in ms */
autoplaySpeed: js.UndefOr[Int] = js.undefined,
/* Should we centre to a single item? */
centerMode: js.UndefOr[Boolean] = js.undefined,
/* Should we show the dots at the bottom of the gallery */
dots: js.UndefOr[Boolean] = js.undefined,
/* Class applied to the dots if they are enabled */
dotsClass: js.UndefOr[Int] = js.undefined,
/* Is the gallery scrollable via dragging on desktop? */
draggable: js.UndefOr[Boolean] = js.undefined,
/* Slides use fade for transition */
fade: js.UndefOr[Boolean] = js.undefined,
/* should the gallery wrap around it's contents */
infinite: js.UndefOr[Boolean] = js.undefined,
/* which item should be the first to be displayed */
initialSlide: js.UndefOr[Int] = js.undefined,
/* Loads images or renders components on demands */
lazyLoad: js.UndefOr[Boolean] = js.undefined,
/* Array of objects in the form of { breakpoint: int, settings: { ... } } The breakpoint int is the maxWidth so the settings will be applied when resolution is below this value. Breakpoints in the array should be ordered from smalles to greatest. Use 'unslick' in place of the settings object to disable rendering the carousel at that breakpoint. Example: [ { breakpoint: 768, settings: { slidesToShow: 3 } }, { breakpoint: 1024, settings: { slidesToShow: 5 } }, { breakpoint: 100000, settings: 'unslick' } ] */
responsive: js.UndefOr[js.Array[ResponsiveSetting]] = js.undefined,
/* Reverses the slide order */
rtl: js.UndefOr[Boolean] = js.undefined,
/* Number of slides to be visible at a time */
slidesToShow: js.UndefOr[Int] = js.undefined,
/* Number of slides to scroll for each navigation item */
slidesToScroll: js.UndefOr[Int] = js.undefined,
/* Enable/Disable CSS Transitions */
useCSS: js.UndefOr[Boolean] = js.undefined,
/* callback function called after the current index changes */
afterChange: js.UndefOr[Int => Callback] = js.undefined,
/* callback function called before the current index changes */
beforeChange: js.UndefOr[(Int, Int) => Callback] = js.undefined,
/* go to the specified slide number */
slickGoTo: js.UndefOr[Int] = js.undefined){
def apply(children: ReactElement*) = {
val f = React.asInstanceOf[js.Dynamic].createFactory(js.Dynamic.global.ReactSlick)
val props = JSMacro[ReactSlick](this)
if (children.isEmpty)
f(props).asInstanceOf[ReactComponentU_]
else if (children.size == 1)
f(props, children.head).asInstanceOf[ReactComponentU_]
else
f(props, children.toJsArray).asInstanceOf[ReactComponentU_]
}
}
|
elacin/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/reactslick/ReactSlick.scala
|
Scala
|
apache-2.0
| 3,293 |
package model.slick.nsi
import slick.jdbc.PostgresProfile.api._
class RamActivities(tag: Tag) extends Table[(Int, String, Option[String], String, Boolean)](tag, "ramActivities") {
def activityID: Rep[Int] = column[Int]("activityID", O.PrimaryKey) // This is the primary key column
def activityName: Rep[String] = column[String]("activityName")
def iconNo: Rep[Option[String]] = column[Option[String]]("iconNo")
def description: Rep[String] = column[String]("description")
def published: Rep[Boolean] = column[Boolean]("published")
// Every table needs a * projection with the same type as the table's type parameter
def * = (activityID, activityName, iconNo, description, published)
}
|
apantin/scala.js.test
|
server/app/model/slick/nsi/RamActivities.scala
|
Scala
|
apache-2.0
| 716 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.crud.rest.instance.destroyer
import com.bwsw.common.marathon._
import com.bwsw.sj.common.utils.EngineLiterals
import com.bwsw.sj.crud.rest.instance.InstanceDomainRenewer
import org.apache.http.HttpStatus
import org.mockito.Mockito._
import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester}
class InstanceDestroyerTestSuit extends FlatSpec with Matchers with PrivateMethodTester with InstanceDestroyerMocks {
it should "deleteFramework() method removes the existent marathon app" in {
//arrange
val deleteFramework = PrivateMethod[String]('deleteFramework)
val marathonManager = mock[MarathonApi]
when(marathonManager.destroyMarathonApplication(frameworkName)).thenReturn(okStatus)
when(marathonManager.getApplicationInfo(frameworkName)).thenReturn(notFoundFrameworkResponce)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act and assert
instanceStopper invokePrivate deleteFramework()
//assert
verify(marathonManager, times(1)).destroyMarathonApplication(frameworkName)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleting)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleted)
}
it should "deleteFramework() method works properly if a framework app has been deleted earlier" in {
//arrange
val deleteFramework = PrivateMethod[String]('deleteFramework)
val marathonManager = mock[MarathonApi]
when(marathonManager.destroyMarathonApplication(frameworkName)).thenReturn(HttpStatus.SC_NOT_FOUND)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act and assert
instanceStopper invokePrivate deleteFramework()
//assert
verify(marathonManager, times(1)).destroyMarathonApplication(frameworkName)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleted)
}
it should "deleteFramework() method fails if marathon has got some problems with the destroying process of framework" in {
//arrange
val deleteFramework = PrivateMethod[String]('deleteFramework)
val marathonManager = mock[MarathonApi]
when(marathonManager.destroyMarathonApplication(frameworkName)).thenReturn(errorStatus)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act and assert
assertThrows[Exception](instanceStopper invokePrivate deleteFramework())
//assert
verify(marathonManager, times(1)).destroyMarathonApplication(frameworkName)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.error)
}
it should "waitForFrameworkToDelete() method works properly if there are no problems" in {
//arrange
val waitForFrameworkToDelete = PrivateMethod('waitForFrameworkToDelete)
val marathonManager = mock[MarathonApi]
when(marathonManager.getApplicationInfo(frameworkName)).thenReturn(notFoundFrameworkResponce)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act
instanceStopper invokePrivate waitForFrameworkToDelete()
//assert
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleted)
}
it should "waitForFrameworkToDelete() method do multiple tries to wait until marathon destroys the app" in {
//arrange
val numberOfTries = 5
val startedMarathonApps = Array.fill(numberOfTries - 1)(okFrameworkResponse).toList
val waitForFrameworkToDelete = PrivateMethod('waitForFrameworkToDelete)
val marathonManager = mock[MarathonApi]
when(marathonManager.getApplicationInfo(frameworkName)).thenReturn(okFrameworkResponse, startedMarathonApps.:+(notFoundFrameworkResponce): _*)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act
instanceStopper invokePrivate waitForFrameworkToDelete()
//assert
verify(instanceManager, times(numberOfTries)).updateFrameworkStage(instanceMock, EngineLiterals.deleting)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleted)
}
it should "run() method works properly if framework has been stopped without any exceptions" in {
//arrange
val marathonManager = mock[MarathonApi]
when(marathonManager.destroyMarathonApplication(frameworkName)).thenReturn(okStatus)
when(marathonManager.getApplicationInfo(frameworkName)).thenReturn(notFoundFrameworkResponce)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act
instanceStopper.run()
//assert
verify(instanceManager, times(1)).updateInstanceStatus(instanceMock, EngineLiterals.deleting)
verify(marathonManager, times(1)).destroyMarathonApplication(frameworkName)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleting)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.deleted)
verify(instanceManager, times(1)).deleteInstance(instanceMock.name)
}
it should s"run() method doesn't delete an instance (set '${EngineLiterals.error}' status) if there are some exceptions during destroying process" in {
//arrange
val marathonManager = mock[MarathonApi]
when(marathonManager.destroyMarathonApplication(frameworkName)).thenReturn(errorStatus)
val instanceManager = mock[InstanceDomainRenewer]
val instanceStopper = instanceDestroyerMock(instanceManager = instanceManager, marathonManager = marathonManager)
//act
instanceStopper.run()
//assert
verify(instanceManager, times(1)).updateInstanceStatus(instanceMock, EngineLiterals.deleting)
verify(marathonManager, times(1)).destroyMarathonApplication(frameworkName)
verify(instanceManager, times(1)).updateFrameworkStage(instanceMock, EngineLiterals.error)
verify(instanceManager, times(1)).updateInstanceStatus(instanceMock, EngineLiterals.error)
}
}
|
bwsw/sj-platform
|
core/sj-crud-rest/src/test/scala-2.12/com/bwsw/sj/crud/rest/instance/destroyer/InstanceDestroyerTestSuit.scala
|
Scala
|
apache-2.0
| 7,352 |
package doobie.syntax
import scalaz._, Scalaz._
import doobie.imports._
import shapeless.test.illTyped
import org.specs2.mutable.Specification
object catchsqlspec extends Specification {
"catchsql syntax" should {
"work on aliased IConnection" in {
42.point[ConnectionIO].attemptSql
true
}
"work on unaliased IConnection" in {
42.point[ConnectionIO].map(_ + 1).attemptSql
true
}
}
}
|
coltfred/doobie
|
core/src/test/scala/doobie/syntax/catchsql.scala
|
Scala
|
mit
| 433 |
import net.mentalarray.doozie.Builders.WorkflowBuilder
import net.mentalarray.doozie.DBStore.JDBCConnection
import net.mentalarray.doozie.Logging
import net.mentalarray.doozie.Tasks.ScalaTask
/**
* Created by bgilcrease on 10/14/14.
*/
class TechnodeLookup extends WorkflowBuilder("TechnodeLookup") with Logging {
val selectQuery =
"""
SELECT DISTINCT PROCESS_ID, SUBSTR(STEP_ID,0,1) AS StepKey,
CASE
WHEN SUBSTR(STEP_ID,0,1) = 'F' THEN '14nm'
WHEN SUBSTR(STEP_ID,0,1) = 'J' THEN '20nm'
WHEN SUBSTR(STEP_ID,0,1) = 'Z' THEN '28nm'
WHEN SUBSTR(STEP_ID,0,1) = 'H' THEN '32nm'
WHEN SUBSTR(STEP_ID,0,1) = 'Y' THEN '45nm'
ELSE 'Other'
END AS TechNode
FROM "SOR"."MST_STEPSEQ_ALL"
WHERE PROCESS_ID like 'U%' and LINE_ID = 'SFBX' and STEP_ID is not null and STEP_ID not like 'B%' and STEP_ID not like 'E%'
with ur
"""
val truncateQuery = "truncate table technodelookup"
appendStep( ScalaTask {
val results = JDBCConnection.YMS.fetchSeq(selectQuery, None)
JDBCConnection.HiveMetastore.executeNonQuery(List(truncateQuery))
JDBCConnection.HiveMetastore.insertBatch(table = "technodelookup", results)
true
})
}
new TechnodeLookup()
|
antagonist112358/tomahawk
|
examples/TechnodeLookupWorkflow.scala
|
Scala
|
apache-2.0
| 1,192 |
package im.actor.server.frontend
import java.net.InetAddress
import akka.stream.FlowShape
import kamon.metric.instrument.{ MinMaxCounter, Histogram }
import scala.util.{ Failure, Success }
import akka.actor._
import akka.stream.actor.ActorPublisher
import akka.stream.scaladsl._
import akka.util.ByteString
import im.actor.server.mtproto.codecs.transport._
import im.actor.server.mtproto.transport._
import im.actor.server.session.SessionRegion
object MTProtoBlueprint {
import akka.stream.stage._
type MTProtoFlow = Flow[ByteString, ByteString, akka.NotUsed]
val protoVersions: Set[Byte] = Set(1, 2, 3)
val apiMajorVersions: Set[Byte] = Set(1)
def apply(connId: String, connTimeHist: Histogram, connCountMM: MinMaxCounter, serverKeys: Seq[ServerKey], remoteAddr: InetAddress)(implicit sessionRegion: SessionRegion, system: ActorSystem): MTProtoFlow = {
val sessionClient = system.actorOf(SessionClient.props(sessionRegion, remoteAddr), s"sessionClient_${connId}")
val authManager = system.actorOf(AuthorizationManager.props(serverKeys, sessionClient), s"authManager-$connId")
val authSource = Source.fromPublisher(ActorPublisher[MTProto](authManager))
val sessionClientSource = Source.fromPublisher(ActorPublisher[MTProto](sessionClient))
val mtprotoFlow = Flow.fromGraph(new PackageParseStage())
.transform(() ⇒ new PackageCheckStage)
.via(new PackageHandleStage(protoVersions, apiMajorVersions, authManager, sessionClient))
val mapRespFlow: Flow[MTProto, ByteString, akka.NotUsed] = Flow[MTProto]
.transform(() ⇒ mapResponse(system))
val connStartTime = System.currentTimeMillis()
connCountMM.increment()
val completeSink = Sink onComplete {
case res ⇒
res match {
case Success(_) ⇒
system.log.debug("Closing connection {}", connId)
case Failure(e) ⇒
system.log.debug("Closing connection {} due to error: {}", connId, e)
}
connTimeHist.record(System.currentTimeMillis() - connStartTime)
connCountMM.decrement()
authManager ! PoisonPill
sessionClient ! PoisonPill
}
Flow.fromGraph(GraphDSL.create() { implicit builder ⇒
import GraphDSL.Implicits._
val bcast = builder.add(Broadcast[ByteString](2))
val merge = builder.add(Merge[MTProto](3))
val mtproto = builder.add(mtprotoFlow)
val auth = builder.add(authSource)
val session = builder.add(sessionClientSource)
val mapResp = builder.add(mapRespFlow)
val complete = builder.add(completeSink)
// format: OFF
bcast ~> complete
bcast ~> mtproto ~> merge
auth ~> merge
session ~> merge ~> mapResp
// format: ON
FlowShape(bcast.in, mapResp.outlet)
})
}
def mapResponse(system: ActorSystem) = new PushStage[MTProto, ByteString] {
private[this] var packageIndex: Int = -1
override def onPush(elem: MTProto, ctx: Context[ByteString]) = {
packageIndex += 1
val pkg = TransportPackage(packageIndex, elem)
val resBits = TransportPackageCodec.encode(pkg).require
val res = ByteString(resBits.toByteBuffer)
elem match {
case _: Drop ⇒
ctx.pushAndFinish(res)
case _ ⇒
ctx.push(res)
}
}
}
}
|
ljshj/actor-platform
|
actor-server/actor-frontend/src/main/scala/im/actor/server/frontend/MTProtoBlueprint.scala
|
Scala
|
mit
| 3,346 |
package chandu0101.scalajs.react.components.materialui
import japgolly.scalajs.react._
import materialui.Mui
import scala.scalajs.js
import scala.scalajs.js.{Date => JDate}
/**
*
key: PropTypes.string,
style: PropTypes.js.Any,
ref: PropTypes.String,
hintText: PropTypes.String,
defaultDate: React.PropTypes.JDate,
formatDate: React.PropTypes. JDate => String,
mode: React.PropTypes.MuiDatePickerMode,
onFocus: React.PropTypes. ReactEventI => Unit,
onTouchTap: React.PropTypes.ReactEventI => Unit,
onChange: React.PropTypes.(JDate,JDate) => Unit
onShow: React.PropTypes.() => Unit,
onDismiss: React.PropTypes.() => Unit,
minDate: React.PropTypes.JDate,
maxDate: React.PropTypes.JDate,
shouldDisableDate: React.PropTypes.func,
hideToolbarYearChange: React.PropTypes.bool,
autoOk: React.PropTypes.bool,
showYearSelector: React.PropTypes.bool
*/
object MuiDatePicker {
def apply(defaultDate : js.UndefOr[JDate] = js.undefined,
onDismiss : js.UndefOr[() => Unit] = js.undefined,
onShow : js.UndefOr[() => Unit] = js.undefined,
style : js.UndefOr[js.Any] = js.undefined,
onChange : js.UndefOr[(JDate,JDate) => Unit] = js.undefined,
autoOk : js.UndefOr[Boolean]=js.undefined,
ref : js.UndefOr[String] = js.undefined,
maxDate : js.UndefOr[JDate] = js.undefined,
hintText : js.UndefOr[String] = js.undefined,
shouldDisableDate : js.UndefOr[js.Function] = js.undefined ,
key : js.UndefOr[String] = js.undefined,
hideToolbarYearChange : js.UndefOr[Boolean]=js.undefined,
minDate : js.UndefOr[JDate] = js.undefined,
onTouchTap : js.UndefOr[ReactEventI => Unit] = js.undefined,
formatDate : js.UndefOr[ JDate => String] = js.undefined,
onFocus : js.UndefOr[ ReactEventI => Unit] = js.undefined,
mode : js.UndefOr[MuiDatePickerMode] = js.undefined,
showYearSelector : js.UndefOr[Boolean]=js.undefined) = {
val p = js.Dynamic.literal()
defaultDate.foreach(v => p.updateDynamic("defaultDate")(v))
onDismiss.foreach(v => p.updateDynamic("onDismiss")(v))
onShow.foreach(v => p.updateDynamic("onShow")(v))
style.foreach(v => p.updateDynamic("style")(v))
onChange.foreach(v => p.updateDynamic("onChange")(v))
autoOk.foreach(v => p.updateDynamic("autoOk")(v))
ref.foreach(v => p.updateDynamic("ref")(v))
maxDate.foreach(v => p.updateDynamic("maxDate")(v))
hintText.foreach(v => p.updateDynamic("hintText")(v))
shouldDisableDate.foreach(v => p.updateDynamic("shouldDisableDate")(v))
key.foreach(v => p.updateDynamic("key")(v))
hideToolbarYearChange.foreach(v => p.updateDynamic("hideToolbarYearChange")(v))
minDate.foreach(v => p.updateDynamic("minDate")(v))
onTouchTap.foreach(v => p.updateDynamic("onTouchTap")(v))
formatDate.foreach(v => p.updateDynamic("formatDate")(v))
onFocus.foreach(v => p.updateDynamic("onFocus")(v))
mode.foreach(v => p.updateDynamic("mode")(v.mode))
showYearSelector.foreach(v => p.updateDynamic("showYearSelector")(v))
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.DatePicker)
f(p).asInstanceOf[ReactComponentU_]
}
}
class MuiDatePickerMode private(val mode: String) extends AnyVal
object MuiDatePickerMode {
val PROTRAIT = new MuiDatePickerMode("portrait")
val LANDSCAPE = new MuiDatePickerMode("landscape")
def newMode(mode: String ) = new MuiDatePickerMode(mode)
}
trait MuiDatePickerM extends js.Object {
def getDate(): JDate = js.native
def setDate(d: JDate): Unit = js.native
}
|
coreyauger/scalajs-react-components
|
core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiDatePicker.scala
|
Scala
|
apache-2.0
| 3,694 |
trait Universe {
type Symbol >: Null <: AnyRef with SymbolApi
trait SymbolApi
type TypeSymbol >: Null <: TypeSymbolApi with Symbol
trait TypeSymbolApi
implicit class CompatibleSymbol(sym: Symbol) {
def asFreeType: TypeSymbol = ???
}
}
object Test extends App {
val u: Universe = ???
import u.*
val sym: Symbol = ???
sym.asFreeType
}
|
dotty-staging/dotty
|
tests/pos/t8301.scala
|
Scala
|
apache-2.0
| 361 |
package patchless.circe.extras
import cats.syntax.either._
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.auto._
import io.circe.parser.parse
import org.scalatest.{FreeSpec, Matchers}
import patchless.Patch
import shapeless.record.Record
class ConfigurablePatchJsonSpec extends FreeSpec with Matchers {
"Configurable decoder" - {
"Normal" in {
import io.circe.generic.extras.defaults._
case class Foo(aString: String, bInt: Int, cBoolean: Boolean)
def parsePatch(str: String) =
parse(str).valueOr(throw _).as[Patch[Foo]].valueOr(throw _)
val aPatched = parsePatch("""{"aString": "patched"}""")
val bPatched = parsePatch("""{"bInt": 22}""")
val cPatched = parsePatch("""{"cBoolean": false}""")
aPatched.updates shouldEqual Record(aString = Some("patched"), bInt = None, cBoolean = None)
bPatched.updates shouldEqual Record(aString = None, bInt = Some(22), cBoolean = None)
cPatched.updates shouldEqual Record(aString = None, bInt = None, cBoolean = Some(false))
}
"Snake case" in {
implicit val configuration = Configuration.default.withSnakeCaseMemberNames
case class Foo(aString: String, bInt: Int, cBoolean: Boolean)
def parsePatch(str: String) =
parse(str).valueOr(throw _).as[Patch[Foo]].valueOr(throw _)
val aPatched = parsePatch("""{"a_string": "patched"}""")
val bPatched = parsePatch("""{"b_int": 22}""")
val cPatched = parsePatch("""{"c_boolean": false}""")
aPatched.updates shouldEqual Record(aString = Some("patched"), bInt = None, cBoolean = None)
bPatched.updates shouldEqual Record(aString = None, bInt = Some(22), cBoolean = None)
cPatched.updates shouldEqual Record(aString = None, bInt = None, cBoolean = Some(false))
}
"Options" in {
import io.circe.generic.extras.defaults._
case class Foo(aString: Option[String])
def parsePatch(str: String) =
parse(str).valueOr(throw _).as[Patch[Foo]].valueOr(throw _)
val aPatchedSome = parsePatch("""{"aString": "patched"}""")
val aPatchedNone = parsePatch("""{"aString": null}""")
aPatchedSome.updates shouldEqual Record(aString = Some(Some("patched")))
aPatchedNone.updates shouldEqual Record(aString = Some(None))
}
}
}
|
jeremyrsmith/patchless
|
patchless-circe/src/test/scala/patchless/circe/extras/ConfigurablePatchJsonSpec.scala
|
Scala
|
apache-2.0
| 2,327 |
package doobie.contrib.postgresql
import doobie.imports._
import doobie.contrib.postgresql.pgtypes._
import java.net.InetAddress
import java.util.UUID
import java.util.concurrent.atomic.AtomicInteger
import org.postgis._
import org.postgresql.util._
import org.postgresql.geometric._
import org.specs2.mutable.Specification
import scalaz.concurrent.Task
import scalaz.{ Maybe, \\/- }
// Establish that we can write and read various types.
object pgtypesspec extends Specification {
val xa = DriverManagerTransactor[Task](
"org.postgresql.Driver",
"jdbc:postgresql:world",
"postgres", ""
)
def inOut[A: Atom](col: String, a: A) =
for {
_ <- Update0(s"CREATE TEMPORARY TABLE TEST (value $col)", None).run
a0 <- Update[A](s"INSERT INTO TEST VALUES (?)", None).withUniqueGeneratedKeys[A]("value")(a)
} yield (a0)
def testInOut[A](col: String, a: A)(implicit m: Meta[A]) =
s"Mapping for $col as ${m.scalaType}" >> {
s"write+read $col as ${m.scalaType}" in {
inOut(col, a).transact(xa).attemptRun must_== \\/-(a)
}
s"write+read $col as Option[${m.scalaType}] (Some)" in {
inOut[Option[A]](col, Some(a)).transact(xa).attemptRun must_== \\/-(Some(a))
}
s"write+read $col as Option[${m.scalaType}] (None)" in {
inOut[Option[A]](col, None).transact(xa).attemptRun must_== \\/-(None)
}
s"write+read $col as Maybe[${m.scalaType}] (Just)" in {
inOut[Maybe[A]](col, Maybe.just(a)).transact(xa).attemptRun must_== \\/-(Maybe.Just(a))
}
s"write+read $col as Maybe[${m.scalaType}] (Empty)" in {
inOut[Maybe[A]](col, Maybe.empty[A]).transact(xa).attemptRun must_== \\/-(Maybe.Empty())
}
}
def testInOutNN[A](col: String, a: A)(implicit m: Atom[A]) =
s"Mapping for $col as ${m.meta._1.scalaType}" >> {
s"write+read $col as ${m.meta._1.scalaType}" in {
inOut(col, a).transact(xa).attemptRun must_== \\/-(a)
}
}
def skip(col: String, msg: String = "not yet implemented") =
s"Mapping for $col" >> {
"PENDING:" in pending(msg)
}
// 8.1 Numeric Types
testInOut[Short]("smallint", 123)
testInOut[Int]("integer", 123)
testInOut[Long]("bigint", 123)
testInOut[BigDecimal]("decimal", 123)
testInOut[BigDecimal]("numeric", 123)
testInOut[Float]("real", 123.45f)
testInOut[Double]("double precision", 123.45)
// 8.2 Monetary Types
skip("pgmoney", "getObject returns Double")
// 8.3 Character Types"
testInOut("character varying", "abcdef")
testInOut("varchar", "abcdef")
testInOut("character(6)", "abcdef")
testInOut("char(6)", "abcdef")
testInOut("text", "abcdef")
// 8.4 Binary Types
testInOut[List[Byte]] ("bytea", BigInt("DEADBEEF",16).toByteArray.toList)
testInOut[Vector[Byte]]("bytea", BigInt("DEADBEEF",16).toByteArray.toVector)
// 8.5 Date/Time Types"
testInOut("timestamp", new java.sql.Timestamp(System.currentTimeMillis))
skip("timestamp with time zone")
testInOut("date", new java.sql.Date(4,5,6))
testInOut("time", new java.sql.Time(3,4,5))
skip("time with time zone")
skip("interval")
// 8.6 Boolean Type
testInOut("boolean", true)
// 8.7 Enumerated Types
// create type myenum as enum ('foo', 'bar') <-- part of setup
object MyEnum extends Enumeration { val foo, bar = Value }
// as scala.Enumeration
implicit val MyEnumMeta = pgEnum(MyEnum, "myenum")
testInOutNN("myenum", MyEnum.foo)
// as java.lang.Enum
implicit val MyJavaEnumMeta = pgJavaEnum[MyJavaEnum]("myenum")
testInOutNN("myenum", MyJavaEnum.bar)
// 8.8 Geometric Types
testInOut("box", new PGbox(new PGpoint(1, 2), new PGpoint(3, 4)))
testInOut("circle", new PGcircle(new PGpoint(1, 2), 3))
testInOut("lseg", new PGlseg(new PGpoint(1, 2), new PGpoint(3, 4)))
testInOut("path", new PGpath(Array(new PGpoint(1, 2), new PGpoint(3, 4)), false))
testInOut("path", new PGpath(Array(new PGpoint(1, 2), new PGpoint(3, 4)), true))
testInOut("point", new PGpoint(1, 2))
testInOut("polygon", new PGpolygon(Array(new PGpoint(1, 2), new PGpoint(3, 4))))
skip("line", "doc says \\"not fully implemented\\"")
// 8.9 Network Address Types
testInOut("inet", InetAddress.getByName("123.45.67.8"))
skip("inet", "no suitable JDK type")
skip("macaddr", "no suitable JDK type")
// 8.10 Bit String Types
skip("bit")
skip("bit varying")
// 8.11 Text Search Types
skip("tsvector")
skip("tsquery")
// 8.12 UUID Type
testInOut("uuid", UUID.randomUUID)
// 8.13 XML Type
skip("xml")
// 8.14 JSON Type
skip("json")
// 8.15 Arrays
skip("bit[]", "Requires a cast")
skip("smallint[]", "always comes back as Array[Int]")
testInOut("integer[]", List[Int](1,2))
testInOut("bigint[]", List[Long](1,2))
testInOut("real[]", List[Float](1.2f, 3.4f))
testInOut("double precision[]", List[Double](1.2, 3.4))
testInOut("varchar[]", List[String]("foo", "bar"))
// 8.16 Composite Types
skip("composite")
// 8.17 Range Types
skip("int4range")
skip("int8range")
skip("numrange")
skip("tsrange")
skip("tstzrange")
skip("daterange")
skip("custom")
// PostGIS geometry types
// Random streams of geometry values
lazy val rnd: Iterator[Double] = Stream.continually(util.Random.nextDouble).iterator
lazy val pts: Iterator[Point] = Stream.continually(new Point(rnd.next, rnd.next)).iterator
lazy val lss: Iterator[LineString] = Stream.continually(new LineString(Array(pts.next, pts.next, pts.next))).iterator
lazy val lrs: Iterator[LinearRing] = Stream.continually(new LinearRing({ lazy val p = pts.next; Array(p, pts.next, pts.next, pts.next, p) })).iterator
lazy val pls: Iterator[Polygon] = Stream.continually(new Polygon(lras.next)).iterator
// Streams of arrays of random geometry values
lazy val ptas: Iterator[Array[Point]] = Stream.continually(Array(pts.next, pts.next, pts.next)).iterator
lazy val plas: Iterator[Array[Polygon]] = Stream.continually(Array(pls.next, pls.next, pls.next)).iterator
lazy val lsas: Iterator[Array[LineString]] = Stream.continually(Array(lss.next, lss.next, lss.next)).iterator
lazy val lras: Iterator[Array[LinearRing]] = Stream.continually(Array(lrs.next, lrs.next, lrs.next)).iterator
// All these types map to `geometry`
def testInOutGeom[A <: Geometry: Meta](a: A) =
testInOut[A]("geometry", a)
testInOutGeom[Geometry](pts.next)
testInOutGeom[ComposedGeom](new MultiLineString(lsas.next))
testInOutGeom[GeometryCollection](new GeometryCollection(Array(pts.next, lss.next)))
testInOutGeom[MultiLineString](new MultiLineString(lsas.next))
testInOutGeom[MultiPolygon](new MultiPolygon(plas.next))
testInOutGeom[PointComposedGeom](lss.next)
testInOutGeom[LineString](lss.next)
testInOutGeom[MultiPoint](new MultiPoint(ptas.next))
testInOutGeom[Polygon](pls.next)
testInOutGeom[Point](pts.next)
}
|
jamescway/doobie
|
contrib/postgresql/src/test/scala/doobie/contrib/postgresql/pgtypes.scala
|
Scala
|
mit
| 6,885 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.regression
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTestingUtils}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{DataFrame, Row}
class IsotonicRegressionSuite
extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import testImplicits._
private def generateIsotonicInput(labels: Seq[Double]): DataFrame = {
labels.zipWithIndex.map { case (label, i) => (label, i.toDouble, 1.0) }
.toDF("label", "features", "weight")
}
private def generatePredictionInput(features: Seq[Double]): DataFrame = {
features.map(Tuple1.apply).toDF("features")
}
test("isotonic regression predictions") {
val dataset = generateIsotonicInput(Seq(1, 2, 3, 1, 6, 17, 16, 17, 18))
val ir = new IsotonicRegression().setIsotonic(true)
val model = ir.fit(dataset)
val predictions = model
.transform(dataset)
.select("prediction").rdd.map { case Row(pred) =>
pred
}.collect()
assert(predictions === Array(1, 2, 2, 2, 6, 16.5, 16.5, 17, 18))
assert(model.boundaries === Vectors.dense(0, 1, 3, 4, 5, 6, 7, 8))
assert(model.predictions === Vectors.dense(1, 2, 2, 6, 16.5, 16.5, 17.0, 18.0))
assert(model.getIsotonic)
}
test("antitonic regression predictions") {
val dataset = generateIsotonicInput(Seq(7, 5, 3, 5, 1))
val ir = new IsotonicRegression().setIsotonic(false)
val model = ir.fit(dataset)
val features = generatePredictionInput(Seq(-2.0, -1.0, 0.5, 0.75, 1.0, 2.0, 9.0))
val predictions = model
.transform(features)
.select("prediction").rdd.map {
case Row(pred) => pred
}.collect()
assert(predictions === Array(7, 7, 6, 5.5, 5, 4, 1))
}
test("params validation") {
val dataset = generateIsotonicInput(Seq(1, 2, 3))
val ir = new IsotonicRegression
ParamsSuite.checkParams(ir)
val model = ir.fit(dataset)
ParamsSuite.checkParams(model)
}
test("default params") {
val dataset = generateIsotonicInput(Seq(1, 2, 3))
val ir = new IsotonicRegression()
assert(ir.getLabelCol === "label")
assert(ir.getFeaturesCol === "features")
assert(ir.getPredictionCol === "prediction")
assert(!ir.isDefined(ir.weightCol))
assert(ir.getIsotonic)
assert(ir.getFeatureIndex === 0)
val model = ir.fit(dataset)
// copied model must have the same parent.
MLTestingUtils.checkCopy(model)
model.transform(dataset)
.select("label", "features", "prediction", "weight")
.collect()
assert(model.getLabelCol === "label")
assert(model.getFeaturesCol === "features")
assert(model.getPredictionCol === "prediction")
assert(!model.isDefined(model.weightCol))
assert(model.getIsotonic)
assert(model.getFeatureIndex === 0)
assert(model.hasParent)
}
test("set parameters") {
val isotonicRegression = new IsotonicRegression()
.setIsotonic(false)
.setWeightCol("w")
.setFeaturesCol("f")
.setLabelCol("l")
.setPredictionCol("p")
assert(!isotonicRegression.getIsotonic)
assert(isotonicRegression.getWeightCol === "w")
assert(isotonicRegression.getFeaturesCol === "f")
assert(isotonicRegression.getLabelCol === "l")
assert(isotonicRegression.getPredictionCol === "p")
}
test("missing column") {
val dataset = generateIsotonicInput(Seq(1, 2, 3))
intercept[IllegalArgumentException] {
new IsotonicRegression().setWeightCol("w").fit(dataset)
}
intercept[IllegalArgumentException] {
new IsotonicRegression().setFeaturesCol("f").fit(dataset)
}
intercept[IllegalArgumentException] {
new IsotonicRegression().setLabelCol("l").fit(dataset)
}
intercept[IllegalArgumentException] {
new IsotonicRegression().fit(dataset).setFeaturesCol("f").transform(dataset)
}
}
test("vector features column with feature index") {
val dataset = Seq(
(4.0, Vectors.dense(0.0, 1.0)),
(3.0, Vectors.dense(0.0, 2.0)),
(5.0, Vectors.sparse(2, Array(1), Array(3.0)))
).toDF("label", "features")
val ir = new IsotonicRegression()
.setFeatureIndex(1)
val model = ir.fit(dataset)
val features = generatePredictionInput(Seq(2.0, 3.0, 4.0, 5.0))
val predictions = model
.transform(features)
.select("prediction").rdd.map {
case Row(pred) => pred
}.collect()
assert(predictions === Array(3.5, 5.0, 5.0, 5.0))
}
test("read/write") {
val dataset = generateIsotonicInput(Seq(1, 2, 3, 1, 6, 17, 16, 17, 18))
def checkModelData(model: IsotonicRegressionModel, model2: IsotonicRegressionModel): Unit = {
assert(model.boundaries === model2.boundaries)
assert(model.predictions === model2.predictions)
assert(model.isotonic === model2.isotonic)
}
val ir = new IsotonicRegression()
testEstimatorAndModelReadWrite(ir, dataset, IsotonicRegressionSuite.allParamSettings,
checkModelData)
}
test("should support all NumericType labels and not support other types") {
val ir = new IsotonicRegression()
MLTestingUtils.checkNumericTypes[IsotonicRegressionModel, IsotonicRegression](
ir, spark, isClassification = false) { (expected, actual) =>
assert(expected.boundaries === actual.boundaries)
assert(expected.predictions === actual.predictions)
}
}
}
object IsotonicRegressionSuite {
/**
* Mapping from all Params to valid settings which differ from the defaults.
* This is useful for tests which need to exercise all Params, such as save/load.
* This excludes input columns to simplify some tests.
*/
val allParamSettings: Map[String, Any] = Map(
"predictionCol" -> "myPrediction",
"isotonic" -> true,
"featureIndex" -> 0
)
}
|
Panos-Bletsos/spark-cost-model-optimizer
|
mllib/src/test/scala/org/apache/spark/ml/regression/IsotonicRegressionSuite.scala
|
Scala
|
apache-2.0
| 6,750 |
package kafka.console
package object app extends Container with Data
|
shnapz/service
|
core/src/main/scala/kafka/console/app/package.scala
|
Scala
|
apache-2.0
| 70 |
package net.nikore.etcd.apis
import akka.actor.ActorRefFactory
import net.nikore.etcd.EtcdJsonProtocol._
import spray.client.pipelining._
import spray.http.StatusCodes.ClientError
import spray.http._
import spray.httpx.SprayJsonSupport._
import scala.concurrent.Future
/**
* Defines a API mix-in trait for the EtcD "Members" API
*
* @author Liam Haworth
*/
trait MembersAPI {
/**
* Defines the actor system under which all Http request will be spawned
*/
implicit val system: ActorRefFactory
/**
* Defines the base URI used when building requests to the API server
*/
protected val connectionURI: String
/**
* Imports the Akka event dispatcher used by Spray
*/
import system.dispatcher
/**
* Returns a list of EtcD members
*
* @return EtcdMember list
*/
def listMembers: Future[List[EtcdMember]] = {
val pipeline: HttpRequest => Future[List[EtcdMember]] = (
sendReceive
~> mapRequestErrors
~> unmarshal[List[EtcdMember]]
)
pipeline(Get(s"$connectionURI/v2/members"))
}
/**
* Informs the EtcD server of a new
* peer to add to the cluster
*
* @param peerURLs A list of peer URLs for the new member
* @param name The name of the new peer
* @return The EtcdMember with it's ID as assigned by the cluster
*/
def addNewMember(peerURLs: List[String], name: Option[String] = None): Future[EtcdMember] = {
if(peerURLs.isEmpty)
throw new Exception("The list of peer URLs provided is empty, a new member must have at least 1 peer URL")
val pipeline: HttpRequest => Future[EtcdMember] = (
sendReceive
~> mapRequestErrors
~> unmarshal[EtcdMember]
)
pipeline(
Post(
s"$connectionURI/v2/members",
// NOTE: Manual fix for Etcd bug #6433 while we wait for the fix to be released in stable CoreOS
HttpEntity(
MediaType.custom("application", "json", binary = true),
EtcdMember(name = name, peerURLs = peerURLs).toJson.toString().getBytes()
)
)
)
}
/**
* Informs the EtcD cluster to remove
* and delete a member
*
* @param memberID The ID of the EtcdMember to remove
* @return Returns the HttpResponse from the API server
*/
def deleteMember(memberID: String): Future[HttpResponse] = {
val pipeline: HttpRequest => Future[HttpResponse] = (
sendReceive
~> mapRequestErrors
)
pipeline(Delete(s"$connectionURI/v2/members/$memberID"))
}
/**
* Updates the peer URLs for an existing member
*
* @param member The member to update
* @param peerURLs A list of peer URLs to update the member listing with
* @return Returns the HttpResponse from the API server
*/
def updateMemberPeerURLs(member: EtcdMember, peerURLs: List[String]): Future[HttpResponse] = {
if(peerURLs.isEmpty)
throw new Exception("The list of peer URLs provided is empty, a new member must have at least 1 peer URL")
val pipeline: HttpRequest => Future[HttpResponse] = (
sendReceive
~> mapRequestErrors
)
pipeline(
Put(
s"$connectionURI/v2/members/${member.id}",
// NOTE: Manual fix for Etcd bug #6433 while we wait for the fix to be released in stable CoreOS
HttpEntity(
MediaType.custom("application", "json", binary = true),
member.copy(peerURLs = peerURLs).toJson.toString().getBytes()
)
)
)
}
/**
* Provides a simply way of handling errors returned from the API server
*/
private val mapRequestErrors = (response: HttpResponse) => {
if (response.status.isSuccess)
response
else
response.status match {
case ClientError(error) =>
throw new Exception(response.entity.asString)
case StatusCodes.BadRequest =>
throw new Exception("The request was malformed or missing fields, please ensure everything is correct in your request")
case StatusCodes.Conflict =>
throw new Exception("There was a conflict when executing the request, please ensure peer hasn't exited in the cluster before")
case StatusCodes.InternalServerError =>
throw new Exception("The API server encountered an internal error when processing the request")
}
}
}
|
LiamHaworth/scala-etcd
|
src/main/scala/net/nikore/etcd/apis/MembersAPI.scala
|
Scala
|
apache-2.0
| 4,333 |
package org.odfi.indesign.module.inative.cmake
import org.odfi.indesign.core.harvest.Harvester
import org.odfi.indesign.core.harvest.fs.HarvestedFile
import java.io.File
class CMakeProjectHarvester extends Harvester {
this.onDeliverFor[HarvestedFile] {
case folder if (folder.isDirectory && new File(folder.path.toFile,"CMakeLists.txt").exists) =>
gather(new CMakeProject(folder))
true
}
}
class CMakeProject(base:HarvestedFile) extends HarvestedFile(base.path) {
deriveFrom(base)
}
|
opendesignflow/indesign
|
indesign-native/src/main/scala/org/odfi/indesign/module/inative/cmake/CMakeProject.scala
|
Scala
|
gpl-3.0
| 527 |
/*
* Copyright 2014 Lars Edenbrandt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package se.nimsa.sbx.app.routing
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.model.{HttpHeader, RemoteAddress}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{Directive1, Route}
import akka.pattern.ask
import se.nimsa.sbx.app.GeneralProtocol.SourceType.USER
import se.nimsa.sbx.app.GeneralProtocol.{SourceAdded, SourceDeleted, SourceRef}
import se.nimsa.sbx.app.SliceboxBase
import se.nimsa.sbx.user.UserProtocol.{AuthKey, _}
trait UserRoutes { this: SliceboxBase =>
val extractUserAgent: HttpHeader => Option[String] = {
case a: `User-Agent` => Some(a.value)
case _ => None
}
val extractIP: Directive1[RemoteAddress] =
headerValuePF {
case `X-Forwarded-For`(Seq(address, _*)) => address
case `Remote-Address`(address) => address
case `X-Real-Ip`(address) => address
} | provide(RemoteAddress.Unknown)
def extractAuthKey: Directive1[AuthKey] =
if (sessionsIncludeIpAndUserAgent)
(optionalCookie(sessionField) & extractIP & optionalHeaderValue(extractUserAgent)).tmap {
case (cookie, ip, optionalUserAgent) =>
AuthKey(cookie.map(_.value), ip.toOption.map(_.getHostAddress), optionalUserAgent)
}
else
optionalCookie(sessionField).map(cookie => AuthKey(cookie.map(_.value), Some(""), Some("")))
def loginRoute(authKey: AuthKey): Route =
path("users" / "login") {
post {
entity(as[UserPass]) { userPass =>
onSuccess(userService.ask(Login(userPass, authKey))) {
case LoggedIn(_, session) =>
setCookie(HttpCookie(sessionField, value = session.token, path = Some("/api"), httpOnly = true)) {
complete(NoContent)
}
case LoginFailed =>
complete((Unauthorized, "Invalid username or password"))
}
}
}
}
def currentUserRoute(authKey: AuthKey): Route =
path("users" / "current") {
get {
onSuccess(userService.ask(GetAndRefreshUserByAuthKey(authKey)).mapTo[Option[ApiUser]]) { optionalUser =>
optionalUser.map(user => UserInfo(user.id, user.user, user.role)) match {
case Some(userInfo) => complete(userInfo)
case None => complete(NotFound)
}
}
}
}
def userRoutes(apiUser: ApiUser, authKey: AuthKey): Route =
pathPrefix("users") {
pathEndOrSingleSlash {
get {
parameters((
'startindex.as(nonNegativeFromStringUnmarshaller) ? 0,
'count.as(nonNegativeFromStringUnmarshaller) ? 20)) { (startIndex, count) =>
onSuccess(userService.ask(GetUsers(startIndex, count))) {
case Users(users) =>
complete(users)
}
}
} ~ post {
authorize(apiUser.hasPermission(UserRole.ADMINISTRATOR)) {
entity(as[ClearTextUser]) { user =>
val apiUser = ApiUser(-1, user.user, user.role).withPassword(user.password)
onSuccess(userService.ask(AddUser(apiUser))) {
case UserAdded(addedUser) => {
system.eventStream.publish(SourceAdded(SourceRef(USER, addedUser.id)))
complete((Created, addedUser))
}
}
}
}
}
} ~ path(LongNumber) { userId =>
delete {
authorize(apiUser.hasPermission(UserRole.ADMINISTRATOR)) {
onSuccess(userService.ask(DeleteUser(userId))) {
case UserDeleted(_) => {
system.eventStream.publish(SourceDeleted(SourceRef(USER, userId)))
complete(NoContent)
}
}
}
}
} ~ path("logout") {
post {
onSuccess(userService.ask(Logout(apiUser, authKey))) {
case LoggedOut =>
deleteCookie(sessionField, path = "/api") {
complete(NoContent)
}
}
}
}
}
}
|
slicebox/slicebox
|
src/main/scala/se/nimsa/sbx/app/routing/UserRoutes.scala
|
Scala
|
apache-2.0
| 4,650 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.core.mr
import kumoi.shell.aaa._
import kumoi.shell.event._
import kumoi.shell.mr._
import kumoi.core.classloader._
import kumoi.core.log._
import kumoi.core._
import kumoi.core.or._
import kumoi.core.Shared._
import kumoi.impl.group._
import scala.actors._
import scala.actors.Actor._
import scala.actors.remote._
import scala.actors.remote.RemoteActor._
import scala.util.Random
import java.net.URL
import kumoi.core.classloader.RemoteClassLoader
/**
* A parallel skeleton worker.
*
* @author Akiyoshi SUGIKI
*/
class DWorker(val port: Int, cloader: RemoteClassLoader) extends ORObject[Worker] with Worker {
def this(cl: RemoteClassLoader) = this(DefaultPort, cl)
private val logging = Logging("DWORKER")
private var masters = List[Node]()
private var worker: Actor = null
private var jobs = Map[Long, OutputChannel[Any]]()
//private var wid = 0L
def tasks(implicit auth: AAA) = {
List()
/*
worker !? DistWorkerGetTasks match {
case DistWorkerTasks(tasks) => tasks
case _ => List()
}
*/
}
protected def genEvent(e: Exception) = WorkerError(e)
def start() = {
//logging.config("start()")
//logging.config("port=" + port + ", name=" + workerName)
//logging.config("classLoader=" + classLoader)
worker = actor {
//RemoteActor.classLoader = cloader
alive(port)
register(WorkerName, self)
loop {
receive {
case DistAddURL(url) =>
logging.debug("addURL " + url)
masters = Node(url.getHost, url.getPort) :: masters
cloader.addURL(url)
// sender ! DistResult(ok)
case DistRemoveURL(url) =>
cloader.removeURL(url)
case DistParallel(no, op, serial, timeout, auth) =>
logging.debug("DistParallel")
/*
val result = process(no, op, serial, auth)
logging.debug("result " + result)
reply(result)
*/
/*
val task = createTask()
jobs += (serial -> sender)
task ! DistWorkerTask(no, op, serial, timeout, auth)
*/
val res = process(no, op, serial, auth)
logging.debug("worker result=" + res)
reply(res)
/*
res match {
case succ: DistResult[_] => reply(DistWorkerCompleted(succ, serial))
case failed: DistFailed => reply(DistWorkerFailed(failed, serial))
}
*/
case DistAbort(no, serial) =>
logging.warn("ABORT is not implemented " + no + ":" + serial)
case DistWorkerCompleted(res, serial) =>
jobs.get(serial) match {
case Some(master) =>
jobs -= serial
master ! res
case None =>
logging.warn("Compeleted() not found")
}
case DistWorkerFailed(res, serial) =>
jobs.get(serial) match {
case Some(master) =>
jobs -= serial
master ! res
case None =>
logging.warn("Compeleted() not found")
}
case DistWorkerGetTasks =>
reply(DistWorkerTasks(jobs.toList.map(j => WorkerTask(j._1))))
case DistExit(reason) =>
exit(reason)
case m =>
logging.warn("unknown message - " + m)
}
}
}
}
private def createTask() = {
actor {
receive {
case DistWorkerTask(no, op, serial, timeout, auth) =>
val res = process(no, op, serial, auth)
logging.debug("worker result=" + res)
res match {
case succ: DistResult[_] => reply(DistWorkerCompleted(succ, serial))
case failed: DistFailed => reply(DistWorkerFailed(failed, serial))
}
}
}
}
private def process(no: Int, op: DistRequest, serial: Long, auth: AAA) = {
type AnyTuple = (Any, Any)
try {
op match {
case DistMap(l, f) =>
val res = l.foldLeft { (List[Any](), List[AnyTuple]()) }{ (b, a) =>
//logging.debug("***** FIELDS = " + f.getClass.getDeclaredFields.toList)
//logging.debug("***** METHODS = " + f.getClass.getDeclaredMethods.toList)
try { (f(a) :: b._1, b._2) } catch {
case e: Exception => logging.debug("***** EXCEPTION *****\\n" + e.getStackTraceString); (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB map() res=" + res)
DistResult(no, res._1.reverse.toList, res._2.reverse.toList, serial)
case DistReduceLeft(l, f, _) =>
l match {
case h :: rest =>
val res = l.foldLeft { (h, List[AnyTuple]()) } { (b, a) =>
try { (f(b._1, a), b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB reduceLeft() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case List() => DistResult(no, List(), List(), serial)
}
case DistReduceRight(l, f, _) =>
l match {
case h :: rest =>
val res = l.foldRight { (h, List[AnyTuple]()) } { (a, b) =>
try { (f(a, b._1), b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB reduceRight() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case List() => DistResult(no, List(), List(), serial)
}
case DistForeach(l, f) =>
val res = l.foldLeft { List[AnyTuple]() } { (b, a) =>
try { f(a); b } catch {
case e: Exception => (a, e) :: b
}
}
logging.debug("FOB foreach() res=" + res)
DistResult(no, List('ok), res.reverse.toList, serial)
case DistExists(l, f) =>
val res = l.foldLeft { (false, List[AnyTuple]()) } { (b, a) =>
try { (f(a) || b._1, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB exists() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case DistFilter(l, f) =>
val res = l.foldLeft { (List[Any](), List[AnyTuple]()) } { (b, a) =>
try { ({ if (f(a)) a :: b._1 else b._1}, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB filter() res=" + res)
DistResult(no, res._1.reverse.toList, res._2.reverse.toList, serial) // reverse
case DistForall(l, f) =>
val res = l.foldLeft { (true, List[AnyTuple]()) } { (b, a) =>
try { (f(a) && b._1, b._2) } catch {
case e: Exception => (false, (a, e) :: b._2) // TODO: Is this right?
}
}
logging.debug("FOB forall() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
case DistCount(l, f) =>
val res = l.foldLeft { (0, List[AnyTuple]()) } { (b, a) =>
try { (b._1 + { if (f(a)) 1 else 0 }, b._2) } catch {
case e: Exception => (b._1, (a, e) :: b._2)
}
}
logging.debug("FOB count() res=" + res)
DistResult(no, List(res._1), res._2.reverse.toList, serial)
}
} catch {
case e: Exception =>
logging.warn("unreacheable?")
DistFailed(no, e, serial)
}
}
/*
def process(no: Int, op: DistRequest, serial: Long, auth: AAA) = {
type AnyTuple = (Any, Any)
//try {
op match {
case DistMap(l, f) =>
//val g = (a: Any) => try { Some(f(a)) } catch { case e: Exception => None }
val r = l.par.map(f)
logging.debug("map " + r)
DistResult(no, r.toList, List(), serial)
case DistReduceLeft(l, f, _) =>
//val g1 = (b: Any, a: Any) => try { f(b, a) } catch { case e: Exception => b }
val r = l.par.reduceLeft(f)
logging.debug("reduceLeft " + r)
DistResult(no, List(r), List(), serial)
case DistReduceRight(l, f, _) =>
//val g2= (a: Any, b: Any) => try { f(a, b) } catch { case e: Exception => b }
val r = l.par.reduceRight(f)
logging.debug("reduceRight " + r)
DistResult(no, List(r), List(), serial)
case DistForeach(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => }
l.par.foreach(f)
logging.debug("foreach")
DistResult(no, List('ok), List(), serial)
case DistExists(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.exists(f)
logging.debug("exists " + r)
DistResult(no, List(r), List(), serial)
case DistFilter(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.filter(f)
logging.debug("filter " + r)
DistResult(no, r.toList, List(), serial)
case DistForall(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.forall(f)
logging.debug("forall " + r)
DistResult(no, List(r), List(), serial)
case DistCount(l, f) =>
//val g = (a: Any) => try { f(a) } catch { case e: Exception => false }
val r = l.par.count(f)
logging.debug("count " + r)
DistResult(no, List(r), List(), serial)
}
//} catch {
// case e: Exception => DistFailed(no, e, serial)
//}
}
*/
/*
class WorkerActor(no: Int, op: DistRequest, serial: Long, aaa: AAA) extends Actor {
def act() {
//implicit def auth: AAA = { aaa }
//alive(port)
//register(Symbol(WorkerName + wid.toString), self)
}
}*/
//protected def op[A](block: => A) = try { block } catch { case e: Exception => throw e }
def shutdown() {
worker ! DistExit('normal)
}
def gc(members: List[PV]) {
if (worker != null) {
val nm = members.map(m => (m.pm.getHostName, m.pm.getPort))
val failed = masters.diff(nm)
if (!failed.isEmpty) {
for (Node(host, port) <- failed) worker ! DistRemoveURL(new URL("http://" + host + ":" + port + "/"))
}
}
}
}
|
axi-sugiki/kumoi
|
src/kumoi/core/mr/DWorker.scala
|
Scala
|
apache-2.0
| 10,246 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.core.server
/**
* Indicates an issue with starting a server, e.g. a problem reading its
* configuration.
*/
final case class ServerStartException(message: String, cause: Option[Throwable] = None)
extends Exception(message, cause.orNull)
|
benmccann/playframework
|
transport/server/play-server/src/main/scala/play/core/server/ServerStartException.scala
|
Scala
|
apache-2.0
| 326 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.schemas.instances
trait AllInstances
extends ScalaInstances
with JavaInstances
with JodaInstances
with AvroInstances
with LowPrioritySchemaDerivation
|
spotify/scio
|
scio-core/src/main/scala/com/spotify/scio/schemas/instances/AllInstances.scala
|
Scala
|
apache-2.0
| 796 |
package models.base
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.EitherT._
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import cache._
import db._
import io.megam.auth.funnel.FunnelErrors._
import controllers.Constants._
import io.megam.common.uid.UID
import io.megam.util.Time
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.nio.charset.Charset
import io.megam.auth.stack.MasterKeyResult
import java.util.UUID
import com.datastax.driver.core.{ ResultSet, Row }
import com.websudos.phantom.dsl._
import scala.concurrent.{ Future ⇒ ScalaFuture }
import com.websudos.phantom.connectors.{ ContactPoint, KeySpaceDef }
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.annotation.tailrec
import controllers.stack.ImplicitJsonFormats
/**
* @author rajthilak
*/
case class MasterKeysInput(key: String) {
val half_json = "\"key\":\"" + key + "\""
val json = "{" + half_json + "}"
}
sealed class MasterKeysSacks extends CassandraTable[MasterKeysSacks, MasterKeyResult] with ImplicitJsonFormats {
object id extends StringColumn(this) with PrimaryKey[String]
object key extends StringColumn(this)
object created_at extends StringColumn(this)
def fromRow(row: Row): MasterKeyResult = {
MasterKeyResult(
id(row),
key(row),
created_at(row))
}
}
abstract class ConcreteMasterKeys extends MasterKeysSacks with RootConnector {
override lazy val tableName = "master_keys"
override implicit def space: KeySpace = scyllaConnection.space
override implicit def session: Session = scyllaConnection.session
def insertNewRecord(ams: MasterKeyResult): ValidationNel[Throwable, ResultSet] = {
val res = insert.value(_.id, ams.id)
.value(_.key, ams.key)
.value(_.created_at, ams.created_at)
.future()
Await.result(res, 5.seconds).successNel
}
def getRecord(id: String): ValidationNel[Throwable, Option[MasterKeyResult]] = {
val res = select.allowFiltering().where(_.id eqs id).one()
Await.result(res, 5.seconds).successNel
}
}
object MasterKeys extends ConcreteMasterKeys {
private def mkMasterKeysSack(input: String): ValidationNel[Throwable, Option[MasterKeyResult]] = {
val ripNel: ValidationNel[Throwable, MasterKeysInput] = (Validation.fromTryCatchThrowable[models.base.MasterKeysInput, Throwable] {
parse(input).extract[MasterKeysInput]
} leftMap { t: Throwable ⇒ new MalformedBodyError(input, t.getMessage) }).toValidationNel //capture failure
for {
rip ← ripNel
} yield {
val res = MasterKeyResult("1", rip.key, Time.now.toString)
res.some
}
}
def create(input: String): ValidationNel[Throwable, Option[MasterKeyResult]] = {
for {
wa ← (mkMasterKeysSack(input) leftMap { err: NonEmptyList[Throwable] ⇒ err })
set ← (insertNewRecord(wa.get) leftMap { t: NonEmptyList[Throwable] ⇒ t })
} yield {
wa
}
}
def findById(id: String): ValidationNel[Throwable, Option[MasterKeyResult]] = {
InMemory[ValidationNel[Throwable, Option[MasterKeyResult]]]({
name: String ⇒
{
play.api.Logger.debug(("%-20s -->[%s]").format("Master key id ->", id))
(getRecord(id) leftMap { t: NonEmptyList[Throwable] ⇒
new ServiceUnavailableError(id, (t.list.map(m ⇒ m.getMessage)).mkString("\n"))
}).toValidationNel.flatMap { xso: Option[MasterKeyResult] ⇒
xso match {
case Some(xs) ⇒ {
Validation.success[Throwable, Option[MasterKeyResult]](xs.some).toValidationNel
}
case None ⇒ Validation.failure[Throwable, Option[MasterKeyResult]](new ResourceItemNotFound(id, "")).toValidationNel
}
}
}
}).get(id).eval(InMemoryCache[ValidationNel[Throwable, Option[MasterKeyResult]]]())
}
implicit val sedimentMasterKey = new Sedimenter[ValidationNel[Throwable, Option[MasterKeyResult]]] {
def sediment(maybeASediment: ValidationNel[Throwable, Option[MasterKeyResult]]): Boolean = {
maybeASediment.isSuccess
}
}
}
|
indykish/vertice_gateway
|
app/models/base/MasterKeys.scala
|
Scala
|
mit
| 4,183 |
package autoproxy
import org.scalatest.FunSuite
class ParamDelegationSpec extends FunSuite {
test("can delegate to a raw parameter") {
@delegating class RawParamWrapper(@proxy pivot : Bippy)
val wrapper = new RawParamWrapper(SimpleBippy)
assert(wrapper.bippy(42) === "42")
}
test("can delegate to a val parameter") {
@delegating class ValParamWrapper(@proxy val pivot : Bippy)
val wrapper = new ValParamWrapper(SimpleBippy)
assert(wrapper.bippy(42) === "42")
}
test("can delegate to a var parameter") {
@delegating class VarParamWrapper(@proxy var pivot : Bippy)
val wrapper = new VarParamWrapper(SimpleBippy)
assert(wrapper.bippy(42) === "42")
wrapper.pivot = DoublingBippy
assert(wrapper.bippy(42) === "84")
}
test("can delegate to a val member") {
@delegating class ValMemberWrapper { @proxy val pivot : Bippy = SimpleBippy }
val wrapper = new ValMemberWrapper
assert(wrapper.bippy(42) === "42")
}
test("can delegate to a var member") {
@delegating class VarMemberWrapper { @proxy var pivot : Bippy = SimpleBippy }
val wrapper = new VarMemberWrapper
assert(wrapper.bippy(42) === "42")
wrapper.pivot = DoublingBippy
assert(wrapper.bippy(42) === "84")
}
test("can delegate to a def member") {
@delegating class DefMemberWrapper { @proxy def pivot : Bippy = SimpleBippy }
val wrapper = new DefMemberWrapper
assert(wrapper.bippy(42) === "42")
}
test("can delegate to a val member in a singleton") {
@delegating object ValMemberWrapperObj { @proxy val pivot : Bippy = SimpleBippy }
assert(ValMemberWrapperObj.bippy(42) === "42")
}
test("can delegate to a var member in a singleton") {
@delegating object VarMemberWrapperObj { @proxy var pivot : Bippy = SimpleBippy }
assert(VarMemberWrapperObj.bippy(42) === "42")
VarMemberWrapperObj.pivot = DoublingBippy
assert(VarMemberWrapperObj.bippy(42) === "84")
}
test("can delegate to a def member in a singleton") {
@delegating object DefMemberWrapperObj { @proxy def pivot : Bippy = SimpleBippy }
assert(DefMemberWrapperObj.bippy(42) === "42")
}
test("can delegate to an embedded singleton with selective override") {
@delegating object SmarterProps {
@proxy private[this] object props {
var x: Int = 0
var y: String = ""
}
def y_=(txt: String): Unit = { props.y = txt + " bananas"}
}
assert(SmarterProps.x === 0)
assert(SmarterProps.y === "")
SmarterProps.x = 42
SmarterProps.y = "forty-two"
assert(SmarterProps.x === 42)
assert(SmarterProps.y === "forty-two bananas")
}
}
|
thecoda/autoproxy
|
core/src/test/scala/autoproxy/ParamDelegationSpec.scala
|
Scala
|
apache-2.0
| 2,660 |
/*
* @author Philip Stutz
* @author Sara Magliacane
*
* Copyright 2014 University of Zurich & VU University Amsterdam
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.psl.model
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.signalcollect.util.TestAnnouncements
class GroundedConstraintSpec extends FlatSpec with Matchers with TestAnnouncements {
val anna = Individual("anna")
val bob = Individual("bob")
val carl = Individual("carl")
val demo = Individual("democrats")
val repub = Individual("republicans")
val PCI = Individual("PCI")
val Berlusconi = Individual("Berlusconi")
"GroundedConstraint" should " correctly create a functional constraint with two grounded predicates" in {
// Create the constraint: votes(anna, demo) + votes(anna, repub) = 1
val predicate1 = new Predicate("votes", List(PslClass("_"), PslClass("_")), Set(Functional))
val groundedPredicate1 = new GroundedPredicate(1, predicate1, List (anna, demo), Some(0.3))
val groundedPredicate2 = new GroundedPredicate(2, predicate1, List (anna, repub), None)
val groundedConstraint = new GroundedConstraint(1,1, Functional, List[GroundedPredicate] (groundedPredicate1, groundedPredicate2))
// Create the constraint: 0.3 + votes(anna, repub) = 1
groundedConstraint.computeCoefficientMatrix should be (List(1.0))
groundedConstraint.computeConstant should be (0.7)
groundedConstraint.computeComparator should be ("eq")
groundedConstraint.unboundGroundedPredicates should be (List(groundedPredicate2))
}
"GroundedConstraint" should " correctly create a functional constraint with more grounded predicates" in {
// Create the constraint: votes(anna, demo) + votes(anna, repub) + votes(anna, PCI) + votes (anna, Berlusconi) = 1
val predicate1 = new Predicate("votes", List(PslClass("_"), PslClass("_")), Set(Functional))
val groundedPredicate1 = new GroundedPredicate(1, predicate1, List (anna, demo), Some(0.3))
val groundedPredicate2 = new GroundedPredicate(2, predicate1, List (anna, repub), None)
val groundedPredicate3 = new GroundedPredicate(3, predicate1, List (anna, PCI), None)
val groundedPredicate4 = new GroundedPredicate(4, predicate1, List (anna, Berlusconi), None)
val groundedConstraint = new GroundedConstraint(1, 1, Functional, List[GroundedPredicate] (groundedPredicate1, groundedPredicate2, groundedPredicate3, groundedPredicate4))
// Create the constraint: 0.3 + votes(anna, repub) + votes(anna, PCI) + votes (anna, Berlusconi) = 1
groundedConstraint.computeCoefficientMatrix should be (List(1.0, 1.0, 1.0))
groundedConstraint.computeConstant should be (0.7)
groundedConstraint.computeComparator should be ("eq")
groundedConstraint.unboundGroundedPredicates should be (List(groundedPredicate2, groundedPredicate3, groundedPredicate4))
}
"GroundedConstraint" should " correctly create a always false partial functional constraint with more grounded predicates" in {
// Create the constraint: votes(anna, demo) + votes(anna, repub) + votes(anna, PCI) + votes (anna, Berlusconi) <= 1
val predicate1 = new Predicate("votes", List(PslClass("_"), PslClass("_")), Set(PartialFunctional))
val groundedPredicate1 = new GroundedPredicate(1, predicate1, List (anna, demo), Some(0.3))
val groundedPredicate2 = new GroundedPredicate(2, predicate1, List (anna, repub), None)
val groundedPredicate3 = new GroundedPredicate(3, predicate1, List (anna, PCI), None)
val groundedPredicate4 = new GroundedPredicate(4, predicate1, List (anna, Berlusconi), Some(1.0))
val groundedConstraint = new GroundedConstraint(1, 1, PartialFunctional, List[GroundedPredicate] (groundedPredicate1, groundedPredicate2, groundedPredicate3, groundedPredicate4))
// Create the constraint: 0.3 + votes(anna, repub) + votes(anna, PCI) + 1.0 <= 1
//groundedConstraint.unboundGroundedPredicates should be (List(groundedPredicate2, groundedPredicate3, groundedPredicate4))
groundedConstraint.createOptimizableFunction(1.0) should be (None)
}
"GroundedConstraint" should " correctly create a symmetric constraint" in {
// Create the constraint: votes(anna, demo) - votes(anna, repub) = 0
val predicate1 = new Predicate("votes", List(PslClass("_"), PslClass("_")), Set(Symmetric))
val groundedPredicate1 = new GroundedPredicate(1, predicate1, List (anna, demo), Some(0.3))
val groundedPredicate2 = new GroundedPredicate(2, predicate1, List (anna, repub), None)
val groundedConstraint = new GroundedConstraint(1, 1, Symmetric, List[GroundedPredicate] (groundedPredicate1, groundedPredicate2))
// Create the constraint: 0.3 + votes(anna, repub) = 1
groundedConstraint.computeCoefficientMatrix should be (List(1.0))
groundedConstraint.computeConstant should be (0.3)
groundedConstraint.computeComparator should be ("eq")
groundedConstraint.unboundGroundedPredicates should be (List(groundedPredicate2))
}
}
|
uzh/fox
|
src/test/scala/com/signalcollect/psl/model/GroundedConstraintSpec.scala
|
Scala
|
apache-2.0
| 5,576 |
package org.jetbrains.plugins.scala
package lang
package psi
package types
import org.jetbrains.plugins.scala.lang.psi.types.api.{TypeSystem, TypeVisitor, ValueType}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType
import org.jetbrains.plugins.scala.project.ProjectContextOwner
import scala.collection.mutable.ArrayBuffer
trait ScType extends ProjectContextOwner {
def typeSystem: TypeSystem = projectContext.typeSystem
private var aliasType: Option[AliasType] = null
final def isAliasType: Option[AliasType] = {
if (aliasType == null) {
aliasType = isAliasTypeInner
}
aliasType
}
private var unpacked: ScType = null
final def unpackedType: ScType = {
if (unpacked == null) {
unpacked = unpackedTypeInner
}
unpacked
}
protected def isAliasTypeInner: Option[AliasType] = None
override final def toString: String = presentableText
def isValue: Boolean
def isFinalType: Boolean = false
def inferValueType: ValueType
protected def unpackedTypeInner: ScType = {
val existingWildcards = ScExistentialType.existingWildcards(this)
val wildcards = new ArrayBuffer[ScExistentialArgument]
val quantified = recursiveVarianceUpdateModifiable[Set[String]](Set.empty, {
case (s: ScExistentialArgument, _, data) if !data.contains(s.name) =>
val name = ScExistentialType.fixExistentialArgumentName(s.name, existingWildcards)
if (!wildcards.exists(_.name == name)) wildcards += ScExistentialArgument(name, s.args, s.lower, s.upper)
(true, ScExistentialArgument(name, s.args, s.lower, s.upper), data)
case (ex: ScExistentialType, _, data) =>
(false, ex, data ++ ex.boundNames)
case (t, _, data) => (false, t, data)
})
if (wildcards.nonEmpty) {
ScExistentialType(quantified, wildcards.toList).simplify()
} else quantified
}
/**
* This method is important for parameters expected type.
* There shouldn't be any abstract type in this expected type.
* todo rewrite with recursiveUpdate method
*/
def removeAbstracts: ScType = this
def equivInner(r: ScType, uSubst: ScUndefinedSubstitutor, falseUndef: Boolean): (Boolean, ScUndefinedSubstitutor) = {
(false, uSubst)
}
class RecursiveUpdateException extends Exception {
override def getMessage: String = "Type mismatch after update method"
}
/**
* use 'update' to replace appropriate type part with another type
* 'update' should return true if type changed, false otherwise.
* To just collect info about types (see collectAbstracts) always return false
*
* default implementation for types, which don't contain other types.
*/
final def recursiveUpdate(update: ScType => (Boolean, ScType), visited: Set[ScType] = Set.empty): ScType = {
update(this) match {
case (true, res) => res
case _ if visited.contains(this) => this
case _ => updateSubtypes(update, visited + this)
}
}
def updateSubtypes(update: ScType => (Boolean, ScType), visited: Set[ScType]): ScType = this
def recursiveVarianceUpdate(update: (ScType, Int) => (Boolean, ScType), variance: Int = 1): ScType = {
recursiveVarianceUpdateModifiable[Unit]((), (tp, v, _) => {
val (newTp, newV) = update(tp, v)
(newTp, newV, ())
}, variance)
}
def recursiveVarianceUpdateModifiable[T](data: T, update: (ScType, Int, T) => (Boolean, ScType, T),
variance: Int = 1): ScType = {
update(this, variance, data) match {
case (true, res, _) => res
case _ => this
}
}
def visitType(visitor: TypeVisitor)
def typeDepth: Int = 1
def presentableText: String = typeSystem.presentableText(this, withPrefix = true)
def canonicalText: String = typeSystem.canonicalText(this)
}
trait NamedType extends ScType {
val name: String
override def presentableText: String = name
override def canonicalText: String = name
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/types/ScType.scala
|
Scala
|
apache-2.0
| 3,968 |
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0, and
* you may not use this file except in compliance with the Apache License
* Version 2.0. You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the Apache License Version 2.0 is distributed on an "AS
* IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
import sbt._
import Keys._
object SnowplowCommonEnrichBuild extends Build {
import Dependencies._
import BuildSettings._
// Configure prompt to show current project.
override lazy val settings = super.settings :+ {
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }
}
// Define our project, with basic project information and library
// dependencies.
lazy val project = Project("snowplow-common-enrich", file("."))
.settings(buildSettings: _*)
.settings(
libraryDependencies ++= Seq(
// Java
Libraries.httpClient,
Libraries.yodaTime,
Libraries.yodaConvert,
Libraries.commonsLang,
Libraries.commonsIo,
Libraries.useragent,
Libraries.jacksonDatabind,
Libraries.jsonValidator,
Libraries.mavenArtifact,
// Scala
Libraries.scalaz7,
Libraries.argonaut,
Libraries.snowplowRawEvent,
Libraries.scalaUtil,
Libraries.refererParser,
Libraries.maxmindIplookups,
Libraries.json4sJackson,
Libraries.json4sScalaz,
Libraries.igluClient,
// Scala (test only)
Libraries.specs2,
Libraries.scalazSpecs2,
Libraries.scalaCheck,
Libraries.commonsCodec
)
)
}
|
1974kpkpkp/snowplow
|
3-enrich/scala-common-enrich/project/SnowplowCommonEnrichBuild.scala
|
Scala
|
apache-2.0
| 2,030 |
package com.atomist.project.archive
import com.atomist.project.edit.ProjectEditor
import com.atomist.project.generate.ProjectGenerator
import com.atomist.rug.runtime._
/**
* Convenience wrapper to hold the different Rug types
*/
object Rugs {
def Empty: Rugs = {
new Rugs(Nil, Nil, Nil, Nil, Nil)
}
}
case class Rugs(
private val _editors: Seq[ProjectEditor],
private val _generators: Seq[ProjectGenerator],
private val _commandHandlers: Seq[CommandHandler],
private val _eventHandlers: Seq[EventHandler],
private val _responseHandlers: Seq[ResponseHandler]
) {
//sort them on the wait out
def editors: Seq[ProjectEditor] = _editors.sortBy(p => p.name)
def generators: Seq[ProjectGenerator] = _generators.sortBy(p => p.name)
def commandHandlers: Seq[CommandHandler] = _commandHandlers.sortBy(p => p.name)
def eventHandlers: Seq[EventHandler] = _eventHandlers.sortBy(p => p.name)
def responseHandlers: Seq[ResponseHandler] = _responseHandlers.sortBy(p => p.name)
def editorNames: Seq[String] = names(editors)
def generatorNames: Seq[String] = names(generators)
def commandHandlerNames: Seq[String] = names(commandHandlers)
def eventHandlerNames: Seq[String] = names(eventHandlers)
def responseHandlerNames: Seq[String] = names(responseHandlers)
def allRugs: Seq[Rug] = editors ++ generators ++ commandHandlers ++ eventHandlers ++ responseHandlers
private def names(rugs: Seq[Rug]): Seq[String] = rugs.map(r => r.name)
override def toString: String = {
val sb = new StringBuilder
sb.append("editors: [")
sb.append(editorNames.mkString(", "))
sb.append("] generators: [")
sb.append(generatorNames.mkString(", "))
sb.append("] event handlers: [")
sb.append(eventHandlerNames.mkString(", "))
sb.append("] command handlers: [")
sb.append(commandHandlerNames.mkString(", "))
sb.append("] response handlers: [")
sb.append(responseHandlerNames.mkString(", "))
sb.append("]")
sb.toString
}
}
|
atomist/rug
|
src/main/scala/com/atomist/project/archive/Rugs.scala
|
Scala
|
gpl-3.0
| 2,063 |
package example
import diode._
import scalatags.JsDom.all._
class TreeView(root: ModelRO[FileNode], parent: Seq[String], selection: ModelRO[Seq[String]], dispatcher: Dispatcher) {
val id = root().id
val path = parent :+ id
val childSeq = build
// recursively build the tree view
def build = {
root().children.zipWithIndex.map {
case (c, idx) =>
new TreeView(root.zoom(_.children(idx)), path, selection, dispatcher)
}
}
def render: Frag = {
val isSelected = if (selection().nonEmpty && selection().last == id) "active" else ""
def renderName(name: String) =
a(
href := "#",
cls := isSelected,
onclick := { () =>
dispatcher(Select(path))
},
name
)
root() match {
case Directory(id, name, children) =>
li(cls := s"directory", renderName(name), ul(childSeq.map(_.render)))
case File(id, name) =>
li(cls := s"file", renderName(name))
}
}
}
|
ochrons/diode
|
examples/treeview/src/main/scala/example/TreeView.scala
|
Scala
|
mit
| 996 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.executionplan.builders
import org.neo4j.graphdb.{Direction, PropertyContainer}
import org.neo4j.cypher.internal.symbols.{RelationshipType, NodeType, SymbolTable}
import org.neo4j.cypher.internal.pipes.matching.ExpanderStep
import org.neo4j.graphdb.DynamicRelationshipType.withName
import org.neo4j.cypher.internal.commands.{Predicate, RelatedTo, True}
object TrailBuilder {
def findLongestTrail(patterns: Seq[RelatedTo], boundPoints: Seq[String], predicates: Seq[Predicate] = Seq.empty) =
new TrailBuilder(patterns, boundPoints, predicates).findLongestTrail()
}
final case class LongestTrail(start: String, end: Option[String], longestTrail: Trail) {
lazy val step = longestTrail.toSteps(0).get.reverse()
}
final class TrailBuilder(patterns: Seq[RelatedTo], boundPoints: Seq[String], predicates: Seq[Predicate]) {
private def internalFindLongestPath(doneSeq: Seq[(Trail, Seq[RelatedTo])]): Seq[(Trail, Seq[RelatedTo])] = {
val result: Seq[(Trail, Seq[RelatedTo])] = doneSeq.flatMap {
case (done: Trail, patterns: Seq[RelatedTo]) =>
val relatedToes = patterns.filter {
rel => done.end == rel.left || done.end == rel.right
}
if (relatedToes.isEmpty)
Seq((done, patterns))
else {
Seq((done, patterns)) ++
relatedToes.map {
case rel if rel.left == done.end => (WrappingTrail(done, rel.direction.reverse(), rel.relName, rel.relTypes, rel.right, predicates, rel), patterns.filterNot(_ == rel))
case rel => (WrappingTrail(done, rel.direction, rel.relName, rel.relTypes, rel.left, predicates, rel), patterns.filterNot(_ == rel))
}
}
}
if (result.distinct == doneSeq.distinct)
result
else
internalFindLongestPath(result)
}
private def findLongestTrail(): Option[LongestTrail] =
if (patterns.isEmpty) {
None
}
else {
val foundPaths: Seq[(Trail, Seq[RelatedTo])] = findAllPaths()
val pathsBetweenBoundPoints: Seq[(Trail, Seq[RelatedTo])] = findCompatiblePaths(foundPaths)
if (pathsBetweenBoundPoints.isEmpty) {
None
} else {
val trail = findLongestTrail(pathsBetweenBoundPoints)
Some(trail)
}
}
private def findLongestTrail(pathsBetweenBoundPoints: scala.Seq[(Trail, scala.Seq[RelatedTo])]): LongestTrail = {
val almost = pathsBetweenBoundPoints.sortBy(_._1.size)
val (longestPath, _) = almost.last
val start = longestPath.start
val end = if (boundPoints.contains(longestPath.end)) Some(longestPath.end) else None
val trail = LongestTrail(start, end, longestPath)
trail
}
private def findAllPaths(): Seq[(Trail, scala.Seq[RelatedTo])] = {
val startPoints = boundPoints.map(point => (BoundPoint(point), patterns))
val foundPaths = internalFindLongestPath(startPoints).
filter {
case (trail, toes) => trail.size > 0 && trail.start != trail.end
}
foundPaths
}
private def findCompatiblePaths(incomingPaths: Seq[(Trail, Seq[RelatedTo])]): Seq[(Trail, Seq[RelatedTo])] = {
val foundPaths = incomingPaths.filterNot {
case (trail, _) => hasBoundPointsInMiddleOfPath(trail)
}
val boundInTwoPoints = foundPaths.filter {
case (p, left) => boundPoints.contains(p.start) && boundPoints.contains(p.end)
}
val boundInAtLeastOnePoints = foundPaths.filter {
case (p, left) => boundPoints.contains(p.start) || boundPoints.contains(p.end)
}
if (boundInTwoPoints.nonEmpty)
boundInTwoPoints
else
boundInAtLeastOnePoints
}
def hasBoundPointsInMiddleOfPath(trail: Trail): Boolean = {
trail.pathDescription.slice(1, trail.pathDescription.size - 1).exists(boundPoints.contains)
}
}
|
dksaputra/community
|
cypher/src/main/scala/org/neo4j/cypher/internal/executionplan/builders/TrailBuilder.scala
|
Scala
|
gpl-3.0
| 4,593 |
package com.nidkil.downloader.actor
import scala.concurrent.Future
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.actor.ActorNotFound
import scala.util.{Failure, Success}
class ShutdownReaper(controller: ActorRef = null) extends Reaper with ActorLogging {
def allSoulsReaped() {
log.info(s"All souls reaped, shutting system down")
if (controller != null) context.stop(controller)
context.system.shutdown()
}
}
|
nidkil/akka-downloader
|
src/main/scala/com/nidkil/downloader/actor/ShutdownReaper.scala
|
Scala
|
apache-2.0
| 454 |
package org.template.recommendation
import io.prediction.controller.LServing
import scala.io.Source
import io.prediction.controller.Params // ADDED
// ADDED ServingParams to specify the blacklisting file location.
case class ServingParams(filepath: String) extends Params
class Serving(val params: ServingParams)
extends LServing[Query, PredictedResult] {
override
def serve(query: Query, predictedResults: Seq[PredictedResult])
: PredictedResult = {
val disabledProducts: Set[String] = Source
.fromFile(params.filepath)
.getLines
.toSet
val itemScores = predictedResults.head.itemScores
PredictedResult(itemScores.filter(ps => !disabledProducts(ps.item)))
}
}
|
wenaz/PredictionIO
|
examples/scala-parallel-recommendation/custom-serving/src/main/scala/Serving.scala
|
Scala
|
apache-2.0
| 709 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.http.js
import org.scalajs.macrotaskexecutor.MacrotaskExecutor
import wvlet.airframe.codec.{MessageCodec, MessageCodecFactory, MessageContext}
import wvlet.airframe.http.Http
import wvlet.airframe.http.js.JSHttpClient.MessageEncoding
import wvlet.airframe.msgpack.spi.{Packer, Unpacker}
import wvlet.airframe.surface.Surface
import wvlet.airspec.AirSpec
/**
*/
object JSHttpClientTest extends AirSpec {
implicit val ec: scala.concurrent.ExecutionContext = MacrotaskExecutor.Implicits.global
case class Person(id: Int, name: String)
test("create http client") {
ignore("ignore server interaction tests")
val s = Surface.of[Person]
val client = JSHttpClient()
client.getOps[Person, Person]("/v1/info", Person(1, "leo"), s, s).recover { case e: Throwable =>
logger.warn(e)
1
}
}
test("crete a request") {
val req = Http.request("/v1/info")
}
sealed trait Suit
case object Spade extends Suit
case object Heart extends Suit
object SuitCodec extends MessageCodec[Suit] {
override def pack(
p: Packer,
v: Suit
): Unit = {
// Use lowercase string for making sure the custom codec is used
p.packString(v.toString.toLowerCase)
}
override def unpack(
u: Unpacker,
v: MessageContext
): Unit = {
u.unpackString match {
case "spade" => v.setObject(Spade)
case "heart" => v.setObject(Heart)
case _ => v.setNull
}
}
}
test("support custom codec") {
val f = MessageCodecFactory.defaultFactory.withCodecs(Map(Surface.of[Suit] -> SuitCodec))
val client =
JSHttpClient(JSHttpClientConfig().withRequestEncoding(MessageEncoding.JsonEncoding).withCodecFactory(f))
val request = client.prepareRequestBody[Suit](Http.POST("/v1/suit"), Spade, Surface.of[Suit])
request.contentString shouldBe """"spade""""
}
}
|
wvlet/airframe
|
airframe-http/.js/src/test/scala/wvlet/airframe/http/js/JSHttpClientTest.scala
|
Scala
|
apache-2.0
| 2,478 |
package modules
import java.io.InputStreamReader
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import java.util.zip.ZipInputStream
import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
import scala.util.control.NonFatal
import resource._
import play.api.inject._
import play.api.{Configuration, Environment, Logger, Mode}
import com.blueconic.browscap.impl.UserAgentFileParser
import com.blueconic.browscap.{BrowsCapField, UserAgentParser, UserAgentService}
import verification._
/**
* Config for Haruko's verification flow.
*/
class VerificationModule extends Module {
override def bindings(environment: Environment, configuration: Configuration): Seq[Binding[_]] = {
val configBlock = configuration.getConfig("verification").getOrElse {
throw new RuntimeException("No verification block in Play config!")
}
val userAgentParser: UserAgentParser = configBlock
.getString("browscapZipPath")
.map(Success(_))
.getOrElse(Failure(new Exception("No browscapZipPath in Play verification config block.")))
.map(loadUserAgentParser)
.recover {
case NonFatal(e) if environment.mode != Mode.Prod =>
Logger.warn(s"Falling back to bundled Browscap data version ${UserAgentService.BUNDLED_BROWSCAP_VERSION}.")
new UserAgentService().loadParser()
}
.get
val geoIP = configBlock
.getString("geoipDir")
.map(Success(_))
.getOrElse(Failure(new Exception("No geoipDir in Play verification config block.")))
.map(GeoIPImpl.apply)
.recover {
case NonFatal(e) if environment.mode != Mode.Prod =>
Logger.warn(s"Falling back to GeoIP dev/test stub.")
GeoIPStub
}
.get
val fireholNetsets = configBlock
.getString("fireholDir")
.map(Success(_))
.getOrElse(Failure(new Exception("No fireholDir in Play verification config block.")))
.map(FireholNetsetsImpl.apply)
.recover {
case NonFatal(e) if environment.mode != Mode.Prod =>
Logger.warn(s"Falling back to FireHOL dev/test stub.")
FireholNetsetsStub
}
.get
// TODO: periodic reload of these files
Seq(
bind[UserAgentParser].toInstance(userAgentParser),
bind[GeoIP].toInstance(geoIP),
bind[FireholNetsets].toInstance(fireholNetsets)
)
}
/**
* Workaround for poor choices in BlueConic Browscap data loader.
*/
def loadUserAgentParser(browscapZipPath: String): UserAgentParser = {
// This does not work because UserAgentService assumes that the first file in the ZIP is a CSV.
// I don't know why anyone would ever assume that, instead of looking for a CSV extension.
//new UserAgentService(browscapZipPath).loadParser()
// This means we can't use the Browscap ZIP distribution because there's more than one file in it.
// So we have to work around this issue:
val browscapDefaultFields = BrowsCapField.values().filter(_.isDefault).toSet
(for {
input <- managed(Files.newInputStream(Paths.get(browscapZipPath)))
zip <- managed(new ZipInputStream(input))
// Advance to first CSV in file.
_ = Iterator
.continually(zip.getNextEntry)
.takeWhile(_ != null)
.find(_.getName.endsWith(".csv"))
.getOrElse {
throw new RuntimeException(s"Couldn't find CSV file in $browscapZipPath!")
}
reader <- managed(new InputStreamReader(zip, StandardCharsets.UTF_8))
} yield {
UserAgentFileParser.parse(reader, browscapDefaultFields.asJava)
}).apply(identity)
}
}
|
haruko-devs/haruko
|
app/modules/VerificationModule.scala
|
Scala
|
mit
| 3,649 |
package io.buoyant.linkerd
import com.twitter.conversions.time._
import com.twitter.finagle.buoyant.DstBindingFactory
import com.twitter.finagle.naming.NameInterpreter
import com.twitter.finagle.param.Label
import com.twitter.finagle.stats.{BroadcastStatsReceiver, LoadedStatsReceiver}
import com.twitter.finagle.tracing.{BroadcastTracer, DefaultTracer, Tracer}
import com.twitter.finagle.util.{DefaultTimer, LoadService}
import com.twitter.finagle.{Namer, Path, Stack, param => fparam}
import com.twitter.logging.Logger
import com.twitter.server.util.JvmStats
import io.buoyant.admin.{Admin, AdminConfig}
import io.buoyant.config._
import io.buoyant.namer.Param.Namers
import io.buoyant.namer._
import io.buoyant.linkerd.telemeter.UsageDataTelemeterConfig
import io.buoyant.telemetry._
import io.buoyant.telemetry.admin.{AdminMetricsExportTelemeter, histogramSnapshotInterval}
import java.net.InetSocketAddress
import scala.util.control.NoStackTrace
/**
* Represents the total configuration of a Linkerd process.
*/
trait Linker {
def routers: Seq[Router]
def namers: Seq[(Path, Namer)]
def admin: Admin
def tracer: Tracer
def telemeters: Seq[Telemeter]
def configured[T: Stack.Param](t: T): Linker
}
object Linker {
private[this] val log = Logger()
private[this] val DefaultAdminAddress = new InetSocketAddress(9990)
private[this] val DefaultAdminConfig = AdminConfig()
private[linkerd] case class Initializers(
protocol: Seq[ProtocolInitializer] = Nil,
namer: Seq[NamerInitializer] = Nil,
interpreter: Seq[InterpreterInitializer] = Nil,
transformer: Seq[TransformerInitializer] = Nil,
tlsClient: Seq[TlsClientInitializer] = Nil,
identifier: Seq[IdentifierInitializer] = Nil,
classifier: Seq[ResponseClassifierInitializer] = Nil,
telemetry: Seq[TelemeterInitializer] = Nil,
announcer: Seq[AnnouncerInitializer] = Nil,
failureAccrual: Seq[FailureAccrualInitializer] = Nil
) {
def iter: Iterable[Seq[ConfigInitializer]] =
Seq(protocol, namer, interpreter, tlsClient, identifier, transformer, classifier, telemetry, announcer, failureAccrual)
def all: Seq[ConfigInitializer] = iter.flatten.toSeq
def parse(config: String): LinkerConfig =
Linker.parse(config, this)
def load(config: String): Linker =
Linker.load(config, this)
}
private[linkerd] lazy val LoadedInitializers = Initializers(
LoadService[ProtocolInitializer],
LoadService[NamerInitializer],
LoadService[InterpreterInitializer] :+ DefaultInterpreterInitializer,
LoadService[TransformerInitializer],
LoadService[TlsClientInitializer],
LoadService[IdentifierInitializer],
LoadService[ResponseClassifierInitializer],
LoadService[TelemeterInitializer],
LoadService[AnnouncerInitializer],
LoadService[FailureAccrualInitializer]
)
def parse(
config: String,
inits: Initializers = LoadedInitializers
): LinkerConfig = {
val mapper = Parser.objectMapper(config, inits.iter)
mapper.readValue[LinkerConfig](config)
}
private[linkerd] def load(config: String, inits: Initializers): Linker =
parse(config, inits).mk()
def load(config: String): Linker =
load(config, LoadedInitializers)
object param {
case class LinkerConfig(config: Linker.LinkerConfig)
implicit object LinkerConfig extends Stack.Param[LinkerConfig] {
val default = LinkerConfig(Linker.LinkerConfig(None, Seq(), None, None, None))
}
}
case class LinkerConfig(
namers: Option[Seq[NamerConfig]],
routers: Seq[RouterConfig],
telemetry: Option[Seq[TelemeterConfig]],
admin: Option[AdminConfig],
usage: Option[UsageDataTelemeterConfig]
) {
def mk(): Linker = {
// At least one router must be specified
if (routers.isEmpty) throw NoRoutersSpecified
val metrics = MetricsTree()
val telemeterParams = Stack.Params.empty + param.LinkerConfig(this) + metrics
val adminTelemeter = new AdminMetricsExportTelemeter(metrics, histogramSnapshotInterval(), DefaultTimer.twitter)
val usageTelemeter = usage.getOrElse(UsageDataTelemeterConfig()).mk(telemeterParams)
val telemeters = telemetry.toSeq.flatten.map {
case t if t.disabled =>
val msg = s"The ${t.getClass.getCanonicalName} telemeter is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true'."
throw new IllegalArgumentException(msg) with NoStackTrace
case t => t.mk(telemeterParams)
} :+ adminTelemeter :+ usageTelemeter
// Telemeters may provide StatsReceivers.
val stats = mkStats(metrics, telemeters)
LoadedStatsReceiver.self = stats
JvmStats.register(stats)
val tracer = mkTracer(telemeters)
DefaultTracer.self = tracer
val params = Stack.Params.empty + fparam.Tracer(tracer) + fparam.Stats(stats)
val namersByPrefix = mkNamers(params + fparam.Stats(stats.scope("namer")))
NameInterpreter.setGlobal(ConfiguredNamersInterpreter(namersByPrefix))
val routerImpls = mkRouters(params + Namers(namersByPrefix) + fparam.Stats(stats.scope("rt")))
val adminImpl = admin.getOrElse(DefaultAdminConfig).mk(DefaultAdminAddress)
Impl(routerImpls, namersByPrefix, tracer, telemeters, adminImpl)
}
private[this] def mkStats(metrics: MetricsTree, telemeters: Seq[Telemeter]) = {
val receivers = telemeters.collect { case t if !t.stats.isNull => t.stats } :+ new MetricsTreeStatsReceiver(metrics)
for (r <- receivers) log.debug("stats: %s", r)
BroadcastStatsReceiver(receivers)
}
private[this] def mkTracer(telemeters: Seq[Telemeter]) = {
val all = telemeters.collect { case t if !t.tracer.isNull => t.tracer }
for (t <- all) log.info("tracer: %s", t)
BroadcastTracer(all)
}
private[this] def mkNamers(params: Stack.Params) = {
namers.getOrElse(Nil).reverse.map {
case n if n.disabled =>
val msg = s"The ${n.prefix.show} namer is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true'."
throw new IllegalArgumentException(msg) with NoStackTrace
case n => n.prefix -> n.mk(params)
}
}
private[this] def mkRouters(params: Stack.Params) = {
// Router labels must not conflict
for ((label, rts) <- routers.groupBy(_.label))
if (rts.size > 1) throw ConflictingLabels(label)
for (r <- routers) {
if (r.disabled) {
val msg = s"The ${r.protocol.name} protocol is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true' on each router."
throw new IllegalArgumentException(msg) with NoStackTrace
}
}
val impls = routers.map { router =>
val interpreter = router.interpreter.interpreter(params + Label(router.label))
router.router(params + DstBindingFactory.Namer(interpreter))
}
// Server sockets must not conflict
impls.flatMap(_.servers).groupBy(_.addr).values.foreach {
case Seq(srv0, srv1, _*) => throw ConflictingPorts(srv0.addr, srv1.addr)
case _ =>
}
impls
}
}
/**
* Private concrete implementation, to help protect compatibility if
* the Linker api is extended.
*/
private case class Impl(
routers: Seq[Router],
namers: Seq[(Path, Namer)],
tracer: Tracer,
telemeters: Seq[Telemeter],
admin: Admin
) extends Linker {
override def configured[T: Stack.Param](t: T) =
copy(routers = routers.map(_.configured(t)))
}
}
|
hhtpcd/linkerd
|
linkerd/core/src/main/scala/io/buoyant/linkerd/Linker.scala
|
Scala
|
apache-2.0
| 7,649 |
package org.jetbrains.plugins.scala
package refactoring.extractMethod
/**
* Nikolay.Tropin
* 2014-05-20
*/
class ScalaExtractMethodInnerClass extends ScalaExtractMethodTestBase {
override def folderPath: String = super.folderPath + "innerClass/"
def testNoReturnSeveralOutput() = doTest()
def testReturnSeveralOutput1() = doTest()
def testReturnSeveralOutput2() = doTest()
def testUnitReturnSeveralOutput1() = doTest()
def testUnitReturnSeveralOutput2() = doTest()
}
|
triggerNZ/intellij-scala
|
test/org/jetbrains/plugins/scala/refactoring/extractMethod/ScalaExtractMethodInnerClass.scala
|
Scala
|
apache-2.0
| 488 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan, TypedFilter}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.types.BooleanType
class TypedFilterOptimizationSuite extends PlanTest {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("EliminateSerialization", FixedPoint(50),
EliminateSerialization) ::
Batch("CombineTypedFilters", FixedPoint(50),
CombineTypedFilters) :: Nil
}
implicit private def productEncoder[T <: Product : TypeTag] = ExpressionEncoder[T]()
test("filter after serialize with the same object type") {
val input = LocalRelation('_1.int, '_2.int)
val f = (i: (Int, Int)) => i._1 > 0
val query = input
.deserialize[(Int, Int)]
.serialize[(Int, Int)]
.filter(f).analyze
val optimized = Optimize.execute(query)
val expected = input
.deserialize[(Int, Int)]
.where(callFunction(f, BooleanType, 'obj))
.serialize[(Int, Int)].analyze
comparePlans(optimized, expected)
}
test("filter after serialize with different object types") {
val input = LocalRelation('_1.int, '_2.int)
val f = (i: OtherTuple) => i._1 > 0
val query = input
.deserialize[(Int, Int)]
.serialize[(Int, Int)]
.filter(f).analyze
val optimized = Optimize.execute(query)
comparePlans(optimized, query)
}
test("filter before deserialize with the same object type") {
val input = LocalRelation('_1.int, '_2.int)
val f = (i: (Int, Int)) => i._1 > 0
val query = input
.filter(f)
.deserialize[(Int, Int)]
.serialize[(Int, Int)].analyze
val optimized = Optimize.execute(query)
val expected = input
.deserialize[(Int, Int)]
.where(callFunction(f, BooleanType, 'obj))
.serialize[(Int, Int)].analyze
comparePlans(optimized, expected)
}
test("filter before deserialize with different object types") {
val input = LocalRelation('_1.int, '_2.int)
val f = (i: OtherTuple) => i._1 > 0
val query = input
.filter(f)
.deserialize[(Int, Int)]
.serialize[(Int, Int)].analyze
val optimized = Optimize.execute(query)
comparePlans(optimized, query)
}
test("back to back filter with the same object type") {
val input = LocalRelation('_1.int, '_2.int)
val f1 = (i: (Int, Int)) => i._1 > 0
val f2 = (i: (Int, Int)) => i._2 > 0
val query = input.filter(f1).filter(f2).analyze
val optimized = Optimize.execute(query)
assert(optimized.collect { case t: TypedFilter => t }.length == 1)
}
test("back to back filter with different object types") {
val input = LocalRelation('_1.int, '_2.int)
val f1 = (i: (Int, Int)) => i._1 > 0
val f2 = (i: OtherTuple) => i._2 > 0
val query = input.filter(f1).filter(f2).analyze
val optimized = Optimize.execute(query)
assert(optimized.collect { case t: TypedFilter => t }.length == 2)
}
}
|
aokolnychyi/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/TypedFilterOptimizationSuite.scala
|
Scala
|
apache-2.0
| 4,104 |
/*
* Copyright (C) 2016 Vincibean <Andre Bessi>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.vincibean.scala.impatient.chapter5.exercise3
/**
* Write a class Time with read-only properties hours and minutes and a method
* before(other: Time): Boolean that checks whether this time comes before the
* other. A Time object should be constructed as new Time(hrs, min) , where hrs is in
* military time format (between 0 and 23).
*
* Created by Vincibean on 18/01/16.
*/
class Time(val hours: Int, val minutes: Int) {
require(hours > 0 && hours < 24)
require(minutes > 0 && minutes < 60)
private val asMinutes = hours * 60 + minutes
def before(other: Time): Boolean = other.asMinutes > this.asMinutes
}
|
Vincibean/ScalaForTheImpatient-Solutions
|
src/main/scala/org/vincibean/scala/impatient/chapter5/exercise3/Time.scala
|
Scala
|
gpl-3.0
| 1,358 |
package uk.gov.gds.common.mongo
import com.mongodb.WriteConcern
import uk.gov.gds.common.audit.TestAuditEventRepository
import com.mongodb.casbah.MongoDB
import com.mongodb.Bytes
object UnauthenticatedMongoDatabaseManagerForTests extends MongoDatabaseManager {
database.setWriteConcern(WriteConcern.MAJORITY)
database.underlying.setOptions(database.getOptions() & (~Bytes.QUERYOPTION_SLAVEOK))
protected val repositoriesToInitialiseOnStartup = List(TestAuditEventRepository)
protected override def authenticateToDatabaseIfRequired(connection: MongoDB) {
// no-op. We're disabling authentication
}
}
|
alphagov/gds-scala-common
|
mongo-utils/src/test/scala/uk/gov/gds/common/mongo/UnauthenticatedMongoDatabaseManagerForTests.scala
|
Scala
|
mit
| 618 |
package com.vivint.ceph.model
import org.scalatest.{FunSpec, Matchers}
import play.api.libs.json._
class PlayJsonFormatsTest extends FunSpec with Matchers {
import PlayJsonFormats._
describe("LocationFormat") {
it("it goes in and out properly") {
Json.toJson(Location.empty).as[Location] shouldBe Location.empty
Json.toJson(PartialLocation(Some("ip"), None)).as[Location] shouldBe PartialLocation(Some("ip"), None)
Json.toJson(PartialLocation(None, Some(1234))).as[Location] shouldBe PartialLocation(None, Some(1234))
Json.toJson(IPLocation("ip", 1234)).as[Location] shouldBe IPLocation("ip", 1234)
Json.toJson(ServiceLocation("hostname", "ip", 1234)).as[Location] shouldBe ServiceLocation("hostname", "ip", 1234)
}
}
}
|
vivint-smarthome/ceph-on-mesos
|
src/test/scala/com/vivint/ceph/model/PlayJsonFormatsTest.scala
|
Scala
|
apache-2.0
| 766 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package typechecker
import scala.reflect.NameTransformer
import symtab.Flags._
/** Logic related to method synthesis which involves cooperation between
* Namer and Typer.
*/
trait MethodSynthesis {
self: Analyzer =>
import global._
import definitions._
import CODE._
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(
if (clazz.isClass) Select(This(clazz), sym) else Ident(sym)
)
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
def newMethodFlags(name: TermName) = {
val overrideFlag = if (isOverride(name)) OVERRIDE else 0L
overrideFlag | SYNTHETIC
}
def newMethodFlags(method: Symbol) = {
val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L
(method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
}
private def finishMethod(method: Symbol, f: Symbol => Tree): Tree =
localTyper typed (
if (method.isLazy) ValDef(method, f(method))
else DefDef(method, f(method))
)
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
val name1 = name.toTermName
val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter info, f)
}
private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
val name1 = name.toTermName
val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter infoFn(m), f)
}
private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
val m = original.cloneSymbol(clazz, newMethodFlags(original), name) setPos clazz.pos.focus
finishMethod(clazz.info.decls enter m, f)
}
def clazzMember(name: Name) = clazz.info nonPrivateMember name
def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
cloneInternal(original, f, nameFn(original.name))
def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree =
createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType))
def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree =
createInternal(name, f, NullaryMethodType(returnType))
def createMethod(original: Symbol)(f: Symbol => Tree): Tree =
createInternal(original.name, f, original.info)
def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree =
createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree): Tree = {
def dflt(arg: Tree) = currentRun.runDefinitions.RuntimeStatics_ioobe match {
case NoSymbol =>
// Support running the compiler with an older library on the classpath
Throw(IndexOutOfBoundsExceptionClass.tpe_*, fn(arg, nme.toString_))
case ioobeSym =>
val ioobeTypeApply = TypeApply(gen.mkAttributedRef(ioobeSym), List(TypeTree(returnType)))
Apply(ioobeTypeApply, List(arg))
}
createMethod(name, List(IntTpe), returnType) { m =>
val arg0 = Ident(m.firstParam)
val default = DEFAULT ==> dflt(arg0)
val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
Match(arg0, cases)
}
}
// def foo() = constant
def constantMethod(name: Name, value: Any): Tree = {
val constant = Constant(value)
createMethod(name, Nil, constant.tpe)(_ => Literal(constant))
}
// def foo = constant
def constantNullary(name: Name, value: Any): Tree = {
val constant = Constant(value)
createMethod(name, constant.tpe)(_ => Literal(constant))
}
}
/** There are two key methods in here.
*
* 1) Enter methods such as enterGetterSetter are called
* from Namer with a tree which may generate further trees such as accessors or
* implicit wrappers. Some setup is performed. In general this creates symbols
* and enters them into the scope of the owner.
*
* 2) addDerivedTrees is called from Typer when a Template is typed.
* It completes the job, returning a list of trees with their symbols
* set to those created in the enter methods. Those trees then become
* part of the typed template.
*/
trait MethodSynth {
self: Namer =>
import NamerErrorGen._
import treeInfo.noFieldFor
// populate synthetics for this unit with trees that will later be added by the typer
// we get here when entering the symbol for the valdef, so its rhs has not yet been type checked
def enterGetterSetter(tree: ValDef): Unit = {
val fieldSym =
if (noFieldFor(tree, owner)) NoSymbol
else owner.newValue(tree.name append NameTransformer.LOCAL_SUFFIX_STRING, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
val getter = Getter(tree)
val getterSym = getter.createSym
// only one symbol can have `tree.pos`, the others must focus their position
// normally the field gets the range position, but if there is none, give it to the getter
//
// scala/bug#10009 the tree's modifiers can be temporarily out of sync with the new symbol's flags.
// typedValDef corrects this later on.
tree.symbol = fieldSym orElse (getterSym setPos tree.pos)
val namer = namerOf(tree.symbol)
// the valdef gets the accessor symbol for a lazy val (too much going on in its RHS)
// the fields phase creates the field symbol
if (!tree.mods.isLazy) {
// if there's a field symbol, the getter is considered a synthetic that must be added later
// if there's no field symbol, the ValDef tree receives the getter symbol and thus is not a synthetic
if (fieldSym != NoSymbol) {
context.unit.synthetics(getterSym) = getter.derivedTree(getterSym)
getterSym setInfo namer.accessorTypeCompleter(tree, tree.tpt.isEmpty, isBean = false, isSetter = false)
} else getterSym setInfo namer.valTypeCompleter(tree)
enterInScope(getterSym)
if (getter.needsSetter) {
val setter = Setter(tree)
val setterSym = setter.createSym
context.unit.synthetics(setterSym) = setter.derivedTree(setterSym)
setterSym setInfo namer.accessorTypeCompleter(tree, tree.tpt.isEmpty, isBean = false, isSetter = true)
enterInScope(setterSym)
}
// TODO: delay emitting the field to the fields phase (except for private[this] vals, which only get a field and no accessors)
if (fieldSym != NoSymbol) {
fieldSym setInfo namer.valTypeCompleter(tree)
enterInScope(fieldSym)
}
} else {
getterSym setInfo namer.valTypeCompleter(tree)
enterInScope(getterSym)
}
deriveBeanAccessors(tree, namer)
}
private def deriveBeanAccessors(tree: ValDef, namer: Namer): Unit = {
// TODO: can we look at the annotations symbols? (name-based introduced in 8cc477f8b6, see neg/t3403)
val hasBeanProperty = tree.mods hasAnnotationNamed tpnme.BeanPropertyAnnot
val hasBoolBP = tree.mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
if (hasBeanProperty || hasBoolBP) {
if (!tree.name.charAt(0).isLetter) BeanPropertyAnnotationFieldWithoutLetterError(tree)
// avoids name clashes with private fields in traits
else if (tree.mods.isPrivate) BeanPropertyAnnotationPrivateFieldError(tree)
val derivedPos = tree.pos.focus
val missingTpt = tree.tpt.isEmpty
def deriveBeanAccessor(prefix: String): Symbol = {
val isSetter = prefix == "set"
val name = newTermName(prefix + tree.name.toString.capitalize)
val setterParam = nme.syntheticParamName(1)
// note: tree.tpt may be EmptyTree, which will be a problem when use as the tpt of a parameter
// the completer will patch this up (we can't do this now without completing the field)
val tptToPatch = if (missingTpt) TypeTree() else tree.tpt.duplicate
val (vparams, tpt) =
if (isSetter) (List(ValDef(Modifiers(PARAM | SYNTHETIC), setterParam, tptToPatch, EmptyTree)), TypeTree(UnitTpe))
else (Nil, tptToPatch)
val rhs =
if (tree.mods.isDeferred) EmptyTree
else if (isSetter) Apply(Ident(tree.name.setterName), List(Ident(setterParam)))
else Select(This(owner), tree.name)
val sym = createMethod(tree, name, derivedPos, tree.mods.flags & BeanPropertyFlags)
context.unit.synthetics(sym) = newDefDef(sym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt)
sym
}
val getterCompleter = namer.accessorTypeCompleter(tree, missingTpt, isBean = true, isSetter = false)
enterInScope(deriveBeanAccessor(if (hasBeanProperty) "get" else "is") setInfo getterCompleter)
if (tree.mods.isMutable) {
val setterCompleter = namer.accessorTypeCompleter(tree, missingTpt, isBean = true, isSetter = true)
enterInScope(deriveBeanAccessor("set") setInfo setterCompleter)
}
}
}
def enterImplicitWrapper(classDef: ClassDef): Unit = {
val methDef = factoryMeth(classDef.mods & (AccessFlags | FINAL) | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef)
val methSym = enterInScope(assignMemberSymbol(methDef))
context.unit.synthetics(methSym) = methDef
treeInfo.firstConstructor(classDef.impl.body) match {
case primaryConstructor: DefDef =>
if (mexists(primaryConstructor.vparamss)(_.mods.hasDefault))
enterDefaultGetters(methSym, primaryConstructor, primaryConstructor.vparamss, primaryConstructor.tparams)
case _ =>
}
methSym setInfo implicitFactoryMethodCompleter(methDef, classDef.symbol)
}
trait DerivedAccessor {
def tree: ValDef
def derivedName: TermName
def derivedFlags: Long
def derivedTree(sym: Symbol): Tree
def derivedPos = tree.pos.focus
def createSym = createMethod(tree, derivedName, derivedPos, derivedFlags)
}
case class Getter(tree: ValDef) extends DerivedAccessor {
def derivedName = tree.name
def derivedFlags = tree.mods.flags & GetterFlags | ACCESSOR.toLong | ( if (needsSetter) 0 else STABLE )
def needsSetter = tree.mods.isMutable // implies !lazy
override def derivedTree(derivedSym: Symbol) = {
val missingTpt = tree.tpt.isEmpty
val tpt = if (missingTpt) TypeTree() else tree.tpt.duplicate
val rhs =
if (noFieldFor(tree, owner)) tree.rhs // context.unit.transformed.getOrElse(tree.rhs, tree.rhs)
else Select(This(tree.symbol.enclClass), tree.symbol)
newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = Nil, tpt = tpt)
}
// derivedSym setPos tree.pos
// // ValDef will have its position focused whereas DefDef will have original correct rangepos
// // ideally positions would be correct at the creation time but lazy vals are really a special case
// // here so for the sake of keeping api clean we fix positions manually in LazyValGetter
// tpt.setPos(tree.tpt.pos)
// tree.tpt.setPos(tree.tpt.pos.focus)
}
case class Setter(tree: ValDef) extends DerivedAccessor {
def derivedName = tree.setterName
def derivedFlags = tree.mods.flags & SetterFlags | ACCESSOR
def derivedTree(derivedSym: Symbol) = {
val setterParam = nme.syntheticParamName(1)
// note: tree.tpt may be EmptyTree, which will be a problem when use as the tpt of a parameter
// the completer will patch this up (we can't do this now without completing the field)
val missingTpt = tree.tpt.isEmpty
val tptToPatch = if (missingTpt) TypeTree() else tree.tpt.duplicate
val vparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), setterParam, tptToPatch, EmptyTree))
val tpt = TypeTree(UnitTpe)
val rhs =
if (noFieldFor(tree, owner)) EmptyTree
else Assign(Select(This(tree.symbol.enclClass), tree.symbol), Ident(setterParam))
newDefDef(derivedSym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt)
}
}
}
}
|
lrytz/scala
|
src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
|
Scala
|
apache-2.0
| 13,048 |
package thesis.orderings
import thesis.matrixTypes._
import thesis.utils._
import thesis.rectangles._
abstract class Move {
def apply(m: MatrixMoves):ReverseMove
}
/**
* Object containing the different moves that can be used
*/
object Move {
/** Swap move
* Selects two random rows a and b such that b - a < 10 and swaps them
*/
case object Swap extends Move{
def apply(m: MatrixMoves):ReverseMove = {
val a = m.randomRow
val b = Math.min(m.rows-1, a+Utils.rand.nextInt(10))
m.swap(a,b)
}
override def toString = "Swap"
}
/** Swap Adjacent move
* Selects two random adjacent rows and swaps them
*/
case object SwapAdjacent extends Move{
def apply(m: MatrixMoves):ReverseMove = {
var a = m.randomRow
if(a == m.rows - 1) a -= 1
var b = (a+1) % m.rows
m.swap(a,b)
}
override def toString = "SwapAdjacent"
}
/** Reverse move
* Selects two random rows a and i such that |a-i| < 50 and reverses all the rows between them (included)
*/
case object Reverse extends Move{
def apply(m: MatrixMoves):ReverseMove = {
var a = m.randomRow
var i = if(Utils.rand.nextInt(2) == 0.0) Math.min(a+Utils.rand.nextInt(50), m.rows-1) else Math.max(a-Utils.rand.nextInt(50), 0)
m.reverse(a,i)
}
override def toString = "Reverse"
}
/** Reverse move
* Selects two random rows a and b such that |a-i| < 50 and
* a random k shifts the rows a to b (inclusive) forward(if a<b)/backwards(if a>b) by a k rows
*/
case object Relocate extends Move{
def apply(m: MatrixMoves):ReverseMove = {
val a = m.randomRow
var b = if(Utils.rand.nextInt(2) == 0.0) Math.min(a+Utils.rand.nextInt(50), m.rows-1) else Math.max(a-Utils.rand.nextInt(50), 0)
val k = Utils.rand.nextInt{
if(a < b) m.rows - b
else b.max(1)
}
m.relocate(a,b,k)
}
override def toString = "Relocate"
}
/** Kmax move
* Cut the matrix into maximum k+1 blocks separated by k rows for which the error is the
* greatest. Then reorder those blocks with a TSP. Do it until convergence
*/
case object KMax extends Move{
def apply(m: MatrixMoves):ReverseMove = {
val k = 10
m.apply_kmax(k)
}
override def toString = "KMax"
}
/** Swap Rectangles move
* Finds two rectangles in the matrix and put them next to each other
*/
case object SwapRectangles extends Move{
def apply(m: MatrixMoves):ReverseMove = {
val rectArray = RectangleFinder.getSuitableRectangles(m)
val r1 = rectArray(0)
val r2 = rectArray(1)
var reverseColor = new ReverseMove(){
def reverseTheMove = Unit
def moveName = "reverseColor"
}
// If the matrix is categorical and that it is transposed,
// change the color of one rectangle such that both have the same color
if(m.isCat && m.isTranspose) reverseColor = m.changeColorRectangles(r1, r2)
val reverse1 = m.swapRectangles(r1, r2)
new ReverseMove(){
def reverseTheMove = {
reverse1.reverseTheMove
reverseColor.reverseTheMove
}
def moveName = "full SwapRectangle"
}
}
override def toString = "Rectangles"
}
}
/** Class that represents a move with its probability of being selected
*/
case class MoveProba(move:Move, var probaWeight:Double = 1.0){ // Default weight of 1.0 if not provided
require(probaWeight >= 0, s"The weight must be >= 0 : $probaWeight")
/** Multiplies the probability weight of this move by weight
*
* @param weight The weight multiplier
*/
def *=(weight:Double){
require(weight >= 0, s"The weight multiplier must be >= 0 : $weight")
probaWeight *= weight
}
/** Adds weight to the probability weight of this move
*
* @param weight The weight to add
*/
def +=(weight:Double){
probaWeight += weight
probaWeight = probaWeight.max(0.0)
}
var contributedError = 0.0 // Keeps track of the amount of error each move contributed for
var count = 0 // Counts how many times each move was used in the last segment.
var time:Long = 0
/** Resets the learning of the probability for this move
*/
def reset(){
contributedError = 0.0
count = 0
time = 0
}
/** Learn the new probability for this move
*
* @param reaction_factor If equal to 0, will never update the weight.
* If equal to 0, will never take into account old weight
*/
def updateWeight(reaction_factor:Double){
probaWeight = probaWeight * (1-reaction_factor) + reaction_factor * contributedError/count.max(1)// use max to avoid division by 0
reset()
}
override def toString = move.toString
}
|
GLeurquin/Faithful-visualization-of-categorical-datasets
|
src/main/scala/Orderings/Move.scala
|
Scala
|
mit
| 4,466 |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala.internal
import org.mongodb.scala.{ Observable, Observer, SingleObservable }
private[scala] case class UnitObservable[T](observable: Observable[T]) extends SingleObservable[Unit] {
override def subscribe(observer: Observer[_ >: Unit]): Unit =
observable.foldLeft(0)((_, _) => 0).map(_ => ()).subscribe(observer)
}
|
rozza/mongo-java-driver
|
driver-scala/src/main/scala/org/mongodb/scala/internal/UnitObservable.scala
|
Scala
|
apache-2.0
| 953 |
/*
* Copyright (c) 2011, Daniel Spiewak
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* - Neither the name of "Anti-XML" nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.codecommit.antixml
package performance
case class XmlCounts(nodes: Int, elements: Int, attributes: Int) {
def +(rhs: XmlCounts) = XmlCounts(nodes+rhs.nodes,elements+rhs.elements,attributes+rhs.attributes)
def report:String = nodes+" nodes, "+elements+" elems, "+attributes+" attrs"
}
object XmlCounts {
def apply(a: Any): XmlCounts = a match {
case g: Group[_] => (XmlCounts(0,0,0) /: g) { (acc,n) => acc + count(n) }
case n: Node => count(n)
case sn: scala.xml.Node => count(sn)
case jn: org.w3c.dom.Node => count(jn)
case _ => XmlCounts(-1,-1,-1)
}
private def count(n: Node): XmlCounts = {
val top = n match {
case Elem(_,_,a,_,_) => XmlCounts(1,1,a.size)
case _ => XmlCounts(1,0,0)
}
(top /: n.children) { (acc,child) => acc + count(child) }
}
private def count(n: scala.xml.Node): XmlCounts = n match {
case e: scala.xml.Elem =>
(XmlCounts(1,1,e.attributes.length) /: e.child) { (acc,c) => acc + count(c) }
case _ => XmlCounts(1,0,0)
}
private def count(n: org.w3c.dom.Node): XmlCounts = n match {
case e: org.w3c.dom.Element =>
(XmlCounts(1,1,e.getAttributes.getLength) /: JavaNodeSeqWithIndexedSeq.wrap(e.getChildNodes)) { (acc,c) => acc + count(c) }
case d: org.w3c.dom.Document =>
(XmlCounts(0,0,0) /: JavaNodeSeqWithIndexedSeq.wrap(d.getChildNodes)) { (acc,c) => acc + count(c) }
case _ => XmlCounts(1,0,0)
}
}
|
djspiewak/anti-xml
|
src/test/scala/com/codecommit/antixml/performance/XmlCounts.scala
|
Scala
|
bsd-3-clause
| 3,011 |
package value_class_override_no_spec
// There are two versions of this tests: one with and one without specialization.
// The bug was only exposed *without* specialization.
trait T extends Any {
def x: Any
}
final class StringOps(val repr0: String) extends AnyVal with T {
def x = ()
}
|
yusuke2255/dotty
|
tests/pos/valueclasses/value-class-override-no-spec.scala
|
Scala
|
bsd-3-clause
| 292 |
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mark Harrah
*/
package xsbt.boot
import java.io.File
// <boot.directory>
// [<scala-org>.]scala-<scala.version>/ [baseDirectoryName]
// lib/ [ScalaDirectoryName]
// <app.name>-<app.version>/ [appDirectoryName]
//
// see also ProjectProperties for the set of constants that apply to the build.properties file in a project
// The scala organization is used as a prefix in baseDirectoryName when a non-standard organization is used.
private object BootConfiguration
{
// these are the Scala module identifiers to resolve/retrieve
val ScalaOrg = "org.scala-lang"
val CompilerModuleName = "scala-compiler"
val LibraryModuleName = "scala-library"
val JUnitName = "junit"
val SbtOrg = "org.scala-sbt"
/** The Ivy conflict manager to use for updating.*/
val ConflictManagerName = "latest-revision"
/** The name of the local Ivy repository, which is used when compiling sbt from source.*/
val LocalIvyName = "local"
/** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/
val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]"
/** The artifact pattern used for the local Ivy repository.*/
def LocalArtifactPattern = LocalPattern
/** The Ivy pattern used for the local Ivy repository.*/
def LocalIvyPattern = LocalPattern
final val FjbgPackage = "ch.epfl.lamp.fjbg."
/** The class name prefix used to hide the Scala classes used by this loader from the application */
final val ScalaPackage = "scala."
/** The class name prefix used to hide the Ivy classes used by this loader from the application*/
final val IvyPackage = "org.apache.ivy."
/** The class name prefix used to hide the launcher classes from the application.
* Note that access to xsbti classes are allowed.*/
final val SbtBootPackage = "xsbt.boot."
/** The prefix for JLine resources.*/
final val JLinePackagePath = "jline/"
/** The loader will check that these classes can be loaded and will assume that their presence indicates
* the Scala compiler and library have been downloaded.*/
val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil
val ScalaHomeProperty = "scala.home"
val UpdateLogName = "update.log"
val DefaultChecksums = "sha1" :: "md5" :: Nil
val DefaultIvyConfiguration = "default"
/** The name of the directory within the boot directory to retrieve scala to. */
val ScalaDirectoryName = "lib"
/** The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory
* containing all jars for the requested version of scala. */
val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]"
def artifactType(classifier: String) =
classifier match
{
case "sources" => "src"
case "javadoc" => "doc"
case _ => "jar"
}
/** The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory
* containing all jars for the requested version of scala. */
def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]"
val ScalaVersionPrefix = "scala-"
/** The name of the directory to retrieve the application and its dependencies to.*/
def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version
/** The name of the directory in the boot directory to put all jars for the given version of scala in.*/
def baseDirectoryName(scalaOrg: String, scalaVersion: Option[String]) = scalaVersion match {
case None => "other"
case Some(sv) => (if (scalaOrg == ScalaOrg) "" else scalaOrg + ".") + ScalaVersionPrefix + sv
}
def extractScalaVersion(dir: File): Option[String] =
{
val name = dir.getName
if(name.contains(ScalaVersionPrefix))
Some(name.substring(name.lastIndexOf(ScalaVersionPrefix) + ScalaVersionPrefix.length))
else
None
}
}
private object ProxyProperties
{
val HttpProxyEnv = "http_proxy"
val HttpProxyUser = "http_proxy_user"
val HttpProxyPassword = "http_proxy_pass"
val ProxyHost = "http.proxyHost"
val ProxyPort = "http.proxyPort"
val ProxyUser = "http.proxyUser"
val ProxyPassword = "http.proxyPassword"
}
|
olove/xsbt
|
launch/src/main/scala/xsbt/boot/BootConfiguration.scala
|
Scala
|
bsd-3-clause
| 4,313 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.