code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package almhirt.corex.spray.service
import scala.language.postfixOps
import almhirt.common._
import spray.routing._
import spray.http._
import spray.routing.directives._
import spray.httpx.marshalling.Marshaller
import spray.httpx.unmarshalling.Unmarshaller
import almhirt.akkax._
import almhirt.httpx.spray.marshalling._
trait HttpEventEndpointFactory extends Directives {
def publish(payload: AnyRef)
def createEventEndpoint(publish: AnyRef ⇒ Unit)(implicit eventUnmarshaller: Unmarshaller[Event]): RequestContext ⇒ Unit = {
val putEventDirective = (post) & entity(as[Event])
putEventDirective {
event ⇒
ctx ⇒ {
publish(event)
ctx.complete(StatusCodes.Accepted, event.eventId.value.toString())
}
}
}
}
|
chridou/almhirt
|
ext/almhirt-corex-spray-service/src/main/scala/almhirt/corex/spray/service/HttpEventEndpointFactory.scala
|
Scala
|
apache-2.0
| 804 |
import scala.reflect.ClassTag
import scala.runtime.BoxedUnit
object Test {
def main(args: Array[String]): Unit = {
println(implicitly[ClassTag[Unit]] == ClassTag.Unit)
println(implicitly[ClassTag[Boolean]] == ClassTag.Boolean)
println(implicitly[ClassTag[Byte]] == ClassTag.Byte)
println(implicitly[ClassTag[Char]] == ClassTag.Char)
println(implicitly[ClassTag[Short]] == ClassTag.Short)
println(implicitly[ClassTag[Int]] == ClassTag.Int)
println(implicitly[ClassTag[Long]] == ClassTag.Long)
println(implicitly[ClassTag[Float]] == ClassTag.Float)
println(implicitly[ClassTag[Double]] == ClassTag.Double)
println(implicitly[ClassTag[Object]] == ClassTag.Object)
println(implicitly[ClassTag[Any]] == ClassTag.Any)
println(implicitly[ClassTag[AnyRef]] == ClassTag.AnyRef)
println(implicitly[ClassTag[AnyVal]] == ClassTag.AnyVal)
println(implicitly[ClassTag[BoxedUnit]] != ClassTag.Unit)
}
}
|
lampepfl/dotty
|
tests/run/i4205.scala
|
Scala
|
apache-2.0
| 950 |
package com.twitter.server
import com.twitter.app.GlobalFlag
import com.twitter.finagle.{Addr, Resolver, Name}
import com.twitter.util.Var
// TODO: deprecate in favor of Wily dtabs.
object resolverMap extends GlobalFlag[Map[String, String]](Map.empty,
"A list mapping service names to resolvers (gizmoduck=zk!/gizmoduck)")
/**
* Indicates that a [[com.twitter.finagle.Resolver]] was not found for the
* given `name` using the FlagResolver.
*
* Resolvers are discovered via the com.twitter.server.resolverMap
*/
class NamedResolverNotFoundException(scheme: String, name: String)
extends Exception(s"Resolver not found for scheme '$scheme' with name '$name'. " +
s"resolverMap = ${resolverMap().keySet.toSeq.sorted.mkString(",")}")
class FlagResolver extends Resolver {
val scheme = "flag"
private[this] def resolvers = resolverMap()
def bind(arg: String): Var[Addr] = resolvers.get(arg) match {
case Some(target) =>
Resolver.eval(target) match {
case Name.Bound(va) => va
case Name.Path(_) =>
Var.value(Addr.Failed(new IllegalArgumentException("Cannot bind to trees")))
}
case None =>
val a = Addr.Failed(new NamedResolverNotFoundException(scheme, arg))
Var.value(a)
}
}
|
travisbrown/twitter-server
|
src/main/scala/com/twitter/server/FlagResolver.scala
|
Scala
|
apache-2.0
| 1,257 |
/**
* This file is part of agora-board.
* Copyright (C) 2016 Agora Voting SL <[email protected]>
* agora-board is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License.
* agora-board is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License
* along with agora-board. If not, see <http://www.gnu.org/licenses/>.
**/
import javax.inject._
import play.api._
import play.api.http.HttpFilters
import play.api.mvc._
import filters.ExampleFilter
/**
* This class configures filters that run on every request. This
* class is queried by Play to get a list of filters.
*
* Play will automatically use filters from any class called
* `Filters` that is placed the root package. You can load filters
* from a different class by adding a `play.http.filters` setting to
* the `application.conf` configuration file.
*
* @param env Basic environment settings for the current application.
* @param exampleFilter A demonstration filter that adds a header to
* each response.
*/
@Singleton
class Filters @Inject() (
env: Environment,
exampleFilter: ExampleFilter) extends HttpFilters {
override val filters = {
// Use the example filter if we're running development mode. If
// we're running in production or test mode then don't use any
// filters at all.
if (env.mode == Mode.Dev) Seq(exampleFilter) else Seq.empty
}
}
|
agoravoting/agora-board
|
app/Filters.scala
|
Scala
|
agpl-3.0
| 1,755 |
package pl.jozwik.demo
import com.typesafe.scalalogging.StrictLogging
object Main extends App with StrictLogging {
logger.debug(s"""Hello world ${args.mkString(",")}""")
val a = 3
val b = 4
logger.debug(s"""$a + $b = ${Demo.add(a, b)}""")
}
|
ajozwik/sbt-start
|
src/main/scala/pl/jozwik/demo/Main.scala
|
Scala
|
apache-2.0
| 252 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.controller
import kafka.api.{LeaderAndIsr, KAFKA_083, PartitionStateInfo}
import kafka.utils._
import org.apache.kafka.clients.{ClientResponse, ClientRequest, ManualMetadataUpdater, NetworkClient}
import org.apache.kafka.common.{TopicPartition, Node}
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.network.{Selectable, ChannelBuilders, Selector, NetworkReceive}
import org.apache.kafka.common.protocol.{SecurityProtocol, ApiKeys}
import org.apache.kafka.common.requests._
import org.apache.kafka.common.security.ssl.SSLFactory
import org.apache.kafka.common.utils.Time
import collection.mutable.HashMap
import kafka.cluster.Broker
import java.net.{SocketTimeoutException}
import java.util.concurrent.{LinkedBlockingQueue, BlockingQueue}
import kafka.server.KafkaConfig
import collection.mutable
import kafka.common.{KafkaException, TopicAndPartition}
import collection.Set
import collection.JavaConverters._
class ControllerChannelManager(controllerContext: ControllerContext, config: KafkaConfig, time: Time, metrics: Metrics) extends Logging {
protected val brokerStateInfo = new HashMap[Int, ControllerBrokerStateInfo]
private val brokerLock = new Object
this.logIdent = "[Channel manager on controller " + config.brokerId + "]: "
controllerContext.liveBrokers.foreach(addNewBroker(_))
def startup() = {
brokerLock synchronized {
brokerStateInfo.foreach(brokerState => startRequestSendThread(brokerState._1))
}
}
def shutdown() = {
brokerLock synchronized {
brokerStateInfo.values.foreach(removeExistingBroker)
}
}
def sendRequest(brokerId: Int, apiKey: ApiKeys, apiVersion: Option[Short], request: AbstractRequest, callback: AbstractRequestResponse => Unit = null) {
brokerLock synchronized {
val stateInfoOpt = brokerStateInfo.get(brokerId)
stateInfoOpt match {
case Some(stateInfo) =>
stateInfo.messageQueue.put(QueueItem(apiKey, apiVersion, request, callback))
case None =>
warn("Not sending request %s to broker %d, since it is offline.".format(request, brokerId))
}
}
}
def addBroker(broker: Broker) {
// be careful here. Maybe the startup() API has already started the request send thread
brokerLock synchronized {
if(!brokerStateInfo.contains(broker.id)) {
addNewBroker(broker)
startRequestSendThread(broker.id)
}
}
}
def removeBroker(brokerId: Int) {
brokerLock synchronized {
removeExistingBroker(brokerStateInfo(brokerId))
}
}
private def addNewBroker(broker: Broker) {
val messageQueue = new LinkedBlockingQueue[QueueItem]
debug("Controller %d trying to connect to broker %d".format(config.brokerId, broker.id))
val brokerEndPoint = broker.getBrokerEndPoint(config.interBrokerSecurityProtocol)
val brokerNode = new Node(broker.id, brokerEndPoint.host, brokerEndPoint.port)
val networkClient = {
val selector = new Selector(
NetworkReceive.UNLIMITED,
config.connectionsMaxIdleMs,
metrics,
time,
"controller-channel",
Map("broker-id" -> broker.id.toString).asJava,
false,
ChannelBuilders.create(config.interBrokerSecurityProtocol, SSLFactory.Mode.CLIENT, config.channelConfigs)
)
new NetworkClient(
selector,
new ManualMetadataUpdater(Seq(brokerNode).asJava),
config.brokerId.toString,
1,
0,
Selectable.USE_DEFAULT_BUFFER_SIZE,
Selectable.USE_DEFAULT_BUFFER_SIZE,
config.requestTimeoutMs
)
}
val requestThread = new RequestSendThread(config.brokerId, controllerContext, broker, messageQueue, networkClient, brokerNode, config, time)
requestThread.setDaemon(false)
brokerStateInfo.put(broker.id, new ControllerBrokerStateInfo(networkClient, brokerNode, broker, messageQueue, requestThread))
}
private def removeExistingBroker(brokerState: ControllerBrokerStateInfo) {
try {
brokerState.networkClient.close()
brokerState.messageQueue.clear()
brokerState.requestSendThread.shutdown()
brokerStateInfo.remove(brokerState.broker.id)
} catch {
case e: Throwable => error("Error while removing broker by the controller", e)
}
}
protected def startRequestSendThread(brokerId: Int) {
val requestThread = brokerStateInfo(brokerId).requestSendThread
if(requestThread.getState == Thread.State.NEW)
requestThread.start()
}
}
case class QueueItem(apiKey: ApiKeys, apiVersion: Option[Short], request: AbstractRequest, callback: AbstractRequestResponse => Unit)
class RequestSendThread(val controllerId: Int,
val controllerContext: ControllerContext,
val toBroker: Broker,
val queue: BlockingQueue[QueueItem],
val networkClient: NetworkClient,
val brokerNode: Node,
val config: KafkaConfig,
val time: Time)
extends ShutdownableThread("Controller-%d-to-broker-%d-send-thread".format(controllerId, toBroker.id)) {
private val lock = new Object()
private val stateChangeLogger = KafkaController.stateChangeLogger
private val socketTimeoutMs = config.controllerSocketTimeoutMs
override def doWork(): Unit = {
def backoff(): Unit = CoreUtils.swallowTrace(Thread.sleep(300))
val QueueItem(apiKey, apiVersion, request, callback) = queue.take()
import NetworkClientBlockingOps._
var clientResponse: ClientResponse = null
try {
lock synchronized {
var isSendSuccessful = false
while (isRunning.get() && !isSendSuccessful) {
// if a broker goes down for a long time, then at some point the controller's zookeeper listener will trigger a
// removeBroker which will invoke shutdown() on this thread. At that point, we will stop retrying.
try {
if (!brokerReady()) {
isSendSuccessful = false
backoff()
}
else {
val requestHeader = apiVersion.fold(networkClient.nextRequestHeader(apiKey))(networkClient.nextRequestHeader(apiKey, _))
val send = new RequestSend(brokerNode.idString, requestHeader, request.toStruct)
val clientRequest = new ClientRequest(time.milliseconds(), true, send, null)
clientResponse = networkClient.blockingSendAndReceive(clientRequest, socketTimeoutMs)(time).getOrElse {
throw new SocketTimeoutException(s"No response received within $socketTimeoutMs ms")
}
isSendSuccessful = true
}
} catch {
case e: Throwable => // if the send was not successful, reconnect to broker and resend the message
warn(("Controller %d epoch %d fails to send request %s to broker %s. " +
"Reconnecting to broker.").format(controllerId, controllerContext.epoch,
request.toString, toBroker.toString()), e)
networkClient.close(brokerNode.idString)
isSendSuccessful = false
backoff()
}
}
if (clientResponse != null) {
val response = ApiKeys.forId(clientResponse.request.request.header.apiKey) match {
case ApiKeys.LEADER_AND_ISR => new LeaderAndIsrResponse(clientResponse.responseBody)
case ApiKeys.STOP_REPLICA => new StopReplicaResponse(clientResponse.responseBody)
case ApiKeys.UPDATE_METADATA_KEY => new UpdateMetadataResponse(clientResponse.responseBody)
case apiKey => throw new KafkaException(s"Unexpected apiKey received: $apiKey")
}
stateChangeLogger.trace("Controller %d epoch %d received response %s for a request sent to broker %s"
.format(controllerId, controllerContext.epoch, response.toString, toBroker.toString))
if (callback != null) {
callback(response)
}
}
}
} catch {
case e: Throwable =>
error("Controller %d fails to send a request to broker %s".format(controllerId, toBroker.toString()), e)
// If there is any socket error (eg, socket timeout), the connection is no longer usable and needs to be recreated.
networkClient.close(brokerNode.idString)
}
}
private def brokerReady(): Boolean = {
import NetworkClientBlockingOps._
try {
if (networkClient.isReady(brokerNode, time.milliseconds()))
true
else {
val ready = networkClient.blockingReady(brokerNode, socketTimeoutMs)(time)
if (!ready)
throw new SocketTimeoutException(s"Failed to connect within $socketTimeoutMs ms")
info("Controller %d connected to %s for sending state change requests".format(controllerId, toBroker.toString()))
true
}
} catch {
case e: Throwable =>
error("Controller %d's connection to broker %s was unsuccessful".format(controllerId, toBroker.toString()), e)
networkClient.close(brokerNode.idString)
false
}
}
}
class ControllerBrokerRequestBatch(controller: KafkaController) extends Logging {
val controllerContext = controller.controllerContext
val controllerId: Int = controller.config.brokerId
val leaderAndIsrRequestMap = mutable.Map.empty[Int, mutable.Map[TopicPartition, PartitionStateInfo]]
val stopReplicaRequestMap = mutable.Map.empty[Int, Seq[StopReplicaRequestInfo]]
val updateMetadataRequestMap = mutable.Map.empty[Int, mutable.Map[TopicPartition, PartitionStateInfo]]
private val stateChangeLogger = KafkaController.stateChangeLogger
def newBatch() {
// raise error if the previous batch is not empty
if (leaderAndIsrRequestMap.size > 0)
throw new IllegalStateException("Controller to broker state change requests batch is not empty while creating " +
"a new one. Some LeaderAndIsr state changes %s might be lost ".format(leaderAndIsrRequestMap.toString()))
if (stopReplicaRequestMap.size > 0)
throw new IllegalStateException("Controller to broker state change requests batch is not empty while creating a " +
"new one. Some StopReplica state changes %s might be lost ".format(stopReplicaRequestMap.toString()))
if (updateMetadataRequestMap.size > 0)
throw new IllegalStateException("Controller to broker state change requests batch is not empty while creating a " +
"new one. Some UpdateMetadata state changes %s might be lost ".format(updateMetadataRequestMap.toString()))
}
def addLeaderAndIsrRequestForBrokers(brokerIds: Seq[Int], topic: String, partition: Int,
leaderIsrAndControllerEpoch: LeaderIsrAndControllerEpoch,
replicas: Seq[Int], callback: AbstractRequestResponse => Unit = null) {
val topicPartition = new TopicPartition(topic, partition)
brokerIds.filter(_ >= 0).foreach { brokerId =>
val result = leaderAndIsrRequestMap.getOrElseUpdate(brokerId, mutable.Map.empty)
result.put(topicPartition, PartitionStateInfo(leaderIsrAndControllerEpoch, replicas.toSet))
}
addUpdateMetadataRequestForBrokers(controllerContext.liveOrShuttingDownBrokerIds.toSeq,
Set(TopicAndPartition(topic, partition)))
}
def addStopReplicaRequestForBrokers(brokerIds: Seq[Int], topic: String, partition: Int, deletePartition: Boolean,
callback: (AbstractRequestResponse, Int) => Unit = null) {
brokerIds.filter(b => b >= 0).foreach { brokerId =>
stopReplicaRequestMap.getOrElseUpdate(brokerId, Seq.empty[StopReplicaRequestInfo])
val v = stopReplicaRequestMap(brokerId)
if(callback != null)
stopReplicaRequestMap(brokerId) = v :+ StopReplicaRequestInfo(PartitionAndReplica(topic, partition, brokerId),
deletePartition, (r: AbstractRequestResponse) => callback(r, brokerId))
else
stopReplicaRequestMap(brokerId) = v :+ StopReplicaRequestInfo(PartitionAndReplica(topic, partition, brokerId),
deletePartition)
}
}
/** Send UpdateMetadataRequest to the given brokers for the given partitions and partitions that are being deleted */
def addUpdateMetadataRequestForBrokers(brokerIds: Seq[Int],
partitions: collection.Set[TopicAndPartition] = Set.empty[TopicAndPartition],
callback: AbstractRequestResponse => Unit = null) {
def updateMetadataRequestMapFor(partition: TopicAndPartition, beingDeleted: Boolean) {
val leaderIsrAndControllerEpochOpt = controllerContext.partitionLeadershipInfo.get(partition)
leaderIsrAndControllerEpochOpt match {
case Some(leaderIsrAndControllerEpoch) =>
val replicas = controllerContext.partitionReplicaAssignment(partition).toSet
val partitionStateInfo = if (beingDeleted) {
val leaderAndIsr = new LeaderAndIsr(LeaderAndIsr.LeaderDuringDelete, leaderIsrAndControllerEpoch.leaderAndIsr.isr)
PartitionStateInfo(LeaderIsrAndControllerEpoch(leaderAndIsr, leaderIsrAndControllerEpoch.controllerEpoch), replicas)
} else {
PartitionStateInfo(leaderIsrAndControllerEpoch, replicas)
}
brokerIds.filter(b => b >= 0).foreach { brokerId =>
updateMetadataRequestMap.getOrElseUpdate(brokerId, mutable.Map.empty[TopicPartition, PartitionStateInfo])
updateMetadataRequestMap(brokerId).put(new TopicPartition(partition.topic, partition.partition), partitionStateInfo)
}
case None =>
info("Leader not yet assigned for partition %s. Skip sending UpdateMetadataRequest.".format(partition))
}
}
val filteredPartitions = {
val givenPartitions = if (partitions.isEmpty)
controllerContext.partitionLeadershipInfo.keySet
else
partitions
if (controller.deleteTopicManager.partitionsToBeDeleted.isEmpty)
givenPartitions
else
givenPartitions -- controller.deleteTopicManager.partitionsToBeDeleted
}
if (filteredPartitions.isEmpty)
brokerIds.filter(b => b >= 0).foreach { brokerId =>
updateMetadataRequestMap.getOrElseUpdate(brokerId, mutable.Map.empty[TopicPartition, PartitionStateInfo])
}
else
filteredPartitions.foreach(partition => updateMetadataRequestMapFor(partition, beingDeleted = false))
controller.deleteTopicManager.partitionsToBeDeleted.foreach(partition => updateMetadataRequestMapFor(partition, beingDeleted = true))
}
def sendRequestsToBrokers(controllerEpoch: Int) {
try {
leaderAndIsrRequestMap.foreach { case (broker, partitionStateInfos) =>
partitionStateInfos.foreach { case (topicPartition, state) =>
val typeOfRequest = if (broker == state.leaderIsrAndControllerEpoch.leaderAndIsr.leader) "become-leader" else "become-follower"
stateChangeLogger.trace(("Controller %d epoch %d sending %s LeaderAndIsr request %s to broker %d " +
"for partition [%s,%d]").format(controllerId, controllerEpoch, typeOfRequest,
state.leaderIsrAndControllerEpoch, broker,
topicPartition.topic, topicPartition.partition))
}
val leaderIds = partitionStateInfos.map(_._2.leaderIsrAndControllerEpoch.leaderAndIsr.leader).toSet
val leaders = controllerContext.liveOrShuttingDownBrokers.filter(b => leaderIds.contains(b.id)).map { b =>
val brokerEndPoint = b.getBrokerEndPoint(controller.config.interBrokerSecurityProtocol)
new LeaderAndIsrRequest.EndPoint(brokerEndPoint.id, brokerEndPoint.host, brokerEndPoint.port)
}
val partitionStates = partitionStateInfos.map { case (topicPartition, partitionStateInfo) =>
val LeaderIsrAndControllerEpoch(leaderIsr, controllerEpoch) = partitionStateInfo.leaderIsrAndControllerEpoch
val partitionState = new LeaderAndIsrRequest.PartitionState(controllerEpoch, leaderIsr.leader,
leaderIsr.leaderEpoch, leaderIsr.isr.map(Integer.valueOf).asJava, leaderIsr.zkVersion,
partitionStateInfo.allReplicas.map(Integer.valueOf).asJava
)
topicPartition -> partitionState
}
val leaderAndIsrRequest = new LeaderAndIsrRequest(controllerId, controllerEpoch, partitionStates.asJava, leaders.asJava)
controller.sendRequest(broker, ApiKeys.LEADER_AND_ISR, None, leaderAndIsrRequest, null)
}
leaderAndIsrRequestMap.clear()
updateMetadataRequestMap.foreach { case (broker, partitionStateInfos) =>
partitionStateInfos.foreach(p => stateChangeLogger.trace(("Controller %d epoch %d sending UpdateMetadata request %s " +
"to broker %d for partition %s").format(controllerId, controllerEpoch, p._2.leaderIsrAndControllerEpoch,
broker, p._1)))
val partitionStates = partitionStateInfos.map { case (topicPartition, partitionStateInfo) =>
val LeaderIsrAndControllerEpoch(leaderIsr, controllerEpoch) = partitionStateInfo.leaderIsrAndControllerEpoch
val partitionState = new UpdateMetadataRequest.PartitionState(controllerEpoch, leaderIsr.leader,
leaderIsr.leaderEpoch, leaderIsr.isr.map(Integer.valueOf).asJava, leaderIsr.zkVersion,
partitionStateInfo.allReplicas.map(Integer.valueOf).asJava
)
topicPartition -> partitionState
}
val version = if (controller.config.interBrokerProtocolVersion.onOrAfter(KAFKA_083)) (1: Short) else (0: Short)
val updateMetadataRequest =
if (version == 0) {
val liveBrokers = controllerContext.liveOrShuttingDownBrokers.map { broker =>
val brokerEndPoint = broker.getBrokerEndPoint(SecurityProtocol.PLAINTEXT)
new UpdateMetadataRequest.BrokerEndPoint(brokerEndPoint.id, brokerEndPoint.host, brokerEndPoint.port)
}
new UpdateMetadataRequest(controllerId, controllerEpoch, liveBrokers.asJava, partitionStates.asJava)
}
else {
val liveBrokers = controllerContext.liveOrShuttingDownBrokers.map { broker =>
val endPoints = broker.endPoints.map { case (securityProtocol, endPoint) =>
securityProtocol -> new UpdateMetadataRequest.EndPoint(endPoint.host, endPoint.port)
}
new UpdateMetadataRequest.Broker(broker.id, endPoints.asJava)
}
new UpdateMetadataRequest(controllerId, controllerEpoch, partitionStates.asJava, liveBrokers.asJava)
}
controller.sendRequest(broker, ApiKeys.UPDATE_METADATA_KEY, Some(version), updateMetadataRequest, null)
}
updateMetadataRequestMap.clear()
stopReplicaRequestMap.foreach { case (broker, replicaInfoList) =>
val stopReplicaWithDelete = replicaInfoList.filter(_.deletePartition).map(_.replica).toSet
val stopReplicaWithoutDelete = replicaInfoList.filterNot(_.deletePartition).map(_.replica).toSet
debug("The stop replica request (delete = true) sent to broker %d is %s"
.format(broker, stopReplicaWithDelete.mkString(",")))
debug("The stop replica request (delete = false) sent to broker %d is %s"
.format(broker, stopReplicaWithoutDelete.mkString(",")))
replicaInfoList.foreach { r =>
val stopReplicaRequest = new StopReplicaRequest(controllerId, controllerEpoch, r.deletePartition,
Set(new TopicPartition(r.replica.topic, r.replica.partition)).asJava)
controller.sendRequest(broker, ApiKeys.STOP_REPLICA, None, stopReplicaRequest, r.callback)
}
}
stopReplicaRequestMap.clear()
} catch {
case e : Throwable => {
if (leaderAndIsrRequestMap.size > 0) {
error("Haven't been able to send leader and isr requests, current state of " +
s"the map is $leaderAndIsrRequestMap")
}
if (updateMetadataRequestMap.size > 0) {
error("Haven't been able to send metadata update requests, current state of " +
s"the map is $updateMetadataRequestMap")
}
if (stopReplicaRequestMap.size > 0) {
error("Haven't been able to send stop replica requests, current state of " +
s"the map is $stopReplicaRequestMap")
}
throw new IllegalStateException(e)
}
}
}
}
case class ControllerBrokerStateInfo(networkClient: NetworkClient,
brokerNode: Node,
broker: Broker,
messageQueue: BlockingQueue[QueueItem],
requestSendThread: RequestSendThread)
case class StopReplicaRequestInfo(replica: PartitionAndReplica, deletePartition: Boolean, callback: AbstractRequestResponse => Unit = null)
class Callbacks private (var leaderAndIsrResponseCallback: AbstractRequestResponse => Unit = null,
var updateMetadataResponseCallback: AbstractRequestResponse => Unit = null,
var stopReplicaResponseCallback: (AbstractRequestResponse, Int) => Unit = null)
object Callbacks {
class CallbackBuilder {
var leaderAndIsrResponseCbk: AbstractRequestResponse => Unit = null
var updateMetadataResponseCbk: AbstractRequestResponse => Unit = null
var stopReplicaResponseCbk: (AbstractRequestResponse, Int) => Unit = null
def leaderAndIsrCallback(cbk: AbstractRequestResponse => Unit): CallbackBuilder = {
leaderAndIsrResponseCbk = cbk
this
}
def updateMetadataCallback(cbk: AbstractRequestResponse => Unit): CallbackBuilder = {
updateMetadataResponseCbk = cbk
this
}
def stopReplicaCallback(cbk: (AbstractRequestResponse, Int) => Unit): CallbackBuilder = {
stopReplicaResponseCbk = cbk
this
}
def build: Callbacks = {
new Callbacks(leaderAndIsrResponseCbk, updateMetadataResponseCbk, stopReplicaResponseCbk)
}
}
}
|
reiseburo/kafka
|
core/src/main/scala/kafka/controller/ControllerChannelManager.scala
|
Scala
|
apache-2.0
| 23,045 |
package dzufferey.utils
import scala.language.experimental.macros
import scala.reflect.macros.blackbox.Context
import LogLevel._
/** Simple logger that outputs to stdout. */
object Logger {
val lock = new java.util.concurrent.locks.ReentrantLock
private var minPriority = Notice.priority
val disallowed = scala.collection.mutable.HashSet.empty[String]
def reset = {
minPriority = Info.priority
disallowed.clear()
}
def getMinPriority = minPriority match {
case x if x == Critical.priority => Critical
case x if x == Error.priority => Error
case x if x == Warning.priority => Warning
case x if x == Notice.priority => Notice
case x if x == Info.priority => Info
case x if x == Debug.priority => Debug
case p => sys.error("unknown priority ("+p+")")
}
def setMinPriority(lvl: Level) = minPriority = lvl.priority
def setMinPriority(lvl: Int) = minPriority = lvl
def disallow(str: String) = disallowed += str
def allow(str: String) = disallowed -= str
private def increaseLevel(l: Level): Level = l match {
case Critical => Error
case Error => Warning
case Warning => Notice
case Notice => Info
case Info => Debug
case Debug => Debug
}
private def decreaseLevel(l: Level): Level = l match {
case Critical => Critical
case Error => Critical
case Warning => Error
case Notice => Warning
case Info => Notice
case Debug => Info
}
def moreVerbose = setMinPriority( increaseLevel(getMinPriority))
def lessVerbose = setMinPriority( decreaseLevel(getMinPriority))
/** Should be dispayed ? */
def apply(relatedTo: String, lvl: Level): Boolean =
lvl.priority >= minPriority && !disallowed(relatedTo)
//The evaluation of the content should *NOT* print. It can cause deadlocks.
/** Log a message to the console.
* @param relatedTo The package/file/class from where this message comes from.
* @param lvl The priority of the message.
* @param content The content of the message (evaluated only if needed).
*/
def apply(relatedTo: String, lvl: Level, content: String): Unit = macro LoggerMacros.string
def apply(relatedTo: String, lvl: Level, content: java.io.BufferedWriter => Unit): Unit = macro LoggerMacros.writer
/** Log a message and throw an exception with the content. */
def logAndThrow(relatedTo: String, lvl: Level, content: String): Nothing = macro LoggerMacros.logAndThrow
def assert(cond: Boolean, relatedTo: String, content: String): Unit = macro LoggerMacros.assert
}
class LoggerMacros(val c: Context) {
import c.universe._
val isEnabled = System.getProperty("disableLogging") != "true"
def string(relatedTo: c.Expr[String], lvl: c.Expr[Level], content: c.Expr[String]): c.Expr[Unit] = {
val tree = if (isEnabled) {
q"""
if (dzufferey.utils.Logger($relatedTo, $lvl)) {
val prefix = "[" + $lvl.color + $lvl.message + scala.Console.RESET + "]" + " @ " + $relatedTo + ": "
val writer = new java.io.BufferedWriter(new dzufferey.utils.PrefixingWriter(prefix, scala.Console.out))
dzufferey.utils.Logger.lock.lock
try {
writer.write($content)
writer.append('\\n')
writer.flush()
} finally {
dzufferey.utils.Logger.lock.unlock
}
}
"""
} else q"()"
c.Expr[Unit](tree)
}
def writer(relatedTo: c.Expr[String], lvl: c.Expr[Level], content: c.Expr[java.io.BufferedWriter => Unit]): c.Expr[Unit] = {
val tree = if (isEnabled) {
q"""
if (dzufferey.utils.Logger($relatedTo, $lvl)) {
val prefix = "[" + $lvl.color + $lvl.message + scala.Console.RESET + "]" + " @ " + $relatedTo + ": "
val writer = new java.io.BufferedWriter(new dzufferey.utils.PrefixingWriter(prefix, scala.Console.out))
dzufferey.utils.Logger.lock.lock
try {
$content(writer)
writer.flush()
} finally {
dzufferey.utils.Logger.lock.unlock
}
}
"""
} else q"()"
c.Expr[Unit](tree)
}
def logAndThrow(relatedTo: c.Expr[String], lvl: c.Expr[Level], content: c.Expr[String]): c.Expr[Nothing] = {
val tree = if (isEnabled) {
q"""
{
val c = $content
dzufferey.utils.Logger($relatedTo, $lvl, c)
scala.Console.flush()
sys.error(c)
}
"""
} else {
q"""
sys.error($content)
"""
}
c.Expr[Nothing](tree)
}
def assert(cond: c.Expr[Boolean], relatedTo: c.Expr[String], content: c.Expr[String]): c.Expr[Unit] = {
val tree = if (isEnabled) {
q"""
if (!$cond) {
dzufferey.utils.Logger.logAndThrow($relatedTo, dzufferey.utils.LogLevel.Error, $content)
}
"""
} else q"()"
c.Expr[Unit](tree)
}
}
|
dzufferey/misc-scala-utils
|
src/main/scala-2/dzufferey/utils/Logger.scala
|
Scala
|
apache-2.0
| 4,924 |
package org.json4s.reflect
import java.sql.Timestamp
import java.util.Date
import org.json4s.{DateTime, DefaultFormats, Formats, JInt, JObject, JString, MappingException, Obj, Objs, reflect}
import org.scalatest.Assertion
import org.scalatest.wordspec.AnyWordSpec
case class RRSimple(id: Int, name: String, items: List[String], createdAt: Date)
case class RRSimpleJoda(id: Int, name: String, items: List[String], createdAt: DateTime)
case class RROption(
id: Int,
name: String,
status: Option[String],
code: Option[Int],
createdAt: Date,
deletedAt: Option[Date]
)
case class RRTypeParam[T](id: Int, name: String, value: T, opt: Option[T], seq: Seq[T], map: Map[String, T])
case class Response(data: List[Map[String, Int]])
case class NestedType(dat: List[Map[Double, Option[Int]]], lis: List[List[List[List[List[Int]]]]])
case class NestedType3(dat: List[Map[Double, Option[List[Option[Int]]]]], lis: List[List[List[List[List[Int]]]]])
case class NestedType4(
dat: List[Map[Double, Option[List[Map[Long, Option[Int]]]]]],
lis: List[List[List[List[List[Int]]]]]
)
case class NestedType5(
dat: List[Map[Double, Option[List[Map[Long, Option[Map[Byte, Either[Double, Long]]]]]]]],
lis: List[List[List[List[List[Int]]]]]
)
case class NestedResType[T, S, V <: Option[S]](t: T, v: V, dat: List[Map[T, V]], lis: List[List[List[List[List[S]]]]])
case object TheObject
object PathTypes {
type T = Map[String, Double]
case class TypeAliasOfGenericType(p: T)
trait WithCaseClass {
case class FromTrait(name: String)
case class FromTraitRROption(
id: Int,
name: String,
status: Option[String],
code: Option[Int],
createdAt: Date,
deletedAt: Option[Date]
)
// case class FromTraitRRTypeParam[T](id: Int, name: String, value: T, opt: Option[T], seq: Seq[T], map: Map[String, T])
// ..
}
object HasTrait extends WithCaseClass {
def descr = Reflector.describe[FromTrait]
}
class ContainsCaseClass {
case class InternalType(name: String)
def methodWithCaseClass = {
case class InMethod(name: String)
implicit val formats: Formats = DefaultFormats.withCompanions(classOf[InMethod] -> this)
Reflector.describe[InMethod]
}
def methodWithClosure = {
val fn = () => {
case class InFunction(name: String)
// val st = Reflector.scalaTypeOf[InFunction] // -> Reflector.describe[InFunction]
// val sig = ScalaSigReader.findScalaSig(st.erasure)
// val classes = sig.get.symbols.collect({ case c: ClassSymbol => c })
// (st, classes)
Reflector.describe[InFunction]
}
fn()
}
}
}
class NormalClass {
val complex: RRSimple = RRSimple(1, "ba", Nil, new Date)
val string: String = "bla"
val primitive: Int = 1
val optPrimitive: Option[Int] = Some(3)
}
case class PetOwner(firstName: String, lastName: String) {
def this(age: Int) = this("John", "Doe")
}
object PetOwner {
def apply(email: String) = new PetOwner("Russell", "Westbrook")
}
case class Dog(name: String)
case class Cat @PrimaryConstructor() (name: String) {
def this(owner: PetOwner) = this(s"${owner.firstName}'s favorite pet'")
}
object GenericCaseClassWithCompanion {
def apply[A](v: A): GenericCaseClassWithCompanion[A] = GenericCaseClassWithCompanion(v, "Bar")
}
case class GenericCaseClassWithCompanion[A](value: A, other: String)
class ReflectorSpec extends AnyWordSpec {
implicit val formats: Formats = DefaultFormats.withCompanions(
classOf[PathTypes.HasTrait.FromTrait] -> PathTypes.HasTrait,
classOf[PathTypes.HasTrait.FromTraitRROption] -> PathTypes.HasTrait
)
"Reflector" should {
val inst = new PathTypes.ContainsCaseClass
"issue 507" in {
val result = org.json4s.Extraction.decompose(
GenericCaseClassWithCompanion(3)
)
assert(result == JObject(List(("value", JInt(3)), ("other", JString("Bar")))))
}
"describe a class defined in a class constructor" in {
val fmts: Formats = formats.withCompanions(classOf[inst.InternalType] -> inst)
Reflector.describe(manifest[PathTypes.HasTrait.FromTrait], fmts) match {
case d: ClassDescriptor =>
assert(d.constructors.nonEmpty)
assert(d.constructors.head.params.size == 2)
assert(d.properties.size == 1)
case _ => fail("Expected a class descriptor")
}
}
"describe a class defined in a trait constructor" in {
Reflector.describe[PathTypes.HasTrait.FromTrait] match {
case d: ClassDescriptor =>
assert(d.constructors.nonEmpty)
assert(d.constructors.head.params.size == 2)
assert(d.properties.size == 1)
assert(d.companion.map(_.instance) == Some(PathTypes.HasTrait.FromTrait))
assert(d.constructors.head.params(0).defaultValue.get() == PathTypes.HasTrait)
case _ => fail("Expected a class descriptor")
}
}
"describe a class defined in a method" in {
// inst.methodWithCaseClass match {
// case d: ClassDescriptor =>
// println(d)
// assert(d.constructors.nonEmpty)
// d.constructors.head.params.size must_== 1
// d.properties.size must_== 1
// case _ => fail("Expected a class descriptor")
// }
assertThrows[MappingException] { inst.methodWithCaseClass }
}
"describe a class defined in a closure" in {
assertThrows[MappingException] { inst.methodWithClosure }
}
"describe a case object" in {
val descr = Reflector.describe(TheObject.getClass).asInstanceOf[ClassDescriptor]
val res = descr.mostComprehensive
println(Reflector.describe(TheObject.getClass))
res
}
"describe primitives" in {
assert(Reflector.describe[Int] == PrimitiveDescriptor(Reflector.scalaTypeOf[Int]))
assert(Reflector.describe[Byte] == PrimitiveDescriptor(Reflector.scalaTypeOf[Byte]))
assert(Reflector.describe[Short] == PrimitiveDescriptor(Reflector.scalaTypeOf[Short]))
assert(Reflector.describe[Long] == PrimitiveDescriptor(Reflector.scalaTypeOf[Long]))
assert(Reflector.describe[Double] == PrimitiveDescriptor(Reflector.scalaTypeOf[Double]))
assert(Reflector.describe[Float] == PrimitiveDescriptor(Reflector.scalaTypeOf[Float]))
assert(Reflector.describe[java.lang.Integer] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Integer]))
assert(Reflector.describe[java.lang.Byte] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Byte]))
assert(Reflector.describe[java.lang.Short] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Short]))
assert(Reflector.describe[java.lang.Long] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Long]))
assert(Reflector.describe[java.lang.Double] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Double]))
assert(Reflector.describe[java.lang.Float] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.lang.Float]))
assert(Reflector.describe[BigInt] == PrimitiveDescriptor(Reflector.scalaTypeOf[BigInt]))
assert(Reflector.describe[BigDecimal] == PrimitiveDescriptor(Reflector.scalaTypeOf[BigDecimal]))
assert(
Reflector.describe[java.math.BigInteger] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.math.BigInteger])
)
assert(
Reflector.describe[java.math.BigDecimal] == PrimitiveDescriptor(Reflector.scalaTypeOf[java.math.BigDecimal])
)
assert(Reflector.describe[String] == PrimitiveDescriptor(Reflector.scalaTypeOf[String]))
assert(Reflector.describe[Date] == PrimitiveDescriptor(Reflector.scalaTypeOf[Date]))
assert(Reflector.describe[Timestamp] == PrimitiveDescriptor(Reflector.scalaTypeOf[Timestamp]))
}
"Describe a case class with Type Alias of Genric Types" in {
val desc = Reflector.describe[PathTypes.TypeAliasOfGenericType].asInstanceOf[ClassDescriptor]
assert(desc.properties(0).returnType == Reflector.scalaTypeOf[Map[String, Double]])
}
def genericCheckCaseClass(
desc: ObjectDescriptor
)(params: Seq[ConstructorParamDescriptor] => Assertion): Assertion = {
val realDesc = desc.asInstanceOf[ClassDescriptor]
// One for c'tor, one for apply
assert(realDesc.constructors.size == 2)
params(realDesc.constructors(0).params)
params(realDesc.constructors(1).params)
}
def checkCaseClass[A: Manifest](params: Seq[ConstructorParamDescriptor] => Assertion): Assertion = {
val desc = Reflector.describe[A].asInstanceOf[ClassDescriptor]
genericCheckCaseClass(desc)(params)
}
def checkCtorParams(createdAtType: ScalaType)(params: Seq[ConstructorParamDescriptor]): Assertion = {
assert(params(0).name == "id")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[Int])
assert(params(1).name == "name")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[String])
assert(params(2).name == "items")
assert(params(2).defaultValue.isEmpty)
assert(params(2).argType == Reflector.scalaTypeOf[List[String]])
assert(params(3).name == "createdAt")
assert(params(3).defaultValue.isEmpty)
assert(params(3).argType == createdAtType)
}
"describe a simple case class" in checkCaseClass[RRSimple](checkCtorParams(Reflector.scalaTypeOf[Date]))
"describe a simple joda case class" in checkCaseClass[RRSimpleJoda](
checkCtorParams(Reflector.scalaTypeOf[DateTime])
)
"Describe a case class with options" in checkCaseClass[RROption] { params =>
assert(params(0).name == "id")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[Int])
assert(params(1).name == "name")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[String])
assert(params(2).name == "status")
assert(params(2).defaultValue.isEmpty)
assert(params(2).argType == Reflector.scalaTypeOf[Option[String]])
assert(params(2).argType.typeArgs == Seq(Reflector.scalaTypeOf[String]))
assert(params(3).name == "code")
assert(params(3).defaultValue.isEmpty)
assert(params(3).argType == Reflector.scalaTypeOf[Option[Int]])
assert(params(3).argType != Reflector.scalaTypeOf[Option[String]])
assert(params(3).argType.typeArgs == Seq(Reflector.scalaTypeOf[Int]))
assert(params(4).name == "createdAt")
assert(params(4).defaultValue.isEmpty)
assert(params(4).argType == Reflector.scalaTypeOf[Date])
assert(params(5).name == "deletedAt")
assert(params(5).defaultValue.isEmpty)
assert(params(5).argType == Reflector.scalaTypeOf[Option[Date]])
assert(params(5).argType.typeArgs == Seq(Reflector.scalaTypeOf[Date]))
}
"describe a type parameterized class" in checkCaseClass[RRTypeParam[Int]] { params =>
assert(params(0).name == "id")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[Int])
assert(params(1).name == "name")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[String])
assert(params(2).name == "value")
assert(params(2).defaultValue.isEmpty)
assert(params(2).argType == Reflector.scalaTypeOf[Int])
assert(params(3).name == "opt")
assert(params(3).defaultValue.isEmpty)
assert(params(3).argType == Reflector.scalaTypeOf[Option[Int]])
assert(params(4).name == "seq")
assert(params(4).defaultValue.isEmpty)
assert(params(4).argType == Reflector.scalaTypeOf[Seq[Int]])
assert(params(5).name == "map")
assert(params(5).defaultValue.isEmpty)
assert(params(5).argType == Reflector.scalaTypeOf[Map[String, Int]])
}
"describe a type with nested generic types" in checkCaseClass[NestedType] { params =>
assert(params(0).name == "dat")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[List[Map[Double, Option[Int]]]])
assert(params(1).name == "lis")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[List[List[List[List[List[Int]]]]]])
}
"describe a type with nested generic types 2" in checkCaseClass[NestedType3] { params =>
assert(params(0).name == "dat")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[List[Map[Double, Option[List[Option[Int]]]]]])
assert(params(1).name == "lis")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[List[List[List[List[List[Int]]]]]])
}
"describe a type with nested generic types 3" in checkCaseClass[NestedType4] { params =>
assert(params(0).name == "dat")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[List[Map[Double, Option[List[Map[Long, Option[Int]]]]]]])
assert(params(1).name == "lis")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[List[List[List[List[List[Int]]]]]])
}
"describe a type with nested generic types 4" in checkCaseClass[NestedType5] { params =>
assert(params(0).name == "dat")
assert(params(0).defaultValue.isEmpty)
assert(
params(0).argType == Reflector
.scalaTypeOf[List[Map[Double, Option[List[Map[Long, Option[Map[Byte, Either[Double, Long]]]]]]]]]
)
assert(params(1).name == "lis")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[List[List[List[List[List[Int]]]]]])
}
"describe a type with nested generic types parameters" in checkCaseClass[NestedResType[Double, Int, Option[Int]]] {
params =>
assert(params(0).name == "t")
assert(params(0).defaultValue.isEmpty)
assert(params(0).argType == Reflector.scalaTypeOf[Double])
assert(params(1).name == "v")
assert(params(1).defaultValue.isEmpty)
assert(params(1).argType == Reflector.scalaTypeOf[Option[Int]])
assert(params(2).name == "dat")
assert(params(2).defaultValue.isEmpty)
assert(params(2).argType == Reflector.scalaTypeOf[List[Map[Double, Option[Int]]]])
assert(params(3).name == "lis")
assert(params(3).defaultValue.isEmpty)
assert(params(3).argType == Reflector.scalaTypeOf[List[List[List[List[List[Int]]]]]])
}
"describe a class with a wildcard parameter" in checkCaseClass[Objs] { params =>
assert(params(0).name == "objects")
assert(params(0).argType == Reflector.scalaTypeOf[List[Obj[_]]])
}
"describe the fields of a class" in {
val desc = Reflector.describe[NormalClass].asInstanceOf[ClassDescriptor]
assert(desc.constructors.size == 1)
val params = desc.properties
assert(params.size == 4)
assert(params(0).name == "complex")
assert(params(0).returnType == Reflector.scalaTypeOf[RRSimple])
assert(params(1).name == "string")
assert(params(1).returnType == Reflector.scalaTypeOf[String])
assert(params(2).name == "primitive")
assert(params(2).returnType == Reflector.scalaTypeOf[Int])
assert(params(3).name == "optPrimitive")
assert(params(3).returnType == Reflector.scalaTypeOf[Option[Int]])
}
"Describe a case class with $outer field" in {
val desc = Reflector.describe[PathTypes.HasTrait.FromTraitRROption].asInstanceOf[ClassDescriptor]
assert(desc.companion.map(_.instance) == Some(PathTypes.HasTrait.FromTraitRROption))
assert(desc.constructors.head.params(0).defaultValue.get() == PathTypes.HasTrait)
}
"Describe a case class with options defined in a trait" in {
checkCaseClass[PathTypes.HasTrait.FromTraitRROption] { params =>
val ctorParams = params.filterNot(_.name == ScalaSigReader.OuterFieldName)
assert(ctorParams(0).name == "id")
assert(ctorParams(0).defaultValue.isEmpty)
assert(ctorParams(0).argType == Reflector.scalaTypeOf[Int])
assert(ctorParams(1).name == "name")
assert(ctorParams(1).defaultValue.isEmpty)
assert(ctorParams(1).argType == Reflector.scalaTypeOf[String])
assert(ctorParams(2).name == "status")
assert(ctorParams(2).defaultValue.isEmpty)
assert(ctorParams(2).argType == Reflector.scalaTypeOf[Option[String]])
assert(ctorParams(2).argType.typeArgs == Seq(Reflector.scalaTypeOf[String]))
assert(ctorParams(3).name == "code")
assert(ctorParams(3).defaultValue.isEmpty)
assert(ctorParams(3).argType == Reflector.scalaTypeOf[Option[Int]])
assert(ctorParams(3).argType != Reflector.scalaTypeOf[Option[String]])
assert(ctorParams(3).argType.typeArgs == Seq(Reflector.scalaTypeOf[Int]))
assert(ctorParams(4).name == "createdAt")
assert(ctorParams(4).defaultValue.isEmpty)
assert(ctorParams(4).argType == Reflector.scalaTypeOf[Date])
assert(ctorParams(5).name == "deletedAt")
assert(ctorParams(5).defaultValue.isEmpty)
assert(ctorParams(5).argType == Reflector.scalaTypeOf[Option[Date]])
assert(ctorParams(5).argType.typeArgs == Seq(Reflector.scalaTypeOf[Date]))
}
}
"discover all constructors, incl. the ones from companion object" in {
val klass = Reflector.scalaTypeOf(classOf[PetOwner])
val descriptor = Reflector.describeWithFormats(klass).asInstanceOf[reflect.ClassDescriptor]
// the main one (with firstName, lastName Strings) is seen as two distinct ones:
// as a constructor and an apply method
assert(descriptor.constructors.size == 4)
}
"denote no constructor as primary if there are multiple competing" in {
val klass = Reflector.scalaTypeOf(classOf[PetOwner])
val descriptor = Reflector.describeWithFormats(klass).asInstanceOf[reflect.ClassDescriptor]
assert(descriptor.constructors.count(_.isPrimary) == 0)
}
"denote the only constructor as primary if only one exists" in {
val klass = Reflector.scalaTypeOf(classOf[Dog])
val descriptor = Reflector.describeWithFormats(klass).asInstanceOf[reflect.ClassDescriptor]
// the only human-visible constructor is visible as two - the constructor and the apply method
assert(descriptor.constructors.size == 2)
assert(descriptor.constructors.count(_.isPrimary) == 1)
assert(descriptor.constructors(0).isPrimary == true)
assert(descriptor.constructors(1).isPrimary == false)
}
"denote the annotated constructor as primary even if multiple exist" in {
val klass = Reflector.scalaTypeOf(classOf[Cat])
val descriptor = Reflector.describeWithFormats(klass).asInstanceOf[reflect.ClassDescriptor]
assert(descriptor.constructors.size == 3)
assert(descriptor.constructors.count(_.isPrimary) == 1)
}
"retrieve constructors of a class in a deterministic order" in {
val klass = Reflector.scalaTypeOf(classOf[PetOwner])
val descriptor = Reflector.describeWithFormats(klass).asInstanceOf[reflect.ClassDescriptor]
assert(descriptor.constructors.size == 4)
val first = descriptor.constructors(0)
val second = descriptor.constructors(1)
val third = descriptor.constructors(2)
val fourth = descriptor.constructors(3)
assert(first.params.map(_.name) == Seq("firstName", "lastName"))
assert(first.constructor.method == null)
assert(first.constructor.constructor != null)
assert(second.params.map(_.name) == Seq("age"))
assert(third.params.map(_.name) == Seq("firstName", "lastName"))
assert(third.constructor.method != null)
assert(third.constructor.constructor == null)
assert(fourth.params.map(_.name) == Seq("email"))
}
}
}
|
json4s/json4s
|
native/src/test/scala/org/json4s/reflect/ReflectorSpec.scala
|
Scala
|
apache-2.0
| 20,041 |
package ch.epfl.lamp.grading
final case class GradingSummary(score: Int, maxScore: Int, feedback: String)
|
sbt-coursera/sbt-coursera
|
src/main/scala/ch/epfl/lamp/grading/GradingSummary.scala
|
Scala
|
bsd-3-clause
| 107 |
package controllers
import scala.concurrent.ExecutionContext.Implicits.global
import play.api.mvc.WebSocket
import play.api.libs.iteratee.Concurrent
import play.api.libs.iteratee.Iteratee
import com.typesafe.scalalogging.Logger
import org.slf4j.LoggerFactory
class PortalWebSocket extends SessionConfig with PortalWebSocketMessages{
val logger = Logger(LoggerFactory.getLogger(this.getClass))
def broadcastLog = WebSocket.using[String]{ request =>
val (out,channel) = Concurrent.broadcast[String]
val in = Iteratee.foreach[String] {
msg => {
// check session here
SessionChecker.isLogin(request.session.data) match {
case Left(r) => {
if (!r) {
logger.debug("unauthorized access. No login")
channel.push(unauthorized_msg)
}
else {
request.session.get(session_var_uuid) match {
case Some(e) => {
// creating LogItemPusher
new LogItemPusher(e,com.richardchankiyin.os.Scheduler.logKeeper,channel)
channel.push(ok_msg)
}
case None => {
logger.warn("with session but uuid not found?! Weird")
channel.push(unauthorized_msg)
}
}
}
}
case Right(r) => {
logger.debug("unauthorized access. Timeout already....")
channel.push(unauthorized_msg)
}
}
}
}
(in,out)
}
}
|
richardchankiyin/sysdashboard
|
Portal/app/controllers/PortalWebSocket.scala
|
Scala
|
gpl-3.0
| 1,571 |
/**
* Copyright (c) 2013-2016 Extended Mind Technologies Oy
*
* This file is part of Extended Mind.
*
* Extended Mind is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.extendedmind.api.test
import java.io.PrintWriter
import java.util.UUID
import org.extendedmind._
import org.extendedmind.bl._
import org.extendedmind.db._
import org.extendedmind.domain._
import org.extendedmind.security._
import org.extendedmind.email._
import org.extendedmind.test._
import org.extendedmind.test.TestGraphDatabase._
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.mockito.Matchers.{ eq => mockEq }
import scaldi.Module
import spray.http.BasicHttpCredentials
import spray.http.HttpHeaders.Authorization
import org.zeroturnaround.zip.ZipUtil
import java.io.File
import org.zeroturnaround.zip.FileUtil
import org.apache.commons.io.FileUtils
import org.extendedmind.api.JsonImplicits._
import spray.httpx.SprayJsonSupport._
import spray.httpx.marshalling._
import spray.json.DefaultJsonProtocol._
import spray.http.StatusCodes._
/**
* Worst case test for item routes.
*/
class ItemWorstCaseSpec extends ServiceSpecBase{
object TestDataGeneratorConfiguration extends Module {
bind[GraphDatabase] to db
}
override def configurations = TestDataGeneratorConfiguration :: new Configuration(settings, actorRefFactory)
before {
db.insertTestData()
}
after {
cleanDb(db.ds.gds)
}
describe("In the worst case, ItemService") {
it("should return 'not found' when getting item that does not exist") {
val authenticateResponse = emailPasswordAuthenticate(TIMO_EMAIL, TIMO_PASSWORD)
val randomUUID = UUID.randomUUID().toString()
Get("/v2/owners/" + authenticateResponse.userUUID + "/data/items/" + randomUUID
) ~> addCredentials(BasicHttpCredentials("token", authenticateResponse.token.get)) ~> route ~> check {
val failure = responseAs[ErrorResult]
status should be (BadRequest)
failure.description should startWith("Could not find item " + randomUUID + " for owner " + authenticateResponse.userUUID)
}
}
it("should return 409 Conflict when trying to modify task with invalid modified timestamp") {
val authenticateResponse = emailPasswordAuthenticate(TIMO_EMAIL, TIMO_PASSWORD)
val newItem = Item("learn how to fly", None, None)
Put("/v2/owners/" + authenticateResponse.userUUID + "/data/items",
marshal(newItem).right.get) ~> addHeader("Content-Type", "application/json") ~> addCredentials(BasicHttpCredentials("token", authenticateResponse.token.get)) ~> route ~> check {
val putItemResponse = responseAs[SetResult]
Put("/v2/owners/" + authenticateResponse.userUUID + "/data/items/" + putItemResponse.uuid.get,
marshal(newItem.copy(modified = Some(putItemResponse.modified + 1))).right.get) ~> addHeader("Content-Type", "application/json") ~> addCredentials(BasicHttpCredentials("token", authenticateResponse.token.get)) ~> route ~> check {
status should be (Conflict)
val failure = responseAs[ErrorResult]
failure.code should be (ERR_BASE_WRONG_EXPECTED_MODIFIED.number)
}
}
}
}
}
|
ttiurani/extendedmind
|
backend/src/test/scala/org/extendedmind/api/test/ItemWorstCaseSpec.scala
|
Scala
|
agpl-3.0
| 3,812 |
package scala
object ArrayObjectCopySuite extends tests.Suite {
class A(_i: Int) {
def i = _i
}
class B(_i: Int, _d: Double) extends A(_i)
def initB(arr: Array[B], from: Int = 0) = {
var c = 0
while (c < arr.length) {
arr(c) = new B(c + from, c.toDouble)
c += 1
}
}
def initA(arr: Array[A]) = {
var c = 0
while (c < arr.length) {
arr(c) = new A(c)
c += 1
}
}
val len = 10
val arr = new Array[B](len)
val arr2 = new Array[A](len + 2)
val arrEmpty = new Array[B](0)
test("array[Object]: init") {
initB(arr, 100)
assert(
arr(0).i == 100 && arr(1).i == 101 && arr(2).i == 102 &&
arr(3).i == 103 && arr(4).i == 104 && arr(5).i == 105 &&
arr(6).i == 106 && arr(7).i == 107 && arr(8).i == 108 &&
arr(9).i == 109
)
}
test("array[Object]: copy to another array") {
initB(arr, 100)
scala.Array.copy(arr, 0, arr2, 1, 10)
assert(
arr2(0) == null && arr2(1).i == 100 && arr2(2).i == 101 &&
arr2(3).i == 102 && arr2(4).i == 103 && arr2(5).i == 104 &&
arr2(6).i == 105 && arr2(7).i == 106 && arr2(8).i == 107 &&
arr2(9).i == 108 && arr2(10).i == 109 && arr2(11) == null
)
}
test("array[Object]: copy zero elements from empty array") {
initA(arr2)
scala.Array.copy(arrEmpty, 0, arr2, 5, 0)
assert(
arr2(0).i == 0 && arr2(1).i == 1 && arr2(2).i == 2 &&
arr2(3).i == 3 && arr2(4).i == 4 && arr2(5).i == 5 &&
arr2(6).i == 6 && arr2(7).i == 7 && arr2(8).i == 8 &&
arr2(9).i == 9 && arr2(10).i == 10 && arr2(11).i == 11
)
}
test("array[Object]: copy to self without overlap (1/2)") {
initB(arr)
scala.Array.copy(arr, 0, arr, 5, 5)
assert(
arr(0).i == 0 && arr(1).i == 1 && arr(2).i == 2 && arr(3).i == 3 &&
arr(4).i == 4 && arr(5).i == 0 && arr(6).i == 1 && arr(7).i == 2 &&
arr(8).i == 3 && arr(9).i == 4
)
}
test("array[Object]: copy to self without overlap (2/2)") {
initB(arr)
scala.Array.copy(arr, 6, arr, 4, 2)
assert(
arr(0).i == 0 && arr(1).i == 1 && arr(2).i == 2 && arr(3).i == 3 &&
arr(4).i == 6 && arr(5).i == 7 && arr(6).i == 6 && arr(7).i == 7 &&
arr(8).i == 8 && arr(9).i == 9
)
}
test("array[Object]: copy to self with overlap and backward copy") {
initB(arr)
scala.Array.copy(arr, 0, arr, 2, 6)
assert(
arr(0).i == 0 && arr(1).i == 1 && arr(2).i == 0 && arr(3).i == 1 &&
arr(4).i == 2 && arr(5).i == 3 && arr(6).i == 4 && arr(7).i == 5 &&
arr(8).i == 8 && arr(9).i == 9
)
}
test("array[Object]: copy to self with overlap and forward copy") {
initB(arr)
scala.Array.copy(arr, 2, arr, 0, 6)
assert(
arr(0).i == 2 && arr(1).i == 3 && arr(2).i == 4 && arr(3).i == 5 &&
arr(4).i == 6 && arr(5).i == 7 && arr(6).i == 6 && arr(7).i == 7 &&
arr(8).i == 8 && arr(9).i == 9
)
}
test("array[Object]: throws NullPointerException if from is null") {
assertThrows[java.lang.NullPointerException] {
scala.Array.copy(null, 0, arr2, 5, 2)
}
}
test("array[Object]: throws NullPointerException if to is null") {
assertThrows[java.lang.NullPointerException] {
scala.Array.copy(arr, 0, null, 5, 2)
}
}
test("array[Object]: throws IndexOutOfBoundsException if length is negative") {
assertThrows[java.lang.IndexOutOfBoundsException] {
scala.Array.copy(arr, 0, arr2, 5, -1)
}
}
test(
"array[Object]: throws IndexOutOfBoundsException if toPos + len > to.length") {
assertThrows[java.lang.IndexOutOfBoundsException] {
scala.Array.copy(arr, 0, arr2, 5, 10)
}
}
test(
"array[Object]: throws IndexOutOfBoundsException if fromPos + len > from.length") {
assertThrows[java.lang.IndexOutOfBoundsException] {
scala.Array.copy(arr, 5, arr2, 0, 10)
}
}
test("array[Object]: throws IndexOutOfBoundsException if toPos is negative") {
assertThrows[java.lang.IndexOutOfBoundsException] {
scala.Array.copy(arr, 0, arr2, -1, 10)
}
}
test(
"array[Object]: throws IndexOutOfBoundsException if fromPos is negative") {
assertThrows[java.lang.IndexOutOfBoundsException] {
scala.Array.copy(arr, -1, arr2, 0, 10)
}
}
test(
"array[Object]: throws ArrayStoreException if copy to a different type of array") {
val arrChar = new Array[Char](len)
assertThrows[java.lang.ArrayStoreException] {
scala.Array.copy(arr, 0, arrChar, 5, 2)
}
}
}
|
cedricviaccoz/scala-native
|
unit-tests/src/main/scala/scala/ArrayObjectCopySuite.scala
|
Scala
|
bsd-3-clause
| 4,570 |
import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.Future
object Example_02_FuturesAsync extends Example_00_Futures {
def findFullProfile(user: String): Future[String] = {
import scala.async.Async.{async, await}
val fRankingForUser = fRanking(user)
val fbasicProfileForUser = fBasicProfile(user)
async {
val ranking = await(fRankingForUser)
val basicProfile = await(fbasicProfileForUser)
val lastMedal = await(fLastMedalInLevel(basicProfile))
s"$basicProfile;$ranking;$lastMedal"
}
}
}
|
enqae/futuresComposition
|
src/main/scala/Example_02_FuturesAsync.scala
|
Scala
|
apache-2.0
| 567 |
/*
* Copyright (C) 2017 LREN CHUV for Human Brain Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ch.chuv.lren.woken
import cats.data.ValidatedNel
import cats.implicits._
import ch.chuv.lren.woken.cromwell.core.ConfigUtil.Validation
import ch.chuv.lren.woken.messages.query.ValidationSpec
package object validation {
import acyclic.pkg
def defineSplitters(
validations: List[ValidationSpec]
): Validation[List[FeaturesSplitterDefinition]] =
validations
.map { spec =>
defineSplitter(spec)
}
.sequence[Validation, FeaturesSplitterDefinition]
def defineSplitter(spec: ValidationSpec): ValidatedNel[String, FeaturesSplitterDefinition] =
spec.code match {
case "kfold" =>
val numFolds = spec.parametersAsMap("k").toInt
KFoldFeaturesSplitterDefinition(spec, numFolds).validNel[String]
case other => s"Validation $other is not handled".invalidNel[FeaturesSplitterDefinition]
}
}
|
LREN-CHUV/workflow
|
src/main/scala/ch/chuv/lren/woken/validation/package.scala
|
Scala
|
apache-2.0
| 1,606 |
package com.github.dcapwell.scala.playground.macros
import scala.reflect.ClassTag
import scala.reflect.macros.{blackbox, whitebox}
trait TreeLenses { self: BlackboxSupport =>
import c.universe._
import scalaz.Lens
val modFlags = Lens.lensu[Modifiers, FlagSet](
set = (m, flags) => Modifiers(flags, m.privateWithin, m.annotations),
get = (m) => m.flags
)
val valMods = Lens.lensu[ValDef, Modifiers](
set = (v, m) => ValDef(m, v.name, v.tpt, v.rhs),
get = (v) => v.mods
)
val valFlags: Lens[ValDef, FlagSet] = valMods andThen modFlags
val defMods = Lens.lensu[DefDef, Modifiers](
set = (d, m) => DefDef(m, d.name, d.tparams, d.vparamss, d.tpt, d.rhs),
get = (d) => d.mods
)
val defFlags: Lens[DefDef, FlagSet] = defMods andThen modFlags
val templBody = Lens.lensu[Template, List[Tree]](
set = (t, b) => Template(t.parents, t.self, b),
get = (t) => t.body
)
val templParents = Lens.lensu[Template, List[Tree]](
set = (t, p) => Template(p, t.self, t.body),
get = (t) => t.parents
)
val clazzTempl = Lens.lensu[ClassDef, Template](
set = (c, t) => ClassDef(c.mods, c.name, c.tparams, t),
get = (c) => c.impl
)
val clazzName = Lens.lensu[ClassDef, TypeName](
set = (c, n) => ClassDef(c.mods, n, c.tparams, c.impl),
get = (c) => c.name
)
val clazzMods = Lens.lensu[ClassDef, Modifiers](
set = (c, m) => ClassDef(m, c.name, c.tparams, c.impl),
get = (c) => c.mods
)
val clazzFlags = clazzMods andThen modFlags
val clazzBody: Lens[ClassDef, List[Tree]] = clazzTempl andThen templBody
val clazzParents: Lens[ClassDef, List[Tree]] = clazzTempl andThen templParents
}
trait TreeSupport extends TreeLenses { self: BlackboxSupport =>
import c.universe._
def isLiteral(tree: Tree): Boolean = tree match {
case Literal(Constant(_)) => true
case _ => false
}
def typeTree[A: TypeTag] = TypeTree(typeOf[A])
val UnitLiteral = Literal(Constant(()))
val MapSelf: Tree =>? Tree = {
case t => t
}
implicit class TreeOps(self: Tree) {
def findAll[T: ClassTag] : List[T] = self collect { case t: T => t }
}
implicit class ListTreeOps(self: List[Tree]) {
def trees[T: ClassTag] : List[T] = self collect { case t: T => t }
def mapPartial(pf: Tree =>? Tree): List[Tree] =
self.map(pf orElse MapSelf)
}
}
trait TypeSupport { self: BlackboxSupport =>
import c.universe._
def isCaseClass(t: Type): Boolean = {
val sym = t.typeSymbol
sym.isClass && sym.asClass.isCaseClass
}
def assertCaseClass(t: Type): Unit =
if(!isCaseClass(t)) c.abort(c.enclosingPosition, s"${t.typeSymbol} is not a case class")
def primaryConstructor(t: Type): MethodSymbol =
t.decls.collectFirst { case m: MethodSymbol if m.isPrimaryConstructor => m }.getOrElse(c.abort(c.enclosingPosition, "Unable to find primary constructor for product"))
def companionObject(t: Type): Symbol =
t.typeSymbol.companion
def caseFields(t: Type): List[Symbol] =
primaryConstructor(t).paramLists.head
def is(tpe: Type, typeString: String): Option[String] = {
// expected type
val typeSymbol = c.mirror.staticClass(typeString).asType
val typeSymbolParams = typeSymbol.typeParams
// given type
val givenSymboles = typeSymbols(tpe)
if(typeSymbolParams.size != givenSymboles.size) Some(s"Arity does not match; given ${toTypeString(tpe.typeSymbol, givenSymboles)}, but expected ${toTypeString(typeSymbol, typeSymbolParams)}")
else {
val typeCreated = typeSymbol.toType.substituteSymbols(typeSymbolParams, givenSymboles)
if(! (tpe =:= typeCreated)) Some(s"Expected type is $typeCreated but was given $tpe")
else None
}
}
def typeSymbols(tpe: Type): List[Symbol] =
tpe.typeArgs.map(_.typeSymbol.asType)
}
trait SymbolSupport { self: BlackboxSupport =>
import c.universe._
def toTypeString(clazz: Symbol, args: List[Symbol]): String = {
if(args.isEmpty) clazz.toString
else s"$clazz[${args.map(_.name).mkString(",")}]"
}
}
trait ContextSupport { self: BlackboxSupport =>
def abort(msg: String) =
c.abort(c.enclosingPosition, msg)
}
trait BlackboxSupport extends TreeSupport with TypeSupport with SymbolSupport with ContextSupport {
val c: blackbox.Context
}
trait WhiteboxSupport extends BlackboxSupport {
val c: whitebox.Context
}
|
dcapwell/scala-playground
|
macros/src/main/scala/com/github/dcapwell/scala/playground/macros/MacroSupport.scala
|
Scala
|
mit
| 4,371 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.render
import io.gatling.BaseSpec
import io.gatling.recorder.render.RequestElement.extractCharsetFromContentType
class RequestElementSpec extends BaseSpec {
"extractCharsetFromContentType" should "extract unwrapped charset from Content-Type" in {
extractCharsetFromContentType("text/html; charset=utf-8") shouldBe Some("utf-8")
}
it should "extract wrapped charset from Content-Type" in {
extractCharsetFromContentType("text/html; charset=\"utf-8\"") shouldBe Some("utf-8")
}
it should "not extract when Content-Type doesn't have a charset attribute" in {
extractCharsetFromContentType("text/html") shouldBe None
}
}
|
gatling/gatling
|
gatling-recorder/src/test/scala/io/gatling/recorder/render/RequestElementSpec.scala
|
Scala
|
apache-2.0
| 1,293 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.dal.model.provider
import java.net.{InetSocketAddress, Socket, URI}
import java.nio.channels.ClosedChannelException
import java.util.{Collections, Date}
import com.bwsw.sj.common.dal.morphia.MorphiaAnnotations.{IdField, PropertyField}
import com.bwsw.sj.common.utils.{MessageResourceUtils, ProviderLiterals}
import kafka.javaapi.TopicMetadataRequest
import kafka.javaapi.consumer.SimpleConsumer
import org.apache.zookeeper.ZooKeeper
import org.mongodb.morphia.annotations.Entity
import scaldi.Injector
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
/**
* protected methods and variables need for testing purposes
*/
@Entity("providers")
class ProviderDomain(@IdField val name: String,
val description: String,
val hosts: Array[String],
@PropertyField("provider-type") val providerType: String,
val creationDate: Date) {
import ProviderDomain._
def getConcatenatedHosts(separator: String = ","): String = {
hosts.mkString(separator)
}
def checkConnection(zkSessionTimeout: Int)(implicit injector: Injector): ArrayBuffer[String] = {
val errors = ArrayBuffer[String]()
for (host <- this.hosts) {
errors ++= checkProviderConnectionByType(host, this.providerType, zkSessionTimeout)
}
errors
}
protected def checkProviderConnectionByType(host: String, providerType: String, zkSessionTimeout: Int)
(implicit injector: Injector): ArrayBuffer[String] = {
providerType match {
case ProviderLiterals.zookeeperType =>
checkZookeeperConnection(host, zkSessionTimeout)
case ProviderLiterals.kafkaType =>
checkKafkaConnection(host)
case ProviderLiterals.elasticsearchType =>
checkESConnection(host)
case ProviderLiterals.jdbcType =>
checkJdbcConnection(host)
case ProviderLiterals.restType =>
checkRestConnection(host)
case _ =>
throw new Exception(s"Host checking for provider type '$providerType' is not implemented")
}
}
protected def checkZookeeperConnection(address: String, zkSessionTimeout: Int): ArrayBuffer[String] = {
val errors = ArrayBuffer[String]()
Try(new ZooKeeper(address, zkSessionTimeout, null)) match {
case Success(client) =>
val deadline = 1.seconds.fromNow
var connected: Boolean = false
while (!connected && deadline.hasTimeLeft) {
connected = client.getState.isConnected
}
if (!connected) {
errors += messageResourceUtils.createMessage("rest.providers.provider.cannot.connect.zk", address)
}
client.close()
case Failure(_) =>
errors += messageResourceUtils.createMessage("rest.providers.provider.cannot.connect.zk.wrong.host", address)
}
errors
}
protected def checkKafkaConnection(address: String): ArrayBuffer[String] = {
val errors = ArrayBuffer[String]()
val (host, port) = getHostAndPort(address)
val consumer = new SimpleConsumer(host, port, 500, 64 * 1024, "connectionTest")
val topics = Collections.singletonList("test_connection")
val req = new TopicMetadataRequest(topics)
Try(consumer.send(req)) match {
case Success(_) =>
case Failure(_: ClosedChannelException) | Failure(_: java.io.EOFException) =>
errors += messageResourceUtils.createMessage("rest.providers.provider.cannot.connect.kafka", address)
case Failure(_) =>
errors += messageResourceUtils.createMessage("rest.providers.provider.cannot.connect.kafka.wrong.host", address)
}
errors
}
protected def checkESConnection(address: String): ArrayBuffer[String] = ArrayBuffer()
protected def checkJdbcConnection(address: String)(implicit injector: Injector): ArrayBuffer[String] = ArrayBuffer()
protected def checkRestConnection(address: String): ArrayBuffer[String] = {
val (host, port) = getHostAndPort(address)
val socket = new Socket()
val errors = Try {
socket.connect(new InetSocketAddress(host, port), ProviderLiterals.connectTimeoutMillis)
} match {
case Success(_) =>
ArrayBuffer[String]()
case Failure(_) =>
ArrayBuffer[String](messageResourceUtils.createMessage("rest.providers.provider.cannot.connect.rest", address))
}
if (!socket.isClosed) socket.close()
errors
}
protected def getHostAndPort(address: String): (String, Int) = {
val uri = new URI("dummy://" + address)
val host = uri.getHost
val port = uri.getPort
(host, port)
}
}
object ProviderDomain {
protected[provider] val messageResourceUtils = new MessageResourceUtils
}
|
bwsw/sj-platform
|
core/sj-common/src/main/scala/com/bwsw/sj/common/dal/model/provider/ProviderDomain.scala
|
Scala
|
apache-2.0
| 5,592 |
package utils
import com.typesafe.config.ConfigFactory
object ConfigUtils {
val INTERNAL_HOST_PATH = "proxy.internalHttpHost"
val INTERNAL_PORT_PATH = "proxy.internalHttpPort"
val DEFAULT_INTERNAL_HOST = "coordinator.elastic.l4lb.thisdcos.directory"
val DEFAULT_INTERNAL_PORT = 9200
lazy val conf: com.typesafe.config.Config = ConfigFactory.load()
// get the version number from the config
lazy val VERSION: String = {
conf.getString("application.version")
}
// get the proxy's internal host mame from the config
def internalHost: String = {
conf.getStringWithDefault(INTERNAL_HOST_PATH, DEFAULT_INTERNAL_HOST)
}
// get the proxy's internal port number from the config
def internalPort: Int = {
conf.getIntWithDefault(INTERNAL_PORT_PATH, DEFAULT_INTERNAL_PORT)
}
/**
* Helper class to help check optional config parameters
*
* @param underlying the Config
*/
implicit class RichConfig(val underlying: com.typesafe.config.Config) extends AnyVal {
def getStringWithDefault(path: String, defaultValue: String): String = {
if (underlying.hasPath(path)) {
underlying.getString(path)
} else {
defaultValue
}
}
def getBooleanWithDefault(path: String, defaultValue: Boolean): Boolean = {
if (underlying.hasPath(path)) {
underlying.getBoolean(path)
} else {
defaultValue
}
}
def getIntWithDefault(path: String, defaultValue: Int): Int = {
if (underlying.hasPath(path)) {
underlying.getInt(path)
} else {
defaultValue
}
}
}
}
|
amollenkopf/dcos-iot-demo
|
map-webapp/app/Utils/ConfigUtils.scala
|
Scala
|
apache-2.0
| 1,618 |
package com.github.mgoeminne.sitar.parser
/**
* Defines a parser of a particular style of citation.
*/
trait StyleParser
{
/**
* @return a citation parser for inproceedings citations
*/
def inProceedingsParser : CitationParser
/**
* @return a citation parser for article citations
*/
def articleParser : CitationParser
/**
* @return a citation parser for book chapter citations
*/
def bookChapterParser : CitationParser
/**
* @return a citation parser for technical report citations
*/
def technicalReportParser : CitationParser
/**
* @return a citation parser for book citations
*/
def bookParser : CitationParser
/**
* @return a citation parser for proceedinds citations
*/
def proceedingsParser : CitationParser
/**
* @return a citation parser for thesis citations
*/
def thesisParser : CitationParser
/**
* @return a sequence of all parsers
*/
private def parsers: Seq[CitationParser] = Seq( inProceedingsParser, articleParser,
bookChapterParser, technicalReportParser,
bookParser, proceedingsParser)
/**
* Tries to parse a citation using a parser for a particular style of citation.
* @param line The potential citation string.
* @return The extracted citation, if any.
*/
def parse(line: String): Option[Citation] =
{
parsers.foldLeft(Option.empty[Citation])((previous, p) => previous match {
case s: Some[Citation] => s
case _ => p.parseAll(p.citation, line) match {
case p.Success(result: Citation, _) => Some(result)
case _ => None
}
})
}
}
|
mgoeminne/sitar
|
src/main/scala/com/github/mgoeminne/sitar/parser/StyleParser.scala
|
Scala
|
mit
| 1,847 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.controller
import io.prediction.core.BaseDataSource
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import scala.reflect._
/** Base class of a local data source.
*
* A local data source runs locally within a single machine and return data
* that can fit within a single machine.
*
* @tparam TD Training data class.
* @tparam EI Evaluation Info class.
* @tparam Q Input query class.
* @tparam A Actual value class.
* @group Data Source
*/
abstract class LDataSource[TD: ClassTag, EI, Q, A]
extends BaseDataSource[RDD[TD], EI, Q, A] {
def readTrainingBase(sc: SparkContext): RDD[TD] = {
sc.parallelize(Seq(None)).map(_ => readTraining())
}
/** Implement this method to only return training data from a data source */
def readTraining(): TD
def readEvalBase(sc: SparkContext): Seq[(RDD[TD], EI, RDD[(Q, A)])] = {
val localEvalData: Seq[(TD, EI, Seq[(Q, A)])] = readEval()
localEvalData.map { case (td, ei, qaSeq) => {
val tdRDD = sc.parallelize(Seq(None)).map(_ => td)
val qaRDD = sc.parallelize(qaSeq)
(tdRDD, ei, qaRDD)
}}
}
/** To provide evaluation feature for your engine, your must override this
* method to return data for evaluation from a data source. Returned data can
* optionally include a sequence of query and actual value pairs for
* evaluation purpose.
*
* The default implementation returns an empty sequence as a stub, so that
* an engine can be compiled without implementing evaluation.
*/
def readEval(): Seq[(TD, EI, Seq[(Q, A)])] = Seq[(TD, EI, Seq[(Q, A)])]()
@deprecated("Use readEval() instead.", "0.9.0")
def read(): Seq[(TD, EI, Seq[(Q, A)])] = readEval()
}
|
beni55/PredictionIO
|
core/src/main/scala/io/prediction/controller/LDataSource.scala
|
Scala
|
apache-2.0
| 2,352 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorCriterion
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.tensor.{DenseTensorApply, Tensor, TensorFunc4, TensorFunc6}
import scala.reflect.ClassTag
/**
* Creates a criterion that optimizes a two-class classification hinge loss (margin-based loss)
* between input x (a Tensor of dimension 1) and output y.
*
* @param margin if unspecified, is by default 1.
* @param sizeAverage whether to average the loss
*/
@SerialVersionUID( - 5028892499250398130L)
class MarginCriterion[@specialized(Float, Double) T: ClassTag]
(val margin: Double = 1.0, val sizeAverage: Boolean = true)
(implicit ev: TensorNumeric[T]) extends TensorCriterion[T] {
override def updateOutput(input: Tensor[T], target: Tensor[T]): T = {
var sum: T = ev.fromType(0)
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], index1: Int, data2: Array[T], index2: Int): Unit = {
val z = ev.minus(ev.fromType(margin), ev.times(data1(index1), data2(index2)))
if (ev.isGreater(z, ev.fromType(0))) sum = ev.plus(sum, z)
}
}
DenseTensorApply.apply2[T](input, target, func)
if (sizeAverage) sum = ev.divide(sum, ev.fromType(input.nElement()))
sum
}
override def updateGradInput(input: Tensor[T], target: Tensor[T]): Tensor[T] = {
val norm = ev.fromType(if (sizeAverage) -1.0 / input.nElement() else 1.0)
gradInput.resizeAs(input)
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc6[T] {
override def apply (data1: Array[T], offset1: Int, data2: Array[T],
offset2: Int, data3: Array[T], offset3: Int): Unit = {
if (ev.isGreater(ev.fromType(margin), ev.times(data2(offset2), data3(offset3)))) {
data1(offset1) = ev.times(norm, data3(offset3))
}
}
}
DenseTensorApply.apply3[T](gradInput, input, target, func)
gradInput
}
override def toString(): String = {
s"nn.MarginCriterion($margin)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[MarginCriterion[T]]
override def equals(other: Any): Boolean = other match {
case that: MarginCriterion[T] =>
super.equals(that) &&
(that canEqual this) &&
margin == that.margin &&
sizeAverage == that.sizeAverage
case _ => false
}
override def hashCode(): Int = {
def getHashCode(a: Any): Int = if (a == null) 0 else a.hashCode()
val state = Seq(super.hashCode(), margin, sizeAverage)
state.map(getHashCode).foldLeft(0)((a, b) => 31 * a + b)
}
}
object MarginCriterion {
def apply[@specialized(Float, Double) T: ClassTag](
margin: Double = 1.0,
sizeAverage: Boolean = true)(implicit ev: TensorNumeric[T]) : MarginCriterion[T] = {
new MarginCriterion[T](margin, sizeAverage)
}
}
|
psyyz10/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/MarginCriterion.scala
|
Scala
|
apache-2.0
| 3,618 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio.ByteBuffer
import kafka.api.ApiUtils._
import kafka.common.ErrorMapping
import kafka.network.{RequestOrResponseSend, RequestChannel}
import kafka.network.RequestChannel.Response
import kafka.utils.Logging
import org.apache.kafka.common.protocol.ApiKeys
import scala.collection.mutable.ListBuffer
object TopicMetadataRequest extends Logging {
val CurrentVersion = 0.shortValue
val DefaultClientId = ""
/**
* TopicMetadataRequest has the following format -
* number of topics (4 bytes) list of topics (2 bytes + topic.length per topic) detailedMetadata (2 bytes) timestamp (8 bytes) count (4 bytes)
*/
def readFrom(buffer: ByteBuffer): TopicMetadataRequest = {
val versionId = buffer.getShort
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
val numTopics = readIntInRange(buffer, "number of topics", (0, Int.MaxValue))
val topics = new ListBuffer[String]()
for(i <- 0 until numTopics)
topics += readShortString(buffer)
new TopicMetadataRequest(versionId, correlationId, clientId, topics.toList)
}
}
case class TopicMetadataRequest(versionId: Short,
correlationId: Int,
clientId: String,
topics: Seq[String])
extends RequestOrResponse(Some(ApiKeys.METADATA.id)){
def this(topics: Seq[String], correlationId: Int) =
this(TopicMetadataRequest.CurrentVersion, correlationId, TopicMetadataRequest.DefaultClientId, topics)
def writeTo(buffer: ByteBuffer) {
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
buffer.putInt(topics.size)
topics.foreach(topic => writeShortString(buffer, topic))
}
def sizeInBytes(): Int = {
2 + /* version id */
4 + /* correlation id */
shortStringLength(clientId) + /* client id */
4 + /* number of topics */
topics.foldLeft(0)(_ + shortStringLength(_)) /* topics */
}
override def toString(): String = {
describe(true)
}
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val topicMetadata = topics.map {
topic => TopicMetadata(topic, Nil, ErrorMapping.codeFor(e.getClass.asInstanceOf[Class[Throwable]]))
}
val errorResponse = TopicMetadataResponse(Seq(), topicMetadata, correlationId)
requestChannel.sendResponse(new Response(request, new RequestOrResponseSend(request.connectionId, errorResponse)))
}
override def describe(details: Boolean): String = {
val topicMetadataRequest = new StringBuilder
topicMetadataRequest.append("Name: " + this.getClass.getSimpleName)
topicMetadataRequest.append("; Version: " + versionId)
topicMetadataRequest.append("; CorrelationId: " + correlationId)
topicMetadataRequest.append("; ClientId: " + clientId)
if(details)
topicMetadataRequest.append("; Topics: " + topics.mkString(","))
topicMetadataRequest.toString()
}
}
|
prashantbh/kafka
|
core/src/main/scala/kafka/api/TopicMetadataRequest.scala
|
Scala
|
apache-2.0
| 3,835 |
package sbt
import internals.{
DslEntry,
DslSetting,
DslEnablePlugins,
DslDisablePlugins
}
package object dsl {
def enablePlugins(ps: AutoPlugin*): DslEntry = DslEnablePlugins(ps)
def disablePlugins(ps: AutoPlugin*): DslEntry = DslDisablePlugins(ps)
}
|
niktrop/sbt
|
main/src/main/scala/sbt/dsl/package.scala
|
Scala
|
bsd-3-clause
| 265 |
package io.vertx.scala
import scala.concurrent.{Future, Promise}
object FutureHelper {
def futurify[A](x: Promise[A] => _): Future[A] = {
val p = Promise[A]()
x(p)
p.future
}
}
|
campudus/tableaux
|
src/main/scala/io/vertx/scala/FutureHelper.scala
|
Scala
|
apache-2.0
| 196 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras
import com.intel.analytics.bigdl.dllib.nn.LocallyConnected2D
import com.intel.analytics.bigdl.dllib.nn.abstractnn.DataFormat
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.{Shape, TestUtils}
class LocallyConnected2DSpec extends KerasBaseSpec {
"LocallyConnected2D NHWC Float" should "be ok" in {
ifskipTest()
val kerasCode =
"""
|input_tensor = Input(shape=[3,6,2])
|input = np.array([[[[1,2], [2,3], [3,4],[4,5],[5,6],[6,7]],
| [[2,3], [3,4],[4,5],[5,6],[6,7], [1,2]],
| [[1,2], [2,3], [3,4],[4,5],[6,7],[5,6]]]])
|output_tensor = LocallyConnected2D(3, 2, 1, dim_ordering="tf",
|input_shape=(3,6,2))(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val locallyConnected2d =
LocallyConnected2D[Float](2, 6, 3, 3, 1, 2, format = DataFormat.NHWC)
val a = locallyConnected2d.parameters()
val wc = (data: Array[Tensor[Float]]) => {
val out = new Array[Tensor[Float]](data.length)
val d1l: Int = data(0).size(1)
val d2l: Int = data(0).size(2)
val d3l: Int = data(0).size(3)
out(0) = Tensor(d1l, d3l, d2l)
val page: Int = d2l * d3l
for (i <- 0 to d1l * d2l * d3l - 1) {
val d1 = i / page + 1
val d2 = (i % page) / (d3l) + 1
val d3 = (i % page) % d3l + 1
val v = data(0).valueAt(d1, d2, d3)
out(0).setValue(d1, d3, d2, v)
}
if (data.length > 1) {
out(1) = data(1)
}
out
}
checkOutputAndGrad(locallyConnected2d, kerasCode, wc)
}
"LocallyConnected1D computeOutputShape NCHW" should "work properly" in {
val layer = LocallyConnected2D[Float](3, 12, 12, 3, 2, 2, 2, 1)
TestUtils.compareOutputShape(layer, Shape(3, 12, 12)) should be (true)
}
"LocallyConnected2D computeOutputShape NHWC" should "work properly" in {
val layer = LocallyConnected2D[Float](2, 16, 12, 4, 1, 2, format = DataFormat.NHWC)
TestUtils.compareOutputShape(layer, Shape(12, 16, 2)) should be (true)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/keras/LocallyConnected2DSpec.scala
|
Scala
|
apache-2.0
| 2,752 |
package com.typesafe.slick.examples.lifted
import scala.concurrent.{Future, Await}
import scala.concurrent.duration.Duration
//#imports
// Use H2Driver to connect to an H2 database
import scala.slick.driver.H2Driver.api._
//#imports
/**
* A simple example that uses statically typed queries against an in-memory
* H2 database. The example data comes from Oracle's JDBC tutorial at
* http://download.oracle.com/javase/tutorial/jdbc/basics/tables.html.
*/
object GettingStartedOverview extends App {
//#quick-imports
import scala.slick.driver.H2Driver.api._
//#quick-imports
//#quick-schema
class Coffees(tag: Tag) extends Table[(String, Double)](tag, "COFFEES") {
def name = column[String]("COF_NAME", O.PrimaryKey)
def price = column[Double]("PRICE")
def * = (name, price)
}
val coffees = TableQuery[Coffees]
//#quick-schema
val f1 =
//#quick-query
Database.forURL("jdbc:h2:mem:test1", driver = "org.h2.Driver").run(
//#quick-query
coffees.schema.create andThen
//#quick-query
( for( c <- coffees; if c.price < 10.0 ) yield c.name ).result
//#quick-query
andThen
//#quick-query
// or
coffees.filter(_.price < 10.0).map(_.name).result
)
//#quick-query
Await.result(f1, Duration.Inf)
val f2 = Database.forURL("jdbc:h2:mem:test1", driver = "org.h2.Driver").run(
coffees.schema.create andThen {
//#what-is-slick-micro-example
val limit = 10.0
// Your query could look like this:
( for( c <- coffees; if c.price < limit ) yield c.name ).result
//#what-is-slick-micro-example
} andThen {
val limit = 10.0
//#what-is-slick-micro-example
// Or using Plain SQL String Interpolation:
sql"select COF_NAME from COFFEES where PRICE < $limit".as[String]
// Both queries result in SQL equivalent to:
// select COF_NAME from COFFEES where PRICE < 10.0
//#what-is-slick-micro-example
}
)
Await.result(f2, Duration.Inf)
//#features-scala-collections
// Query that only returns the "name" column
coffees.map(_.name)
// Query that does a "where price < 10.0"
coffees.filter(_.price < 10.0)
//#features-scala-collections
{
val db = Database.forURL("jdbc:h2:mem:test1", driver = "org.h2.Driver")
val f4 = {
//#features-type-safe
// The result of "select PRICE from COFFEES" is a Seq of Double
// because of the type safe column definitions
val coffeeNames: Future[Seq[Double]] = db.run(
//#features-type-safe
coffees.schema.create andThen
//#features-type-safe
coffees.map(_.price).result
)
// Query builders are type safe:
coffees.filter(_.price < 10.0)
// Using a string in the filter would result in a compilation error
//#features-type-safe
coffeeNames
}
Await.result(f4, Duration.Inf)
}
//#features-composable
// Create a query for coffee names with a price less than 10, sorted by name
coffees.filter(_.price < 10.0).sortBy(_.name).map(_.name)
// The generated SQL is equivalent to:
// select name from COFFEES where PRICE < 10.0 order by NAME
//#features-composable
}
|
nuodb/slick
|
src/sphinx/code/GettingStartedOverview.scala
|
Scala
|
bsd-2-clause
| 3,143 |
package ecommerce.sales.app
import akka.actor._
import akka.kernel.Bootable
import ecommerce.sales.{Event, ReservationAggregateRoot}
import pl.newicom.dddd.actor.{ActorFactory, DefaultConfig, PassivationConfig}
import pl.newicom.dddd.aggregate._
import pl.newicom.dddd.coordination.ReceptorConfig
import pl.newicom.dddd.monitoring.AggregateRootMonitoring
import pl.newicom.dddd.office.LocalOfficeId
import pl.newicom.dddd.process.CommandReceptorSupport.CommandReception
import pl.newicom.dddd.process.{Receptor, ReceptorActorFactory}
import pl.newicom.eventstore.EventstoreSubscriber
trait SalesBackendConfiguration {
this: Bootable =>
implicit object ReservationARFactory extends AggregateRootActorFactory[ReservationAggregateRoot] {
override def props(pc: PassivationConfig) =
Props(new ReservationAggregateRoot(DefaultConfig(pc, replyWithEvents = false)) with AggregateRootMonitoring with AggregateRootLogger[Event])
}
implicit def commandReceptorActorFactory[A <: CommandReception : LocalOfficeId : ActorFactory]: ReceptorActorFactory[A] = new ReceptorActorFactory[A] {
def receptorFactory: ReceptorFactory = (config: ReceptorConfig) => new Receptor(config) with EventstoreSubscriber
}
}
|
pawelkaczor/ddd-leaven-akka-v2
|
sales/write-back/src/main/scala/ecommerce/sales/app/SalesBackendConfiguration.scala
|
Scala
|
mit
| 1,221 |
package com.typesafe.slick.testkit.tests
import org.junit.Assert._
import com.typesafe.slick.testkit.util.{RelationalTestDB, TestkitTest}
class AggregateTest extends TestkitTest[RelationalTestDB] {
import tdb.profile.simple._
override val reuseInstance = true
def testAggregates {
class T(tag: Tag) extends Table[(Int, Option[Int])](tag, "t2") {
def a = column[Int]("a")
def b = column[Option[Int]]("b")
def * = (a, b)
}
val ts = TableQuery[T]
ts.ddl.create
ts ++= Seq((1, Some(1)), (1, Some(2)), (1, Some(3)))
def q1(i: Int) = for { t <- ts if t.a === i } yield t
def q2(i: Int) = (q1(i).length, q1(i).map(_.a).sum, q1(i).map(_.b).sum, q1(i).map(_.b).avg)
val q2_0 = q2(0).shaped
val q2_1 = q2(1).shaped
println(q2_0.run)
println(q2_1.run)
assertEquals((0, None, None, None), q2_0.run)
assertEquals((3, Some(3), Some(6), Some(2)), q2_1.run)
}
def testGroupBy = {
class T(tag: Tag) extends Table[(Int, Option[Int])](tag, "t3") {
def a = column[Int]("a")
def b = column[Option[Int]]("b")
def * = (a, b)
}
val ts = TableQuery[T]
ts.ddl.create
ts ++= Seq((1, Some(1)), (1, Some(2)), (1, Some(3)))
ts ++= Seq((2, Some(1)), (2, Some(2)), (2, Some(5)))
ts ++= Seq((3, Some(1)), (3, Some(9)))
println("=========================================================== q0")
val q0 = ts.groupBy(_.a)
val q1 = q0.map(_._2.length).sortBy(identity)
val r0 = q1.run
val r0t: Seq[Int] = r0
assertEquals(Vector(2, 3, 3), r0t)
println("=========================================================== q")
val q = (for {
(k, v) <- ts.groupBy(t => t.a)
} yield (k, v.length, v.map(_.a).sum, v.map(_.b).sum)).sortBy(_._1)
val r = q.run
val rt = r: Seq[(Int, Int, Option[Int], Option[Int])]
println(r)
assertEquals(Vector((1, 3, Some(3), Some(6)), (2, 3, Some(6), Some(8)), (3, 2, Some(6), Some(10))), rt)
class U(tag: Tag) extends Table[Int](tag, "u") {
def id = column[Int]("id")
def * = id
}
val us = TableQuery[U]
us.ddl.create
us ++= Seq(1, 2, 3)
println("=========================================================== q2")
val q2 = (for {
u <- us
t <- ts if t.a === u.id
} yield (u, t)).groupBy(_._1.id).map {
case (id, q) => (id, q.length, q.map(_._2.a).sum, q.map(_._2.b).sum)
}
val r2 = q2.run
val r2t = r2: Seq[(Int, Int, Option[Int], Option[Int])]
println(r2)
assertEquals(Set((1, 3, Some(3), Some(6)), (2, 3, Some(6), Some(8)), (3, 2, Some(6), Some(10))), r2.toSet)
println("=========================================================== q3")
val q3 = (for {
(x, q) <- ts.map(t => (t.a + 10, t.b)).groupBy(_._1)
} yield (x, q.map(_._2).sum)).sortBy(_._1)
val r3 = q3.run
val r3t = r3: Seq[(Int, Option[Int])]
println(r3)
assertEquals(Vector((11, Some(6)), (12, Some(8)), (13, Some(10))), r3)
println("=========================================================== q4")
val q4 = (for {
(x, q) <- ts.groupBy(t => (t.a, t.b))
} yield (x, q.length)).sortBy(_._1)
val r4 = q4.run
val r4t = r4: Seq[((Int, Option[Int]), Int)]
println(r4)
assertEquals(Vector( ((1,Some(1)),1), ((1,Some(2)),1), ((1,Some(3)),1),
((2,Some(1)),1), ((2,Some(2)),1), ((2,Some(5)),1),
((3,Some(1)),1), ((3,Some(9)),1)), r4)
println("=========================================================== q5")
val q5 = ts
.filter(_.a === 1)
.map(t => (t.a, t.b))
.sortBy(_._2)
.groupBy(x => (x._1, x._2))
.map { case (a, _) => (a._1, a._2) }
assertEquals(Set((1, Some(1)), (1, Some(2)), (1, Some(3))), q5.run.toSet)
us += 4
println("=========================================================== q6")
val q6 = (for {
(u, t) <- us leftJoin ts on (_.id === _.a)
} yield (u, t)).groupBy(_._1.id).map {
case (id, q) => (id, q.length, q.map(_._1).length, q.map(_._2).length)
}
assertEquals(Set((1, 3, 3, 3), (2, 3, 3, 3), (3, 2, 2, 2), (4, 1, 1, 0)), q6.run.toSet)
println("=========================================================== q7")
val q7 = ts.groupBy(_.a).map { case (a, ts) =>
(a, ts.map(_.b).sum, ts.map(_.b).min, ts.map(_.b).max, ts.map(_.b).avg)
}
assertEquals(Set(
(1, Some(6), Some(1), Some(3), Some(2)),
(2, Some(8), Some(1), Some(5), Some(2)),
(3, Some(10), Some(1), Some(9), Some(5))), q7.run.toSet)
println("=========================================================== q8")
val q8 = us.map( _ => "test").groupBy(x => x).map(_._2.max)
assertEquals((Seq(Some("test"))), q8.run)
val q8b = for( (key, group) <- us.map(_ => "x").groupBy(co => co) )
yield (key, group.map(co => co).max )
assertEquals((Seq(("x", Some("x")))), q8b.run)
val q8c = for( (key, group) <- us.map(_ => 5).groupBy(co => co) )
yield (key, group.map(co => co + co).sum )
assertEquals((Seq((5, Some(40)))), q8c.run)
println("=========================================================== q9")
val res9 = Set(
(1, Some(1)), (1, Some(2)), (1, Some(3)),
(2, Some(1)), (2, Some(2)), (2, Some(5)),
(3, Some(1)), (3, Some(9))
)
val q9 = ts.groupBy(x => x).map(_._1)
assertEquals(res9, q9.run.toSet)
val q9b = ts.map(x => x).groupBy(_.*).map(_._1)
assertEquals(res9, q9b.run.toSet)
val q9c = ts.map(x => x).groupBy(x => x).map(_._1)
assertEquals(res9, q9c.run.toSet)
println("=========================================================== q10")
val q10 = (for {
m <- ts
} yield m) groupBy (_.a) map {
case (id, data) => (id, data.map(_.b.asColumnOf[Option[Double]]).max)
}
assertEquals(Set((2,Some(5.0)), (1,Some(3.0)), (3,Some(9.0))), q10.run.toSet)
case class Pair(a:Int,b:Option[Int])
class T4(tag: Tag) extends Table[Pair](tag, "t4") {
def a = column[Int]("a")
def b = column[Option[Int]]("b")
def * = (a, b) <> (Pair.tupled,Pair.unapply)
}
val t4s = TableQuery[T4]
t4s.ddl.create
t4s ++= Seq(Pair(1, Some(1)), Pair(1, Some(2)))
t4s ++= Seq(Pair(1, Some(1)), Pair(1, Some(2)))
t4s ++= Seq(Pair(1, Some(1)), Pair(1, Some(2)))
println("=========================================================== q11")
val expected11 = Set(
Pair(1, Some(1)), Pair(1, Some(2))
)
val q12 = t4s
val res12 = q12.run
assertEquals(6, res12.size)
assertEquals(expected11, res12.toSet)
val q13 = t4s.map(identity)
val res13 = q13.run
assertEquals(6, res13.size)
assertEquals(expected11, res13.toSet)
val q11 = t4s.groupBy(identity).map(_._1)
val res11 = q11.run
assertEquals(expected11, res11.toSet)
assertEquals(2, res11.size)
}
def testIntLength {
class A(tag: Tag) extends Table[Int](tag, "A_testIntLength") {
def id = column[Int]("ID")
def * = id
}
val as = TableQuery[A]
as.ddl.create
as += 1
val q1 = as.groupBy(_.id).map {
case (_, q) => (q.map(_.id).min, q.length)
}
q1.run
}
def testGroup3 {
case class Tab(col1: String, col2: String, col3: String, col4: Int)
class Tabs(tag: Tag) extends Table[Tab](tag, "TAB_group3") {
def col1 = column[String]("COL1")
def col2 = column[String]("COL2")
def col3 = column[String]("COL3")
def col4 = column[Int]("COL4")
def * = (col1, col2, col3, col4) <> (Tab.tupled, Tab.unapply)
}
val Tabs = TableQuery[Tabs]
Tabs.ddl.create
Tabs ++= Seq(
Tab("foo", "bar", "bat", 1),
Tab("foo", "bar", "bat", 2),
Tab("foo", "quux", "bat", 3),
Tab("baz", "quux", "bat", 4)
)
val q1 = Tabs.groupBy(t => (t.col1, t.col2, t.col3)).map {
case (grp, t) => (grp._1, grp._2, t.map(_.col4).sum)
}
assertEquals(Set(("baz","quux",Some(4)), ("foo","quux",Some(3)), ("foo","bar",Some(3))), q1.run.toSet)
val q2 = Tabs.groupBy(t => ((t.col1, t.col2), t.col3)).map {
case (grp, t) => (grp._1._1, grp._1._2, t.map(_.col4).sum)
}
assertEquals(Set(("baz","quux",Some(4)), ("foo","quux",Some(3)), ("foo","bar",Some(3))), q2.run.toSet)
}
def testMultiMapAggregates {
class B(tag: Tag) extends Table[(Long, String, String)](tag, "b_multimap") {
def id = column[Long]("id", O.PrimaryKey)
def b = column[String]("b")
def d = column[String]("d")
def * = (id, b, d)
}
val bs = TableQuery[B]
class A(tag: Tag) extends Table[(Long, String, Long, Long)](tag, "a_multimap") {
def id = column[Long]("id", O.PrimaryKey)
def a = column[String]("a")
def c = column[Long]("c")
def fkId = column[Long]("fkId")
def * = (id, a, c, fkId)
}
val as = TableQuery[A]
(as.ddl ++ bs.ddl).create
val q1 = as.groupBy(_.id).map(_._2.map(x => x).map(x => x.a).min)
assert(q1.run.toList.isEmpty)
val q2 =
(as leftJoin bs on (_.id === _.id)).map { case (c, s) =>
val name = s.b
(c, s, name)
}.groupBy { prop =>
val c = prop._1
val s = prop._2
val name = prop._3
s.id
}.map { prop =>
val supId = prop._1
val c = prop._2.map(x => x._1)
val s = prop._2.map(x => x._2)
val name = prop._2.map(x => x._3)
(name.min, s.map(_.b).min, supId, c.length)
}
assert(q2.run.isEmpty)
val q4 = as.flatMap { t1 =>
bs.withFilter { t2 =>
t1.fkId === t2.id && t2.d === ""
}.map(t2 => (t1, t2))
}.groupBy { prop =>
val t1 = prop._1
val t2 = prop._2
(t1.a, t2.b)
}.map { prop =>
val a = prop._1._1
val b = prop._1._2
val t1 = prop._2.map(_._1)
val t2 = prop._2.map(_._2)
val c3 = t1.map(_.c).max
scala.Tuple3(a, b, c3)
}
assert(q4.run.isEmpty)
}
}
|
dvinokurov/slick
|
slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/AggregateTest.scala
|
Scala
|
bsd-2-clause
| 9,896 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon.utils
import scala.collection.mutable.{Stack, Set => MSet}
import scala.collection.mutable.Builder
import scala.collection.{Iterable, IterableLike, GenSet}
/** A stack of mutable sets with a set-like API and methods to push and pop */
class IncrementalSet[A] extends IncrementalState
with Iterable[A]
with IterableLike[A, Set[A]]
with Builder[A, IncrementalSet[A]] {
private[this] val stack = new Stack[MSet[A]]()
override def repr = stack.flatten.toSet
/** Removes all the elements */
override def clear(): Unit = {
stack.clear()
}
/** Removes all the elements and creates a new set */
def reset(): Unit = {
clear()
push()
}
/** Creates one more set level */
def push(): Unit = {
stack.push(MSet())
}
/** Removes one set level */
def pop(): Unit = {
stack.pop()
}
/** Returns true if the set contains elem */
def apply(elem: A) = repr.contains(elem)
/** Returns true if the set contains elem */
def contains(elem: A) = repr.contains(elem)
/** Returns an iterator over all the elements */
def iterator = stack.flatten.iterator
/** Add an element to the head set */
def += (elem: A) = { stack.head += elem; this }
/** Removes an element from all stacked sets */
def -= (elem: A) = { stack.foreach(_ -= elem); this }
override def newBuilder = new scala.collection.mutable.SetBuilder(Set.empty[A])
def result = this
push() // By default, creates a new empty mutable set ready to add elements to it.
}
|
regb/leon
|
src/main/scala/leon/utils/IncrementalSet.scala
|
Scala
|
gpl-3.0
| 1,610 |
import gruenewa.grid.GridRun
object Exec {
def main(args: Array[String]) {
val executor = gruenewa.grid.GridRun.startExecutor()
try {
printf("16 + 1 = %d\\n", executor.apply[Int,Int]{1+}(16))
} finally {
executor.close()
}
}
}
|
gruenewa/gruenewa-grid
|
samples/Exec.scala
|
Scala
|
gpl-3.0
| 261 |
package pl.szymonmatejczyk.subgraphsampling
import com.twitter.cassovary.graph.Graph
/**
* Created by szymonmatejczyk on 26.03.15.
*/
class MHSampling(graph: Graph) {
// def neighbors(subraphNodes: collection.Set[Int]): Seq[collection.Set[Int]] = {
//
// }
}
|
szymonm/subgraphSampling
|
src/main/scala/pl/szymonmatejczyk/subgraphsampling/MHSampling.scala
|
Scala
|
apache-2.0
| 265 |
package controllers
import javax.inject._
import play.api.mvc._
/**
* This controller creates an `Action` to handle HTTP requests to the
* application's home page.
*/
@Singleton
class HomeController @Inject() extends Controller {
/**
* Create an Action to render an HTML page with a welcome message.
* The configuration in the `routes` file means that this method
* will be called when the application receives a `GET` request with
* a path of `/`.
*/
def index = Action {
Ok("Hello World!")
}
}
|
daniloradenovic/helloworld-rest
|
scala-play/app/controllers/HomeController.scala
|
Scala
|
mit
| 537 |
package net.liftweb.example.snippet
import _root_.org.specs._
import _root_.org.specs.Sugar._
import _root_.org.specs.runner._
import _root_.net.liftweb.example.model._
import _root_.net.liftweb.http.{S, Req, LiftSession}
import _root_.net.liftweb.util.{Full, Empty}
// This file crashes the compiler. TODO: Investigate, report, and fix
// class WikiTest extends JUnit4(WikiSpec)
// object WikiSpec extends Specification with MockEntries {
// "In the following spec, 'pageName' refers to the value of the S parameter 'wiki_page'" +
// "The 'main' function" should { doBefore { createMocks }
// "return all existing entries if pageName is 'all'" in {
// withEntries(WikiEntry.create.name("EntryOne"), WikiEntry.create.name("EntryTwo"))
// userRequests("all")
// inSession {
// wikiMain must \\\\("a", Map("href" -> "/wiki/EntryOne"))
// wikiMain must \\\\("a", Map("href" -> "/wiki/EntryTwo"))
// }
// }
// "return a new page with a form for a 'HomePage' entry if the wiki_page parameter is not specified" in {
// withNoEntries; userRequests("nothing")
// inSession {
// wikiMain must \\\\("form", Map("action" -> "/wiki/HomePage", "method" -> "GET"))
// }
// }
// "return a new page with a form for a 'NewEntry' entry if there is no entry with the name 'NewEntry' in the database" in {
// withNoEntries; userRequests("NewEntry")
// inSession {
// wikiMain must \\\\("form", Map("action" -> "/wiki/NewEntry", "method" -> "GET"))
// }
// }
// "return an existing entry if there is an entry named 'ExistingEntry' in the database" in {
// withEntries(WikiEntry.create.name("ExistingEntry")); userRequests("ExistingEntry")
// inSession {
// wikiMain must \\\\("form", Map("action" -> "/wiki/ExistingEntry", "method" -> "GET"))
// }
// }
// }
// "A newly created entry" should { doBefore { createMocks }
// "be named 'HomePage' if pageName is not specified" in {
// withNoEntries; userRequests("nothing")
// inSession {
// wikiMain.toString must include("Create Entry named HomePage")
// }
// }
// "be named 'pageName' if pageName is specified" in {
// withNoEntries; userRequests("MyPage")
// inSession {
// wikiMain.toString must include("Create Entry named MyPage")
// }
// }
// }
// }
//
// import _root_.net.liftweb.mapper._
// import _root_.net.liftweb.example.model._
// import _root_.net.liftweb.example.snippet._
// trait MockEntries extends MockRequest {
// var wikiEntries: MetaWikiEntry = _
// var requested = "all"
// def wikiMain = {
// trait MockedMetaWikiEntry extends MetaWikiEntry {
// override def find(q: QueryParam[WikiEntry]*) = wikiEntries.find(q:_*)
// override def findAll(q: QueryParam[WikiEntry]*) = wikiEntries.findAll(q:_*)
// override def create = wikiEntries.create
// override def findAll = wikiEntries.findAll
// }
// val wiki = new Wiki with MockedMetaWikiEntry
// wiki.main
// }
// override def createMocks = {
// super.createMocks
// wikiEntries = mock[MetaWikiEntry]
// }
// def userRequests(page: String) {
// if (page == "nothing")
// unsetParameter("wiki_page")
// else
// setParameter("wiki_page", page)
// requested = page
// }
// def withNoEntries = {
// expect {
// 0.atLeastOf(wikiEntries).find(any(classOf[QueryParam[WikiEntry]])).willReturn(Empty)
// 0.atLeastOf(wikiEntries).create.willReturn(new WikiEntry)
// }
// }
// def withEntries(entries: WikiEntry*) = {
// expect {
// if (entries.isEmpty)
// one(wikiEntries).find(any(classOf[QueryParam[WikiEntry]])).willReturn(Empty)
// else if (requested == "all")
// 0.atLeastOf(wikiEntries).findAll willReturn entries.toList
// else
// one(wikiEntries).find(any(classOf[QueryParam[WikiEntry]])).willReturn(Full(entries(0)))
// 0.atLeastOf(wikiEntries).findAll(any(classOf[QueryParam[WikiEntry]])).willReturn(entries.toList)
// }
// }
// }
// import _root_.org.specs.mock._
// import _root_.javax.servlet.http._
// trait MockRequest extends JMocker with ClassMocker {
// var request = mock[Req]
// var httpRequest = mock[HttpServletRequest]
// var session = mock[LiftSession]
// def createMocks = {
// request = mock[Req]
// httpRequest = mock[HttpServletRequest]
// session = mock[LiftSession]
// expect {
// 0.atLeastOf(request).request.willReturn(httpRequest)
// 0.atLeastOf(httpRequest).getCookies
// }
// }
// def inSession(f: => Any) {
// S.init(request, session) {
// f
// }
// }
// def unsetParameter(name: String) {
// expect {
// 0.atLeastOf(request).param(equal(name)).willReturn(None)
// }
// }
// def setParameter(name: String, value: String) {
// expect {
// 0.atLeastOf(request).param(equal(name)).willReturn(Some(value))
// }
// }
// }
|
andreum/liftweb
|
sites/example/src/test/scala/net/liftweb/example/snippet/WikiSpec.scala
|
Scala
|
apache-2.0
| 5,027 |
import sbt._
class UseColor(info: ProjectInfo) extends DefaultProject(info)
{
def ivyCacheDirectory = outputPath / "ivy-cache"
override def updateOptions = CacheDirectory(ivyCacheDirectory) :: super.updateOptions.toList
override def managedStyle = ManagedStyle.Ivy
val repo = Resolver.file("test-repo", ("repo" / "test").asFile)
def color = FileUtilities.readString("color".asFile, log).right.getOrElse(error("No color specified"))
override def libraryDependencies = Set(
"org.scala-tools.sbt" % "test-ivy-extra" %"1.0" extra("e:color" -> color)
)
}
|
sbt/sbt-zero-seven
|
src/sbt-test/dependency-management/extra/changes/UseColor.scala
|
Scala
|
bsd-3-clause
| 561 |
package jp.sf.amateras.scala.nio
import java.io._
object StreamUtils {
/**
* Copy contents of the input stream to the output stream.
* Both of streams is not closed in this method.
*
* @param in the input stream
* @param out the output stream
*/
def transfer(in: InputStream, out: OutputStream): Unit = {
val buf = new Array[Byte](1024 * 8)
var length = 0
while(length != -1){
length = in.read(buf)
if(length > 0){
out.write(buf, 0, length)
}
}
}
/**
* Read contents of the input stream as byte array.
* The input stream is not closed in this method.
*
* @param in the input stream
* @return the input stream contents as byte array
*/
def readAsBytes(in: InputStream): Array[Byte] = {
using(new ByteArrayOutputStream()){ out =>
transfer(in, out)
out.toByteArray
}
}
/**
* Read contents of the input stream as string.
* The input stream is not closed in this method.
*
* @param in the input stream
* @param charset the character encoding, default is UTF-8.
* @return the input stream contents as string
*/
def readAsString(in: InputStream, charset: String = "UTF-8"): String =
new String(readAsBytes(in), charset)
/**
* Process each lines of the file.
* The input stream is not closed in this method.
*
* @param in the input stream
* @param charset the character encoding, default is UTF-8.
* @param f the function to process lines
*/
def foreachLines(in: InputStream, charset: String = "UTF-8")(f: String => Unit): Unit = {
val reader = new BufferedReader(new InputStreamReader(in, charset))
var line: String = reader.readLine()
while(line != null){
f(line)
line = reader.readLine()
}
}
/**
* Process each chunked bytes of the file.
* The input stream is not closed in this method.
*
* @param in the input stream
* @param chunkSize the chunk size (bytes), default is 1024 * 8.
* @param f the function to process lines
*/
def foreachBytes(in: InputStream, chunkSize: Int = 1024 * 8)(f: Array[Byte] => Unit): Unit = {
val bytes = new Array[Byte](chunkSize)
var length = in.read(bytes)
while(length != -1){
f(bytes)
length = in.read(bytes)
}
}
}
|
takezoe/scala-nio
|
src/main/scala/jp/sf/amateras/scala/nio/StreamUtils.scala
|
Scala
|
apache-2.0
| 2,300 |
package org.scalaide.core.lexical
import org.scalaide.ui.syntax.ScalaSyntaxClass
import org.eclipse.jdt.core.JavaCore
import org.eclipse.jface.preference.IPreferenceStore
import org.eclipse.jface.text.Document
import org.eclipse.jface.text.TextAttribute
import org.eclipse.jface.text.rules.Token
import org.junit.ComparisonFailure
import org.junit.Test
import org.mockito.Mockito._
import org.scalaide.core.internal.lexical.ScaladocTokenScanner
class ScaladocTokenScannerTest {
var scaladocAtt: String = _
var annotationAtt: String = _
var macroAtt: String = _
var taskTagAtt: String = _
/**
* Tokenizes Scaladoc content. The complete input is handled as Scaladoc
* content.
*
* There is a sequence returned containing tuples where each tuple value
* represents a token. The first element is a string specifying the
* content of the token. The second element is the offset of the token and the
* last element is its length.
*/
def tokenize(str: String): Seq[(String, Int, Int)] =
tokenize(str, 0, str.length())
/**
* Tokenizes Scaladoc content that is placed somewhere in a Scala source code
* snippet, which is passed in `str`. The Scaladoc content can be referenced by
* passing its offset and its length. The offset must start at the `/` sign
* of the Scaladoc starter marker. The length includes start and end tags of
* Scaladoc.
*
* There is a sequence returned containing tuples where each tuple value
* represents a token. The first element is a string specifying the
* content of the token. The second element is the offset of the token and the
* last element is its length.
*/
def tokenize(str: String, offset: Int, length: Int): Seq[(String, Int, Int)] = {
val scaladocClass = mock(classOf[ScalaSyntaxClass])
val annotationClass = mock(classOf[ScalaSyntaxClass])
val macroClass = mock(classOf[ScalaSyntaxClass])
val taskTagClass = mock(classOf[ScalaSyntaxClass])
val prefStore = mock(classOf[IPreferenceStore])
val sampleTaskTags = "XXX,TODO,@todo,$todo,!todo"
when(prefStore.getString(JavaCore.COMPILER_TASK_TAGS)).thenReturn(sampleTaskTags)
val scaladocAtt = mock(classOf[TextAttribute])
val annotationAtt = mock(classOf[TextAttribute])
val macroAtt = mock(classOf[TextAttribute])
val taskTagAtt = mock(classOf[TextAttribute])
when(scaladocAtt.toString()).thenReturn("scaladocAtt")
when(annotationAtt.toString()).thenReturn("annotationAtt")
when(macroAtt.toString()).thenReturn("macroAtt")
when(taskTagAtt.toString()).thenReturn("taskTagAtt")
this.scaladocAtt = scaladocAtt.toString()
this.annotationAtt = annotationAtt.toString()
this.macroAtt = macroAtt.toString()
this.taskTagAtt = taskTagAtt.toString()
when(scaladocClass.getTextAttribute(prefStore)).thenReturn(scaladocAtt)
when(annotationClass.getTextAttribute(prefStore)).thenReturn(annotationAtt)
when(macroClass.getTextAttribute(prefStore)).thenReturn(macroAtt)
when(taskTagClass.getTextAttribute(prefStore)).thenReturn(taskTagAtt)
val scanner = new ScaladocTokenScanner(
prefStore,
scaladocClass,
annotationClass,
macroClass,
taskTagClass)
val doc = {
val rawInput = str.filterNot(_ == '^')
val doc = new Document(rawInput)
val partitioner = ScalaCodePartitioner.documentPartitioner()
doc.setDocumentPartitioner(partitioner)
partitioner.connect(doc)
doc
}
scanner.setRange(doc, offset, length)
val data = Iterator
.continually((scanner.nextToken(), scanner.getTokenOffset(), scanner.getTokenLength()))
.takeWhile(_._1 != Token.EOF)
.map { case (ta, off, len) => (ta.getData().toString(), off, len) }
.toSeq
/*
* The scanner returns a token for each character but we want all consecutive
* token of the same type grouped as one single token.
*/
val groupedToken = (Seq(data.head) /: data.tail) {
case (token, t @ (scc, _, len)) =>
val (sccBefore, offBefore, lenBefore) = token.last
if (sccBefore == scc)
token.init :+ ((scc, offBefore, lenBefore+len))
else
token :+ t
}
groupedToken
}
implicit class Assert_===[A](actual: A) {
def ===(expected: A): Unit = {
if (actual != expected)
throw new ComparisonFailure("actual != expected,",
expected.toString(),
actual.toString())
}
}
@Test
def no_annotation(): Unit = {
val res = tokenize("""/***/""")
res === Seq((scaladocAtt, 0, 5))
}
@Test
def single_annotation(): Unit = {
val res = tokenize("""/**@param world desc*/""")
res === Seq((scaladocAtt, 0, 3), (annotationAtt, 3, 6), (scaladocAtt, 9, 13))
}
@Test
def single_annotation_without_text(): Unit = {
val res = tokenize("""/**@param*/""")
res === Seq((scaladocAtt, 0, 3), (annotationAtt, 3, 6), (scaladocAtt, 9, 2))
}
@Test
def consecutive_annotations_should_not_be_handled_as_single_annotation(): Unit = {
val res = tokenize("""/**@pa@pa*/""")
res === Seq((scaladocAtt, 0, 3), (annotationAtt, 3, 3), (scaladocAtt, 6, 5))
}
@Test
def consecutive_macros_should_not_be_handled_as_single_macro(): Unit = {
val res = tokenize("""/**$pa$pa*/""")
res === Seq((scaladocAtt, 0, 3), (macroAtt, 3, 3), (scaladocAtt, 6, 5))
}
@Test
def identifier_as_name_in_annotation(): Unit = {
val res = tokenize("""/**@azAZ09_*/""")
res === Seq((scaladocAtt, 0, 3), (annotationAtt, 3, 8), (scaladocAtt, 11, 2))
}
@Test
def identifier_as_name_in_macro(): Unit = {
val res = tokenize("""/**$azAZ09_*/""")
res === Seq((scaladocAtt, 0, 3), (macroAtt, 3, 8), (scaladocAtt, 11, 2))
}
@Test
def multiple_annotation(): Unit = {
val res = tokenize("""/**@pa abc @param @param */""")
res === Seq(
(scaladocAtt, 0, 3), (annotationAtt, 3, 3), (scaladocAtt, 6, 5),
(annotationAtt, 11, 6), (scaladocAtt, 17, 1), (annotationAtt, 18, 6),
(scaladocAtt, 24, 3))
}
@Test
def multiple_macros(): Unit = {
val res = tokenize("""/**$def $def text $def*/""")
res === Seq(
(scaladocAtt, 0, 3), (macroAtt, 3, 4), (scaladocAtt, 7, 1),
(macroAtt, 8, 4), (scaladocAtt, 12, 6), (macroAtt, 18, 4),
(scaladocAtt, 22, 2))
}
@Test
def single_line_in_scaladoc_should_not_produce_an_out_of_bound_error(): Unit = {
val res = tokenize(" * @param")
res === Seq((scaladocAtt, 0, 3), (annotationAtt, 3, 6))
}
@Test
def no_highlighting_of_a_single_start_symbol(): Unit = {
val res = tokenize("a @ b $")
res === Seq((scaladocAtt, 0, 7))
}
@Test
def start_symbol_between_identifiers_should_handled_as_scaladoc(): Unit = {
val res = tokenize("a@b a$b")
res === Seq((scaladocAtt, 0, 7))
}
@Test
def no_highlighting_of_start_symbol_before_another_start_symbol(): Unit = {
val res = tokenize("@@ab $$ab")
res === Seq(
(scaladocAtt, 0, 1), (annotationAtt, 1, 3), (scaladocAtt, 4, 2),
(macroAtt, 6, 3))
}
@Test
def no_highlighting_of_start_symbol_after_non_scaladoc(): Unit = {
val res = tokenize("@ab@ $ab$")
res === Seq(
(annotationAtt, 0, 3), (scaladocAtt, 3, 2), (macroAtt, 5, 3),
(scaladocAtt, 8, 1))
}
@Test
def part_of_source_code_snippet(): Unit = {
val res = tokenize("val i = 0; /**@param $def*/ def meth = 0", 11, 16)
res === Seq(
(scaladocAtt, 11, 3), (annotationAtt, 14, 6), (scaladocAtt, 20, 1),
(macroAtt, 21, 4), (scaladocAtt, 25, 2))
}
@Test
def single_task_tag(): Unit = {
val res = tokenize("/**TODO*/")
res === Seq((scaladocAtt, 0, 3), (taskTagAtt, 3, 4), (scaladocAtt, 7, 2))
}
@Test
def braces_macro(): Unit = {
val res = tokenize("/**?{abc}d*/".replace('?', '$'))
res === Seq((scaladocAtt, 0, 3), (macroAtt, 3, 6), (scaladocAtt, 9, 3))
}
@Test
def task_tag_that_starts_with_a_special_sign(): Unit = {
val res = tokenize("/**@todo$todo!todo@param*/")
res === Seq((scaladocAtt, 0, 3), (taskTagAtt, 3, 15), (scaladocAtt, 18, 8))
}
}
|
scala-ide/scala-ide
|
org.scala-ide.sdt.core.tests/src/org/scalaide/core/lexical/ScaladocTokenScannerTest.scala
|
Scala
|
bsd-3-clause
| 8,132 |
package com.olegych.scastie.api
package runtime
import play.api.libs.json.Json
protected[runtime] trait SharedRuntime {
def write(instrumentations: List[Instrumentation]): String = {
if (instrumentations.isEmpty) "" else Json.stringify(Json.toJson(instrumentations))
}
private val maxValueLength = 500
private def show[A](a: A): String =
if(a == null) "null"
else a.toString
protected[runtime] def render[T](a: T, typeName: String): Render = {
a match {
case html: Html => html
case v =>
val vs = show(v)
val out =
if (vs.size > maxValueLength) vs.take(maxValueLength) + "..."
else vs
Value(out, typeName.replace(Instrumentation.instrumentedObject + ".", ""))
}
}
}
|
scalacenter/scastie
|
runtime-scala/src/main/scala/com.olegych.scastie.api.runtime/SharedRuntime.scala
|
Scala
|
apache-2.0
| 760 |
package WorkingWithLists.P07
object P07 {
def flatten(l: List[Any]): List[Any] = l.foldRight(List[Any]()) {
case (el: List[Any], lr) => flatten(el):::lr
case (e, lr) => e::lr
}
def flatten_FlatMap(l: List[Any]): List[Any] = l.flatMap {
case el: List[Any] => flatten_FlatMap(el)
case e => List(e)
}
}
|
ihac/Ninety-Nine-Scala-Problems
|
src/main/scala/WorkingWithLists/P07/P07.scala
|
Scala
|
gpl-3.0
| 326 |
package net.quasardb.spark.rdd
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SparkSession, Row, DataFrame}
import org.apache.spark.sql.types._
import org.apache.spark._
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets.UTF_8
import java.sql.Timestamp
import net.quasardb.qdb._
import net.quasardb.qdb.ts.{TimeRange, Timespec}
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
// import net.quasardb.spark.rdd.{AggregateQuery, Util}
import net.quasardb.spark.partitioner._
case class DoubleAggregation(
table: String,
column: String,
aggreggationType: String,
begin: Timestamp,
end: Timestamp,
count: Long,
result: Double
) extends Serializable
class DoubleAggregateRDD(
sc: SparkContext,
val uri: String,
val table: String,
val columns: Seq[String],
val input: Seq[AggregateQuery])(implicit securityOptions : Option[Session.SecurityOptions])
extends RDD[DoubleAggregation](sc, Nil) {
override protected def getPartitions = QdbPartitioner.computePartitions(uri)
override def compute(
split: Partition,
context: TaskContext): Iterator[DoubleAggregation] = {
val aggregate = new QdbDoubleAggregationCollection()
input.foreach {
_ match {
case AggregateQuery(begin, end, operation) => aggregate.add(
new QdbDoubleAggregation(
operation,
new TimeRange(
new Timespec(begin),
new Timespec(end))))
}
}
val partition: QdbPartition = split.asInstanceOf[QdbPartition]
val series: QdbTimeSeries = Util.createCluster(partition.uri).timeSeries(table)
// TODO: limit query to only the Partition
columns.map { column =>
series
.doubleAggregate(column, aggregate)
.toList
.map(DoubleAggregateRDD.fromJava(table, column)) }
.flatten
.iterator
}
def toDataFrame(sparkSession: SparkSession): DataFrame = {
val struct =
StructType(
StructField("table", StringType, true) ::
StructField("column", StringType, true) ::
StructField("aggregationType", StringType, true) ::
StructField("begin", TimestampType, true) ::
StructField("end", TimestampType, true) ::
StructField("count", LongType, true) ::
StructField("result", DoubleType, true) :: Nil)
sparkSession.createDataFrame(map(DoubleAggregateRDD.toRow), struct(Set("table", "column", "aggregationType", "begin", "end", "count", "result")))
}
}
object DoubleAggregateRDD {
def fromJava(table:String, column:String)(row:QdbDoubleAggregation):DoubleAggregation = {
DoubleAggregation(
table,
column,
row.getType.toString,
row.getRange.getBegin.asTimestamp,
row.getRange.getEnd.asTimestamp,
row.getCount,
row.getResult.getValue)
}
def toRow(row : DoubleAggregation): Row = {
Row(
row.table,
row.column,
row.aggreggationType,
row.begin,
row.end,
row.count,
row.result)
}
}
|
bureau14/qdb-spark-connector
|
src/main/scala/net/quasardb/spark/rdd/DoubleAggregateRDD.scala
|
Scala
|
bsd-2-clause
| 3,038 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.views.registration
import play.api.data.FormError
import play.api.i18n.Messages
import play.api.mvc.Call
import org.jsoup.nodes.Document
trait PersonalDetailsViewBehaviour[A] extends RegistrationPageBehaviour[A] {
/**
* Assumes that the Call for the continue button has been set up as CommonBuilder.DefaultCall1.
*/
def personalDetails() = {
"have the correct label for first name" in {
labelShouldBe(doc, "firstName-container", "iht.firstName")
}
"have hint text for first name" in {
labelHelpTextShouldBe(doc, "firstName-container", "iht.firstName.hint")
}
"have a first name field" in {
assertRenderedById(doc, "firstName")
}
"have the correct label for last name" in {
labelShouldBe(doc, "lastName-container", "iht.lastName")
}
"have a last name field" in {
assertRenderedById(doc, "lastName")
}
"have a fieldset with the Id 'date-of-birth'" in {
assertRenderedById(doc, "date-of-birth")
}
"have a 'day' input box" in {
assertRenderedById(doc, "dateOfBirth.day")
}
"have a 'month' input box" in {
assertRenderedById(doc, "dateOfBirth.month")
}
"have a 'year' input box" in {
assertRenderedById(doc, "dateOfBirth.year")
}
"have a form hint for date of birth" in {
messagesShouldBePresent(view, messagesApi("iht.dateExample"))
}
"have the correct label for nino" in {
labelShouldBe(doc, "nino-container", "iht.nationalInsuranceNo")
}
"have hint text for nino" in {
labelHelpTextShouldBe(doc, "nino-container", "iht.ninoExample")
}
"have a nino field" in {
assertRenderedById(doc, "nino")
}
}
def phoneNumber(label: String, hint: String) = {
"have a phone number field" in {
assertRenderedById(doc, "phoneNo")
}
"have the correct label for phone number" in {
labelShouldBe(doc, "phoneNo-container", label)
}
"have a form hint for phone number" in {
messagesShouldBePresent(view, messagesApi(hint))
}
}
def personalDetailsInEditMode(view: => Document, cancelUrl: => Call) = {
personalDetails()
"have a continue button with correct text" in {
val continueLink = view.getElementById("continue-button")
continueLink.attr("value") mustBe messagesApi("iht.continue")
}
"have a cancel link with correct text" in {
val cancelLink = view.getElementById("cancel-button")
cancelLink.attr("href") mustBe cancelUrl.url
cancelLink.text() mustBe messagesApi("site.link.cancel")
}
}
}
|
hmrc/iht-frontend
|
test/iht/views/registration/PersonalDetailsViewBehaviour.scala
|
Scala
|
apache-2.0
| 3,187 |
/**
* Created by peter_v on 22/02/15.
*/
package csv
import base.{PredicateObject, EventByResource}
import common._
import csv.EventsReader.eventByResourceReader
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class EventsReaderSuite extends FunSuite {
def eventByResourceIterator(filename: String, factsAtOption: Option[String] = None): EventByResourceIterator = {
val file = scala.io.Source.fromURL(getClass.getResource(filename))
eventByResourceReader(file, factsAtOption)
}
test("Object EventsReader can read an empty CSV eventFile") {
assert(eventByResourceIterator("/empty_CSV_file.csv").isEmpty)
}
test("Object EventsReader can read a CSV eventFile with header lines") {
assert(eventByResourceIterator("/event_csv/header.csv").isEmpty)
}
test("Object EventsReader can read a CSV eventFile with 1 data line") {
assertResult(1)(eventByResourceIterator("/event_csv/one_data_line.csv").size)
}
test("Object EventsReader returns Resource and Event with 3 PredicateObjects") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/one_data_line.csv")
val eventByResource_0: EventByResource = iterator.next()
assertResult(36)(eventByResource_0.resource.subject.toString.length)
assertResult(3)(eventByResource_0.event.pos.size)
}
test("Object EventsReader returns Resource and Event with all present PredicateObjects even if one is empty") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/one_data_line_empty_entry.csv")
val eventByResource_0: EventByResource = iterator.next()
assertResult(36)(eventByResource_0.resource.subject.toString.length)
assertResult(2)(eventByResource_0.event.pos.size)
}
test("Object EventsReader skips empty lines") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/one_data_line_empty_entry.csv")
val eventByResource_0: EventByResource = iterator.next()
val eventByResource_1: EventByResource = iterator.next()
assertResult(36)(eventByResource_1.resource.subject.toString.length)
assertResult(3)(eventByResource_1.event.pos.size)
}
test("Object EventsReader returns Event with detailed PredicateObjects") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/one_data_line.csv")
val eventByResource_0: EventByResource = iterator.next()
val event = eventByResource_0.event
val predicateObject: PredicateObject = event.pos.head
assertResult("amd:bar")(predicateObject.predicate)
assertResult("s")(predicateObject.objectType)
assertResult("Bar")(predicateObject.objectValue)
val nextPredicateObject: PredicateObject = event.pos.tail.head
assertResult("amd:foo")(nextPredicateObject.predicate)
assertResult("s")(nextPredicateObject.objectType)
assertResult("Foo")(nextPredicateObject.objectValue)
val nextNextPredicateObject: PredicateObject = event.pos.tail.tail.head
assertResult("amd:int")(nextNextPredicateObject.predicate)
assertResult("i")(nextNextPredicateObject.objectType)
assertResult(42)(nextNextPredicateObject.objectValue.toInt) // TODO return a real Int for "i" objectType
}
test("Object EventsReader returns Events for 2 resources") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/two_events.csv")
val eventByResource_0: EventByResource = iterator.next()
val event = eventByResource_0.event
val predicateObject: PredicateObject = event.pos.head
assertResult("amd:bar")(predicateObject.predicate)
assertResult("s")(predicateObject.objectType)
assertResult("Bar")(predicateObject.objectValue)
val nextPredicateObject: PredicateObject = event.pos.tail.head
assertResult("amd:foo")(nextPredicateObject.predicate)
assertResult("s")(nextPredicateObject.objectType)
assertResult("Foo")(nextPredicateObject.objectValue)
val nextNextPredicateObject: PredicateObject = event.pos.tail.tail.head
assertResult("amd:int")(nextNextPredicateObject.predicate)
assertResult("i")(nextNextPredicateObject.objectType)
assertResult(42)(nextNextPredicateObject.objectValue.toInt) // TODO return a real Int for "i" objectType
val eventByResource_1: EventByResource = iterator.next()
val event_1 = eventByResource_1.event
val predicateObject_1: PredicateObject = event_1.pos.head
assertResult("amd:bar")(predicateObject_1.predicate)
assertResult("s")(predicateObject_1.objectType)
assertResult("Ping")(predicateObject_1.objectValue)
val nextPredicateObject_1: PredicateObject = event_1.pos.tail.head
assertResult("amd:foo")(nextPredicateObject_1.predicate)
assertResult("s")(nextPredicateObject_1.objectType)
assertResult("Pong")(nextPredicateObject_1.objectValue)
val nextNextPredicateObject_1: PredicateObject = event_1.pos.tail.tail.head
assertResult("amd:int")(nextNextPredicateObject_1.predicate)
assertResult("i")(nextNextPredicateObject_1.objectType)
assertResult(37)(nextNextPredicateObject_1.objectValue.toInt) // TODO return a real Int for "i" objectType
}
test("Object EventsReader sets the at timestamp to a value set in context with amd:context:facts_at") {
val iterator: EventByResourceIterator = eventByResourceIterator("/event_csv/one_data_line.csv", Some("2016-04-01"))
val eventByResource_0: EventByResource = iterator.next()
val resource_0 = eventByResource_0.resource
val predicateObject_0 = eventByResource_0.event.pos.head
assertResult("2016-04-01") { predicateObject_0.at.get }
}
}
|
petervandenabeele/AllMyData
|
src/test/scala/csv/EventsReaderSuite.scala
|
Scala
|
mit
| 5,666 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package play.core.server
import java.io.File
import java.net.InetAddress
import akka.Done
import akka.actor.{ ActorSystem, CoordinatedShutdown }
import akka.stream.ActorMaterializer
import com.lightbend.lagom.devmode.ssl.LagomDevModeSSLHolder
import com.typesafe.sslconfig.ssl.FakeKeyStore
import play.api.ApplicationLoader.DevContext
import play.api._
import play.core.{ ApplicationProvider, BuildLink, SourceMapper }
import play.utils.Threads
import scala.collection.JavaConverters._
import scala.concurrent.Future
import scala.util.control.NonFatal
import scala.util.{ Failure, Success, Try }
/**
* Used to start servers in 'dev' mode, a mode where the application
* is reloaded whenever its source changes.
*/
object LagomReloadableDevServerStart {
/**
* A threshold for retrieving the current hostname.
*
* If Lagom startup takes too long, it can cause a number of issues and we try to detect it using
* InetAddress.getLocalHost. If it takes longer than this threshold, it might be a signal
* of a well-known problem with MacOS that might cause issues with Lagom.
*/
private val startupWarningThreshold = 1000L
def mainDev(
buildLink: BuildLink,
httpAddress: String,
httpPort: Int,
httpsPort: Int
): ReloadableServer = {
val classLoader = getClass.getClassLoader
Threads.withContextClassLoader(classLoader) {
try {
val process = new RealServerProcess(args = Seq.empty)
val path: File = buildLink.projectPath
val enableSsl = httpsPort > 0
// The pairs play.server.httpx.{address,port} are read from PlayRegisterWithServiceRegistry
// to register the service
val httpsSettings: Map[String, String] =
if (enableSsl) {
// in Dev mode we hardcode the keystoreBaseFolder to File(".").
val keystoreBaseFolder = new File(".")
val sslHolder = new LagomDevModeSSLHolder(keystoreBaseFolder)
Map(
// In dev mode, `play.server.https.address` and `play.server.http.address` are assigned the same value
// but both settings are set in case some code specifically read one config setting or the other.
"play.server.https.address" -> httpAddress, // there's no httpsAddress
"play.server.https.port" -> httpsPort.toString,
// See also com/lightbend/lagom/scaladsl/testkit/ServiceTest.scala
// These configure the server
"play.server.https.keyStore.path" -> sslHolder.keyStoreMetadata.storeFile.getAbsolutePath,
"play.server.https.keyStore.type" -> sslHolder.keyStoreMetadata.storeType,
"play.server.https.keyStore.password" -> String.valueOf(sslHolder.keyStoreMetadata.storePassword),
// These configure the clients (play-ws and akka-grpc)
"ssl-config.loose.disableHostnameVerification" -> "true",
"ssl-config.trustManager.stores.0.path" -> sslHolder.trustStoreMetadata.storeFile.getAbsolutePath,
"ssl-config.trustManager.stores.0.type" -> sslHolder.trustStoreMetadata.storeType,
"ssl-config.trustManager.stores.0.password" -> String.valueOf(sslHolder.trustStoreMetadata.storePassword)
)
} else Map.empty
val httpSettings: Map[String, String] =
Map(
// The pairs play.server.httpx.{address,port} are read from PlayRegisterWithServiceRegistry
// to register the service
"play.server.http.address" -> httpAddress,
"play.server.http.port" -> httpPort.toString
)
// each user service needs to tune its "play.filters.hosts.allowed" so that Play's
// AllowedHostFilter (https://www.playframework.com/documentation/2.6.x/AllowedHostsFilter)
// doesn't block request with header "Host: " with a value "localhost:<someport>". The following
// setting whitelists 'localhost` for both http/s ports and also 'httpAddress' for both ports too.
val allowHostsSetting = "play.filters.hosts.allowed" -> {
val http = List(s"$httpAddress:$httpPort", s"localhost:$httpPort")
val https = if (enableSsl) List(s"$httpAddress:$httpsPort", s"localhost:$httpsPort") else Nil
(http ++ https).asJavaCollection
}
// on dev-mode, we often have more than one cluster on the same jvm
val clusterSettings = "akka.cluster.jmx.multi-mbeans-in-same-jvm" -> "on"
val dirAndDevSettings: Map[String, AnyRef] =
ServerConfig.rootDirConfig(path) ++
buildLink.settings.asScala.toMap ++
httpSettings ++
httpsSettings +
allowHostsSetting +
clusterSettings
// ("play.server.akka.http2.enabled" -> "true") +
// Use plain Java call here in case of scala classloader mess
{
if (System.getProperty("play.debug.classpath") == "true") {
System.out.println("\n---- Current ClassLoader ----\n")
System.out.println(this.getClass.getClassLoader)
System.out.println("\n---- The where is Scala? test ----\n")
System.out.println(this.getClass.getClassLoader.getResource("scala/Predef$.class"))
}
}
val before = System.currentTimeMillis()
val address = InetAddress.getLocalHost
val after = System.currentTimeMillis()
if (after - before > startupWarningThreshold) {
println(play.utils.Colors.red(s"WARNING: Retrieving local host name ${address} took more than ${startupWarningThreshold}ms, this can create problems at startup with Lagom"))
println(play.utils.Colors.red("If you are using macOS, see https://thoeni.io/post/macos-sierra-java/ for a potential solution"))
}
// First delete the default log file for a fresh start (only in Dev Mode)
try {
new File(path, "logs/application.log").delete()
} catch {
case NonFatal(_) =>
}
// Configure the logger for the first time.
// This is usually done by Application itself when it's instantiated, which for other types of ApplicationProviders,
// is usually instantiated along with or before the provider. But in dev mode, no application exists initially, so
// configure it here.
LoggerConfigurator(classLoader) match {
case Some(loggerConfigurator) =>
loggerConfigurator.init(path, Mode.Dev)
case None =>
println("No play.logger.configurator found: logging must be configured entirely by the application.")
}
// Create reloadable ApplicationProvider
val appProvider = new ApplicationProvider {
var lastState: Try[Application] = Failure(new PlayException("Not initialized", "?"))
override def current: Option[Application] = lastState.toOption
def get: Try[Application] = {
synchronized {
val reloaded = buildLink.reload match {
case NonFatal(t) => Failure(t)
case cl: ClassLoader => Success(Some(cl))
case null => Success(None)
}
reloaded.flatMap { maybeClassLoader =>
val maybeApplication: Option[Try[Application]] = maybeClassLoader.map { projectClassloader =>
try {
if (lastState.isSuccess) {
println()
println(play.utils.Colors.magenta("--- (RELOAD) ---"))
println()
}
// First, stop the old application if it exists
lastState.foreach(Play.stop)
// Create the new environment
val environment = Environment(path, projectClassloader, Mode.Dev)
val sourceMapper = new SourceMapper {
def sourceOf(className: String, line: Option[Int]) = {
Option(buildLink.findSource(className, line.map(_.asInstanceOf[java.lang.Integer]).orNull)).flatMap {
case Array(file: java.io.File, null) => Some((file, None))
case Array(file: java.io.File, line: java.lang.Integer) => Some((file, Some(line)))
case _ => None
}
}
}
val newApplication = Threads.withContextClassLoader(projectClassloader) {
val context = ApplicationLoader.Context.create(
environment = environment,
initialSettings = dirAndDevSettings,
devContext = Some(DevContext(sourceMapper, buildLink))
)
val loader = ApplicationLoader(context)
loader.load(context)
}
newApplication.coordinatedShutdown.addTask(CoordinatedShutdown.PhaseBeforeActorSystemTerminate, "force-reload") { () =>
// We'll only force a reload if the reason for shutdown is not an Application.stop
if (!newApplication.coordinatedShutdown.shutdownReason().contains(ApplicationStoppedReason)) {
buildLink.forceReload()
}
Future.successful(Done)
}
Play.start(newApplication)
Success(newApplication)
} catch {
// No binary dependency on play-guice
case e if e.getClass.getName == "com.google.inject.CreationException" =>
lastState = Failure(e)
val hint = "Hint: Maybe you have forgot to enable your service Module class via `play.modules.enabled`? (check in your project's application.conf)"
logExceptionAndGetResult(path, e, hint)
lastState
case e: PlayException =>
lastState = Failure(e)
logExceptionAndGetResult(path, e)
lastState
case NonFatal(e) =>
lastState = Failure(UnexpectedException(unexpected = Some(e)))
logExceptionAndGetResult(path, e)
lastState
case e: LinkageError =>
lastState = Failure(UnexpectedException(unexpected = Some(e)))
logExceptionAndGetResult(path, e)
lastState
}
}
maybeApplication.flatMap(_.toOption).foreach { app =>
lastState = Success(app)
}
maybeApplication.getOrElse(lastState)
}
}
}
private def logExceptionAndGetResult(path: File, e: Throwable, hint: String = ""): Unit = {
e.printStackTrace()
println()
println(play.utils.Colors.red(s"Stacktrace caused by project ${path.getName} (filesystem path to project is ${path.getAbsolutePath}).\n${hint}"))
}
override def handleWebCommand(request: play.api.mvc.RequestHeader) = None
}
// Start server with the application
val serverConfig = ServerConfig(
rootDir = path,
port = Some(httpPort),
sslPort = if (httpsPort > 0) Some(httpsPort) else None,
address = httpAddress,
mode = Mode.Dev,
properties = process.properties,
configuration = Configuration.load(classLoader, System.getProperties, dirAndDevSettings, allowMissingApplicationConf = true)
)
// We *must* use a different Akka configuration in dev mode, since loading two actor systems from the same
// config will lead to resource conflicts, for example, if the actor system is configured to open a remote port,
// then both the dev mode and the application actor system will attempt to open that remote port, and one of
// them will fail.
val devModeAkkaConfig = serverConfig.configuration.underlying.getConfig("lagom.akka.dev-mode.config")
val actorSystemName = serverConfig.configuration.underlying.getString("lagom.akka.dev-mode.actor-system.name")
val actorSystem: ActorSystem = ActorSystem(actorSystemName, devModeAkkaConfig)
val serverCoordinatedShutdown = CoordinatedShutdown(actorSystem)
// Registering a task that invokes `Play.stop` is necessary for the scenarios where
// the Application and the Server use separate ActorSystems (e.g. DevMode).
serverCoordinatedShutdown.addTask(CoordinatedShutdown.PhaseServiceStop, "shutdown-application-dev-mode") {
() =>
implicit val ctx = actorSystem.dispatcher
val stoppedApp = appProvider.get.map(Play.stop)
Future.fromTry(stoppedApp).map(_ => Done)
}
val serverContext = ServerProvider.Context(serverConfig, appProvider, actorSystem, ActorMaterializer()(actorSystem), () => Future.successful(()))
val serverProvider = ServerProvider.fromConfiguration(classLoader, serverConfig.configuration)
serverProvider.createServer(serverContext)
} catch {
case e: ExceptionInInitializerError => throw e.getCause
}
}
}
}
|
rstento/lagom
|
dev/reloadable-server/src/main/scala/play/core/server/LagomReloadableDevServerStart.scala
|
Scala
|
apache-2.0
| 13,550 |
/**
* Copyright (c) 2011 ScalaStuff.org (joint venture of Alexander Dvorkovyy and Ruud Diterwich)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalastuff.proto
import value._
import org.scalastuff.scalabeans._
import com.dyuproject.protostuff._
abstract class MutableField[B <: AnyRef](_tag: Int, _propertyDescriptor: PropertyDescriptor)
extends Field[B](_tag, _propertyDescriptor) {
def setDefaultValue(message: B) {
setValue(message, valueHandler.defaultValue)
}
def mergeFrom(input: Input, message: B) {
setValue(message, valueHandler.readFrom(input))
}
protected def setValue(message: B, value: valueHandler.V)
}
abstract class MutableMirrorField[B <: AnyRef](tag: Int, propertyDescriptor: MutablePropertyDescriptor)
extends MutableField[B](tag, propertyDescriptor) {
def getValue(message: B) = propertyDescriptor.get[valueHandler.V](message)
def setValue(message: B, value: valueHandler.V) {
propertyDescriptor.set(message, value)
}
}
object MutableMirrorField {
def apply[B <: AnyRef](tag: Int, prop: PropertyDescriptor) = prop match {
case mutableProperty: MutablePropertyDescriptor =>
for (valueHandler <- ValueHandler(mutableProperty.scalaType))
yield {
valueHandler match {
case repeatedValueHandler: RepeatedValueHandler =>
new MutableMirrorField[B](tag, mutableProperty) with RepeatedField[B] {
val valueHandler = repeatedValueHandler
}
case beanValueHandler: MutableBeanValueHandler =>
new MutableMirrorField[B](tag, mutableProperty) with BeanField[B] {
val valueHandler = beanValueHandler
}
case vh @ _ =>
new MutableMirrorField[B](tag, mutableProperty) {
val valueHandler = vh
}
}
}
case _ => None
}
}
|
scalastuff/scalabeans
|
src/main/scala/org/scalastuff/proto/MutableField.scala
|
Scala
|
apache-2.0
| 2,410 |
package lila.simul
import akka.actor._
import akka.pattern.{ ask, pipe }
import play.api.libs.json.Json
import scala.concurrent.duration._
import chess.Status
import chess.variant.Variant
import lila.common.Debouncer
import lila.db.dsl.Coll
import lila.game.{ Game, GameRepo }
import lila.hub.actorApi.lobby.ReloadSimuls
import lila.hub.actorApi.map.Tell
import lila.hub.actorApi.timeline.{ Propagate, SimulCreate, SimulJoin }
import lila.memo.AsyncCache
import lila.socket.actorApi.SendToFlag
import lila.user.{ User, UserRepo }
import makeTimeout.short
private[simul] final class SimulApi(
system: ActorSystem,
sequencers: ActorRef,
onGameStart: String => Unit,
socketHub: ActorRef,
site: ActorSelection,
renderer: ActorSelection,
timeline: ActorSelection,
userRegister: ActorSelection,
lobby: ActorSelection,
repo: SimulRepo) {
def currentHostIds: Fu[Set[String]] = currentHostIdsCache apply true
private val currentHostIdsCache = AsyncCache.single[Set[String]](
name = "simul.currentHostIds",
f = repo.allStarted map (_ map (_.hostId) toSet),
timeToLive = 10 minutes)
def create(setup: SimulSetup, me: User): Fu[Simul] = {
val simul = Simul.make(
clock = SimulClock(
config = chess.Clock.Config(setup.clockTime * 60, setup.clockIncrement),
hostExtraTime = setup.clockExtra * 60),
variants = setup.variants.flatMap { chess.variant.Variant(_) },
host = me,
color = setup.color)
repo.createdByHostId(me.id) foreach {
_.filter(_.isNotBrandNew).map(_.id).foreach(abort)
}
(repo create simul) >>- publish() >>- {
timeline ! (Propagate(SimulCreate(me.id, simul.id, simul.fullName)) toFollowersOf me.id)
} inject simul
}
def addApplicant(simulId: Simul.ID, user: User, variantKey: String) {
WithSimul(repo.findCreated, simulId) { simul =>
if (simul.nbAccepted >= Game.maxPlayingRealtime) simul
else {
timeline ! (Propagate(SimulJoin(user.id, simul.id, simul.fullName)) toFollowersOf user.id)
Variant(variantKey).filter(simul.variants.contains).fold(simul) { variant =>
simul addApplicant SimulApplicant.make(SimulPlayer.make(user, variant))
}
}
}
}
def removeApplicant(simulId: Simul.ID, user: User) {
WithSimul(repo.findCreated, simulId) { _ removeApplicant user.id }
}
def accept(simulId: Simul.ID, userId: String, v: Boolean) {
UserRepo byId userId foreach {
_ foreach { user =>
WithSimul(repo.findCreated, simulId) { _.accept(user.id, v) }
}
}
}
def start(simulId: Simul.ID) {
Sequence(simulId) {
repo.findCreated(simulId) flatMap {
_ ?? { simul =>
simul.start ?? { started =>
UserRepo byId started.hostId flatten s"No such host: ${simul.hostId}" flatMap { host =>
started.pairings.map(makeGame(started, host)).sequenceFu map { games =>
games.headOption foreach {
case (game, _) => sendTo(simul.id, actorApi.StartSimul(game, simul.hostId))
}
games.foldLeft(started) {
case (s, (g, hostColor)) => s.setPairingHostColor(g.id, hostColor)
}
}
} flatMap update
} >> currentHostIdsCache.clear
}
}
}
}
def onPlayerConnection(game: Game, user: Option[User])(simul: Simul) {
user.filter(_.id == simul.hostId) ifTrue simul.isRunning foreach { host =>
repo.setHostGameId(simul, game.id)
sendTo(simul.id, actorApi.HostIsOn(game.id))
}
}
def abort(simulId: Simul.ID) {
Sequence(simulId) {
repo.findCreated(simulId) flatMap {
_ ?? { simul =>
(repo remove simul) >>- sendTo(simul.id, actorApi.Aborted) >>- publish()
}
}
}
}
def finishGame(game: Game) {
game.simulId foreach { simulId =>
Sequence(simulId) {
repo.findStarted(simulId) flatMap {
_ ?? { simul =>
val simul2 = simul.updatePairing(
game.id,
_.finish(game.status, game.winnerUserId, game.turns)
)
update(simul2) >> currentHostIdsCache.clear >>- {
if (simul2.isFinished) userRegister ! lila.hub.actorApi.SendTo(simul2.hostId,
lila.socket.Socket.makeMessage("simulEnd", Json.obj(
"id" -> simul.id,
"name" -> simul.name
)))
}
}
}
}
}
}
def ejectCheater(userId: String) {
repo.allNotFinished foreach {
_ foreach { oldSimul =>
Sequence(oldSimul.id) {
repo.findCreated(oldSimul.id) flatMap {
_ ?? { simul =>
(simul ejectCheater userId) ?? { simul2 =>
update(simul2).void
}
}
}
}
}
}
}
private def makeGame(simul: Simul, host: User)(pairing: SimulPairing): Fu[(Game, chess.Color)] = for {
user ← UserRepo byId pairing.player.user flatten s"No user with id ${pairing.player.user}"
hostColor = simul.hostColor
whiteUser = hostColor.fold(host, user)
blackUser = hostColor.fold(user, host)
game1 = Game.make(
game = chess.Game(
board = chess.Board init pairing.player.variant,
clock = simul.clock.chessClockOf(hostColor).start.some),
whitePlayer = lila.game.Player.white,
blackPlayer = lila.game.Player.black,
mode = chess.Mode.Casual,
variant = pairing.player.variant,
source = lila.game.Source.Simul,
pgnImport = None)
game2 = game1
.updatePlayer(chess.White, _.withUser(whiteUser.id, lila.game.PerfPicker.mainOrDefault(game1)(whiteUser.perfs)))
.updatePlayer(chess.Black, _.withUser(blackUser.id, lila.game.PerfPicker.mainOrDefault(game1)(blackUser.perfs)))
.withSimulId(simul.id)
.withId(pairing.gameId)
.start
_ ← (GameRepo insertDenormalized game2) >>-
onGameStart(game2.id) >>-
sendTo(simul.id, actorApi.StartGame(game2, simul.hostId))
} yield game2 -> hostColor
private def update(simul: Simul) =
repo.update(simul) >>- socketReload(simul.id) >>- publish()
private def WithSimul(
finding: Simul.ID => Fu[Option[Simul]],
simulId: Simul.ID)(updating: Simul => Simul) {
Sequence(simulId) {
finding(simulId) flatMap {
_ ?? { simul => update(updating(simul)) }
}
}
}
private def Sequence(simulId: Simul.ID)(work: => Funit) {
sequencers ! Tell(simulId, lila.hub.Sequencer work work)
}
private object publish {
private val siteMessage = SendToFlag("simul", Json.obj("t" -> "reload"))
private val debouncer = system.actorOf(Props(new Debouncer(5 seconds, {
(_: Debouncer.Nothing) =>
site ! siteMessage
repo.allCreated foreach { simuls =>
renderer ? actorApi.SimulTable(simuls) map {
case view: play.twirl.api.Html => ReloadSimuls(view.body)
} pipeToSelection lobby
}
})))
def apply() { debouncer ! Debouncer.Nothing }
}
private def sendTo(simulId: Simul.ID, msg: Any) {
socketHub ! Tell(simulId, msg)
}
private def socketReload(simulId: Simul.ID) {
sendTo(simulId, actorApi.Reload)
}
}
|
clarkerubber/lila
|
modules/simul/src/main/SimulApi.scala
|
Scala
|
agpl-3.0
| 7,297 |
/**
* Copyright (c) 2014-2016 Snowplow Analytics Ltd.
* All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache
* License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow
package storage.kinesis.elasticsearch
// AWS Kinesis Connector libs
import com.amazonaws.services.kinesis.connectors.elasticsearch.{
ElasticsearchObject,
ElasticsearchEmitter,
ElasticsearchTransformer
}
import com.amazonaws.services.kinesis.connectors.interfaces.{
IEmitter,
IBuffer,
ITransformer,
IFilter,
IKinesisConnectorPipeline
}
import com.amazonaws.services.kinesis.connectors.KinesisConnectorConfiguration
import com.amazonaws.services.kinesis.connectors.impl.{BasicMemoryBuffer,AllPassFilter}
// This project
import sinks._
import StreamType._
// Tracker
import scalatracker.Tracker
/**
* ElasticsearchPipeline class sets up the Emitter/Buffer/Transformer/Filter
*
* @param streamType the type of stream, good/bad
* @param documentIndex the elasticsearch index name
* @param documentType the elasticsearch index type
* @param goodSink the configured GoodSink
* @param badSink the configured BadSink
* @param tracker a Tracker instance
* @param maxConnectionTime the maximum amount of time
* we can attempt to send to elasticsearch
* @param elasticsearchClientType The type of ES Client to use
*/
class ElasticsearchPipeline(
streamType: StreamType,
documentIndex: String,
documentType: String,
goodSink: Option[ISink],
badSink: ISink,
tracker: Option[Tracker] = None,
maxConnectionTime: Long,
elasticsearchClientType: String
) extends IKinesisConnectorPipeline[ValidatedRecord, EmitterInput] {
override def getEmitter(configuration: KinesisConnectorConfiguration): IEmitter[EmitterInput] =
new SnowplowElasticsearchEmitter(configuration, goodSink, badSink, tracker, maxConnectionTime, elasticsearchClientType)
override def getBuffer(configuration: KinesisConnectorConfiguration) = new BasicMemoryBuffer[ValidatedRecord](configuration)
override def getTransformer(c: KinesisConnectorConfiguration) = streamType match {
case Good => new SnowplowElasticsearchTransformer(documentIndex, documentType)
case Bad => new BadEventTransformer(documentIndex, documentType)
}
override def getFilter(c: KinesisConnectorConfiguration) = new AllPassFilter[ValidatedRecord]()
}
|
bigdecisions/snowplow
|
4-storage/kinesis-elasticsearch-sink/src/main/scala/com.snowplowanalytics.snowplow.storage.kinesis/elasticsearch/ElasticsearchPipeline.scala
|
Scala
|
apache-2.0
| 2,916 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import java.io.ByteArrayOutputStream
import scala.collection.mutable.ArrayBuffer
import scala.util.Try
import com.fasterxml.jackson.core._
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
private[sql] class SparkSQLJsonProcessingException(msg: String) extends RuntimeException(msg)
/**
* Constructs a parser for a given schema that translates a json string to an [[InternalRow]].
*/
class JacksonParser(
schema: StructType,
options: JSONOptions) extends Logging {
import JacksonUtils._
import ParseModes._
import com.fasterxml.jackson.core.JsonToken._
// A `ValueConverter` is responsible for converting a value from `JsonParser`
// to a value in a field for `InternalRow`.
private type ValueConverter = JsonParser => AnyRef
// `ValueConverter`s for the root schema for all fields in the schema
private val rootConverter = makeRootConverter(schema)
private val factory = new JsonFactory()
options.setJacksonOptions(factory)
private val emptyRow: Seq[InternalRow] = Seq(new GenericInternalRow(schema.length))
private val corruptFieldIndex = schema.getFieldIndex(options.columnNameOfCorruptRecord)
corruptFieldIndex.foreach(idx => require(schema(idx).dataType == StringType))
@transient
private[this] var isWarningPrinted: Boolean = false
@transient
private def printWarningForMalformedRecord(record: () => UTF8String): Unit = {
def sampleRecord: String = {
if (options.wholeFile) {
""
} else {
s"Sample record: ${record()}\n"
}
}
def footer: String = {
s"""Code example to print all malformed records (scala):
|===================================================
|// The corrupted record exists in column ${options.columnNameOfCorruptRecord}.
|val parsedJson = spark.read.json("/path/to/json/file/test.json")
|
""".stripMargin
}
if (options.permissive) {
logWarning(
s"""Found at least one malformed record. The JSON reader will replace
|all malformed records with placeholder null in current $PERMISSIVE_MODE parser mode.
|To find out which corrupted records have been replaced with null, please use the
|default inferred schema instead of providing a custom schema.
|
|${sampleRecord ++ footer}
|
""".stripMargin)
} else if (options.dropMalformed) {
logWarning(
s"""Found at least one malformed record. The JSON reader will drop
|all malformed records in current $DROP_MALFORMED_MODE parser mode. To find out which
|corrupted records have been dropped, please switch the parser mode to $PERMISSIVE_MODE
|mode and use the default inferred schema.
|
|${sampleRecord ++ footer}
|
""".stripMargin)
}
}
@transient
private def printWarningIfWholeFile(): Unit = {
if (options.wholeFile && corruptFieldIndex.isDefined) {
logWarning(
s"""Enabling wholeFile mode and defining columnNameOfCorruptRecord may result
|in very large allocations or OutOfMemoryExceptions being raised.
|
""".stripMargin)
}
}
/**
* This function deals with the cases it fails to parse. This function will be called
* when exceptions are caught during converting. This functions also deals with `mode` option.
*/
private def failedRecord(record: () => UTF8String): Seq[InternalRow] = {
corruptFieldIndex match {
case _ if options.failFast =>
if (options.wholeFile) {
throw new SparkSQLJsonProcessingException("Malformed line in FAILFAST mode")
} else {
throw new SparkSQLJsonProcessingException(s"Malformed line in FAILFAST mode: ${record()}")
}
case _ if options.dropMalformed =>
if (!isWarningPrinted) {
printWarningForMalformedRecord(record)
isWarningPrinted = true
}
Nil
case None =>
if (!isWarningPrinted) {
printWarningForMalformedRecord(record)
isWarningPrinted = true
}
emptyRow
case Some(corruptIndex) =>
if (!isWarningPrinted) {
printWarningIfWholeFile()
isWarningPrinted = true
}
val row = new GenericInternalRow(schema.length)
row.update(corruptIndex, record())
Seq(row)
}
}
/**
* Create a converter which converts the JSON documents held by the `JsonParser`
* to a value according to a desired schema. This is a wrapper for the method
* `makeConverter()` to handle a row wrapped with an array.
*/
def makeRootConverter(st: StructType): JsonParser => Seq[InternalRow] = {
val elementConverter = makeConverter(st)
val fieldConverters = st.map(_.dataType).map(makeConverter).toArray
(parser: JsonParser) => parseJsonToken[Seq[InternalRow]](parser, st) {
case START_OBJECT => convertObject(parser, st, fieldConverters) :: Nil
// SPARK-3308: support reading top level JSON arrays and take every element
// in such an array as a row
//
// For example, we support, the JSON data as below:
//
// [{"a":"str_a_1"}]
// [{"a":"str_a_2"}, {"b":"str_b_3"}]
//
// resulting in:
//
// List([str_a_1,null])
// List([str_a_2,null], [null,str_b_3])
//
case START_ARRAY =>
val array = convertArray(parser, elementConverter)
// Here, as we support reading top level JSON arrays and take every element
// in such an array as a row, this case is possible.
if (array.numElements() == 0) {
Nil
} else {
array.toArray[InternalRow](schema).toSeq
}
}
}
/**
* Create a converter which converts the JSON documents held by the `JsonParser`
* to a value according to a desired schema.
*/
def makeConverter(dataType: DataType): ValueConverter = dataType match {
case BooleanType =>
(parser: JsonParser) => parseJsonToken[java.lang.Boolean](parser, dataType) {
case VALUE_TRUE => true
case VALUE_FALSE => false
}
case ByteType =>
(parser: JsonParser) => parseJsonToken[java.lang.Byte](parser, dataType) {
case VALUE_NUMBER_INT => parser.getByteValue
}
case ShortType =>
(parser: JsonParser) => parseJsonToken[java.lang.Short](parser, dataType) {
case VALUE_NUMBER_INT => parser.getShortValue
}
case IntegerType =>
(parser: JsonParser) => parseJsonToken[java.lang.Integer](parser, dataType) {
case VALUE_NUMBER_INT => parser.getIntValue
}
case LongType =>
(parser: JsonParser) => parseJsonToken[java.lang.Long](parser, dataType) {
case VALUE_NUMBER_INT => parser.getLongValue
}
case FloatType =>
(parser: JsonParser) => parseJsonToken[java.lang.Float](parser, dataType) {
case VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT =>
parser.getFloatValue
case VALUE_STRING =>
// Special case handling for NaN and Infinity.
val value = parser.getText
val lowerCaseValue = value.toLowerCase
if (lowerCaseValue.equals("nan") ||
lowerCaseValue.equals("infinity") ||
lowerCaseValue.equals("-infinity") ||
lowerCaseValue.equals("inf") ||
lowerCaseValue.equals("-inf")) {
value.toFloat
} else {
throw new SparkSQLJsonProcessingException(s"Cannot parse $value as FloatType.")
}
}
case DoubleType =>
(parser: JsonParser) => parseJsonToken[java.lang.Double](parser, dataType) {
case VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT =>
parser.getDoubleValue
case VALUE_STRING =>
// Special case handling for NaN and Infinity.
val value = parser.getText
val lowerCaseValue = value.toLowerCase
if (lowerCaseValue.equals("nan") ||
lowerCaseValue.equals("infinity") ||
lowerCaseValue.equals("-infinity") ||
lowerCaseValue.equals("inf") ||
lowerCaseValue.equals("-inf")) {
value.toDouble
} else {
throw new SparkSQLJsonProcessingException(s"Cannot parse $value as DoubleType.")
}
}
case StringType =>
(parser: JsonParser) => parseJsonToken[UTF8String](parser, dataType) {
case VALUE_STRING =>
UTF8String.fromString(parser.getText)
case _ =>
// Note that it always tries to convert the data as string without the case of failure.
val writer = new ByteArrayOutputStream()
Utils.tryWithResource(factory.createGenerator(writer, JsonEncoding.UTF8)) {
generator => generator.copyCurrentStructure(parser)
}
UTF8String.fromBytes(writer.toByteArray)
}
case TimestampType =>
(parser: JsonParser) => parseJsonToken[java.lang.Long](parser, dataType) {
case VALUE_STRING =>
val stringValue = parser.getText
// This one will lose microseconds parts.
// See https://issues.apache.org/jira/browse/SPARK-10681.
Long.box {
Try(options.timestampFormat.parse(stringValue).getTime * 1000L)
.getOrElse {
// If it fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
DateTimeUtils.stringToTime(stringValue).getTime * 1000L
}
}
case VALUE_NUMBER_INT =>
parser.getLongValue * 1000000L
}
case DateType =>
(parser: JsonParser) => parseJsonToken[java.lang.Integer](parser, dataType) {
case VALUE_STRING =>
val stringValue = parser.getText
// This one will lose microseconds parts.
// See https://issues.apache.org/jira/browse/SPARK-10681.x
Int.box {
Try(DateTimeUtils.millisToDays(options.dateFormat.parse(stringValue).getTime))
.orElse {
// If it fails to parse, then tries the way used in 2.0 and 1.x for backwards
// compatibility.
Try(DateTimeUtils.millisToDays(DateTimeUtils.stringToTime(stringValue).getTime))
}
.getOrElse {
// In Spark 1.5.0, we store the data as number of days since epoch in string.
// So, we just convert it to Int.
stringValue.toInt
}
}
}
case BinaryType =>
(parser: JsonParser) => parseJsonToken[Array[Byte]](parser, dataType) {
case VALUE_STRING => parser.getBinaryValue
}
case dt: DecimalType =>
(parser: JsonParser) => parseJsonToken[Decimal](parser, dataType) {
case (VALUE_NUMBER_INT | VALUE_NUMBER_FLOAT) =>
Decimal(parser.getDecimalValue, dt.precision, dt.scale)
}
case st: StructType =>
val fieldConverters = st.map(_.dataType).map(makeConverter).toArray
(parser: JsonParser) => parseJsonToken[InternalRow](parser, dataType) {
case START_OBJECT => convertObject(parser, st, fieldConverters)
}
case at: ArrayType =>
val elementConverter = makeConverter(at.elementType)
(parser: JsonParser) => parseJsonToken[ArrayData](parser, dataType) {
case START_ARRAY => convertArray(parser, elementConverter)
}
case mt: MapType =>
val valueConverter = makeConverter(mt.valueType)
(parser: JsonParser) => parseJsonToken[MapData](parser, dataType) {
case START_OBJECT => convertMap(parser, valueConverter)
}
case udt: UserDefinedType[_] =>
makeConverter(udt.sqlType)
case _ =>
(parser: JsonParser) =>
// Here, we pass empty `PartialFunction` so that this case can be
// handled as a failed conversion. It will throw an exception as
// long as the value is not null.
parseJsonToken[AnyRef](parser, dataType)(PartialFunction.empty[JsonToken, AnyRef])
}
/**
* This method skips `FIELD_NAME`s at the beginning, and handles nulls ahead before trying
* to parse the JSON token using given function `f`. If the `f` failed to parse and convert the
* token, call `failedConversion` to handle the token.
*/
private def parseJsonToken[R >: Null](
parser: JsonParser,
dataType: DataType)(f: PartialFunction[JsonToken, R]): R = {
parser.getCurrentToken match {
case FIELD_NAME =>
// There are useless FIELD_NAMEs between START_OBJECT and END_OBJECT tokens
parser.nextToken()
parseJsonToken[R](parser, dataType)(f)
case null | VALUE_NULL => null
case other => f.applyOrElse(other, failedConversion(parser, dataType))
}
}
/**
* This function throws an exception for failed conversion, but returns null for empty string,
* to guard the non string types.
*/
private def failedConversion[R >: Null](
parser: JsonParser,
dataType: DataType): PartialFunction[JsonToken, R] = {
case VALUE_STRING if parser.getTextLength < 1 =>
// If conversion is failed, this produces `null` rather than throwing exception.
// This will protect the mismatch of types.
null
case token =>
// We cannot parse this token based on the given data type. So, we throw a
// SparkSQLJsonProcessingException and this exception will be caught by
// `parse` method.
throw new SparkSQLJsonProcessingException(
s"Failed to parse a value for data type $dataType (current token: $token).")
}
/**
* Parse an object from the token stream into a new Row representing the schema.
* Fields in the json that are not defined in the requested schema will be dropped.
*/
private def convertObject(
parser: JsonParser,
schema: StructType,
fieldConverters: Array[ValueConverter]): InternalRow = {
val row = new GenericInternalRow(schema.length)
while (nextUntil(parser, JsonToken.END_OBJECT)) {
schema.getFieldIndex(parser.getCurrentName) match {
case Some(index) =>
row.update(index, fieldConverters(index).apply(parser))
case None =>
parser.skipChildren()
}
}
row
}
/**
* Parse an object as a Map, preserving all fields.
*/
private def convertMap(
parser: JsonParser,
fieldConverter: ValueConverter): MapData = {
val keys = ArrayBuffer.empty[UTF8String]
val values = ArrayBuffer.empty[Any]
while (nextUntil(parser, JsonToken.END_OBJECT)) {
keys += UTF8String.fromString(parser.getCurrentName)
values += fieldConverter.apply(parser)
}
ArrayBasedMapData(keys.toArray, values.toArray)
}
/**
* Parse an object as a Array.
*/
private def convertArray(
parser: JsonParser,
fieldConverter: ValueConverter): ArrayData = {
val values = ArrayBuffer.empty[Any]
while (nextUntil(parser, JsonToken.END_ARRAY)) {
values += fieldConverter.apply(parser)
}
new GenericArrayData(values.toArray)
}
/**
* Parse the JSON input to the set of [[InternalRow]]s.
*
* @param recordLiteral an optional function that will be used to generate
* the corrupt record text instead of record.toString
*/
def parse[T](
record: T,
createParser: (JsonFactory, T) => JsonParser,
recordLiteral: T => UTF8String): Seq[InternalRow] = {
try {
Utils.tryWithResource(createParser(factory, record)) { parser =>
// a null first token is equivalent to testing for input.trim.isEmpty
// but it works on any token stream and not just strings
parser.nextToken() match {
case null => Nil
case _ => rootConverter.apply(parser) match {
case null => throw new SparkSQLJsonProcessingException("Root converter returned null")
case rows => rows
}
}
}
} catch {
case _: JsonProcessingException | _: SparkSQLJsonProcessingException =>
failedRecord(() => recordLiteral(record))
}
}
}
|
SnappyDataInc/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
|
Scala
|
apache-2.0
| 17,245 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package patterns
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.parsing.types.{ExistentialClause, InfixType, Type}
/**
* @author Alexander Podkhalyuzin
* Date: 29.02.2008
*/
/*
* TypePattern ::= Type (but it can't be InfixType => Type (because case A => B => C?))
*/
object TypePattern {
def parse(builder: ScalaPsiBuilder): Boolean = {
val typeMarker = builder.mark
builder.getTokenType match {
case ScalaTokenTypes.tLPARENTHESIS =>
val parMarker = builder.mark
builder.advanceLexer() //Ate (
builder.disableNewlines()
builder.getTokenType match {
case ScalaTokenTypes.tFUNTYPE | ScalaTokenTypes.tRPARENTHESIS =>
if (builder.getTokenType == ScalaTokenTypes.tFUNTYPE) {
builder.advanceLexer() //Ate =>
if (!Type.parse(builder, isPattern = true)) {
builder error ScalaBundle.message("wrong.type")
}
}
builder.getTokenType match {
case ScalaTokenTypes.tRPARENTHESIS =>
builder.advanceLexer() //Ate )
case _ =>
builder error ScalaBundle.message("rparenthesis.expected")
}
builder.restoreNewlinesState()
builder.getTokenType match {
case ScalaTokenTypes.tFUNTYPE =>
builder.advanceLexer() //Ate =>
case _ =>
builder error ScalaBundle.message("fun.sign.expected")
}
if (!Type.parse(builder, isPattern = true)) {
builder error ScalaBundle.message("wrong.type")
}
typeMarker.done(ScalaElementTypes.TYPE_PATTERN)
parMarker.drop()
return true
case _ =>
builder.restoreNewlinesState()
parMarker.rollbackTo()
}
case _ =>
}
if (!InfixType.parse(builder, star = false, isPattern = true)) {
typeMarker.drop()
return false
}
builder.getTokenType match {
case ScalaTokenTypes.kFOR_SOME =>
ExistentialClause parse builder
typeMarker.done(ScalaElementTypes.TYPE_PATTERN)
true
case _ =>
typeMarker.done(ScalaElementTypes.TYPE_PATTERN)
true
}
}
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/patterns/TypePattern.scala
|
Scala
|
apache-2.0
| 2,492 |
///////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2014 Adobe Systems Incorporated. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////
package com.adobe
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import java.io.{File,PrintWriter}
object ExampleApp {
def main(args: Array[String]) {
val conf = new SparkConf()
.setAppName("ExampleApp")
.setMaster("spark://spark_master_hostname:7077")
.setSparkHome("/usr/lib/spark")
.setJars(Seq("/tmp/ExampleApp.jar"))
.set("spark.executor.memory", "10g")
.set("spark.cores.max", "4")
val sc = new SparkContext(conf)
val nums = sc.parallelize(Seq(1,2,4,8))
val squares = nums.map{case num => num*num}
println("Nums: " + nums.collect().mkString(", "))
println("Squares: " + squares.collect().mkString(", "))
sc.stop()
}
}
|
adobe-research/spark-cluster-deployment
|
sample-application/src/main/scala/ExampleApp.scala
|
Scala
|
apache-2.0
| 1,542 |
/*
* Copyright 2013 - 2017 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.morpheus.engine.query
object DefaultMySQLEngines {
val InnoDB = "InnoDB"
val Memory = "MEMORY"
val Heap = "HEAP"
val Merge = "MERGE"
val MrgMyLSAM = "MRG_MYISAM"
val isam = "ISAM"
val MrgISAM = "MRG_ISAM"
val innoBase = "INNOBASE"
val BDB = "BDB"
val BerkleyDB = "BERKELEYDB"
val NDBCluster = "NDBCLUSTER"
val NDB = "NDB"
val Example = "EXAMPLE"
val Archive = "ARCHIVE"
val CSV = "CSV"
val Federated = "FEDERATED"
val BlackHole = "BLACKHOLE"
}
sealed abstract class SQLEngine(val value: String)
/**
* This is the sequence of default available storage engines in the MySQL 5.0 specification.
* For the official documentation, @see <a href="http://dev.mysql.com/doc/refman/5.0/en/show-engines.html">the MySQL 5.0 docs</a>.
*
* More recent versions of MySQL features far less available options. The official list is available on @see <a href="http://dev.mysql.com/doc/refman/5
* .7/en/show-engines.html">the MySQL 5.7 docs</a> page.
*/
trait DefaultSQLEngines {
case object InnoDB extends SQLEngine(DefaultMySQLEngines.InnoDB)
case object InnoBase extends SQLEngine(DefaultMySQLEngines.innoBase)
case object Memory extends SQLEngine(DefaultMySQLEngines.Memory)
case object Heap extends SQLEngine(DefaultMySQLEngines.Heap)
case object Merge extends SQLEngine(DefaultMySQLEngines.Merge)
case object BDB extends SQLEngine(DefaultMySQLEngines.BDB)
case object BerkleyDB extends SQLEngine(DefaultMySQLEngines.BerkleyDB)
case object NDBCluster extends SQLEngine(DefaultMySQLEngines.NDBCluster)
case object NDB extends SQLEngine(DefaultMySQLEngines.NDB)
case object Example extends SQLEngine(DefaultMySQLEngines.Example)
case object Archive extends SQLEngine(DefaultMySQLEngines.Archive)
case object CSV extends SQLEngine(DefaultMySQLEngines.CSV)
case object Federated extends SQLEngine(DefaultMySQLEngines.Federated)
case object Blackhole extends SQLEngine(DefaultMySQLEngines.BlackHole)
}
trait MySQLEngines extends DefaultSQLEngines {}
|
websudos/morpheus
|
morpheus-dsl/src/main/scala/com/outworkers/morpheus/engine/query/SQLEngine.scala
|
Scala
|
bsd-2-clause
| 2,631 |
/*
* Copyright 2013 Julian Peeters
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package artisanal.pickle.maker
package types
import tags._
import scala.reflect.internal.pickling._
case class TypeRefTpe_String(currentPosition: Position, thisTpe_scala: ThisTpe_scala, scala: ExtModClassRef_scala, predef: ExtModClassRef_predef) extends Tpe {
var position = 0
var polyTpePosition = 0
var typeNamePosition = 0
var annotPos = 0
val typeName = "String"
def write(myPickleBuffer: PickleBuffer) = {
position = currentPosition.current
TypeRefTpe_nonGeneric(currentPosition, currentPosition.current + 1, currentPosition.current + 4).writeEntry(myPickleBuffer)
SingleTpe(currentPosition, thisTpe_scala.position, currentPosition.current + 1).write(myPickleBuffer)
ExtRef_nested(currentPosition, currentPosition.current + 1, scala.position).write(myPickleBuffer)
TermName(currentPosition, "Predef").write(myPickleBuffer)
ExtRef_nested(currentPosition, currentPosition.current + 1, currentPosition.current + 2).write(myPickleBuffer)
typeNamePosition = currentPosition.current
TypeName(currentPosition, "String").write(myPickleBuffer)
predef.write(currentPosition, myPickleBuffer, scala)
}
}
|
julianpeeters/artisanal-pickle-maker
|
src/main/scala/types/types/StringRef.scala
|
Scala
|
apache-2.0
| 1,757 |
package scala.collection.immutable
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
@RunWith(classOf[JUnit4])
class SetTest {
@Test
def test_SI8346_toSet_soundness(): Unit = {
val any2stringadd = "Disabled string conversions so as not to get confused!"
def any[A](set: Set[A]): Set[Any] = {
val anyset = set.toSet[Any]
assert((anyset + "fish") contains "fish")
anyset
}
// Make sure default immutable Set does not rebuild itself on widening with toSet
// Need to cover 0, 1, 2, 3, 4 elements as special cases
var si = Set.empty[Int]
assert(si eq si.toSet[Any])
for (i <- 1 to 5) {
val s1 = Set(Array.range(1, i+1): _*)
val s2 = si + i
val s1a = any(s1)
val s2a = any(s2)
assert(s1 eq s1a)
assert(s2 eq s2a)
si = s2
}
// Make sure BitSet correctly rebuilds itself on widening with toSet
// Need to cover empty, values 0-63, values 0-127 as special cases
val bitsets = Seq(BitSet.empty, BitSet(23), BitSet(23, 99), BitSet(23, 99, 141))
bitsets.foreach{ b =>
val ba = any(b)
assert(b ne ba)
assertEquals(b, ba)
}
// Make sure HashSet (and by extension, its implementing class HashTrieSet)
// does not rebuild itself on widening by toSet
val hashset = HashSet(1, 3, 5, 7)
val hashseta = any(hashset)
assert(hashset eq hashseta)
// Make sure ListSet does not rebuild itself on widening by toSet
// (Covers Node also, since it subclasses ListSet)
val listset = ListSet(1, 3, 5, 7)
val listseta = any(listset)
assert(listset eq listseta)
// Make sure SortedSets correctly rebuild themselves on widening with toSet
// Covers TreeSet and keySet of SortedMap also
val sortedsets = Seq(
SortedSet.empty[Int], SortedSet(5), SortedSet(1,2,3,5,4),
SortedMap(1 -> "cod", 2 -> "herring").keySet
)
sortedsets.foreach{ set =>
val seta = any(set)
assert(set ne seta)
assertEquals(set, seta)
}
// Make sure ValueSets correctly rebuild themselves on widening with toSet
object WeekDay extends Enumeration {
type WeekDay = Value
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
val valuesa = any(WeekDay.values)
assert(WeekDay.values ne valuesa)
assertEquals(WeekDay.values, valuesa)
// Make sure regular Map keySets do not rebuild themselves on widening with toSet
val mapset = Map(1 -> "cod", 2 -> "herring").keySet
val mapseta = any(mapset)
assert(mapset eq mapseta)
}
}
|
felixmulder/scala
|
test/junit/scala/collection/immutable/SetTest.scala
|
Scala
|
bsd-3-clause
| 2,615 |
/**
* Copyright (C) 2013 Stefan Niederhauser ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package guru.nidi.atlassian.remote.query
import com.atlassian.jira.rpc.soap.beans._
import guru.nidi.atlassian.remote.jira.RemoteIssueExt
/**
*
*/
trait QueryService {
def getProjectsByKey(keys: String*): Seq[RemoteProject]
def getIssue(issueKey: String): RemoteIssueExt
def getIssuesFromJqlSearch(query: String, maxResults: Int): Seq[RemoteIssueExt]
def baseUrl: String
def customField(issue: RemoteIssue, name: String): String
def priorityById(id: String): RemotePriority
def issueTypeById(id: String): RemoteIssueType
def statusById(id: String): RemoteStatus
def resolutionById(id: String): RemoteResolution
}
|
nidi3/simple-remote-atlassian
|
src/main/scala/guru/nidi/atlassian/remote/query/QueryService.scala
|
Scala
|
apache-2.0
| 1,272 |
package org.bone.ircballoon
import org.bone.ircballoon.model._
import org.bone.ircballoon.actor.message._
import I18N.i18n._
import ImageUtil._
import org.eclipse.swt.widgets.{List => SWTList, _}
import org.eclipse.swt.layout._
import org.eclipse.swt.events._
import org.eclipse.swt.graphics._
import org.eclipse.swt.custom.StyledText
import org.eclipse.swt.custom.StackLayout
import org.eclipse.swt.custom.ScrolledComposite
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import org.eclipse.swt._
import scala.collection.mutable.ListBuffer
class VoteWindow(parent: Shell) extends SWTHelper
{
class VoteOption(label: Label, prompt: Text, removeButton: Button) {
def dispose() {
label.dispose()
prompt.dispose()
removeButton.dispose()
}
}
val shell = new Shell(parent, SWT.DIALOG_TRIM|SWT.RESIZE)
val gridLayout = new GridLayout(3, false)
var options: List[String] = List("")
var optionsWidget: List[VoteOption] = Nil
val optionFrame = createGroup(shell, tr("Vote Options"), 3)
var addButton: Button = null
var startButton: Button = null
var spinner: Spinner = null
def createVoteOption(): (List[VoteOption], Button) = {
optionsWidget.foreach(_.dispose())
if (this.addButton != null) {
this.addButton.dispose()
}
val newWidgets = for(i <- 0 until options.size) yield {
val option = options(i)
val label = new Label(optionFrame, SWT.NONE)
val promptText = new Text(optionFrame, SWT.BORDER)
val removeButton = new Button(optionFrame, SWT.PUSH)
val gridData = new GridData(SWT.FILL, SWT.CENTER, true, false)
label.setText(i + ". ")
promptText.setText(option)
promptText.addModifyListener { e: ModifyEvent =>
options = options.updated(i, promptText.getText.toString)
}
removeButton.setToolTipText(tr("Remove this option"))
removeButton.setImage(MyIcon.remove)
removeButton.addSelectionListener { e: SelectionEvent =>
if (options.size > 1) {
val (l1, l2) = options splitAt i
options = l1 ::: (l2 drop 1)
updateOptionArea(createVoteOption())
}
}
promptText.setLayoutData(gridData)
new VoteOption(label, promptText, removeButton)
}
val addButtonData = new GridData(SWT.RIGHT, SWT.CENTER, false, false, 3, 1)
val addButton = new Button(optionFrame, SWT.PUSH)
addButton.setLayoutData(addButtonData)
addButton.setToolTipText(tr("Add vote option"))
addButton.setImage(MyIcon.add)
addButton.addSelectionListener { e: SelectionEvent =>
options = options ++ List("")
updateOptionArea(createVoteOption())
}
shell.layout(true, true)
(newWidgets.toList, addButton)
}
def updateOptionArea(widgets: (List[VoteOption], Button)) {
this.optionsWidget = widgets._1
this.addButton = widgets._2
}
def isAllOptionsNotEmpty = options.forall(!_.isEmpty)
def isIRCConnected = {
try {
implicit val timeout = Timeout(5.seconds)
val status: Future[Boolean] = (MainWindow.controller ? IsConnected).mapTo[Boolean]
Await.result(status, 5.seconds)
} catch {
case e: Exception => false
}
}
def displayError(message: String) {
val messageBox = new MessageBox(shell, SWT.ERROR|SWT.OK)
messageBox.setMessage(message)
messageBox.open()
}
def addStartButton(): Button = {
val gridData = new GridData(SWT.RIGHT, SWT.FILL, true, false)
val button = new Button(shell, SWT.PUSH)
button.setText(tr("Start"))
button.setImage(MyIcon.vote)
button.setLayoutData(gridData)
button.addSelectionListener { e: SelectionEvent =>
if (!isAllOptionsNotEmpty) {
displayError(tr("Vote options cannot have empty value."))
} else if (!isIRCConnected) {
displayError(tr("You need connect to IRC chatroom before start voting."))
} else {
val voteStatusWin = new VoteStatusWin(parent, options, this.spinner.getSelection)
MainWindow.controller ! StartVoting(options, this.spinner.getSelection, voteStatusWin)
shell.dispose()
voteStatusWin.open()
}
}
button
}
def createDurationSpinner(): Spinner =
{
val label = new Label(shell, SWT.LEFT)
val spinner = new Spinner(shell, SWT.NONE)
label.setText(tr("Vote duration (minutes):"))
spinner.setMaximum(180)
spinner.setMinimum(2)
spinner
}
def open()
{
updateOptionArea(createVoteOption())
this.spinner = createDurationSpinner()
this.startButton = addStartButton()
shell.setLayout(gridLayout)
shell.setText(tr("Start Vote"))
shell.setSize(600, 400)
shell.open()
}
}
|
brianhsu/IRCBalloon
|
src/main/scala/ui/VoteWindow.scala
|
Scala
|
gpl-3.0
| 4,811 |
package feh.tec.cvis.common
import feh.tec.cvis.common.ChannelDescriptor.Comparator
import feh.tec.cvis.common.cv.Helper.Array2D
import org.opencv.core.Point
trait ImageDescriptor {
type ADescriptor <: AreaDescriptor
def name : String
def originalImage : Array[Byte]
def sideLength: Int
def descriptorChannels: Int
def interestPoints: Map[Point, ADescriptor]
}
object ImageDescriptor{
@deprecated
trait BetterSearch{
self: ImageDescriptor =>
type PointsGroupDescriptor = Double
def pointsGroups: Map[PointsGroupDescriptor, Set[Point]]
}
}
trait AreaDescriptor{
type Channel <: ChannelDescriptor
def sideLength: Int
def channelsCount: Int
def channels: Seq[Channel]
}
object AreaDescriptor{
implicit class AreaDescriptorCompareWrapper[D <: ChannelDescriptor](ad: AreaDescriptor.SingleChannel{ type Channel = D })
(implicit c: Comparator[D])
{
def canBeEqual(ad2: AreaDescriptor.SingleChannel{ type Channel = D }, precision: Double): Boolean =
implicitly[Comparator[D]].canBeEqual(ad.channel, ad2.channel, precision)
}
// implicit class AreaDescriptorCompareWrapper[D <: AreaDescriptor : Comparator](d: D){
// def canBeEqual(d2: D, precision: Double): Boolean = implicitly[Comparator[D]].canBeEqual(d, d2, precision)
// }
trait SingleChannel extends AreaDescriptor{
def channel: Channel
final def channelsCount = 1
final def channels = channel :: Nil
}
trait HasStatistics extends AreaDescriptor{
type Channel <: ChannelDescriptor with ChannelDescriptor.Statistics
}
// todo
}
trait ChannelDescriptor{
def data : Array[Double]
def byRows: Array2D[Double]
def byCols: Array2D[Double]
}
object ChannelDescriptor{
implicit class ChannelDescriptorCompareWrapper[D <: ChannelDescriptor : Comparator](d: D){
def canBeEqual(d2: D, precision: Double): Boolean = implicitly[Comparator[D]].canBeEqual(d, d2, precision)
}
trait Comparator[D <: ChannelDescriptor]{
def canBeEqual(d1: D, d2: D, precision: Double): Boolean
}
trait Statistics{
self: ChannelDescriptor =>
// for each channel
def mean : Double
def std : Double
def range : Double
/** interquartile range */
def iqr : Double
}
object ByIqrComparator extends Comparator[ChannelDescriptor with Statistics]{
def canBeEqual(d1: ChannelDescriptor with Statistics, d2: ChannelDescriptor with Statistics, precision: Double): Boolean = ???
}
}
|
fehu/comp-vis
|
common/src/main/scala/feh/tec/cvis/common/Descriptors.scala
|
Scala
|
mit
| 2,562 |
package com.wavesplatform.settings
import java.io.File
import com.wavesplatform.common.state.ByteStr
case class WalletSettings(file: Option[File], password: Option[String], seed: Option[ByteStr])
|
wavesplatform/Waves
|
node/src/main/scala/com/wavesplatform/settings/WalletSettings.scala
|
Scala
|
mit
| 199 |
package feh.tec.cvis
import java.awt.image._
import java.awt.{Color, Dimension}
import java.nio.ByteBuffer
import java.util.UUID
import feh.dsl.swing2.Var
import feh.tec.cvis.DescriptorsSupport.{ADescriptor, IDescriptor}
import feh.tec.cvis.common.cv.Helper._
import feh.tec.cvis.common.cv.describe.CallHistory.{TypedArgEntry, ArgEntry}
import feh.tec.cvis.common.cv.describe.{ArgDescriptor, CallHistoryContainer, CallHistory}
import feh.tec.cvis.common.cv.{CV, CornerDetection, Drawing}
import feh.tec.cvis.db.{HasDescriptorCache, HasDbConnections}
import feh.tec.cvis.db.SingleChannelDescriptorsWithStats._
import feh.tec.cvis.gui.GenericSimpleApp.DefaultApp
import org.opencv.core._
import slick.driver.H2Driver.api._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.swing.Swing._
object HarrisApp extends DefaultApp("Harris interest points", 300 -> 300, 600 -> 800)
with HarrisSupport
with FeatureDetectionSupport
with KMeansSupport
with GroupingSupport
with DescriptorsSupport
with AdminSupport
with UserSupport
with Drawing
{
CV.loadNative()
def mkSimpleFrame(image: BufferedImage,
frameTitle: String,
defaultSize: Dimension,
regNewFrame: SimpleFrame => Unit,
unregAFrame: SimpleFrame => Unit ) =
new SimpleFrame(image, frameTitle, defaultSize, regNewFrame, unregAFrame)
with ConfigurationsPanelBuilder
with FrameExec
with HistorySupport
with HarrisSupportFrame
with FeatureDetectionSupportFrame
with KMeansSupportFrame
with GroupingSupportFrame
with DescriptorsSupportFrame
with AdminSupportFrame
with UserSupportFrame
with CornerDetection
with MatSupport
with HasDbConnections
with HasDescriptorCache
{
frame =>
def dbAccessTimeout: FiniteDuration = 200.millis
// implicit val db = DbConnection(Database.forConfig("h2harris"))
implicit val db = DbConnection(Database.forConfig("h2harrisDev")); println("using `dev` db")
override def stop(): Unit = {
db.close(dbAccessTimeout)
super.stop()
}
db.tryCreateTables( table.create )
// LayoutDebug = true
protected val distinctInterestPoints: Var[CallHistoryContainer[Set[(Int, Int)]]] =
Var(CallHistoryContainer.empty(Set()))
type Config = SimpleVerticalPanel with PanelExec[_, _]
lazy val configurations: Seq[(String, Config)] = Seq(
"harris" -> HarrisPanel
, "features" -> FeatureDetectionPanel
, "grouping" -> GroupingPanel
, "k-means" -> KMeansPanel
, "distinct" -> DistinctPanel
, "describe" -> DescriptorsPanel
, "admin" -> AdminPanel
, "user" -> UserPanel
)
object FeatureDetectionPanel extends FeatureDetectionPanel{
def getSrc: CallHistoryContainer[Mat] = CallHistoryContainer.empty(originalMat)
}
object GroupingPanel extends GroupingPanel{
def getSrc = harrisFiltered.get
override def drawGroupsCenters(): Unit = {
KMeansPanel.getInitialLabels set groupsCentersWithPoints.get.value.map(_._2) // todo: should be in another place, not in draw
if(repaint_?.get) HarrisPanel.drawHarris()
super.drawGroupsCenters()
}
}
object KMeansPanel extends KMeansPanel {
def getSrc = if(useInitialLabels.get) CallHistoryContainer(harrisFiltered.get.value,
groupsCentersWithPoints.get.history
.asInstanceOf[CallHistory[HarrisFilterd]]
)
else harrisFiltered.get
lazy val getInitialLabels: Var[Seq[Set[Point]]] = Var(Nil)
override def drawClusterCenters() = {
if (repaint_?.get) HarrisPanel.drawHarris()
super.drawClusterCenters()
}
UpperPanel.onError +:= ((_: Throwable) => useInitialLabels.set(false))
}
object DistinctPanel extends GroupingPanel{
sealed trait Source
object Source{
case object None extends Source{ override def toString = "[None]" }
case object Grouping extends Source{ override def toString = "grouping" }
case object KMeans extends Source{ override def toString = "k-means" }
case object Features extends Source{ override def toString = "features detection" }
object Descriptor extends ArgDescriptor[Source]("Source", null)
}
override def params: Set[ArgEntry[_]] = super.params ++ Set(
TypedArgEntry(Source.Descriptor, gcSrc.get)
)
lazy val gcSrc: Var[Source] = Var(Source.None)
protected lazy val sourcesAvailable = Var(Set[Source](Source.None))
groupsCentersWithPoints.onChange(l => if(l.value.isEmpty) sourcesAvailable.affect(_ - Source.Grouping)
else sourcesAvailable.affect(_ + Source.Grouping))
clusteringResult .onChange(l => if(l.value.isEmpty) sourcesAvailable.affect(_ - Source.KMeans)
else sourcesAvailable.affect(_ + Source.KMeans))
featureDetectionResultFiltered.onChange(l => if(l.value.isEmpty) sourcesAvailable.affect(_ - Source.Features)
else sourcesAvailable.affect(_ + Source.Features))
sourcesAvailable.onChange(_ => sourceControl.component.tryUpdate())
lazy val sourceControl = controlForSeq(sourcesAvailable.get.toSeq).dropDownList(gcSrc.set)
override def formBuilders: Seq[(String, (AbstractDSLBuilder, DSLLabelBuilder[_]))] = Seq(
"distinctSourceControl" -> (sourceControl -> label("Source"))
, "maxPairToPairInClusterDistance2" -> maxPairToPairInClusterDistanceControl
)
def getSrc: CallHistoryContainer[List[Point]] = gcSrc.get match {
case Source.Grouping => groupsCentersWithPoints.get.affect(CallHistory.Entry("take group center"))(_.map(_._1))
case Source.KMeans => clusteringResult.get.affect(CallHistory.Entry("take cluster centers"))(_.centers)
case Source.Features => featureDetectionResultFiltered.get.affect(CallHistory.Entry("take the points"))(_.map(_.pt.swap: Point))
case Source.None => CallHistoryContainer.empty(Nil)
}
override def setResult: CallHistoryContainer[List[(Point, Set[Point])]] => Unit = {
res =>
distinctInterestPoints set res.affect(CallHistory.Entry("points to (Int, Int)"))(_.map(_._1.pairInt).toSet)
drawGroupsCenters()
}
override def drawGroupsCenters(): Unit = {
if(repaint_?.get) HarrisPanel.drawHarris()
affectImageMat(img => distinctInterestPoints.get.value.foreach{
p => img.draw.circle(p.swap, maxPairToPairInClusterDistance.toInt, Color.cyan, thickness = 2)
})
repaintImage()
}
}
object DescriptorsPanel extends DescriptorsPanel{
def getSrc: CallHistoryContainer[(Mat, Set[Point])] = distinctInterestPoints.get
.affect(CallHistory.Entry("mk points from pairs"))(pts => originalInGrayScale -> pts.map(x => x: Point))
}
object AdminPanel extends AdminPanel{
def dbAccessTimeout = frame.dbAccessTimeout
def getSrc: (Int, Int, Int, Int, Array[Byte], CallHistoryContainer[Map[Point, ADescriptor]]) =
( originalMat.width()
, originalMat.height()
, originalMat.`type`()
, originalImage.getType
, originalMat.toArray[Byte]
, imageDescriptors.get
)
showHistoryFrameTrigger.component.enabled = false
imageDescriptors.onChange{ h => showHistoryFrameTrigger.enabled = h.value.nonEmpty }
def fetchDbInfo(): Future[Seq[(String, Int)]] = db.run(query.namesAndCounts)
def setResult: (IDescriptor) => Unit = d => db.run(query.insert(d))
}
object UserPanel extends UserPanel{
def getSrc: CallHistoryContainer[Map[Point, ADescriptor]] = imageDescriptors.get
def fetchDescriptor(id: UUID)= Await.result(descriptorCache.get(id), frame.dbAccessTimeout)
def searchDbTimeout = frame.dbAccessTimeout
def searchDb(mean: Option[Double], std: Option[Double], range: Option[Double], iqr: Option[Double], precision: Double): Future[Map[(UUID, String), Seq[(Int, Int)]]] =
query.searchBy(mean, std, range, iqr, precision) map db.run getOrElse Future{ Map() }
}
frame.updateForms()
}
}
|
fehu/comp-vis
|
harris-app/src/main/scala/feh/tec/cvis/HarrisApp.scala
|
Scala
|
mit
| 8,955 |
package com.arcusys.learn.liferay.update.migration
import com.arcusys.learn.liferay.util.Base64Helper
import com.arcusys.valamis.file.FileTableComponent
import com.arcusys.valamis.file.model.FileRecord
import scala.slick.driver.JdbcProfile
import scala.slick.jdbc.{JdbcBackend, StaticQuery}
class FileStorageMigration2303(val db: JdbcBackend#DatabaseDef, val driver: JdbcProfile) extends FileTableComponent {
import driver.simple._
def migrate(): Unit = {
db.withTransaction { implicit session =>
val ids = StaticQuery.queryNA[Long]("SELECT id_ FROM Learn_LFFileStorage").list
for (
id <- ids;
file <- StaticQuery.queryNA[(String, String)](s"SELECT filename, content FROM Learn_LFFileStorage where id_ = ${id}").list
) {
val filename = file._1
Option(file._2).filterNot(_.isEmpty).map(Base64Helper.stringToObject) match {
case Some(content: Array[Byte]) =>
files.filter(_.filename === filename).delete
files.insert(new FileRecord(filename, Some(content)))
case _ =>
}
}
}
}
}
|
ViLPy/Valamis
|
learn-portlet/src/main/scala/com/arcusys/learn/liferay/update/migration/FileStorageMigration2303.scala
|
Scala
|
lgpl-3.0
| 1,103 |
package filodb.coordinator
import scala.util.{Failure, Success, Try}
import akka.actor.{ActorRef, Address}
import com.typesafe.scalalogging.StrictLogging
import filodb.core.DatasetRef
/**
* Each FiloDB dataset is divided into a fixed number of shards for ingestion and distributed in-memory
* querying. The ShardMapper keeps track of the mapping between shards and nodes for a single dataset.
* It also keeps track of the status of each shard.
* - Given a partition hash, find the shard and node coordinator
* - Given a shard key hash and # bits, find the shards and node coordinators to query
* - Given a shard key hash and partition hash, # bits, compute the shard (for ingestion partitioning)
* - Register a node to given shard numbers
*
* It is not multi thread safe for mutations (registrations) but reads should be fine.
*
* The shard finding given a hash needs to be VERY fast, it is in the hot query and ingestion path.
*
* @param numShards number of shards. For this implementation, it needs to be a power of 2.
*
*/
class ShardMapper(val numShards: Int) extends Serializable {
import ShardMapper._
require((numShards & (numShards - 1)) == 0, s"numShards $numShards must be a power of two")
private final val log2NumShards = (scala.math.log10(numShards) / scala.math.log10(2)).round.toInt
private final val shardMap = Array.fill(numShards)(ActorRef.noSender)
private final val statusMap = Array.fill[ShardStatus](numShards)(ShardStatusUnassigned)
private final val log2NumShardsOneBits = (1 << log2NumShards) - 1 // results in log2NumShards one bits
// precomputed mask for shard key bits of shard for each spread value
// lower (log2NumShards-spread) bits of shard are devoted to the shard key and set to 1, rest of bits set to 0
// The spread is the array index.
private final val shardHashMask = Array.tabulate[Int](log2NumShards + 1) { i =>
(1 << (log2NumShards - i)) - 1
}
// precomputed mask for partition hash portion of the shard for each spread value
// upper (spread) bits of the shard are devoted to the partition hash to decide on final shard value
// The spread is the array index. Really it is the inverse of the shardHashMask within those bits.
private final val partHashMask = Array.tabulate[Int](log2NumShards + 1) { i =>
shardHashMask(i) ^ log2NumShardsOneBits
}
def copy(): ShardMapper = {
val shardMapperNew = new ShardMapper(numShards)
shardMap.copyToArray(shardMapperNew.shardMap)
statusMap.copyToArray(shardMapperNew.statusMap)
shardMapperNew
}
override def equals(other: Any): Boolean = other match {
case s: ShardMapper => s.numShards == numShards && s.shardValues == shardValues
case o: Any => false
}
override def hashCode: Int = shardValues.hashCode
override def toString: String = s"ShardMapper ${shardValues.zipWithIndex}"
def shardValues: Seq[(ActorRef, ShardStatus)] = shardMap.zip(statusMap).toBuffer
def statuses: Array[ShardStatus] = statusMap
/**
* Maps a partition hash to a shard number and a NodeCoordinator ActorRef
*/
def partitionToShardNode(partitionHash: Int): ShardAndNode = {
val shard = toShard(partitionHash, numShards) // TODO this is not right. Need to fix
ShardAndNode(shard, shardMap(shard))
}
def coordForShard(shardNum: Int): ActorRef = shardMap(shardNum)
def unassigned(shardNum: Int): Boolean = coordForShard(shardNum) == ActorRef.noSender
def statusForShard(shardNum: Int): ShardStatus = statusMap(shardNum)
def numAssignedCoords: Int = (shardMap.toSet - ActorRef.noSender).size
/**
* Use this function to identify the list of shards to query given the shard key hash.
*
* @param shardKeyHash This is the shard key hash, and is used to identify the shard group
* @param spread This is the 'spread' S assigned for a given appName. The data for every
* metric in the app is spread across 2^S^ shards. Example: if S=2, data
* is spread across 4 shards. If S=0, data is located in 1 shard. Bigger
* apps are assigned bigger S and smaller apps are assigned small S.
* @return The shard numbers that hold data for the given shardKeyHash
*/
def queryShards(shardKeyHash: Int, spread: Int): Seq[Int] = {
validateSpread(spread)
// lower (log2NumShards - spread) bits should go to shardKeyHash
val shardBase = shardKeyHash & shardHashMask(spread)
// create the shard for each possible partHash value portion of shard
val spacing = 1 << (log2NumShards - spread)
(shardBase until numShards by spacing)
}
private def validateSpread(spread: Int) = {
require(spread >= 0 && spread <= log2NumShards, s"Invalid spread $spread. log2NumShards is $log2NumShards")
}
/**
* Use this function to calculate the ingestion shard for a fully specified partition id.
* The code logic ingesting data into partitions can use this function to direct data
* to the right partition
*
* @param shardKeyHash This is the shard key hash, and is used to identify the shard group
* @param partitionHash The 32-bit hash of the overall partition or time series key, containing all tags
* @param spread This is the 'spread' S assigned for a given appName. The data for every
* metric in the app is spread across 2^S^ shards. Example: if S=2, data
* is spread across 4 shards. If S=0, data is located in 1 shard. Bigger
* apps are assigned bigger S and smaller apps are assigned small S.
* @return The shard number that contains the partition for the record described by the given
* shardKeyHash and partitionHash
*/
def ingestionShard(shardKeyHash: Int, partitionHash: Int, spread: Int): Int = {
validateSpread(spread)
// explanation for the one-liner:
// shardKeyHash forms the lower n bits of the shard, while partitionHash forms upper (spread) bits
// It is designed this way such that for the same shard key, the rest of the tags spreads out the shard
// across the shard space (thus nodes), ensuring more even distribution
(shardKeyHash & shardHashMask(spread)) | (partitionHash & partHashMask(spread))
}
@deprecated(message = "Use ingestionShard() instead of this method", since = "0.7")
def hashToShard(shardHash: Int, partitionHash: Int, numShardBits: Int): Int = {
ingestionShard(shardHash, partitionHash, log2NumShards - numShardBits)
}
/**
* Returns all shards that match a given address - typically used to compare to cluster.selfAddress
* for that node's own shards
*/
def shardsForAddress(addr: Address): Seq[Int] =
shardMap.toSeq.zipWithIndex.collect {
case (ref, shardNum) if ref != ActorRef.noSender && ref.path.address == addr => shardNum
}
def shardsForCoord(coord: ActorRef): Seq[Int] =
shardMap.toSeq.zipWithIndex.collect {
case (ref, shardNum) if ref == coord => shardNum
}
def unassignShard(shard: Int): Try[Unit] = {
shardMap(shard) = ActorRef.noSender
Success(())
}
/**
* Returns all the shards that have not yet been assigned or in process of being assigned
*/
def unassignedShards: Seq[Int] =
shardMap.toSeq.zipWithIndex.collect { case (ActorRef.noSender, shard) => shard }
def assignedShards: Seq[Int] =
shardMap.toSeq.zipWithIndex.collect { case (ref, shard) if ref != ActorRef.noSender => shard }
def numAssignedShards: Int = numShards - unassignedShards.length
def isAnIngestionState(shard: Int): Boolean = statusMap(shard) match {
case ShardStatusStopped | ShardStatusDown => false
case _ => true
}
/**
* Find out if a shard is active (Normal or Recovery status) or filter a list of shards
*/
def activeShard(shard: Int): Boolean =
statusMap(shard) == ShardStatusActive || statusMap(shard).isInstanceOf[ShardStatusRecovery]
def activeShards(shards: Seq[Int]): Seq[Int] = shards.filter(activeShard)
/**
* Returns a set of unique NodeCoordinator ActorRefs for all assigned shards
*/
def allNodes: Set[ActorRef] = shardMap.toSeq.filter(_ != ActorRef.noSender).toSet
/**
* The main API for updating a ShardMapper.
* If you want to throw if an update does not succeed, call updateFromEvent(ev).get
*/
def updateFromEvent(event: ShardEvent): Try[Unit] = event match {
case e if statusMap.length < e.shard || e.shard < 0 =>
Failure(ShardError(e, s"Invalid shard=${e.shard}, unable to update status."))
case ShardAssignmentStarted(_, shard, node) =>
statusMap(shard) = ShardStatusAssigned
registerNode(Seq(shard), node)
case IngestionStarted(_, shard, node) =>
statusMap(shard) = ShardStatusActive
registerNode(Seq(shard), node)
case RecoveryStarted(_, shard, node, progress) =>
statusMap(shard) = ShardStatusRecovery(progress)
registerNode(Seq(shard), node)
case RecoveryInProgress(_, shard, node, progress) =>
statusMap(shard) = ShardStatusRecovery(progress)
registerNode(Seq(shard), node)
case IngestionError(_, shard, _) =>
statusMap(shard) = ShardStatusError
unassignShard(shard)
case IngestionStopped(_, shard) =>
statusMap(shard) = ShardStatusStopped
Success(())
case ShardDown(_, shard, node) =>
statusMap(shard) = ShardStatusDown
unassignShard(shard)
case _ =>
Success(())
}
/**
* Returns the minimal set of events needed to reconstruct this ShardMapper
*/
def minimalEvents(ref: DatasetRef): Seq[ShardEvent] =
(0 until numShards).flatMap { shard =>
statusMap(shard).minimalEvents(ref, shard, shardMap(shard))
}
/**
* Registers a new node to the given shards. Modifies state in place.
* Idempotent.
*/
private[coordinator] def registerNode(shards: Seq[Int], coordinator: ActorRef): Try[Unit] = {
shards foreach {
case shard =>
//we always override the mapping. There was code earlier which prevent from
//changing the mapping unless it was explicitly unassigned first.
//But functional tests uncovered that sometimes the member down event is not
//received and hence assignments were not removed first.
val oldCoord = shardMap(shard)
log.debug(s"Unassigned coordinator $oldCoord for shard=$shard - Reassigning to $coordinator")
shardMap(shard) = coordinator
}
Success(())
}
/**
* Removes a coordinator ref from all shards mapped to it. Resets the shards to no owner and
* returns the shards removed.
*/
private[coordinator] def removeNode(coordinator: ActorRef): Seq[Int] = {
shardMap.toSeq.zipWithIndex.collect {
case (ref, i) if ref == coordinator =>
shardMap(i) = ActorRef.noSender
i
}
}
private[coordinator] def clear(): Unit = {
for { i <- 0 until numShards } { shardMap(i) = ActorRef.noSender }
}
/**
* Gives a pretty grid-view summary of the status of each shard, plus a sorted view of shards owned by each
* coordinator.
*/
def prettyPrint: String = {
val sortedCoords = allNodes.toSeq.sorted
"Status legend: .=Unassigned N=Assigned A=Active E=Error R=Recovery S=Stopped D=Down\n----- Status Map-----\n" +
statusMap.toSeq.grouped(16).zipWithIndex.map { case (statGroup, i) =>
f" ${i * 16}%4d-${Math.min(i * 16 + 15, numShards)}%4d " +
statGroup.grouped(8).map(_.map(statusToLetter).mkString("")).mkString(" ")
}.mkString("\n") +
"\n----- Coordinators -----\n" +
sortedCoords.map { coord =>
f" $coord%40s\t${shardsForCoord(coord).mkString(", ")}"
}.mkString("\n")
}
}
private[filodb] object ShardMapper extends StrictLogging {
val default = new ShardMapper(1)
val log = logger
final case class ShardAndNode(shard: Int, coord: ActorRef)
final def toShard(n: Int, numShards: Int): Int = (((n & 0xffffffffL) * numShards) >> 32).toInt
def copy(orig: ShardMapper, ref: DatasetRef): ShardMapper = {
val newMap = new ShardMapper(orig.numShards)
orig.minimalEvents(ref).foreach(newMap.updateFromEvent)
newMap
}
final case class ShardAlreadyAssigned(shard: Int, status: ShardStatus, assignedTo: ActorRef)
extends Exception(s"Shard [shard=$shard, status=$status, coordinator=$assignedTo] is already assigned.")
final case class ShardError(event: ShardEvent, context: String)
extends Exception(s"$context [shard=${event.shard}, event=$event]")
def statusToLetter(status: ShardStatus): String = status match {
case ShardStatusUnassigned => "."
case ShardStatusAssigned => "N"
case ShardStatusActive => "A"
case ShardStatusError => "E"
case s: ShardStatusRecovery => "R"
case ShardStatusStopped => "S"
case ShardStatusDown => "D"
}
}
|
tuplejump/FiloDB
|
coordinator/src/main/scala/filodb.coordinator/ShardMapper.scala
|
Scala
|
apache-2.0
| 12,868 |
// code-examples/Rounding/sake.scala
// This chapter contains only script files.
import sake.Project._
// Define some convenient variables.
val libDir = "../lib/"
// If true, don't actually run any commands.
environment.dryRun = false
// If true, show stack traces when a failure happens (doesn't affect some "errors").
showStackTracesOnFailures = false
// Logging level: Info, Notice, Warn, Error, Failure
log.threshold = Level.Info
environment.classpath :::= (files(libDir + "*.jar") -- files(libDir + "*src.jar"))
target('all -> List('clean, 'scripts))
target('scripts) {
// Omits the following:
// specs-script: Demonstrates the Specs DSL, but doesn't run as is.
// try-catch-script: Deliberately exits with -1; would cause a build failure.
// while-script: Runs essentially forever, so it's excluded.
(files("**/*-script.scala") --
files("**/specs-script.scala") --
files("**/try-catch-script.scala") --
files("**/while-script.scala")).foreach { script =>
scala(
'classpath -> environment.classpath,
'opts -> script
)
}
}
target('clean, 'compile, 'spec) {}
|
XClouded/t4f-core
|
scala/src/tmp/Rounding/sake.scala
|
Scala
|
apache-2.0
| 1,140 |
package org.edla.tmdb.shelf
import org.htmlcleaner.{CleanerProperties, DomSerializer, HtmlCleaner, TagNode}
import java.net.URL
import javax.xml.xpath.{XPath, XPathConstants, XPathFactory}
object ImdbInfo {
def getInfo(imdbId: String): (Option[BigDecimal], Option[Boolean]) = {
val url = new URL(s"https://www.imdb.com/title/$imdbId/")
val cleaner = new HtmlCleaner()
val tagNode: TagNode = cleaner.clean(url)
//println("<" + tagNode.getName + ">" + cleaner.getInnerHtml(tagNode) + "</" + tagNode.getName + ">")
val doc: org.w3c.dom.Document = new DomSerializer(new CleanerProperties()).createDOM(tagNode)
val xpath: XPath = XPathFactory.newInstance().newXPath()
val (rawScore, rawIsNotTheatricalFilm) = {
newSite(xpath, doc) match {
case (a, b) if (a.isEmpty && b.isEmpty) => oldSite(xpath, doc)
case (a, b) => (a, b)
}
}
val score =
if (rawScore.isEmpty) {
None
} else {
Some(BigDecimal(rawScore.split("/").head))
}
val isNotTheatricalFilm = Some(
List("TV Movie", "TV Short", "Video", "Episode aired", "TV Series", "TV Special").exists {
rawIsNotTheatricalFilm.contains
}
)
(score, isNotTheatricalFilm)
}
def oldSite(xpath: XPath, doc: org.w3c.dom.Document): (String, String) = {
(
xpath
.evaluate(
"//span[@class='AggregateRatingButton__RatingScore-sc-1ll29m0-1 iTLWoV']",
doc,
XPathConstants.STRING
)
.toString,
xpath
.evaluate("//li[@class='ipc-inline-list__item']", doc, XPathConstants.STRING)
.toString
)
}
def newSite(xpath: XPath, doc: org.w3c.dom.Document): (String, String) = {
(
xpath
.evaluate(
"//div[@id='main_top']//div[@class='imdbRating']/div[@class='ratingValue']/strong",
doc,
XPathConstants.STRING
)
.toString,
xpath.evaluate("//div[@id='main_top']//div[@class='subtext']", doc, XPathConstants.STRING).toString
)
}
def getScoreFromId(imdbId: String): Option[BigDecimal] = {
if (imdbId.isEmpty) {
None
} else {
getInfo(imdbId)._1
}
}
def getInfoFromId(imdbId: String): (Option[BigDecimal], Option[Boolean]) = {
if (imdbId.isEmpty) {
(None, None)
} else {
getInfo(imdbId)
}
}
}
|
newca12/TMDb-shelf
|
src/main/scala/org/edla/tmdb/shelf/ImdbInfo.scala
|
Scala
|
gpl-3.0
| 2,432 |
package liang.don.dzviewer.log.net
import liang.don.dzviewer.log.{LogLevel, LoggerInterface}
/**
* File Logger using .Net (C#) libraries.
*
* @author Don Liang
* @Version 0.1.2, 16/09/2011
*/
trait FileLogger extends LoggerInterface {
override def log(message: String) {
sys.error("[" + getClass.getName + "#log] Not implemented.")
}
override def log(message: String, logLevel: LogLevel.Value) {
sys.error("[" + getClass.getName + "#log] Not implemented.")
}
override def log(message: String, logLevel: LogLevel.Value, exception: Exception) {
sys.error("[" + getClass.getName + "#log] Not implemented.")
}
}
|
dl2k84/DeepZoomViewer
|
src/liang/don/dzviewer/log/net/FileLogger.scala
|
Scala
|
mit
| 642 |
/* https://www.hackerrank.com/challenges/simple-array-sum */
package com.negrisoli.algorithms.warmup
object SimpleArraySum {
def main(args: Array[String]) {
var it = io.Source.stdin.getLines()
var size = it.next().toInt
println(it.next().split(" ").take(size).map(_.toInt).sum)
}
}
|
rbatista/algorithms
|
challenges/hacker-rank/scala/src/main/scala/com/negrisoli/algorithms/warmup/SimpleArraySum.scala
|
Scala
|
mit
| 301 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.annotation.{DeveloperApi, Experimental, Since}
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared.{HasCheckpointInterval, HasFeaturesCol, HasMaxIter, HasSeed}
import org.apache.spark.ml.util._
import org.apache.spark.mllib.clustering.{DistributedLDAModel => OldDistributedLDAModel,
EMLDAOptimizer => OldEMLDAOptimizer, LDA => OldLDA, LDAModel => OldLDAModel,
LDAOptimizer => OldLDAOptimizer, LocalLDAModel => OldLocalLDAModel,
OnlineLDAOptimizer => OldOnlineLDAOptimizer}
import org.apache.spark.mllib.impl.PeriodicCheckpointer
import org.apache.spark.mllib.linalg.{Matrix, Vector, Vectors, VectorUDT}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.sql.functions.{col, monotonicallyIncreasingId, udf}
import org.apache.spark.sql.types.StructType
private[clustering] trait LDAParams extends Params with HasFeaturesCol with HasMaxIter
with HasSeed with HasCheckpointInterval {
/**
* Param for the number of topics (clusters) to infer. Must be > 1. Default: 10.
*
* @group param
*/
@Since("1.6.0")
final val k = new IntParam(this, "k", "number of topics (clusters) to infer",
ParamValidators.gt(1))
/** @group getParam */
@Since("1.6.0")
def getK: Int = $(k)
/**
* Concentration parameter (commonly named "alpha") for the prior placed on documents'
* distributions over topics ("theta").
*
* This is the parameter to a Dirichlet distribution, where larger values mean more smoothing
* (more regularization).
*
* If not set by the user, then docConcentration is set automatically. If set to
* singleton vector [alpha], then alpha is replicated to a vector of length k in fitting.
* Otherwise, the [[docConcentration]] vector must be length k.
* (default = automatic)
*
* Optimizer-specific parameter settings:
* - EM
* - Currently only supports symmetric distributions, so all values in the vector should be
* the same.
* - Values should be > 1.0
* - default = uniformly (50 / k) + 1, where 50/k is common in LDA libraries and +1 follows
* from Asuncion et al. (2009), who recommend a +1 adjustment for EM.
* - Online
* - Values should be >= 0
* - default = uniformly (1.0 / k), following the implementation from
* [[https://github.com/Blei-Lab/onlineldavb]].
* @group param
*/
@Since("1.6.0")
final val docConcentration = new DoubleArrayParam(this, "docConcentration",
"Concentration parameter (commonly named \\"alpha\\") for the prior placed on documents'" +
" distributions over topics (\\"theta\\").", (alpha: Array[Double]) => alpha.forall(_ >= 0.0))
/** @group getParam */
@Since("1.6.0")
def getDocConcentration: Array[Double] = $(docConcentration)
/** Get docConcentration used by spark.mllib LDA */
protected def getOldDocConcentration: Vector = {
if (isSet(docConcentration)) {
Vectors.dense(getDocConcentration)
} else {
Vectors.dense(-1.0)
}
}
/**
* Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics'
* distributions over terms.
*
* This is the parameter to a symmetric Dirichlet distribution.
*
* Note: The topics' distributions over terms are called "beta" in the original LDA paper
* by Blei et al., but are called "phi" in many later papers such as Asuncion et al., 2009.
*
* If not set by the user, then topicConcentration is set automatically.
* (default = automatic)
*
* Optimizer-specific parameter settings:
* - EM
* - Value should be > 1.0
* - default = 0.1 + 1, where 0.1 gives a small amount of smoothing and +1 follows
* Asuncion et al. (2009), who recommend a +1 adjustment for EM.
* - Online
* - Value should be >= 0
* - default = (1.0 / k), following the implementation from
* [[https://github.com/Blei-Lab/onlineldavb]].
* @group param
*/
@Since("1.6.0")
final val topicConcentration = new DoubleParam(this, "topicConcentration",
"Concentration parameter (commonly named \\"beta\\" or \\"eta\\") for the prior placed on topic'" +
" distributions over terms.", ParamValidators.gtEq(0))
/** @group getParam */
@Since("1.6.0")
def getTopicConcentration: Double = $(topicConcentration)
/** Get topicConcentration used by spark.mllib LDA */
protected def getOldTopicConcentration: Double = {
if (isSet(topicConcentration)) {
getTopicConcentration
} else {
-1.0
}
}
/** Supported values for Param [[optimizer]]. */
@Since("1.6.0")
final val supportedOptimizers: Array[String] = Array("online", "em")
/**
* Optimizer or inference algorithm used to estimate the LDA model.
* Currently supported (case-insensitive):
* - "online": Online Variational Bayes (default)
* - "em": Expectation-Maximization
*
* For details, see the following papers:
* - Online LDA:
* Hoffman, Blei and Bach. "Online Learning for Latent Dirichlet Allocation."
* Neural Information Processing Systems, 2010.
* [[http://www.cs.columbia.edu/~blei/papers/HoffmanBleiBach2010b.pdf]]
* - EM:
* Asuncion et al. "On Smoothing and Inference for Topic Models."
* Uncertainty in Artificial Intelligence, 2009.
* [[http://arxiv.org/pdf/1205.2662.pdf]]
*
* @group param
*/
@Since("1.6.0")
final val optimizer = new Param[String](this, "optimizer", "Optimizer or inference" +
" algorithm used to estimate the LDA model. Supported: " + supportedOptimizers.mkString(", "),
(o: String) => ParamValidators.inArray(supportedOptimizers).apply(o.toLowerCase))
/** @group getParam */
@Since("1.6.0")
def getOptimizer: String = $(optimizer)
/**
* Output column with estimates of the topic mixture distribution for each document (often called
* "theta" in the literature). Returns a vector of zeros for an empty document.
*
* This uses a variational approximation following Hoffman et al. (2010), where the approximate
* distribution is called "gamma." Technically, this method returns this approximation "gamma"
* for each document.
*
* @group param
*/
@Since("1.6.0")
final val topicDistributionCol = new Param[String](this, "topicDistributionCol", "Output column" +
" with estimates of the topic mixture distribution for each document (often called \\"theta\\"" +
" in the literature). Returns a vector of zeros for an empty document.")
setDefault(topicDistributionCol -> "topicDistribution")
/** @group getParam */
@Since("1.6.0")
def getTopicDistributionCol: String = $(topicDistributionCol)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* A (positive) learning parameter that downweights early iterations. Larger values make early
* iterations count less.
* This is called "tau0" in the Online LDA paper (Hoffman et al., 2010)
* Default: 1024, following Hoffman et al.
*
* @group expertParam
*/
@Since("1.6.0")
final val learningOffset = new DoubleParam(this, "learningOffset", "(For online optimizer)" +
" A (positive) learning parameter that downweights early iterations. Larger values make early" +
" iterations count less.",
ParamValidators.gt(0))
/** @group expertGetParam */
@Since("1.6.0")
def getLearningOffset: Double = $(learningOffset)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* Learning rate, set as an exponential decay rate.
* This should be between (0.5, 1.0] to guarantee asymptotic convergence.
* This is called "kappa" in the Online LDA paper (Hoffman et al., 2010).
* Default: 0.51, based on Hoffman et al.
*
* @group expertParam
*/
@Since("1.6.0")
final val learningDecay = new DoubleParam(this, "learningDecay", "(For online optimizer)" +
" Learning rate, set as an exponential decay rate. This should be between (0.5, 1.0] to" +
" guarantee asymptotic convergence.", ParamValidators.gt(0))
/** @group expertGetParam */
@Since("1.6.0")
def getLearningDecay: Double = $(learningDecay)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* Fraction of the corpus to be sampled and used in each iteration of mini-batch gradient descent,
* in range (0, 1].
*
* Note that this should be adjusted in synch with [[LDA.maxIter]]
* so the entire corpus is used. Specifically, set both so that
* maxIterations * miniBatchFraction >= 1.
*
* Note: This is the same as the `miniBatchFraction` parameter in
* [[org.apache.spark.mllib.clustering.OnlineLDAOptimizer]].
*
* Default: 0.05, i.e., 5% of total documents.
*
* @group param
*/
@Since("1.6.0")
final val subsamplingRate = new DoubleParam(this, "subsamplingRate", "(For online optimizer)" +
" Fraction of the corpus to be sampled and used in each iteration of mini-batch" +
" gradient descent, in range (0, 1].",
ParamValidators.inRange(0.0, 1.0, lowerInclusive = false, upperInclusive = true))
/** @group getParam */
@Since("1.6.0")
def getSubsamplingRate: Double = $(subsamplingRate)
/**
* For Online optimizer only (currently): [[optimizer]] = "online".
*
* Indicates whether the docConcentration (Dirichlet parameter for
* document-topic distribution) will be optimized during training.
* Setting this to true will make the model more expressive and fit the training data better.
* Default: false
*
* @group expertParam
*/
@Since("1.6.0")
final val optimizeDocConcentration = new BooleanParam(this, "optimizeDocConcentration",
"(For online optimizer only, currently) Indicates whether the docConcentration" +
" (Dirichlet parameter for document-topic distribution) will be optimized during training.")
/** @group expertGetParam */
@Since("1.6.0")
def getOptimizeDocConcentration: Boolean = $(optimizeDocConcentration)
/**
* For EM optimizer only: [[optimizer]] = "em".
*
* If using checkpointing, this indicates whether to keep the last
* checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can
* cause failures if a data partition is lost, so set this bit with care.
* Note that checkpoints will be cleaned up via reference counting, regardless.
*
* See [[DistributedLDAModel.getCheckpointFiles]] for getting remaining checkpoints and
* [[DistributedLDAModel.deleteCheckpointFiles]] for removing remaining checkpoints.
*
* Default: true
*
* @group expertParam
*/
@Since("2.0.0")
final val keepLastCheckpoint = new BooleanParam(this, "keepLastCheckpoint",
"(For EM optimizer) If using checkpointing, this indicates whether to keep the last" +
" checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can" +
" cause failures if a data partition is lost, so set this bit with care.")
/** @group expertGetParam */
@Since("2.0.0")
def getKeepLastCheckpoint: Boolean = $(keepLastCheckpoint)
/**
* Validates and transforms the input schema.
*
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
if (isSet(docConcentration)) {
if (getDocConcentration.length != 1) {
require(getDocConcentration.length == getK, s"LDA docConcentration was of length" +
s" ${getDocConcentration.length}, but k = $getK. docConcentration must be an array of" +
s" length either 1 (scalar) or k (num topics).")
}
getOptimizer match {
case "online" =>
require(getDocConcentration.forall(_ >= 0),
"For Online LDA optimizer, docConcentration values must be >= 0. Found values: " +
getDocConcentration.mkString(","))
case "em" =>
require(getDocConcentration.forall(_ >= 0),
"For EM optimizer, docConcentration values must be >= 1. Found values: " +
getDocConcentration.mkString(","))
}
}
if (isSet(topicConcentration)) {
getOptimizer match {
case "online" =>
require(getTopicConcentration >= 0, s"For Online LDA optimizer, topicConcentration" +
s" must be >= 0. Found value: $getTopicConcentration")
case "em" =>
require(getTopicConcentration >= 0, s"For EM optimizer, topicConcentration" +
s" must be >= 1. Found value: $getTopicConcentration")
}
}
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
SchemaUtils.appendColumn(schema, $(topicDistributionCol), new VectorUDT)
}
private[clustering] def getOldOptimizer: OldLDAOptimizer = getOptimizer match {
case "online" =>
new OldOnlineLDAOptimizer()
.setTau0($(learningOffset))
.setKappa($(learningDecay))
.setMiniBatchFraction($(subsamplingRate))
.setOptimizeDocConcentration($(optimizeDocConcentration))
case "em" =>
new OldEMLDAOptimizer()
.setKeepLastCheckpoint($(keepLastCheckpoint))
}
}
/**
* :: Experimental ::
* Model fitted by [[LDA]].
*
* @param vocabSize Vocabulary size (number of terms or words in the vocabulary)
* @param sparkSession Used to construct local DataFrames for returning query results
*/
@Since("1.6.0")
@Experimental
sealed abstract class LDAModel private[ml] (
@Since("1.6.0") override val uid: String,
@Since("1.6.0") val vocabSize: Int,
@Since("1.6.0") @transient private[ml] val sparkSession: SparkSession)
extends Model[LDAModel] with LDAParams with Logging with MLWritable {
// NOTE to developers:
// This abstraction should contain all important functionality for basic LDA usage.
// Specializations of this class can contain expert-only functionality.
/**
* Underlying spark.mllib model.
* If this model was produced by Online LDA, then this is the only model representation.
* If this model was produced by EM, then this local representation may be built lazily.
*/
@Since("1.6.0")
protected def oldLocalModel: OldLocalLDAModel
/** Returns underlying spark.mllib model, which may be local or distributed */
@Since("1.6.0")
protected def getModel: OldLDAModel
/**
* The features for LDA should be a [[Vector]] representing the word counts in a document.
* The vector should be of length vocabSize, with counts for each term (word).
*
* @group setParam
*/
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* Transforms the input dataset.
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*/
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
if ($(topicDistributionCol).nonEmpty) {
val t = udf(oldLocalModel.getTopicDistributionMethod(sparkSession.sparkContext))
dataset.withColumn($(topicDistributionCol), t(col($(featuresCol)))).toDF
} else {
logWarning("LDAModel.transform was called without any output columns. Set an output column" +
" such as topicDistributionCol to produce results.")
dataset.toDF
}
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
/**
* Value for [[docConcentration]] estimated from data.
* If Online LDA was used and [[optimizeDocConcentration]] was set to false,
* then this returns the fixed (given) value for the [[docConcentration]] parameter.
*/
@Since("1.6.0")
def estimatedDocConcentration: Vector = getModel.docConcentration
/**
* Inferred topics, where each topic is represented by a distribution over terms.
* This is a matrix of size vocabSize x k, where each column is a topic.
* No guarantees are given about the ordering of the topics.
*
* WARNING: If this model is actually a [[DistributedLDAModel]] instance produced by
* the Expectation-Maximization ("em") [[optimizer]], then this method could involve
* collecting a large amount of data to the driver (on the order of vocabSize x k).
*/
@Since("1.6.0")
def topicsMatrix: Matrix = oldLocalModel.topicsMatrix
/** Indicates whether this instance is of type [[DistributedLDAModel]] */
@Since("1.6.0")
def isDistributed: Boolean
/**
* Calculates a lower bound on the log likelihood of the entire corpus.
*
* See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*
* @param dataset test corpus to use for calculating log likelihood
* @return variational lower bound on the log likelihood of the entire corpus
*/
@Since("2.0.0")
def logLikelihood(dataset: Dataset[_]): Double = {
val oldDataset = LDA.getOldDataset(dataset, $(featuresCol))
oldLocalModel.logLikelihood(oldDataset)
}
/**
* Calculate an upper bound bound on perplexity. (Lower is better.)
* See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*
* @param dataset test corpus to use for calculating perplexity
* @return Variational upper bound on log perplexity per token.
*/
@Since("2.0.0")
def logPerplexity(dataset: Dataset[_]): Double = {
val oldDataset = LDA.getOldDataset(dataset, $(featuresCol))
oldLocalModel.logPerplexity(oldDataset)
}
/**
* Return the topics described by their top-weighted terms.
*
* @param maxTermsPerTopic Maximum number of terms to collect for each topic.
* Default value of 10.
* @return Local DataFrame with one topic per Row, with columns:
* - "topic": IntegerType: topic index
* - "termIndices": ArrayType(IntegerType): term indices, sorted in order of decreasing
* term importance
* - "termWeights": ArrayType(DoubleType): corresponding sorted term weights
*/
@Since("1.6.0")
def describeTopics(maxTermsPerTopic: Int): DataFrame = {
val topics = getModel.describeTopics(maxTermsPerTopic).zipWithIndex.map {
case ((termIndices, termWeights), topic) =>
(topic, termIndices.toSeq, termWeights.toSeq)
}
sparkSession.createDataFrame(topics).toDF("topic", "termIndices", "termWeights")
}
@Since("1.6.0")
def describeTopics(): DataFrame = describeTopics(10)
}
/**
* :: Experimental ::
*
* Local (non-distributed) model fitted by [[LDA]].
*
* This model stores the inferred topics only; it does not store info about the training dataset.
*/
@Since("1.6.0")
@Experimental
class LocalLDAModel private[ml] (
uid: String,
vocabSize: Int,
@Since("1.6.0") override protected val oldLocalModel: OldLocalLDAModel,
sparkSession: SparkSession)
extends LDAModel(uid, vocabSize, sparkSession) {
@Since("1.6.0")
override def copy(extra: ParamMap): LocalLDAModel = {
val copied = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession)
copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel]
}
override protected def getModel: OldLDAModel = oldLocalModel
@Since("1.6.0")
override def isDistributed: Boolean = false
@Since("1.6.0")
override def write: MLWriter = new LocalLDAModel.LocalLDAModelWriter(this)
}
@Since("1.6.0")
object LocalLDAModel extends MLReadable[LocalLDAModel] {
private[LocalLDAModel]
class LocalLDAModelWriter(instance: LocalLDAModel) extends MLWriter {
private case class Data(
vocabSize: Int,
topicsMatrix: Matrix,
docConcentration: Vector,
topicConcentration: Double,
gammaShape: Double)
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val oldModel = instance.oldLocalModel
val data = Data(instance.vocabSize, oldModel.topicsMatrix, oldModel.docConcentration,
oldModel.topicConcentration, oldModel.gammaShape)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class LocalLDAModelReader extends MLReader[LocalLDAModel] {
private val className = classOf[LocalLDAModel].getName
override def load(path: String): LocalLDAModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
.select("vocabSize", "topicsMatrix", "docConcentration", "topicConcentration",
"gammaShape")
.head()
val vocabSize = data.getAs[Int](0)
val topicsMatrix = data.getAs[Matrix](1)
val docConcentration = data.getAs[Vector](2)
val topicConcentration = data.getAs[Double](3)
val gammaShape = data.getAs[Double](4)
val oldModel = new OldLocalLDAModel(topicsMatrix, docConcentration, topicConcentration,
gammaShape)
val model = new LocalLDAModel(metadata.uid, vocabSize, oldModel, sparkSession)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[LocalLDAModel] = new LocalLDAModelReader
@Since("1.6.0")
override def load(path: String): LocalLDAModel = super.load(path)
}
/**
* :: Experimental ::
*
* Distributed model fitted by [[LDA]].
* This type of model is currently only produced by Expectation-Maximization (EM).
*
* This model stores the inferred topics, the full training dataset, and the topic distribution
* for each training document.
*
* @param oldLocalModelOption Used to implement [[oldLocalModel]] as a lazy val, but keeping
* [[copy()]] cheap.
*/
@Since("1.6.0")
@Experimental
class DistributedLDAModel private[ml] (
uid: String,
vocabSize: Int,
private val oldDistributedModel: OldDistributedLDAModel,
sparkSession: SparkSession,
private var oldLocalModelOption: Option[OldLocalLDAModel])
extends LDAModel(uid, vocabSize, sparkSession) {
override protected def oldLocalModel: OldLocalLDAModel = {
if (oldLocalModelOption.isEmpty) {
oldLocalModelOption = Some(oldDistributedModel.toLocal)
}
oldLocalModelOption.get
}
override protected def getModel: OldLDAModel = oldDistributedModel
/**
* Convert this distributed model to a local representation. This discards info about the
* training dataset.
*
* WARNING: This involves collecting a large [[topicsMatrix]] to the driver.
*/
@Since("1.6.0")
def toLocal: LocalLDAModel = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession)
@Since("1.6.0")
override def copy(extra: ParamMap): DistributedLDAModel = {
val copied = new DistributedLDAModel(
uid, vocabSize, oldDistributedModel, sparkSession, oldLocalModelOption)
copyValues(copied, extra).setParent(parent)
copied
}
@Since("1.6.0")
override def isDistributed: Boolean = true
/**
* Log likelihood of the observed tokens in the training set,
* given the current parameter estimates:
* log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
*
* Notes:
* - This excludes the prior; for that, use [[logPrior]].
* - Even with [[logPrior]], this is NOT the same as the data log likelihood given the
* hyperparameters.
* - This is computed from the topic distributions computed during training. If you call
* [[logLikelihood()]] on the same training dataset, the topic distributions will be computed
* again, possibly giving different results.
*/
@Since("1.6.0")
lazy val trainingLogLikelihood: Double = oldDistributedModel.logLikelihood
/**
* Log probability of the current parameter estimate:
* log P(topics, topic distributions for docs | Dirichlet hyperparameters)
*/
@Since("1.6.0")
lazy val logPrior: Double = oldDistributedModel.logPrior
private var _checkpointFiles: Array[String] = oldDistributedModel.checkpointFiles
/**
* If using checkpointing and [[LDA.keepLastCheckpoint]] is set to true, then there may be
* saved checkpoint files. This method is provided so that users can manage those files.
*
* Note that removing the checkpoints can cause failures if a partition is lost and is needed
* by certain [[DistributedLDAModel]] methods. Reference counting will clean up the checkpoints
* when this model and derivative data go out of scope.
*
* @return Checkpoint files from training
*/
@DeveloperApi
@Since("2.0.0")
def getCheckpointFiles: Array[String] = _checkpointFiles
/**
* Remove any remaining checkpoint files from training.
*
* @see [[getCheckpointFiles]]
*/
@DeveloperApi
@Since("2.0.0")
def deleteCheckpointFiles(): Unit = {
val fs = FileSystem.get(sparkSession.sparkContext.hadoopConfiguration)
_checkpointFiles.foreach(PeriodicCheckpointer.removeCheckpointFile(_, fs))
_checkpointFiles = Array.empty[String]
}
@Since("1.6.0")
override def write: MLWriter = new DistributedLDAModel.DistributedWriter(this)
}
@Since("1.6.0")
object DistributedLDAModel extends MLReadable[DistributedLDAModel] {
private[DistributedLDAModel]
class DistributedWriter(instance: DistributedLDAModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val modelPath = new Path(path, "oldModel").toString
instance.oldDistributedModel.save(sc, modelPath)
}
}
private class DistributedLDAModelReader extends MLReader[DistributedLDAModel] {
private val className = classOf[DistributedLDAModel].getName
override def load(path: String): DistributedLDAModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val modelPath = new Path(path, "oldModel").toString
val oldModel = OldDistributedLDAModel.load(sc, modelPath)
val model = new DistributedLDAModel(
metadata.uid, oldModel.vocabSize, oldModel, sparkSession, None)
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[DistributedLDAModel] = new DistributedLDAModelReader
@Since("1.6.0")
override def load(path: String): DistributedLDAModel = super.load(path)
}
/**
* :: Experimental ::
*
* Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
*
* Terminology:
* - "term" = "word": an element of the vocabulary
* - "token": instance of a term appearing in a document
* - "topic": multinomial distribution over terms representing some concept
* - "document": one piece of text, corresponding to one row in the input data
*
* Original LDA paper (journal version):
* Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
*
* Input data (featuresCol):
* LDA is given a collection of documents as input data, via the featuresCol parameter.
* Each document is specified as a [[Vector]] of length vocabSize, where each entry is the
* count for the corresponding term (word) in the document. Feature transformers such as
* [[org.apache.spark.ml.feature.Tokenizer]] and [[org.apache.spark.ml.feature.CountVectorizer]]
* can be useful for converting text to word count vectors.
*
* @see [[http://en.wikipedia.org/wiki/Latent_Dirichlet_allocation Latent Dirichlet allocation
* (Wikipedia)]]
*/
@Since("1.6.0")
@Experimental
class LDA @Since("1.6.0") (
@Since("1.6.0") override val uid: String)
extends Estimator[LDAModel] with LDAParams with DefaultParamsWritable {
@Since("1.6.0")
def this() = this(Identifiable.randomUID("lda"))
setDefault(maxIter -> 20, k -> 10, optimizer -> "online", checkpointInterval -> 10,
learningOffset -> 1024, learningDecay -> 0.51, subsamplingRate -> 0.05,
optimizeDocConcentration -> true, keepLastCheckpoint -> true)
/**
* The features for LDA should be a [[Vector]] representing the word counts in a document.
* The vector should be of length vocabSize, with counts for each term (word).
*
* @group setParam
*/
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.6.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group setParam */
@Since("1.6.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.6.0")
def setK(value: Int): this.type = set(k, value)
/** @group setParam */
@Since("1.6.0")
def setDocConcentration(value: Array[Double]): this.type = set(docConcentration, value)
/** @group setParam */
@Since("1.6.0")
def setDocConcentration(value: Double): this.type = set(docConcentration, Array(value))
/** @group setParam */
@Since("1.6.0")
def setTopicConcentration(value: Double): this.type = set(topicConcentration, value)
/** @group setParam */
@Since("1.6.0")
def setOptimizer(value: String): this.type = set(optimizer, value)
/** @group setParam */
@Since("1.6.0")
def setTopicDistributionCol(value: String): this.type = set(topicDistributionCol, value)
/** @group expertSetParam */
@Since("1.6.0")
def setLearningOffset(value: Double): this.type = set(learningOffset, value)
/** @group expertSetParam */
@Since("1.6.0")
def setLearningDecay(value: Double): this.type = set(learningDecay, value)
/** @group setParam */
@Since("1.6.0")
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group expertSetParam */
@Since("1.6.0")
def setOptimizeDocConcentration(value: Boolean): this.type = set(optimizeDocConcentration, value)
/** @group expertSetParam */
@Since("2.0.0")
def setKeepLastCheckpoint(value: Boolean): this.type = set(keepLastCheckpoint, value)
@Since("1.6.0")
override def copy(extra: ParamMap): LDA = defaultCopy(extra)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): LDAModel = {
transformSchema(dataset.schema, logging = true)
val oldLDA = new OldLDA()
.setK($(k))
.setDocConcentration(getOldDocConcentration)
.setTopicConcentration(getOldTopicConcentration)
.setMaxIterations($(maxIter))
.setSeed($(seed))
.setCheckpointInterval($(checkpointInterval))
.setOptimizer(getOldOptimizer)
// TODO: persist here, or in old LDA?
val oldData = LDA.getOldDataset(dataset, $(featuresCol))
val oldModel = oldLDA.run(oldData)
val newModel = oldModel match {
case m: OldLocalLDAModel =>
new LocalLDAModel(uid, m.vocabSize, m, dataset.sparkSession)
case m: OldDistributedLDAModel =>
new DistributedLDAModel(uid, m.vocabSize, m, dataset.sparkSession, None)
}
copyValues(newModel).setParent(this)
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
}
private[clustering] object LDA extends DefaultParamsReadable[LDA] {
/** Get dataset for spark.mllib LDA */
def getOldDataset(dataset: Dataset[_], featuresCol: String): RDD[(Long, Vector)] = {
dataset
.withColumn("docId", monotonicallyIncreasingId())
.select("docId", featuresCol)
.rdd
.map { case Row(docId: Long, features: Vector) =>
(docId, features)
}
}
@Since("1.6.0")
override def load(path: String): LDA = super.load(path)
}
|
xieguobin/Spark_2.0.0_cn1
|
ml/clustering/LDA.scala
|
Scala
|
apache-2.0
| 33,346 |
import scala.io.Source
import java.io.ByteArrayInputStream
object Test extends dotty.runtime.LegacyApp {
val txt = "abcdef"
val in = new ByteArrayInputStream(txt.getBytes());
val source = Source.fromInputStream(in);
println(source.toString) // forces the BufferedSource to look at the head of the input
println(source.mkString) // used to return "bcdef" ...
}
|
yusuke2255/dotty
|
tests/run/t8690.scala
|
Scala
|
bsd-3-clause
| 373 |
/*
* Copyright 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package influxdbreporter.core.collectors
import com.codahale.metrics.Gauge
import influxdbreporter.core.{Field, Tag}
import GaugeCollector.ValueField
sealed class GaugeCollector[T](staticTags: List[Tag] = Nil, fieldFM: Field => Option[Field] = t => Some(t))
extends BaseMetricCollector[Gauge[T], GaugeCollector[T]](staticTags, fieldFM) {
override protected def measurementName: String = "gauge"
override protected def fields(gauge: Gauge[T]): List[Field] = List(Field(ValueField, gauge.getValue))
override def withFieldMapper(mapper: (Field) => Option[Field]): GaugeCollector[T] =
new GaugeCollector[T](staticTags, mapper)
override def withStaticTags(tags: List[Tag]): GaugeCollector[T] =
new GaugeCollector[T](tags, fieldFM)
}
object GaugeCollector {
val ValueField = "value"
def apply[T](): GaugeCollector[T] = new GaugeCollector[T]()
}
|
TouK/influxdb-reporter
|
core/src/main/scala/influxdbreporter/core/collectors/GaugeCollector.scala
|
Scala
|
apache-2.0
| 1,452 |
package com.geeksville.json
import org.json4s.CustomSerializer
import org.json4s.JsonAST._
import java.util.Date
import com.geeksville.util.DateTools
object DateSerializer extends CustomSerializer[Date](format => (
{
case JString(s) =>
DateTools.fromISO8601(s)
},
{
case x: Date =>
JString(DateTools.toISO8601(x))
}))
|
dronekit/dronekit-server
|
src/main/scala/com/geeksville/json/DateSerializer.scala
|
Scala
|
gpl-3.0
| 347 |
package nars.language
import java.util._
import nars.io.Symbols
import nars.storage.Memory
import ImageExt._
import scala.reflect.{BeanProperty, BooleanBeanProperty}
//remove if not needed
import scala.collection.JavaConversions._
import CompoundTerm._
object ImageExt {
/**
* Try to make a new ImageExt. Called by StringParser.
* @return the Term generated from the arguments
* @param argList The list of components
* @param memory Reference to the memory
*/
def make(argList: ArrayList[Term], memory: Memory): Term = {
if (argList.size < 2) {
return null
}
val relation = argList.get(0)
val argument = new ArrayList[Term]()
var index = 0
for (j <- 1 until argList.size) {
if (argList.get(j).getName.charAt(0) == Symbols.IMAGE_PLACE_HOLDER) {
index = j - 1
argument.add(relation)
} else {
argument.add(argList.get(j))
}
}
make(argument, index.toShort, memory)
}
/**
* Try to make an Image from a Product and a relation. Called by the inference rules.
* @param product The product
* @param relation The relation
* @param index The index of the place-holder
* @return A compound generated or a term it reduced to
*/
def make(product: Product,
relation: Term,
index: Short,
memory: Memory): Term = {
if (relation.isInstanceOf[Product]) {
val p2 = relation.asInstanceOf[Product]
if ((product.size == 2) && (p2.size == 2)) {
if ((index == 0) && product.componentAt(1) == p2.componentAt(1)) {
return p2.componentAt(0)
}
if ((index == 1) && product.componentAt(0) == p2.componentAt(0)) {
return p2.componentAt(1)
}
}
}
val argument = product.cloneComponents()
argument.set(index, relation)
make(argument, index, memory)
}
/**
* Try to make an Image from an existing Image and a component. Called by the inference rules.
* @param oldImage The existing Image
* @param component The component to be added into the component list
* @param index The index of the place-holder in the new Image
* @return A compound generated or a term it reduced to
*/
def make(oldImage: ImageExt,
component: Term,
index: Short,
memory: Memory): Term = {
val argList = oldImage.cloneComponents()
val oldIndex = oldImage.getRelationIndex
val relation = argList.get(oldIndex)
argList.set(oldIndex, component)
argList.set(index, relation)
make(argList, index, memory)
}
/**
* Try to make a new compound from a set of components. Called by the public make methods.
* @param argument The argument list
* @param index The index of the place-holder in the new Image
* @return the Term generated from the arguments
*/
def make(argument: ArrayList[Term], index: Short, memory: Memory): Term = {
val name = makeImageName(Symbols.IMAGE_EXT_OPERATOR, argument, index)
val t = memory.nameToListedTerm(name)
if ((t != null)) t else new ImageExt(name, argument, index)
}
}
/**
* An extension image.
* <p>
* B --> (/,P,A,_)) iff (*,A,B) --> P
* <p>
* Internally, it is actually (/,A,P)_1, with an index.
*/
class ImageExt private (n: String, arg: ArrayList[Term], @BeanProperty var relationIndex: Short)
extends CompoundTerm(n, arg) {
/**
* Constructor with full values, called by clone
* @param n The name of the term
* @param cs Component list
* @param open Open variable list
* @param complexity Syntactic complexity of the compound
* @param index The index of relation in the component list
*/
private def this( name: String,
components : ArrayList[Term],
isConstant : Boolean,
complexity: Short,
index: Short) {
this(name, components, index)
this.complexity = complexity
this.isConstant_ = isConstant
// super(n, cs, con, complexity)
}
/**
* Clone an object
* @return A new object, to be casted into an ImageExt
*/
override def clone(): AnyRef = {
new ImageExt(name, cloneList(components).asInstanceOf[ArrayList[Term]], isConstant_, complexity, relationIndex)
}
/**
* Get the relation term in the Image
* @return The term representing a relation
*/
def getRelation(): Term = components.get(relationIndex)
/**
* Get the other term in the Image
* @return The term related
*/
def getTheOtherComponent(): Term = {
if (components.size != 2) {
return null
}
if ((relationIndex == 0)) components.get(1) else components.get(0)
}
/**
* override the default in making the name of the current term from existing fields
* @return the name of the term
*/
override def makeName(): String = {
makeImageName(Symbols.IMAGE_EXT_OPERATOR, components, relationIndex)
}
/**
* get the operator of the term.
* @return the operator of the term
*/
def operator(): String = Symbols.IMAGE_EXT_OPERATOR
}
|
automenta/opennars
|
nars_scala/src/main/scala/nars/language/ImageExt.scala
|
Scala
|
gpl-2.0
| 4,965 |
package fpscala.c03
import fpscala.datastructures.{Cons, List => FpList, Nil => FpNil}
import scala.annotation.tailrec
object Exercise05 {
@tailrec
def dropWhile[A](l: FpList[A], p: A => Boolean): FpList[A] = l match {
case FpNil => FpNil
case Cons(x, xs) if ( !p(x) ) => l
case Cons(_, xs) => dropWhile(xs, p)
}
}
|
willtaylor/fpscala
|
src/main/scala/fpscala/c03/Exercise05.scala
|
Scala
|
gpl-3.0
| 338 |
package chat.tox.antox.callbacks
import android.content.Context
class AntoxOnGroupJoinRejectedCallback(private var ctx: Context) /* extends GroupJoinRejectedCallback */ {
private var reconnecting = false
/* override def groupJoinRejected(groupNumber: Int, reason: ToxGroupJoinRejected): Unit = {
if (reason == ToxGroupJoinRejected.NICK_TAKEN) {
if (ToxSingleton.tox.getGroupSelfName(groupNumber).length < Constants.MAX_NAME_LENGTH) {
//FIXME
//ToxSingleton.tox.setGroupSelfName(groupNumber, PreferenceManager
// .getDefaultSharedPreferences(ctx)
// .getString("nickname", ""))
if (!reconnecting) {
new Thread(new Runnable {
override def run(): Unit = {
reconnecting = true
Thread.sleep(10000)
ToxSingleton.tox.reconnectGroup(groupNumber)
reconnecting = false
}
}).start()
}
}
} else {
println("Tox Group Join Rejected: " + reason)
}
} */
}
|
gale320/Antox
|
app/src/main/scala/chat/tox/antox/callbacks/AntoxOnGroupJoinRejectedCallback.scala
|
Scala
|
gpl-3.0
| 1,028 |
package org.mentha.tools.archimate.model
import org.mentha.tools.archimate.model.view._
import scala.reflect.ClassTag
/**
* http://pubs.opengroup.org/architecture/archimate3-doc/chap02.html
* A collection of concepts in the context of the ArchiMate language structure.
* Note: The top-level language structure is defined in detail in Section 3.2.
* For a general definition of model, see the TOGAF framework - http://pubs.opengroup.org/architecture/archimate3-doc/front.html#ref4.
* @see
*/
class Model extends IdentifiedArchimateObject with VersionedArchimateObject with NamedArchimateObject {
private[model] val _concepts: Storage[Concept] = Storage.buildStorage
private[model] val _views: Storage[View] = Storage.buildStorage
def concept[T <: Concept](id: Identifiable.ID)(implicit tp: ClassTag[T]): T = _concepts[T](id)
def view(id: Identifiable.ID): View = _views[View](id)
def add(id: Identifiable.ID) = new {
def apply[T <: Concept](concept: T): T = _concepts.store(concept, id)
def apply(view: View): View = _views.store(view, id)
}
def add[T <: Concept](concept: T): T = _concepts.store(concept)
def add(view: View): View = _views.store(view)
def concepts[X <: Concept](implicit tp: ClassTag[X]): Iterable[X] = _concepts.select[X](tp)
def nodes: Iterable[NodeConcept] = concepts[NodeConcept]
def edges: Iterable[EdgeConcept] = concepts[EdgeConcept]
def views: Iterable[View] = _views.select[View]
def findView(path: List[String], name: String): Option[View] =
views.collectFirst { case v if v.path == path && v.name == name => v }
def <<(path: List[String]) = new {
def <<(name: String)(viewpoint: ViewPoint = null): View = findView(path, name)
.map {
case v if (null == viewpoint) || (v.viewpoint == viewpoint) => v
case v => throw new IllegalStateException(s"View @ `${name}` has wrong viewpoint: ${v.viewpoint}")
}
.getOrElse { add(new View(viewpoint) withName name) }
}
}
|
zhuj/mentha-web-archimate
|
archimate-model/src/main/scala/org/mentha/tools/archimate/model/Model.scala
|
Scala
|
mit
| 1,989 |
/**********************************************************************************************************************
* *
* Copyright (c) 2013, Reactific Software LLC. All Rights Reserved. *
* *
* Scrupal is free software: you can redistribute it and/or modify it under the terms *
* of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, *
* or (at your option) any later version. *
* *
* Scrupal is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more *
* details. *
* *
* You should have received a copy of the GNU General Public License along with Scrupal. If not, see either: *
* http://www.gnu.org/licenses or http://opensource.org/licenses/GPL-3.0. *
**********************************************************************************************************************/
package com.reactific.slickery
import java.time.Instant
import com.typesafe.config.{Config, ConfigFactory}
import slick.backend.DatabaseConfig
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
/** Abstract Common Database Schema Helper
* This abstract base class provides the common definitions needed to support Slickery based database schemas. It is
* expected that users will
* * This trait allows use to define database components which are simply collections of related tables and the
* various query methods on those tables to provide access to them. Since Components contain Tables and Scrupal requires
* all database entities to have a particular shape, that shape is enforced in the EntityTable class. Note that
* Component extends Sketch which is mixed in to other components but resolved by the Schema class.
*
* @param schemaNamePrototype The name of the schema (will be used in the database)
* @param configPath The path in the configuration where the database information
* @param config The configuration object in which configPath is to be found
* @param ec An implicit execution context in which futures can be executed
* @param classTag The implicit classtag for the DRVR class
* @tparam DRVR THe kind of SlickeryDriver to use for the schema
*/
abstract class Schema[DRVR <: SlickeryDriver](
schemaNamePrototype: String,
configPath : String,
config : Config = ConfigFactory.load)
(implicit ec: ExecutionContext, classTag : ClassTag[DRVR]) extends SlickeryComponent {
/** The Kind of SupportedDB.
* We look this up first because SupportedDB.forConfig can throw if the configuration is invalid or it is not for
* one of the supported databases. This should be the first thing constructed so we fail fast if there is an issue.
*/
val dbKind : SupportedDB[_] = SupportedDB.forConfig(configPath, config) match {
case Some(sdb) ⇒ sdb
case None ⇒ toss(s"Configuration path '${configPath}' did not yield a viable configuration for a supported DB.")
}
val jdbcDriver : Class[_] = dbKind.jdbcDriverClass
/** Schema Name
* Converts the schemaNamePrototype argument into something that a database might find friendly
*/
val schemaName = schemaNamePrototype.replaceAll("[ $!@#%^&*~`]", "_")
/** The DatabaseConfig.
* This extracts the DatabaseConfig information from the configuration object provided.
*/
val dbConfig = DatabaseConfig.forConfig[DRVR](configPath, config)
/** The SlickeryDriver To use
* This driver is instantiated by DatabaseConfig during processing of the configuration.
*/
val driver = dbConfig.driver
/** The database we will interact with */
val db = dbConfig.db
import driver.api._
import driver.{DriverAction,StreamingDriverAction,ReturningInsertActionComposer,SchemaDescription}
import slick.jdbc.ResultSetAction
import slick.jdbc.meta.MTable
import slick.profile.SqlProfile.ColumnOption.{Nullable, NotNull}
/** The Table Schemas.
* This is the fundamental definition of the Schema, it is composed of a map of table names to the SchemaDescription
* object (generated by Slick's TableQuery.schema method). This is used to validate, create and drop the schema.
*
* @return A Map of table names to SchemaDescriptions
*/
def schemas : Map[String,SchemaDescription]
protected def validateExistingTables( tables: Seq[MTable] ) : Seq[Throwable] = {
val theSchemas = schemas
def checkValidity(mtable: MTable) : Option[Throwable] = {
mtable.name.schema match {
case Some(sName) if sName != schemaName ⇒
Some(mkThrowable(s"Table $sName.${mtable.name.name} is not part of schema $schemaName"))
case Some(sName) ⇒
if (!theSchemas.contains(mtable.name.name)) {
Some(mkThrowable(s"Spurious table ${mtable.name.name} found in schema $schemaName"))
} else {
None
}
case None ⇒
Some(mkThrowable(s"Table ${mtable.name.name} has no schema name"))
}
}
def checkSchema(name: String, sd: SchemaDescription) : Option[Throwable] = {
if (tables.exists { mtable ⇒ mtable.name.name == name})
None
else
Some(mkThrowable(s"Required table $name is missing"))
}
val tableChecks = for (mtable ← tables; error <- checkValidity(mtable)) yield { error }
val schemaChecks = for ((name,sd) ← theSchemas; error ← checkSchema(name,sd)) yield { error }
tableChecks.toSeq ++ schemaChecks
}
final def schemaNames() : Future[Seq[String]] = {
db.run { ResultSetAction[String](_.metaData.getSchemas() ) { r => r.nextString() } }
}
final def metaTables() : Future[Seq[MTable]] = {
db.run {
MTable.getTables(None, Some(schemaName), None, None)
}
}
final def validate() : Future[Seq[Throwable]] = {
metaTables().map { tables : Seq[MTable] =>
validateExistingTables(tables)
}
}
private val nullSD : SchemaDescription = null
final def create() : Future[Unit] = {
db.run {
driver.createSchema(schemaName).flatMap { u ⇒
MTable.getTables(None, Some(schemaName), None, None).flatMap[Unit,NoStream,Effect.Schema] { tables ⇒
log.debug(s"Existing Tables: ${tables.map{_.name}}")
val statements = for (
(name, sd) ← schemas if !tables.exists { mt ⇒ mt.name.name == name }
) yield sd
log.debug(s"Schema creation statements: $statements")
val ddl : SchemaDescription = statements.foldLeft(nullSD) {
case (accum, statement) ⇒
if (accum == null)
statement
else
accum ++ statement
}
if (ddl != null) {
driver.createSchemaActionExtensionMethods(ddl).create
} else {
sqlu"".map { i ⇒ () }
}
}
}
}
}
final def drop() : Future[Unit] = {
val ddl : SchemaDescription = schemas.values.foldLeft(nullSD) {
case (accum, statement) ⇒
if (accum == null)
statement
else
accum ++ statement
}
if (ddl != null) {
db.run {
driver.createSchemaActionExtensionMethods(ddl).drop.flatMap { unit ⇒
driver.dropSchema(schemaName)
}
}
} else {
log.warn(s"No DDL Statements In Schema $schemaName To Drop")
db.run { driver.dropSchema(schemaName) }
}
}
trait CRUDQueries[R,ID, T<:TableRow[R]] { self : TableQuery[T] =>
type CreateResult = DBIOAction[ReturningInsertActionComposer[T,Long]#SingleInsertResult,NoStream,Effect.Write]
type RetrieveResult =
StreamingDriverAction[Seq[T#TableElementType],R,Effect.Read]#ResultAction[Option[R],NoStream,Effect.Read]
type UpdateResult = DBIOAction[Int,NoStream,Effect.Write]
type DeleteResult = DBIOAction[Int,NoStream,Effect.Write]
def create(entity: R) : CreateResult
def retrieve(id : ID) : RetrieveResult
def update(entity: R) : UpdateResult
def delete(id: ID) : DeleteResult
}
type OIDType = Storable.OIDType
abstract class TableRow[S](tag: Tag, tableName: String) extends Table[S](tag, Some(schemaName), tableName) {
def fullName : String = {
val schemaPrefix = { schemaName.map { n => n + "." } getOrElse "" }
s"$schemaPrefix$tableName"
}
protected def nm(columnName: String) : String = s"${fullName}_$columnName"
protected def fkn(foreignTableName: String ) : String = nm( foreignTableName + "_fkey")
protected def idx(name: String) : String = nm(name + "_idx")
}
abstract class StorableRow[S <: Storable](tag: Tag, tableName: String) extends TableRow[S](tag, tableName) {
def oid = column[OIDType](nm("oid"), O.PrimaryKey, O.AutoInc, NotNull)
}
abstract class StorableQuery[S <: Storable, T <:StorableRow[S]](cons : Tag => T)
extends TableQuery[T](cons) with CRUDQueries[S,OIDType,T] {
val query = this
lazy val byIdQuery = { this.findBy(_.oid) }
def byId(idToFind : OIDType) = {
byIdQuery(idToFind).result.headOption
}
override def create(entity: S) : CreateResult = {
(this returning this.map(_.oid)) += entity
}
override def retrieve(id: OIDType) : RetrieveResult = {
byId(id)
}
override def update(entity: S) : UpdateResult = {
byIdQuery(entity.getId).update(entity)
}
override def delete(oid: OIDType) : DeleteResult = {
byIdQuery(oid).delete
}
def runCreate(entity: S) : Future[OIDType] = db.run { this.create(entity) }
def runRetrieve(oid : OIDType): Future[Option[S]] = db.run { this.retrieve(oid) }
def runUpdate(entity: S) : Future[Int] = db.run { this.update(entity) }
def runDelete(oid: OIDType) : Future[Int] = db.run { this.delete(oid) }
}
implicit lazy val instantMapper = driver.instantMapper
implicit lazy val regexMapper = driver.regexMapper
implicit lazy val durationMapper = driver.durationMapper
implicit lazy val symbolMapper = driver.symbolMapper
implicit lazy val jsValueMapper = driver.jsValueMapper
implicit lazy val configMapper = driver.configMapper
trait CreatableRow[S <: Creatable] extends StorableRow[S] {
def created = column[Instant](nm("created"), NotNull)
def created_index = index(idx("created"), created, unique = false)
}
trait CreatableQuery[S <: Creatable, T <:CreatableRow[S]] extends StorableQuery[S,T] {
lazy val createdSinceQuery = Compiled { since : Rep[Instant] => this.filter(_.created >= since) }
def createdSince(since: Instant) = createdSinceQuery(since).result
}
trait ModifiableRow[S <: Modifiable] extends StorableRow[S] {
def modified = column[Instant](nm("modified"), NotNull)
def modified_index = index(idx("modified"), modified, unique = false)
}
trait ModifiableQuery[S <: Modifiable, T <:ModifiableRow[S]] extends StorableQuery[S,T] {
lazy val modifiedSinceQuery = Compiled { since : Rep[Instant] => this.filter(_.modified >= since) }
def modifiedById(id:OIDType) = Compiled { for { c <- this if c.oid === id } yield c.modified }
def modifiedSince(since: Instant) = modifiedSinceQuery(since).result
override def update(entity : S) : UpdateResult = {
super.update(entity).flatMap { x ⇒
modifiedById(entity.getId).update(Instant.now())
}
}
}
trait ExpirableRow[S <: Expirable] extends StorableRow[S] {
def expiresAt = column[Instant](nm("expiresAt"), NotNull)
def expiresAt_index = index(idx("expiresAt"), expiresAt, unique = false)
}
trait ExpirableQuery[S <: Expirable, T <: ExpirableRow[S]] extends StorableQuery[S,T] {
lazy val expiredSinceQuery = Compiled { since : Rep[Instant] => this.filter(_.expiresAt <= since ) }
def expiredSince(since: Instant) = expiredSinceQuery(since).result
}
trait NameableRow[S <: Nameable] extends StorableRow[S] {
def name = column[String](nm("name"), NotNull)
def name_index = index(idx("name"), name, unique = true)
}
trait NameableQuery[S <: Nameable, T <: NameableRow[S]] extends StorableQuery[S,T] {
lazy val byNameQuery = Compiled { aName : Rep[String] => this.filter(_.name === aName) }
def byName(name: String) = byNameQuery(name).result
}
trait DescribableRow[S <: Describable] extends StorableRow[S] {
def description = column[String](nm("description"), NotNull)
}
trait DescribableQuery[S <: Describable, T <: DescribableRow[S]] extends StorableQuery[S,T] {
lazy val byDescriptionQuery = Compiled { desc : Rep[String] => this.filter(_.description === desc) }
def byDescription(name: String) = byDescriptionQuery(name).result
}
abstract class SlickeryRow[S <: Slickery](tag : Tag, name: String) extends StorableRow[S](tag, name)
with CreatableRow[S] with ModifiableRow[S] with NameableRow[S] with DescribableRow[S]
abstract class SlickeryQuery[S <: Slickery, T <: SlickeryRow[S]](cons : Tag => T) extends StorableQuery[S,T](cons)
with CreatableQuery[S,T] with ModifiableQuery[S,T] with NameableQuery[S,T] with DescribableQuery[S,T]
abstract class ExpirableSlickeryRow[S <: Slickery with Expirable](tag : Tag, name : String)
extends SlickeryRow[S](tag, name) with ExpirableRow[S]
abstract class ExpirableSlickeryQuery[S <: Slickery with Expirable, T <: ExpirableSlickeryRow[S]](cons : Tag ⇒ T)
extends SlickeryQuery[S,T](cons) with ExpirableQuery[S,T]
/**
* The base class of all correlation tables.
* This allows many-to-many relationships to be established by simply listing the pairs of IDs
*/
abstract class ManyToManyRow[
A <: Storable, TA <: StorableRow[A],
B <: Storable, TB <: StorableRow[B]](
tag : Tag, tableName:String,
nameA: String, queryA: StorableQuery[A,TA],
nameB: String, queryB: StorableQuery[B,TB]) extends TableRow[(OIDType,OIDType)](tag, tableName) {
def a_id = column[OIDType](nm(nameA + "_id"))
def b_id = column[OIDType](nm(nameB + "_id"))
def a_fkey = foreignKey(fkn(nameA), a_id, queryA.query)(_.oid, onDelete = ForeignKeyAction.Cascade)
def b_fkey = foreignKey(fkn(nameB), b_id, queryB.query)(_.oid, onDelete = ForeignKeyAction.Cascade)
def a_b_uniqueness = index(idx(nameA + "_" + nameB), (a_id, b_id), unique = true)
def * = (a_id, b_id)
}
abstract class ManyToManyQuery[
A <: Storable, TA <: StorableRow[A],
B <: Storable, TB <: StorableRow[B],
T <: ManyToManyRow[A,TA,B,TB]](cons: Tag => T) extends TableQuery[T](cons) {
lazy val findAsQuery = Compiled { bId : Rep[OIDType] => this.filter (_.b_id === bId ) }
lazy val findBsQuery = Compiled { aId : Rep[OIDType] => this.filter (_.a_id === aId ) }
def findAssociatedA(id: OIDType) : DBIOAction[Seq[OIDType], NoStream, Effect.Read] =
findAsQuery(id).result.map { s : Seq[(OIDType,OIDType)] => s.map { p : (OIDType,OIDType) => p._1 } }
def findAssociatedA(b: B) : DBIOAction[Seq[OIDType], NoStream, Effect.Read] = findAssociatedA(b.getId)
def findAssociatedB(id: OIDType) : DBIOAction[Seq[OIDType], NoStream, Effect.Read] =
findBsQuery(id).result.map { s : Seq[(OIDType,OIDType)] => s.map { p : (OIDType,OIDType) => p._2 } }
def findAssociatedB(a: A) : DBIOAction[Seq[OIDType], NoStream, Effect.Read] = findAssociatedB(a.getId)
def associate(a: A, b: B) = this += (a.getId, b.getId)
}
}
|
reactific/slickery
|
src/main/scala/com/reactific/slickery/Schema.scala
|
Scala
|
apache-2.0
| 16,193 |
package com.arcusys.valamis.web.servlet.certificate.response
import com.arcusys.valamis.user.model.User
case class CertificateSuccessUsersResponse(
id: Long,
title: String,
shortDescription: String,
description: String,
logo: String,
succeedUsers: Seq[User])
|
igor-borisov/valamis
|
valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/certificate/response/CertificateSuccessUsersResponse.scala
|
Scala
|
gpl-3.0
| 274 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.sql.ResultSet
class SparkMetadataOperationSuite extends HiveThriftJdbcTest {
override def mode: ServerMode.Value = ServerMode.binary
test("Spark's own GetSchemasOperation(SparkGetSchemasOperation)") {
def checkResult(rs: ResultSet, dbNames: Seq[String]): Unit = {
for (i <- dbNames.indices) {
assert(rs.next())
assert(rs.getString("TABLE_SCHEM") === dbNames(i))
}
// Make sure there are no more elements
assert(!rs.next())
}
withDatabase("db1", "db2") { statement =>
Seq("CREATE DATABASE db1", "CREATE DATABASE db2").foreach(statement.execute)
val metaData = statement.getConnection.getMetaData
checkResult(metaData.getSchemas(null, "%"), Seq("db1", "db2", "default", "global_temp"))
checkResult(metaData.getSchemas(null, "db1"), Seq("db1"))
checkResult(metaData.getSchemas(null, "db_not_exist"), Seq.empty)
checkResult(metaData.getSchemas(null, "db*"), Seq("db1", "db2"))
}
}
test("Spark's own GetTablesOperation(SparkGetTablesOperation)") {
def checkResult(rs: ResultSet, tableNames: Seq[String]): Unit = {
for (i <- tableNames.indices) {
assert(rs.next())
assert(rs.getString("TABLE_NAME") === tableNames(i))
}
// Make sure there are no more elements
assert(!rs.next())
}
withJdbcStatement("table1", "table2", "view1") { statement =>
Seq(
"CREATE TABLE table1(key INT, val STRING)",
"CREATE TABLE table2(key INT, val STRING)",
"CREATE VIEW view1 AS SELECT * FROM table2",
"CREATE OR REPLACE GLOBAL TEMPORARY VIEW view_global_temp_1 AS SELECT 1 AS col1",
"CREATE OR REPLACE TEMPORARY VIEW view_temp_1 AS SELECT 1 as col1"
).foreach(statement.execute)
val metaData = statement.getConnection.getMetaData
checkResult(metaData.getTables(null, "%", "%", null),
Seq("table1", "table2", "view1", "view_global_temp_1", "view_temp_1"))
checkResult(metaData.getTables(null, "%", "table1", null), Seq("table1"))
checkResult(metaData.getTables(null, "%", "table_not_exist", null), Seq.empty)
checkResult(metaData.getTables(null, "%", "%", Array("TABLE")),
Seq("table1", "table2"))
checkResult(metaData.getTables(null, "%", "%", Array("VIEW")),
Seq("view1", "view_global_temp_1", "view_temp_1"))
checkResult(metaData.getTables(null, "%", "view_global_temp_1", null),
Seq("view_global_temp_1"))
checkResult(metaData.getTables(null, "%", "view_temp_1", null),
Seq("view_temp_1"))
checkResult(metaData.getTables(null, "%", "%", Array("TABLE", "VIEW")),
Seq("table1", "table2", "view1", "view_global_temp_1", "view_temp_1"))
checkResult(metaData.getTables(null, "%", "table_not_exist", Array("TABLE", "VIEW")),
Seq.empty)
}
}
test("Spark's own GetColumnsOperation(SparkGetColumnsOperation)") {
def checkResult(
rs: ResultSet,
columns: Seq[(String, String, String, String, String)]) : Unit = {
for (i <- columns.indices) {
assert(rs.next())
val col = columns(i)
assert(rs.getString("TABLE_NAME") === col._1)
assert(rs.getString("COLUMN_NAME") === col._2)
assert(rs.getString("DATA_TYPE") === col._3)
assert(rs.getString("TYPE_NAME") === col._4)
assert(rs.getString("REMARKS") === col._5)
}
// Make sure there are no more elements
assert(!rs.next())
}
withJdbcStatement("table1", "table2", "view1") { statement =>
Seq(
"CREATE TABLE table1(key INT comment 'Int column', val STRING comment 'String column')",
"CREATE TABLE table2(key INT, val DECIMAL comment 'Decimal column')",
"CREATE VIEW view1 AS SELECT key FROM table1",
"CREATE OR REPLACE GLOBAL TEMPORARY VIEW view_global_temp_1 AS SELECT 2 AS col2",
"CREATE OR REPLACE TEMPORARY VIEW view_temp_1 AS SELECT 2 as col2"
).foreach(statement.execute)
val metaData = statement.getConnection.getMetaData
checkResult(metaData.getColumns(null, "%", "%", null),
Seq(
("table1", "key", "4", "INT", "Int column"),
("table1", "val", "12", "STRING", "String column"),
("table2", "key", "4", "INT", ""),
("table2", "val", "3", "DECIMAL(10,0)", "Decimal column"),
("view1", "key", "4", "INT", "Int column"),
("view_global_temp_1", "col2", "4", "INT", ""),
("view_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "%", "table1", null),
Seq(
("table1", "key", "4", "INT", "Int column"),
("table1", "val", "12", "STRING", "String column")))
checkResult(metaData.getColumns(null, "%", "table1", "key"),
Seq(("table1", "key", "4", "INT", "Int column")))
checkResult(metaData.getColumns(null, "%", "view%", null),
Seq(
("view1", "key", "4", "INT", "Int column"),
("view_global_temp_1", "col2", "4", "INT", ""),
("view_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "%", "view_global_temp_1", null),
Seq(("view_global_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "%", "view_temp_1", null),
Seq(("view_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "%", "view_temp_1", "col2"),
Seq(("view_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "default", "%", null),
Seq(
("table1", "key", "4", "INT", "Int column"),
("table1", "val", "12", "STRING", "String column"),
("table2", "key", "4", "INT", ""),
("table2", "val", "3", "DECIMAL(10,0)", "Decimal column"),
("view1", "key", "4", "INT", "Int column"),
("view_temp_1", "col2", "4", "INT", "")))
checkResult(metaData.getColumns(null, "%", "table_not_exist", null), Seq.empty)
}
}
test("Spark's own GetTableTypesOperation(SparkGetTableTypesOperation)") {
def checkResult(rs: ResultSet, tableTypes: Seq[String]): Unit = {
for (i <- tableTypes.indices) {
assert(rs.next())
assert(rs.getString("TABLE_TYPE") === tableTypes(i))
}
// Make sure there are no more elements
assert(!rs.next())
}
withJdbcStatement() { statement =>
val metaData = statement.getConnection.getMetaData
checkResult(metaData.getTableTypes, Seq("TABLE", "VIEW"))
}
}
}
|
actuaryzhang/spark
|
sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/SparkMetadataOperationSuite.scala
|
Scala
|
apache-2.0
| 7,390 |
/*
* Copyright 2010-2014 Benjamin Lings
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.codingwell.scalaguice
import com.google.inject.Binder
import com.google.inject.binder._
import com.google.inject.name.Names
import java.lang.annotation.{Annotation => JAnnotation}
import javax.inject.Provider
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
/**
* Extensions for Guice's binding DSL.
* These allow using a type parameter instead of `classOf[Foo]`
* or `new TypeLiteral[Bar[Foo]] {}`. The extra methods are
* named as those in the normal binding DSL suffixed with `Type`.
*
* For example, instead of
* {{{
* binder.bind(new TypeLiteral[Bar[Foo]]{}).to(classOf[FooBarImpl])
* }}}
* use
* {{{
* import BindingExtensions._
* binder.bindType[Bar[Foo]].toType[FooImpl]
* }}}
*
* '''Note''' This syntax allows binding to and from generic types.
* It doesn't currently allow bindings between wildcard types because the
* manifests for wildcard types don't provide access to type bounds.
*/
object BindingExtensions {
implicit class ScalaBinder(val self: Binder) extends AnyVal {
def bindType[T: TypeTag]: AnnotatedBindingBuilder[T] = self.bind(typeLiteral[T])
}
implicit class ScalaScopedBindingBuilder(val self: ScopedBindingBuilder) extends AnyVal {
def inType[TAnn <: JAnnotation : ClassTag](): Unit = self.in(cls[TAnn])
}
implicit class ScalaLinkedBindingBuilder[T](val self: LinkedBindingBuilder[T]) extends AnyVal {
def toType[TImpl <: T : TypeTag]: ScopedBindingBuilder = self.to(typeLiteral[TImpl])
def toProviderType[TProvider <: Provider[_ <: T] : ClassTag]: ScopedBindingBuilder = self.toProvider(cls[TProvider])
}
implicit class ScalaAnnotatedBindingBuilder[T](val self: AnnotatedBindingBuilder[T]) extends AnyVal {
def annotatedWithType[TAnn <: JAnnotation : ClassTag]: LinkedBindingBuilder[T] = self.annotatedWith(cls[TAnn])
}
implicit class ScalaAnnotatedConstantBindingBuilder(val self: AnnotatedConstantBindingBuilder) extends AnyVal {
def annotatedWithType[TAnn <: JAnnotation : ClassTag]: ConstantBindingBuilder = self.annotatedWith(cls[TAnn])
def annotatedWithName(name: String): ConstantBindingBuilder = self.annotatedWith(Names.named(name))
}
implicit class ScalaConstantBindingBuilder(val self: ConstantBindingBuilder) extends AnyVal {
def to[T: ClassTag](): Unit = self.to(cls[T])
}
}
|
codingwell/scala-guice
|
src/main/scala/net/codingwell/scalaguice/BindingExtensions.scala
|
Scala
|
apache-2.0
| 2,945 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.concurrent.cancelables
import minitest.SimpleTestSuite
import monifu.concurrent.Cancelable
object SingleAssignmentCancelableSuite extends SimpleTestSuite {
test("cancel()") {
var effect = 0
val s = SingleAssignmentCancelable()
val b = BooleanCancelable { effect += 1 }
s() = b
s.cancel()
assert(s.isCanceled)
assert(b.isCanceled)
assert(effect == 1)
s.cancel()
assert(effect == 1)
}
test("cancel on single assignment") {
val s = SingleAssignmentCancelable()
s.cancel()
assert(s.isCanceled)
var effect = 0
val b = BooleanCancelable { effect += 1 }
s() = b
assert(b.isCanceled)
assert(effect == 1)
s.cancel()
assert(effect == 1)
}
test("throw exception on multi assignment") {
val s = SingleAssignmentCancelable()
val b1 = Cancelable()
s() = b1
intercept[IllegalStateException] {
val b2 = Cancelable()
s() = b2
}
}
test("throw exception on multi assignment when canceled") {
val s = SingleAssignmentCancelable()
s.cancel()
val b1 = Cancelable()
s() = b1
intercept[IllegalStateException] {
val b2 = Cancelable()
s() = b2
}
}
}
|
virtualirfan/monifu
|
core/shared/src/test/scala/monifu/concurrent/cancelables/SingleAssignmentCancelableSuite.scala
|
Scala
|
apache-2.0
| 1,901 |
/*
* Copyright 2010-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mapper
import org.specs2.mutable.Specification
import common._
import json._
import util._
import Helpers._
import view._
/**
* Systems under specification for ItemsList.
*/
object ItemsListSpec extends Specification {
"ItemsList Specification".title
sequential
val provider = DbProviders.H2MemoryProvider
def init = {
provider.setupDB
Schemifier.destroyTables_!!(DefaultConnectionIdentifier, Schemifier.neverF _, SampleItem)
Schemifier.schemify(true, Schemifier.neverF _, SampleItem)
new ItemsList[SampleItem] {
def metaMapper = SampleItem
}
}
"ItemsList" should {
"buffer items to save" in {
val il = init
il.add
il.add
il.add
il.current.length must_== 0
il.added.length must_== 3
il.save
SampleItem.count must_== 3
il.current.length must_== 3
}
"correctly handle removing an unsaved item" in {
val il = init
il.add
il.add
il.add
il.save
il.add
il.add
il.add
il.remove(il.added(1))
il.remove(il.added(0))
il.save
SampleItem.count must_== 4
il.added.length must_== 0
il.removed.length must_== 0
}
}
}
class SampleItem extends LongKeyedMapper[SampleItem] with IdPK {
def getSingleton = SampleItem
object field extends MappedInt(this)
}
object SampleItem extends SampleItem with LongKeyedMetaMapper[SampleItem] {
var counter = 0
override def create = {
val x: SampleItem = super.create
x.field(counter)
counter += 1
x
}
}
|
lzpfmh/framework-2
|
persistence/mapper/src/test/scala/net/liftweb/mapper/ItemsListSpec.scala
|
Scala
|
apache-2.0
| 2,208 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.orbeon.apache.xerces.xni.parser
import java.io.IOException
/**
* This interface defines a generic document scanner. This interface
* allows a scanner to be used interchangably in existing parser
* configurations.
*
* If the parser configuration uses a document scanner that implements
* this interface, components should be able to query the scanner
* instance from the component manager using the following property
* identifier:
*
* http://apache.org/xml/properties/internal/document-scanner
*/
trait XMLDocumentScanner extends XMLDocumentSource {
/**
* Sets the input source.
*
* @param inputSource The input source.
*
* @throws IOException Thrown on i/o error.
*/
def setInputSource(inputSource: XMLInputSource): Unit
/**
* Scans a document.
*
* @param complete True if the scanner should scan the document
* completely, pushing all events to the registered
* document handler. A value of false indicates that
* that the scanner should only scan the next portion
* of the document and return. A scanner instance is
* permitted to completely scan a document if it does
* not support this "pull" scanning model.
*
* @return True if there is more to scan, false otherwise.
*/
def scanDocument(complete: Boolean): Boolean
}
|
ebruchez/darius-xml.js
|
xerces/shared/src/main/scala/org/orbeon/apache/xerces/xni/parser/XMLDocumentScanner.scala
|
Scala
|
apache-2.0
| 2,214 |
package dk.gp.cogp.model
import dk.gp.cov.CovFunc
import breeze.linalg.DenseVector
import breeze.linalg.DenseMatrix
import dk.gp.math.invchol
import breeze.linalg.cholesky
import scala.util.Random
import breeze.stats._
import breeze.linalg.InjectNumericOps
import breeze.stats.variance.reduceDouble
import dk.bayes.math.gaussian.MultivariateGaussian
case class CogpGPVar(z: DenseMatrix[Double], u: MultivariateGaussian, covFunc: CovFunc, covFuncParams: DenseVector[Double], covFuncParamsDelta: DenseVector[Double]) {
require(u.v.findAll (_.isNaN).size==0,"Inducing points variance is NaN:" + u.v)
def calckZZ(): DenseMatrix[Double] = covFunc.cov(z, z, covFuncParams) + 1e-10 * DenseMatrix.eye[Double](z.rows)
def calckXZ(x: DenseMatrix[Double]): DenseMatrix[Double] = covFunc.cov(x, z, covFuncParams)
def calcdKzz(): Array[DenseMatrix[Double]] = covFunc.covD(z, z, covFuncParams)
def calcdKxz(x: DenseMatrix[Double]): Array[DenseMatrix[Double]] = covFunc.covD(x, z, covFuncParams)
}
object CogpGPVar {
def apply(y: DenseVector[Double], z: DenseMatrix[Double], covFunc: CovFunc, covFuncParams: DenseVector[Double]): CogpGPVar = {
CogpGPVar(z, getInitialIndVar(y, z), covFunc, covFuncParams, DenseVector.zeros[Double](covFuncParams.size))
}
private def getInitialIndVar(y: DenseVector[Double], z: DenseMatrix[Double]): MultivariateGaussian = {
val m = DenseVector.zeros[Double](z.rows)
val v = 10*variance(y)*DenseMatrix.eye[Double](z.rows)
MultivariateGaussian(m, v)
}
}
|
danielkorzekwa/bayes-scala-gp
|
src/main/scala/dk/gp/cogp/model/CogpGPVar.scala
|
Scala
|
bsd-2-clause
| 1,527 |
package gapt.examples
import gapt.expr._
import gapt.proofs.Sequent
import gapt.proofs.ceres._
import gapt.proofs.context.update.{ PrimitiveRecursiveFunction => PrimRecFun }
import gapt.proofs.context.update.ProofDefinitionDeclaration
import gapt.proofs.context.update.ProofNameDeclaration
import gapt.proofs.gaptic._
import gapt.proofs.lk.LKProof
object OneStrictMonotoneRefutation extends TacticsProof( OneStrictMonotoneSchema.ctx ) {
val SCS: Map[CLS, ( Struct, Set[Var] )] = SchematicStruct( "omega" ).getOrElse( Map() )
val CFPRN = CharFormPRN( SCS )
CharFormPRN.PR( CFPRN )
def sequentForm( input: Expr ) = Viperize( le"omegaSFAF $input" )
ctx += hoc"Top:nat>nat"
ctx += hoc"Seq1Make:nat>nat>nat"
ctx += hoc"Seq2Make:nat>nat>nat"
ctx += hoc"Seq1Make2:nat>nat>nat"
ctx += hoc"Seq2Make2:nat>nat>nat"
ctx += hoc"Next:nat>nat"
ctx += PrimRecFun( hoc"SW:nat>i", "SW 0 = z ", "SW (s y) = (suc (SW y))" )
ctx += PrimRecFun(
hoc"SEQ1:nat>nat>o",
"SEQ1 0 y = ((E y (f (suc (SW 0)))) | (LE (f (SW 0)) y))",
"SEQ1 (s x) y = (( (E y (f (suc (SW (s x))))) | (LE (f (SW (s x))) y) ) & (SEQ1 x y))" )
ctx += PrimRecFun(
hoc"SEQ2:nat>nat>o",
"SEQ2 0 y = ((E y (f (SW 0))) | (LE (f (SW 0)) y))",
"SEQ2 (s x) y = (( (E y (f (SW (s x)))) | (LE (f (SW (s x))) y) ) & (SEQ2 x y))" )
val esTop = Sequent( Seq( hof"omegaSFAF(n)" ), Seq() )
ctx += ProofNameDeclaration( le"Top n", esTop )
val esSeq1Make = Sequent(
Seq( hof" !a ((E(n, f(suc(a))) | LE(f(a), n)))" ),
Seq( hof"SEQ1(k, n)" ) )
ctx += ProofNameDeclaration( le"Seq1Make k n", esSeq1Make )
val esSeq2Make = Sequent(
Seq( hof" !a ((E(n, f(a)) | LE(f(a), n)))" ),
Seq( hof"SEQ2(k, n)" ) )
ctx += ProofNameDeclaration( le"Seq2Make k n", esSeq2Make )
val esSeq1Make2 = Sequent(
Seq(
hof"SEQ1(k, s(n))",
hof"SEQ2(k, s(n))",
hof"E(n, f(suc(SW(k)))) | LE(f(SW(k)), n)",
hof"!b (¬LE(f(suc(b)), s(n)) ∨ (E(n, f(suc(b))) ∨ LE(f(b), n)))",
hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))" ),
Seq( hof"SEQ1(k, n)" ) )
ctx += ProofNameDeclaration( le"Seq1Make2 k n", esSeq1Make2 )
val esSeq2Make2 = Sequent(
Seq(
hof"SEQ1(k, s(n))",
hof"SEQ2(k, s(n))",
hof"!b (¬LE(f(b), s(n)) ∨ (E(n, f(b)) ∨ LE(f(b), n)))",
hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))" ),
Seq( hof"SEQ2(k, n)" ) )
ctx += ProofNameDeclaration( le"Seq2Make2 k n", esSeq2Make2 )
val esNext = Sequent(
Seq( hof"SEQ1(n, n)", hof"SEQ2(n, n)", hof"phiSFAT(n)" ),
Seq() )
ctx += ProofNameDeclaration( le"Next n", esNext )
val esPRBc = Sequent( Seq( "Ant_0" -> hof"omegaSFAF(0)" ), Seq() )
val PRBc: LKProof = Lemma( esPRBc ) {
unfold( "omegaSFAF" ) in "Ant_0"
unfold( "phiSFAT" ) in "Ant_0"
escargot
}
ctx += ProofDefinitionDeclaration( le"Top 0", PRBc )
val esPRSc = Sequent( Seq( "Ant_0" -> hof"omegaSFAF(s(n))" ), Seq() )
val PRSc: LKProof = Lemma( esPRSc ) {
unfold( "omegaSFAF" ) in "Ant_0"
andL
andL
cut( "cut", hof"SEQ1(s(n),s(n)) & SEQ2(s(n),s(n))" )
andR
ref( "Seq1Make" )
ref( "Seq2Make" )
andL
ref( "Next" )
}
ctx += ProofDefinitionDeclaration( le"Top (s n)", PRSc )
val esNextBc = Sequent( Seq(
"Ant_0" -> hof"phiSFAT(0)",
"Ant_1" -> hof"SEQ1(0, 0)",
"Ant_2" -> hof"SEQ2(0,0)" ), Seq() )
val NextBc: LKProof = Lemma( esNextBc ) {
unfold( "phiSFAT" ) in "Ant_0"
unfold( "SEQ1" ) in "Ant_1"
unfold( "SEQ2" ) in "Ant_2"
unfold( "SW" ) in "Ant_1"
unfold( "SW" ) in "Ant_2"
escargot
}
ctx += ProofDefinitionDeclaration( le"Next 0", NextBc )
val esNextSc = Sequent( Seq(
"Ant_0" -> hof"phiSFAT(s(n))",
"Ant_1" -> hof"SEQ1(s(n),s(n))",
"Ant_2" -> hof"SEQ2(s(n),s(n))" ), Seq() )
val NextSc: LKProof = Lemma( esNextSc ) {
unfold( "phiSFAT" ) in "Ant_0"
andL
andL
andL
cut( "cut", hof"SEQ1(n,n) & SEQ2(n,n)" )
andR
unfold( "SEQ1" ) in "Ant_1"
andL
unfold( "SEQ2" ) in "Ant_2"
andL
allL( "Ant_0_0_0", le"(SW (s n))" )
orL( "Ant_2_0" )
orL( "Ant_1_0" )
orL( "Ant_0_0_0_0" )
negL
trivial
negL
trivial
unfold( "SW" ) in "Ant_1_0"
allL( "Ant_0_0_1_1", le"(SW n)" )
orL( "Ant_0_0_1_1_0" )
negL
trivial
ref( "Seq1Make2" )
unfold( "SW" ) in "Ant_2_0"
allL( "Ant_0_0_1_1", le"(SW n)" )
orL( "Ant_0_0_1_1_0" )
negL
trivial
ref( "Seq1Make2" )
unfold( "SEQ1" ) in "Ant_1"
andL
unfold( "SEQ2" ) in "Ant_2"
andL
allL( "Ant_0_0_0", le"(SW (s n))" )
orL( "Ant_2_0" )
orL( "Ant_1_0" )
orL( "Ant_0_0_0_0" )
negL
trivial
negL
trivial
ref( "Seq2Make2" )
ref( "Seq2Make2" )
andL
ref( "Next" )
}
ctx += ProofDefinitionDeclaration( le"Next (s n)", NextSc )
val esSeq2Make2Bc = Sequent( Seq(
"Ant_0" -> hof"SEQ1(0, s(n))",
"Ant_1" -> hof"SEQ2(0, s(n))",
"Ant_2" -> hof"!b (¬LE(f(b), s(n)) ∨ (E(n, f(b)) ∨ LE(f(b), n)))",
"Ant_3" -> hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))" ), Seq(
"Suc_0" -> hof"SEQ2(0, n)" ) )
val Seq2Make2Bc: LKProof = Lemma( esSeq2Make2Bc ) {
unfold( "SEQ1" ) in "Ant_0"
unfold( "SEQ2" ) in "Ant_1"
unfold( "SEQ2" ) in "Suc_0"
unfold( "SW" ) in "Ant_0"
unfold( "SW" ) in "Ant_1"
unfold( "SW" ) in "Suc_0"
escargot
}
ctx += ProofDefinitionDeclaration( le"Seq2Make2 0 n", Seq2Make2Bc )
val esSeq2Make2Sc = Sequent( Seq(
"Ant_0" -> hof"SEQ1(s(k), s(n))",
"Ant_1" -> hof"SEQ2(s(k), s(n))",
"Ant_2" -> hof"!b (¬LE(f(b), s(n)) ∨ (E(n, f(b)) ∨ LE(f(b), n)))",
"Ant_3" -> hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))" ), Seq(
"Suc_0" -> hof"SEQ2(s(k), n)" ) )
val Seq2Make2Sc: LKProof = Lemma( esSeq2Make2Sc ) {
unfold( "SEQ1" ) in "Ant_0"
unfold( "SW" ) in "Ant_0"
andL
unfold( "SEQ2" ) in "Ant_1"
unfold( "SW" ) in "Ant_1"
andL
unfold( "SEQ2" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
andR
orL( "Ant_1_0" )
orL( "Ant_0_0" )
allL( "Ant_3", le"(suc (SW k))" )
orL
negL
trivial
negL
trivial
allL( "Ant_2", le"(suc (SW k))" )
orL
negL
trivial
orR
orL
trivial
trivial
allL( "Ant_2", le"(suc (SW k))" )
orL( "Ant_2_0" )
negL
trivial
orR
orL( "Ant_2_0" )
trivial
trivial
ref( "Seq2Make2" )
}
ctx += ProofDefinitionDeclaration( le"Seq2Make2 (s k) n", Seq2Make2Sc )
val esSeq1Make2Bc = Sequent( Seq(
"Ant_0" -> hof"SEQ1(0, s(n))",
"Ant_1" -> hof"SEQ2(0, s(n))",
"Ant_2" -> hof"!b (¬LE(f(suc(b)), s(n)) ∨ (E(n, f(suc(b))) ∨ LE(f(b), n)))",
"Ant_3" -> hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))",
"Ant_4" -> hof"E(n, f(suc(SW(0)))) | LE(f(SW(0)), n)" ), Seq(
"Suc_0" -> hof"SEQ1(0, n)" ) )
val Seq1Make2Bc: LKProof = Lemma( esSeq1Make2Bc ) {
unfold( "SEQ1" ) in "Ant_0"
unfold( "SEQ2" ) in "Ant_1"
unfold( "SEQ1" ) in "Suc_0"
unfold( "SW" ) in "Ant_0"
unfold( "SW" ) in "Ant_1"
unfold( "SW" ) in "Suc_0"
unfold( "SW" ) in "Ant_4"
escargot
}
ctx += ProofDefinitionDeclaration( le"Seq1Make2 0 n", Seq1Make2Bc )
val esSeq1Make2Sc = Sequent( Seq(
"Ant_0" -> hof"SEQ1(s(k), s(n))",
"Ant_1" -> hof"SEQ2(s(k), s(n))",
"Ant_2" -> hof"!b (¬LE(f(suc(b)), s(n)) ∨ (E(n, f(suc(b))) ∨ LE(f(b), n)))",
"Ant_3" -> hof"!a (¬E(s(n), f(a)) ∨ ¬E(s(n), f(suc(a))))",
"Ant_4" -> hof"E(n, f(suc(SW(s(k))))) | LE(f(SW(s(k))), n)" ), Seq(
"Suc_0" -> hof"SEQ1(s(k), n)" ) )
val Seq1Make2Sc: LKProof = Lemma( esSeq1Make2Sc ) {
unfold( "SW" ) in "Ant_4"
unfold( "SEQ1" ) in "Ant_0"
unfold( "SW" ) in "Ant_0"
andL
unfold( "SEQ2" ) in "Ant_1"
unfold( "SW" ) in "Ant_1"
andL
unfold( "SEQ1" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
andR
orR
orL( "Ant_4" )
trivial
trivial
orL( "Ant_1_0" )
orL( "Ant_0_0" )
allL( "Ant_3", le"(suc(SW k))" )
orL( "Ant_3_0" )
negL
trivial
negL
trivial
allL( "Ant_2", le"(SW k)" )
orL( "Ant_2_0" )
negL
trivial
allL( "Ant_2", le"(SW k)" )
orL( "Ant_2_1" )
negL
trivial
ref( "Seq1Make2" )
allL( "Ant_2", le"(SW k)" )
orL( "Ant_2_0" )
negL
trivial
ref( "Seq1Make2" )
}
ctx += ProofDefinitionDeclaration( le"Seq1Make2 (s k) n", Seq1Make2Sc )
val esSeq1MakeBc = Sequent(
Seq( "Ant_0" -> hof" !a ((E(n, f(suc(a))) | LE(f(a), n)))" ), Seq(
"Suc_0" -> hof"SEQ1(0, n)" ) )
val Seq1MakeBc: LKProof = Lemma( esSeq1MakeBc ) {
unfold( "SEQ1" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
escargot
}
ctx += ProofDefinitionDeclaration( le"Seq1Make 0 n ", Seq1MakeBc )
val esSeq1MakeSc = Sequent(
Seq( "Ant_0" -> hof" !a ((E(n, f(suc(a))) | LE(f(a), n)))" ), Seq(
"Suc_0" -> hof"SEQ1(s(k), n)" ) )
val Seq1MakeSc: LKProof = Lemma( esSeq1MakeSc ) {
unfold( "SEQ1" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
andR
allL( le"(suc (SW k))" )
orR
orL
trivial
trivial
ref( "Seq1Make" )
}
ctx += ProofDefinitionDeclaration( le"Seq1Make (s k) n", Seq1MakeSc )
val esSeq2MakeBc = Sequent(
Seq( "Ant_0" -> hof" !a ((E(n, f(a)) | LE(f(a), n)))" ), Seq(
"Suc_0" -> hof"SEQ2(0, n)" ) )
val Seq2MakeBc: LKProof = Lemma( esSeq2MakeBc ) {
unfold( "SEQ2" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
escargot
}
ctx += ProofDefinitionDeclaration( le"Seq2Make 0 n ", Seq2MakeBc )
val esSeq2MakeSc = Sequent(
Seq( "Ant_0" -> hof" !a ((E(n, f(a)) | LE(f(a), n)))" ), Seq(
"Suc_0" -> hof"SEQ2(s(k), n)" ) )
val Seq2MakeSc: LKProof = Lemma( esSeq2MakeSc ) {
unfold( "SEQ2" ) in "Suc_0"
unfold( "SW" ) in "Suc_0"
andR
allL( le"(suc (SW k))" )
orR
orL
trivial
trivial
ref( "Seq2Make" )
}
ctx += ProofDefinitionDeclaration( le"Seq2Make (s k) n", Seq2MakeSc )
}
|
gapt/gapt
|
examples/schema/OneStrictMonotoneRefutation.scala
|
Scala
|
gpl-3.0
| 9,954 |
object Sample {
def foo(x: Int) {
"stop here"
}
def main(args: Array[String]) {
val x = 0
foo(x + 1)
}
}
|
consulo/consulo-scala
|
testdata/debugger/ScalaLocalVariablesEvaluation/param/src/Sample.scala
|
Scala
|
apache-2.0
| 125 |
/**
* This file is part of SensApp [ http://sensapp.modelbased.net ]
*
* Copyright (C) 2011- SINTEF ICT
* Contact: SINTEF ICT <[email protected]>
*
* Module: net.modelbased.sensapp
*
* SensApp is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* SensApp is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General
* Public License along with SensApp. If not, see
* <http://www.gnu.org/licenses/>.
*/
package net.modelbased.sensapp.system.sample
import net.modelbased.sensapp.service.sample.{Service => SampleService}
import net.modelbased.sensapp.library.system._
import net.modelbased.sensapp.service.rrd._
import akka.actor.ActorSystem
class Boot(override val system: ActorSystem) extends System {
// "injection of dependency" to propagate the current actorSystem
trait iod {
lazy val partners = new Monolith { implicit val actorSystem = system }
implicit def actorSystem = system
}
def services = {
List(new RRDTemplateService with iod {},
new RRDBaseService with iod {} )
}
}
|
SINTEF-9012/sensapp
|
net.modelbased.sensapp.system.rrdtest/src/main/scala/net/modelbased/sensapp/system/sample/Boot.scala
|
Scala
|
lgpl-3.0
| 1,473 |
import scala.util.{ Failure, Success }
import scala.concurrent._
import reactivemongo.api._
import reactivemongo.bson._
import reactivemongo.core.errors.GenericDatabaseException
object BSONCollectionSpec extends org.specs2.mutable.Specification {
"BSON collection" title
import reactivemongo.api.commands.bson.DefaultBSONCommandError
import reactivemongo.api.collections.bson._
import Common._
sequential
lazy val collection = db("somecollection_bsoncollectionspec")
case class Person(name: String, age: Int)
case class CustomException(msg: String) extends Exception(msg)
object BuggyPersonWriter extends BSONDocumentWriter[Person] {
def write(p: Person): BSONDocument =
throw CustomException("PersonWrite error")
}
object BuggyPersonReader extends BSONDocumentReader[Person] {
def read(doc: BSONDocument): Person = throw CustomException("hey hey hey")
}
class SometimesBuggyPersonReader extends BSONDocumentReader[Person] {
var i = 0
def read(doc: BSONDocument): Person = {
i += 1
if (i % 4 == 0)
throw CustomException("hey hey hey")
else Person(doc.getAs[String]("name").get, doc.getAs[Int]("age").get)
}
}
object PersonWriter extends BSONDocumentWriter[Person] {
def write(p: Person): BSONDocument =
BSONDocument("age" -> p.age, "name" -> p.name)
}
object PersonReader extends BSONDocumentReader[Person] {
def read(doc: BSONDocument): Person = Person(doc.getAs[String]("name").get, doc.getAs[Int]("age").get)
}
val person = Person("Jack", 25)
val person2 = Person("James", 16)
val person3 = Person("John", 34)
val person4 = Person("Jane", 24)
val person5 = Person("Joline", 34)
"BSON collection" should {
"write five docs with success" >> {
sequential
implicit val writer = PersonWriter
"with insert" in {
Await.result(collection.insert(person), timeout).ok must beTrue and (
Await.result(collection.insert(person2), timeout).ok must beTrue)
}
"with bulkInsert" in {
val persons =
Seq[collection.ImplicitlyDocumentProducer](person3, person4, person5)
/* OR
val persons = Seq(person3, person4, person5).
map(implicitly[collection.ImplicitlyDocumentProducer](_))
*/
collection.bulkInsert(true)(persons: _*).map(_.ok).
aka("insertion") must beTrue.await(timeoutMillis)
}
}
"read empty cursor" >> {
@inline def cursor: Cursor[BSONDocument] =
collection.find(BSONDocument("plop" -> "plop")).cursor[BSONDocument]()
"with success using collect" in {
val list = cursor.collect[Vector](10)
Await.result(list, timeout).length mustEqual 0
}
"read empty cursor with success using collect" in {
collection.find(
BSONDocument("age" -> 25), BSONDocument("name" -> 1)).
one[BSONDocument] must beSome[BSONDocument].like({
case doc =>
doc.elements.size must_== 2 /* _id+name */ and (
doc.getAs[String]("name") aka "name" must beSome("Jack"))
}).await(5000)
}
"explain query result" >> {
"when MongoDB > 2.6" in {
collection.find(BSONDocument.empty).explain().one[BSONDocument].
aka("explanation") must beSome[BSONDocument].which { result =>
result.getAs[BSONDocument]("queryPlanner").
aka("queryPlanner") must beSome and (
result.getAs[BSONDocument]("executionStats").
aka("stats") must beSome) and (
result.getAs[BSONDocument]("serverInfo").
aka("serverInfo") must beSome)
}.await(timeoutMillis)
} tag ("mongo3", "not_mongo26")
"when MongoDB = 2.6" in {
collection.find(BSONDocument.empty).explain().one[BSONDocument].
aka("explanation") must beSome[BSONDocument].which { result =>
result.getAs[List[BSONDocument]]("allPlans").
aka("plans") must beSome[List[BSONDocument]] and (
result.getAs[String]("server").
aka("server") must beSome[String])
}.await(timeoutMillis)
} tag ("mongo2", "mongo26")
}
"with success using foldResponses" in {
cursor.foldResponses(0)(
(i, _) => Cursor.Cont(i + 1), (_, e) => Cursor.Fail(e)).
aka("result") must beEqualTo(1 /* one empty response */ ).
await(timeoutMillis)
}
"with success using foldBulks" in {
cursor.foldBulks(0)(
(i, _) => Cursor.Cont(i + 1), (_, e) => Cursor.Fail(e)).
aka("result") must beEqualTo(1 /* one empty response */ ).
await(timeoutMillis)
}
"with success using foldWhile" in {
cursor.foldWhile(0)(
(i, _) => Cursor.Cont(i + 1), (_, e) => Cursor.Fail(e)).
aka("result") must beEqualTo(0).await(timeoutMillis)
}
"with success as option" in {
cursor.headOption must beNone.await(timeoutMillis)
}
}
"read a document with success" in {
implicit val reader = PersonReader
Await.result(collection.find(BSONDocument()).one[Person], timeout).get mustEqual person
}
"read all with success" >> {
implicit val reader = PersonReader
@inline def cursor = collection.find(BSONDocument()).cursor[Person]()
val persons = Seq(person, person2, person3, person4, person5)
"as list" in {
(cursor.collect[List]() must beEqualTo(persons).await(timeoutMillis)).
and(cursor.headOption must beSome(person).await(timeoutMillis))
}
"using foldResponses" in {
cursor.foldResponses(0)({ (s, _) => Cursor.Cont(s + 1) },
(_, e) => Cursor.Fail(e)) must beEqualTo(1).await(timeoutMillis)
}
"using foldBulks" in {
cursor.foldBulks(1)({ (s, _) => Cursor.Cont(s + 1) },
(_, e) => Cursor.Fail(e)) must beEqualTo(2).await(timeoutMillis)
}
"using foldWhile" in {
cursor.foldWhile(Nil: Seq[Person])((s, p) => Cursor.Cont(s :+ p),
(_, e) => Cursor.Fail(e)) must beEqualTo(persons).await(timeoutMillis)
}
}
"read until John" in {
implicit val reader = PersonReader
@inline def cursor = collection.find(BSONDocument()).cursor[Person]()
val persons = Seq(person, person2, person3)
cursor.foldWhile(Nil: Seq[Person])({ (s, p) =>
if (p.name == "John") Cursor.Done(s :+ p)
else Cursor.Cont(s :+ p)
}, (_, e) => Cursor.Fail(e)) must beEqualTo(persons).await(timeoutMillis)
}
"read a document with error" in {
implicit val reader = BuggyPersonReader
val future = collection.find(BSONDocument()).one[Person].map(_ => 0).recover {
case e if e.getMessage == "hey hey hey" => -1
case e =>
/* e.printStackTrace() */ -2
}
future must beEqualTo(-1).await(timeoutMillis)
}
"read documents with error" >> {
implicit val reader = new SometimesBuggyPersonReader
@inline def cursor = collection.find(BSONDocument()).cursor[Person]()
"using collect" in {
val collect = cursor.collect[Vector]().map(_.size).recover {
case e if e.getMessage == "hey hey hey" => -1
case e =>
/* e.printStackTrace() */ -2
}
collect aka "first collect" must not(throwA[Exception]).
await(timeoutMillis) and (collect must beEqualTo(-1).
await(timeoutMillis))
}
"using foldWhile" in {
Await.result(cursor.foldWhile(0)((i, _) => Cursor.Cont(i + 1),
(_, e) => Cursor.Fail(e)), timeout) must throwA[CustomException]
}
"fallbacking to final value using foldWhile" in {
cursor.foldWhile(0)((i, _) => Cursor.Cont(i + 1),
(_, e) => Cursor.Done(-1)) must beEqualTo(-1).await(timeoutMillis)
}
"skiping failure using foldWhile" in {
cursor.foldWhile(0)((i, _) => Cursor.Cont(i + 1),
(_, e) => Cursor.Cont(-3)) must beEqualTo(-2).await(timeoutMillis)
}
}
"read docs skipping errors using collect" in {
implicit val reader = new SometimesBuggyPersonReader
val result = Await.result(collection.find(BSONDocument()).
cursor[Person]().collect[Vector](stopOnError = false), timeout)
//println(s"(read docs skipping errors using collect) got result $result")
result.length mustEqual 4
}
"write a doc with error" in {
implicit val writer = BuggyPersonWriter
collection.insert(person).map { lastError =>
//println(s"person write succeed?? $lastError")
0
}.recover {
case ce: CustomException => -1
case e =>
e.printStackTrace()
-2
} aka "write result" must beEqualTo(-1).await(timeoutMillis)
}
"write a JavaScript value" in {
collection.insert(BSONDocument("age" -> 101,
"name" -> BSONJavaScript("db.getName()"))).flatMap { _ =>
implicit val reader = PersonReader
collection.find(BSONDocument("age" -> 101)).one[BSONDocument].map(
_.flatMap(_.getAs[BSONJavaScript]("name")).map(_.value))
} aka "inserted" must beSome("db.getName()").await(timeoutMillis)
}
"find and update" >> {
implicit val reader = PersonReader
implicit val writer = PersonWriter
"by updating age of 'Joline', & returns the old document" in {
val updateOp = collection.updateModifier(
BSONDocument("$set" -> BSONDocument("age" -> 35)))
collection.findAndModify(BSONDocument("name" -> "Joline"), updateOp).
map(_.result[Person]) must beSome(person5).await(timeoutMillis)
}
"by updating age of 'James', & returns the updated document" in {
collection.findAndUpdate(
BSONDocument("name" -> "James"), person2.copy(age = 17),
fetchNewObject = true).map(_.result[Person]).
aka("result") must beSome(person2.copy(age = 17)).await(timeoutMillis)
}
"by inserting a new 'Foo' person (with upsert = true)" in {
val fooPerson = Person("Foo", -1)
collection.findAndUpdate(fooPerson, fooPerson,
fetchNewObject = true, upsert = true).
map(_.result[Person]) must beSome(fooPerson).await(timeoutMillis)
}
}
"find and remove" >> {
implicit val reader = PersonReader
"'Joline' using findAndModify" in {
collection.findAndModify(BSONDocument("name" -> "Joline"),
collection.removeModifier).map(_.result[Person]).
aka("removed person") must beSome(person5.copy(age = 35)).
await(timeoutMillis)
}
"'Foo' using findAndRemove" in {
collection.findAndRemove(BSONDocument("name" -> "Foo")).
map(_.result[Person]) aka "removed" must beSome(Person("Foo", -1)).
await(timeoutMillis)
}
}
"be renamed" >> {
"with failure" in {
db(s"foo_${System identityHashCode collection}").
rename("renamed").map(_ => false).recover({
case DefaultBSONCommandError(Some(13), Some(msg), _) if (
msg contains "renameCollection ") => true
case _ => false
}) must beTrue.await(timeoutMillis)
}
}
"be dropped" >> {
"successfully if exists (deprecated)" in {
val col = db(s"foo_${System identityHashCode collection}")
col.create().flatMap(_ => col.drop(false)).
aka("legacy drop") must beTrue.await(timeoutMillis)
}
"with failure if doesn't exist (deprecated)" in {
val col = db(s"foo_${System identityHashCode collection}")
Await.result(col.drop(), timeout).
aka("legacy drop") must throwA[Exception].like {
case GenericDatabaseException(_, Some(26)) => ok
}
}
"successfully if exist" in {
val col = db(s"foo_${System identityHashCode collection}")
col.create().flatMap(_ => col.drop(false)).
aka("drop") must beFalse.await(timeoutMillis)
}
"successfully if doesn't exist" in {
val col = db(s"foo_${System identityHashCode collection}")
col.drop(false) aka "drop" must beFalse.await(timeoutMillis)
}
}
}
"Index" should {
import reactivemongo.api.indexes._
val col = db(s"indexed_col_${hashCode}")
"be first created" in {
col.indexesManager.ensure(Index(
Seq("token" -> IndexType.Ascending), unique = true)).
aka("index creation") must beTrue.await(timeoutMillis)
}
"not be created if already exists" in {
col.indexesManager.ensure(Index(
Seq("token" -> IndexType.Ascending), unique = true)).
aka("index creation") must beFalse.await(timeoutMillis)
}
}
}
|
charleskubicek/ReactiveMongo
|
driver/src/test/scala/BSONCollectionSpec.scala
|
Scala
|
apache-2.0
| 12,878 |
package org.denigma.kappa.model
object Change extends Enumeration {
type Change = Value
val Removed, Added, Unchanged, Updated = Value
}
|
antonkulaga/kappa-notebook
|
kappa-model/shared/src/main/scala/org/denigma/kappa/model/Change.scala
|
Scala
|
mpl-2.0
| 142 |
import stainless.annotation._
object LastDynAssert {
@extern
def dynAssert(cond: Boolean): Unit = {
(??? : Unit)
} ensuring(cond)
def f() = {
dynAssert(false)
(dynAssert(false), 0)
assert(false)
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/verification/valid/LastDynAssert.scala
|
Scala
|
apache-2.0
| 228 |
package aia.faulttolerance
import java.io.File
import java.util.UUID
import akka.actor._
import akka.actor.SupervisorStrategy.{ Stop, Resume, Restart, Escalate }
import akka.actor.OneForOneStrategy
import scala.concurrent.duration._
import language.postfixOps
package dbstrategy2 {
object LogProcessingApp extends App {
val sources = Vector("file:///source1/", "file:///source2/")
val system = ActorSystem("logprocessing")
val databaseUrls = Vector(
"http://mydatabase1",
"http://mydatabase2",
"http://mydatabase3"
)
system.actorOf(
LogProcessingSupervisor.props(sources, databaseUrls),
LogProcessingSupervisor.name
)
}
object LogProcessingSupervisor {
def props(sources: Vector[String], databaseUrls: Vector[String]) =
Props(new LogProcessingSupervisor(sources, databaseUrls))
def name = "file-watcher-supervisor"
}
class LogProcessingSupervisor(
sources: Vector[String],
databaseUrls: Vector[String]
) extends Actor with ActorLogging {
var fileWatchers: Vector[ActorRef] = sources.map { source =>
val fileWatcher = context.actorOf(
Props(new FileWatcher(source, databaseUrls))
)
context.watch(fileWatcher)
fileWatcher
}
override def supervisorStrategy = AllForOneStrategy() {
case _: DiskError => Stop
}
def receive = {
case Terminated(fileWatcher) =>
fileWatchers = fileWatchers.filterNot(_ == fileWatcher)
if (fileWatchers.isEmpty) {
log.info("Shutting down, all file watchers have failed.")
context.system.terminate()
}
}
}
object FileWatcher {
case class NewFile(file: File, timeAdded: Long)
case class SourceAbandoned(uri: String)
}
class FileWatcher(source: String,
databaseUrls: Vector[String])
extends Actor with ActorLogging with FileWatchingAbilities {
register(source)
override def supervisorStrategy = OneForOneStrategy() {
case _: CorruptedFileException => Resume
}
val logProcessor = context.actorOf(
LogProcessor.props(databaseUrls),
LogProcessor.name
)
context.watch(logProcessor)
import FileWatcher._
def receive = {
case NewFile(file, _) =>
logProcessor ! LogProcessor.LogFile(file)
case SourceAbandoned(uri) if uri == source =>
log.info(s"$uri abandoned, stopping file watcher.")
self ! PoisonPill
case Terminated(`logProcessor`) =>
log.info(s"Log processor terminated, stopping file watcher.")
self ! PoisonPill
}
}
object LogProcessor {
def props(databaseUrls: Vector[String]) =
Props(new LogProcessor(databaseUrls))
def name = s"log_processor_${UUID.randomUUID.toString}"
// represents a new log file
case class LogFile(file: File)
}
class LogProcessor(databaseUrls: Vector[String])
extends Actor with ActorLogging with LogParsing {
require(databaseUrls.nonEmpty)
val initialDatabaseUrl = databaseUrls.head
var alternateDatabases = databaseUrls.tail
override def supervisorStrategy = OneForOneStrategy() {
case _: DbBrokenConnectionException => Restart
case _: DbNodeDownException => Stop
}
var dbWriter = context.actorOf(
DbWriter.props(initialDatabaseUrl),
DbWriter.name(initialDatabaseUrl)
)
context.watch(dbWriter)
import LogProcessor._
def receive = {
case LogFile(file) =>
val lines: Vector[DbWriter.Line] = parse(file)
lines.foreach(dbWriter ! _)
case Terminated(_) =>
if(alternateDatabases.nonEmpty) {
val newDatabaseUrl = alternateDatabases.head
alternateDatabases = alternateDatabases.tail
dbWriter = context.actorOf(
DbWriter.props(newDatabaseUrl),
DbWriter.name(newDatabaseUrl)
)
context.watch(dbWriter)
} else {
log.error("All Db nodes broken, stopping.")
self ! PoisonPill
}
}
}
object DbWriter {
def props(databaseUrl: String) =
Props(new DbWriter(databaseUrl))
def name(databaseUrl: String) =
s"""db-writer-${databaseUrl.split("/").last}"""
// A line in the log file parsed by the LogProcessor Actor
case class Line(time: Long, message: String, messageType: String)
}
class DbWriter(databaseUrl: String) extends Actor {
val connection = new DbCon(databaseUrl)
import DbWriter._
def receive = {
case Line(time, message, messageType) =>
connection.write(Map('time -> time,
'message -> message,
'messageType -> messageType))
}
override def postStop(): Unit = {
connection.close()
}
}
class DbCon(url: String) {
/**
* Writes a map to a database.
* @param map the map to write to the database.
* @throws DbBrokenConnectionException when the connection is broken. It might be back later
* @throws DbNodeDownException when the database Node has been removed from the database cluster. It will never work again.
*/
def write(map: Map[Symbol, Any]): Unit = {
//
}
def close(): Unit = {
//
}
}
@SerialVersionUID(1L)
class DiskError(msg: String)
extends Error(msg) with Serializable
@SerialVersionUID(1L)
class CorruptedFileException(msg: String, val file: File)
extends Exception(msg) with Serializable
@SerialVersionUID(1L)
class DbBrokenConnectionException(msg: String)
extends Exception(msg) with Serializable
@SerialVersionUID(1L)
class DbNodeDownException(msg: String)
extends Exception(msg) with Serializable
trait LogParsing {
import DbWriter._
// Parses log files. creates line objects from the lines in the log file.
// If the file is corrupt a CorruptedFileException is thrown
def parse(file: File): Vector[Line] = {
// implement parser here, now just return dummy value
Vector.empty[Line]
}
}
trait FileWatchingAbilities {
def register(uri: String): Unit = {
}
}
}
|
RayRoestenburg/akka-in-action
|
chapter-fault-tolerance/src/main/scala/aia/faulttolerance/LogProcessing2.scala
|
Scala
|
mit
| 6,134 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.DefaultReadWriteTest
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.sql.{DataFrame, Row}
class BinarizerSuite extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
@transient var data: Array[Double] = _
override def beforeAll(): Unit = {
super.beforeAll()
data = Array(0.1, -0.5, 0.2, -0.3, 0.8, 0.7, -0.1, -0.4)
}
test("params") {
ParamsSuite.checkParams(new Binarizer)
}
test("Binarize continuous features with default parameter") {
val defaultBinarized: Array[Double] = data.map(x => if (x > 0.0) 1.0 else 0.0)
val dataFrame: DataFrame = spark.createDataFrame(
data.zip(defaultBinarized)).toDF("feature", "expected")
val binarizer: Binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
binarizer.transform(dataFrame).select("binarized_feature", "expected").collect().foreach {
case Row(x: Double, y: Double) =>
assert(x === y, "The feature value is not correct after binarization.")
}
}
test("Binarize continuous features with setter") {
val threshold: Double = 0.2
val thresholdBinarized: Array[Double] = data.map(x => if (x > threshold) 1.0 else 0.0)
val dataFrame: DataFrame = spark.createDataFrame(
data.zip(thresholdBinarized)).toDF("feature", "expected")
val binarizer: Binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
.setThreshold(threshold)
binarizer.transform(dataFrame).select("binarized_feature", "expected").collect().foreach {
case Row(x: Double, y: Double) =>
assert(x === y, "The feature value is not correct after binarization.")
}
}
test("Binarize vector of continuous features with default parameter") {
val defaultBinarized: Array[Double] = data.map(x => if (x > 0.0) 1.0 else 0.0)
val dataFrame: DataFrame = spark.createDataFrame(Seq(
(Vectors.dense(data), Vectors.dense(defaultBinarized))
)).toDF("feature", "expected")
val binarizer: Binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
binarizer.transform(dataFrame).select("binarized_feature", "expected").collect().foreach {
case Row(x: Vector, y: Vector) =>
assert(x == y, "The feature value is not correct after binarization.")
}
}
test("Binarize vector of continuous features with setter") {
val threshold: Double = 0.2
val defaultBinarized: Array[Double] = data.map(x => if (x > threshold) 1.0 else 0.0)
val dataFrame: DataFrame = spark.createDataFrame(Seq(
(Vectors.dense(data), Vectors.dense(defaultBinarized))
)).toDF("feature", "expected")
val binarizer: Binarizer = new Binarizer()
.setInputCol("feature")
.setOutputCol("binarized_feature")
.setThreshold(threshold)
binarizer.transform(dataFrame).select("binarized_feature", "expected").collect().foreach {
case Row(x: Vector, y: Vector) =>
assert(x == y, "The feature value is not correct after binarization.")
}
}
test("read/write") {
val t = new Binarizer()
.setInputCol("myInputCol")
.setOutputCol("myOutputCol")
.setThreshold(0.1)
testDefaultReadWrite(t)
}
}
|
gioenn/xSpark
|
mllib/src/test/scala/org/apache/spark/ml/feature/BinarizerSuite.scala
|
Scala
|
apache-2.0
| 4,294 |
package gh2011.models
import net.liftweb.json.JsonAST.JValue
case class IssueCommentEventPayload(repo: String, actor: String, issue_id: Long, comment_id: Long,
actor_gravatar: String)
object IssueCommentEventPayload
{
def apply(json: JValue): Option[IssueCommentEventPayload] =
{
val n2s = gh3.node2String(json)(_)
val n2l = gh3.node2Long(json)(_)
val n2os = gh3.node2OptionString(json)(_)
val repo = n2s("repo")
val actor = n2s("actor")
val issue_id = n2l("issue_id")
val comment_id = n2l("comment_id")
val actor_gravatar = n2s("actor_gravatar")
val params = Seq(repo, actor, issue_id, comment_id, actor_gravatar)
if(params.forall(_.isDefined))
Some(IssueCommentEventPayload(repo.get, actor.get, issue_id.get, comment_id.get, actor_gravatar.get))
else None
}
}
|
mgoeminne/github_etl
|
src/main/scala/gh2011/models/IssueCommentEventPayload.scala
|
Scala
|
mit
| 888 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.feature
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.linalg.distributed.RowMatrix
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.util.TestingUtils._
class PCASuite extends SparkFunSuite with MLlibTestSparkContext {
private val data = Array(
Vectors.sparse(5, Seq((1, 1.0), (3, 7.0))),
Vectors.dense(2.0, 0.0, 3.0, 4.0, 5.0),
Vectors.dense(4.0, 0.0, 0.0, 6.0, 7.0)
)
private lazy val dataRDD = sc.parallelize(data, 2)
test("Correct computing use a PCA wrapper") {
val k = dataRDD.count().toInt
val pca = new PCA(k).fit(dataRDD)
val mat = new RowMatrix(dataRDD)
val (pc, explainedVariance) = mat.computePrincipalComponentsAndExplainedVariance(k)
val pca_transform = pca.transform(dataRDD).collect()
val mat_multiply = mat.multiply(pc).rows.collect()
pca_transform.zip(mat_multiply).foreach { case (calculated, expected) =>
assert(calculated ~== expected relTol 1e-8)
}
assert(pca.explainedVariance ~== explainedVariance relTol 1e-8)
}
test("memory cost computation") {
assert(PCAUtil.memoryCost(10, 100) < Int.MaxValue)
// check overflowing
assert(PCAUtil.memoryCost(40000, 60000) > Int.MaxValue)
}
test("number of features more than 65535") {
val data1 = sc.parallelize(Array(
Vectors.dense((1 to 100000).map(_ => 2.0).to[scala.Vector].toArray),
Vectors.dense((1 to 100000).map(_ => 0.0).to[scala.Vector].toArray)
), 2)
val pca = new PCA(2).fit(data1)
// Eigen values should not be negative
assert(pca.explainedVariance.values.forall(_ >= 0))
// Norm of the principal component should be 1.0
assert(Math.sqrt(pca.pc.values.slice(0, 100000)
.map(Math.pow(_, 2)).sum) ~== 1.0 relTol 1e-8)
// Leading explainedVariance is 1.0
assert(pca.explainedVariance(0) ~== 1.0 relTol 1e-12)
// Leading principal component is '1' vector
val firstValue = pca.pc.values(0)
pca.pc.values.slice(0, 100000).map(values =>
assert(values ~== firstValue relTol 1e-12))
}
}
|
WindCanDie/spark
|
mllib/src/test/scala/org/apache/spark/mllib/feature/PCASuite.scala
|
Scala
|
apache-2.0
| 2,957 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.utils
import java.util
import java.util.Collections
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{CompletableFuture, ExecutorService, Executors}
import java.util.function.{Consumer, Supplier}
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.api.{TableEnvironment, TableSchema}
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE
import org.apache.flink.table.descriptors.DescriptorProperties
import org.apache.flink.table.descriptors.Schema.SCHEMA
import org.apache.flink.table.factories.TableSourceFactory
import org.apache.flink.table.functions.{AsyncTableFunction, FunctionContext, TableFunction}
import org.apache.flink.table.planner.runtime.utils.InMemoryLookupableTableSource.{InMemoryAsyncLookupFunction, InMemoryLookupFunction, RESOURCE_COUNTER}
import org.apache.flink.table.sources._
import org.apache.flink.table.types.DataType
import org.apache.flink.table.utils.EncodingUtils
import org.apache.flink.types.Row
import org.apache.flink.util.Preconditions
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.mutable
/**
* A [[LookupableTableSource]] which stores table in memory, this is mainly used for testing.
*/
class InMemoryLookupableTableSource(
schema: TableSchema,
data: List[Row],
asyncEnabled: Boolean,
bounded: Boolean = false)
extends LookupableTableSource[Row]
with StreamTableSource[Row] {
override def getLookupFunction(lookupKeys: Array[String]): TableFunction[Row] = {
new InMemoryLookupFunction(convertDataToMap(lookupKeys), RESOURCE_COUNTER)
}
override def getAsyncLookupFunction(lookupKeys: Array[String]): AsyncTableFunction[Row] = {
new InMemoryAsyncLookupFunction(convertDataToMap(lookupKeys), RESOURCE_COUNTER)
}
private def convertDataToMap(lookupKeys: Array[String]): Map[Row, List[Row]] = {
val lookupFieldIndexes = lookupKeys.map(schema.getFieldNames.indexOf(_))
val map = mutable.HashMap[Row, List[Row]]()
data.foreach { row =>
val key = Row.of(lookupFieldIndexes.map(row.getField): _*)
val oldValue = map.get(key)
if (oldValue.isDefined) {
map.put(key, oldValue.get ++ List(row))
} else {
map.put(key, List(row))
}
}
map.toMap
}
override def getDataStream(execEnv: StreamExecutionEnvironment): DataStream[Row] = {
null
}
override def isAsyncEnabled: Boolean = asyncEnabled
override def getProducedDataType: DataType = schema.toRowDataType
override def getTableSchema: TableSchema = schema
override def isBounded: Boolean = bounded
}
class InMemoryLookupableTableFactory extends TableSourceFactory[Row] {
override def createTableSource(properties: util.Map[String, String]): TableSource[Row] = {
val dp = new DescriptorProperties
dp.putProperties(properties)
val tableSchema = dp.getTableSchema(SCHEMA)
val serializedData = dp.getString("data")
val data = EncodingUtils.decodeStringToObject(serializedData, classOf[List[Row]])
val asyncEnabled = dp.getOptionalBoolean("is-async").orElse(false)
val bounded = dp.getOptionalBoolean("is-bounded").orElse(false)
new InMemoryLookupableTableSource(tableSchema, data, asyncEnabled, bounded)
}
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR_TYPE, "InMemoryLookupableTable")
context
}
override def supportedProperties(): util.List[String] = {
val supported = new util.ArrayList[String]()
supported.add("*")
supported
}
}
object InMemoryLookupableTableSource {
val RESOURCE_COUNTER = new AtomicInteger()
def createTemporaryTable(
tEnv: TableEnvironment,
isAsync: Boolean,
data: List[Row],
schema: TableSchema,
tableName: String,
isBounded: Boolean = false): Unit = {
val source = new InMemoryLookupableTableSource(schema, data, isAsync, isBounded)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(tableName, source)
}
/**
* A lookup function which find matched rows with the given fields.
*/
class InMemoryLookupFunction(
data: Map[Row, List[Row]],
resourceCounter: AtomicInteger)
extends TableFunction[Row] {
override def open(context: FunctionContext): Unit = {
resourceCounter.incrementAndGet()
}
@varargs
def eval(inputs: AnyRef*): Unit = {
val key = Row.of(inputs: _*)
Preconditions.checkArgument(!inputs.contains(null),
s"Lookup key %s contains null value, which would not happen.", key)
data.get(key) match {
case Some(list) => list.foreach(result => collect(result))
case None => // do nothing
}
}
override def close(): Unit = {
resourceCounter.decrementAndGet()
}
}
/**
* An async lookup function which find matched rows with the given fields.
*/
@SerialVersionUID(1L)
class InMemoryAsyncLookupFunction(
data: Map[Row, List[Row]],
resourceCounter: AtomicInteger,
delayedReturn: Int = 0)
extends AsyncTableFunction[Row] {
@transient
var executor: ExecutorService = _
override def open(context: FunctionContext): Unit = {
resourceCounter.incrementAndGet()
executor = Executors.newSingleThreadExecutor()
}
@varargs
def eval(resultFuture: CompletableFuture[util.Collection[Row]], inputs: AnyRef*): Unit = {
val key = Row.of(inputs: _*)
Preconditions.checkArgument(!inputs.contains(null),
s"Lookup key %s contains null value, which would not happen.", key)
CompletableFuture
.supplyAsync(new CollectionSupplier(data, key), executor)
.thenAccept(new CollectionConsumer(resultFuture))
}
override def close(): Unit = {
resourceCounter.decrementAndGet()
if (null != executor && !executor.isShutdown) {
executor.shutdown()
}
}
private class CollectionSupplier(data: Map[Row, List[Row]], key: Row)
extends Supplier[util.Collection[Row]] {
override def get(): util.Collection[Row] = {
val list = data.get(key)
if (list.isDefined && list.get.nonEmpty) {
list.get.asJavaCollection
} else {
Collections.emptyList()
}
}
}
private class CollectionConsumer(resultFuture: CompletableFuture[util.Collection[Row]])
extends Consumer[util.Collection[Row]] {
override def accept(results: util.Collection[Row]): Unit = {
resultFuture.complete(results)
}
}
}
}
|
wwjiang007/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/utils/InMemoryLookupableTableSource.scala
|
Scala
|
apache-2.0
| 7,654 |
package chapter02
/*
Implement a SyncVar class with the following interface:
class SyncVar[T] {
def get(): T = ???
def put(x: T): Unit = ???
}
A SyncVar object is used to exchange values between two or more threads. When created, the SyncVar object is empty:
* Calling get throws an exception
* Calling put adds a value to the SyncVar object
After a value is added to a SyncVar object, we can say that it is non-empty:
* Calling get returns the current value, and changes the state to empty
* Calling put throws an exception
*/
object Exercise03 {
class SyncVar[T] {
private var value: Option[T] = None
def get(): T = {
if (value.isEmpty) throw new IllegalStateException()
else {
val result = value.get
value = None
result
}
}
def put(x: T): Unit = {
if (value.isEmpty) value = Some(x)
else throw new IllegalStateException()
}
}
}
|
vsuharnikov/books-exercises
|
scala/learning-concurrent-programming-in-scala/src/main/scala/chapter02/Exercise03.scala
|
Scala
|
mit
| 922 |
package jp.co.bizreach.play2stub
import java.io.File
import com.typesafe.config.{ConfigRenderOptions, ConfigFactory}
import play.api.Configuration
import play.api.test.FakeApplication
import play.api.test.Helpers._
import scala.collection.JavaConverters._
trait FakePlayHelper {
def PlayApp(configs:(String, Any)*) = {
val configFromFile = ConfigFactory.parseFile(
new File(this.getClass.getResource("/conf/application.conf").toURI))
.entrySet.asScala.map(entry => entry.getKey -> entry.getValue.render(ConfigRenderOptions.concise())).toMap
FakeApplication(
path = new File(this.getClass.getResource("/").toURI),
additionalPlugins = Seq(
"jp.co.bizreach.play2handlebars.HandlebarsPlugin",
"jp.co.bizreach.play2stub.StubPlugin"
),
additionalConfiguration = //configFromFile ++
configs.toSet.toMap
//+ ("play2stub.view-root" -> "/views")
//+ ("play2stub.data-root" -> "/data")
)}
def runApp[T](app: FakeApplication)(block: FakeApplication => T): T = {
running(app) {
block(app)
}
}
def parseConfigFile(path: String):Map[String, String] = {
new Configuration(ConfigFactory.load(path))
.entrySet.map(entry => entry._1 -> entry._2.render()).toMap
}
}
|
bizreach/play2-stub
|
src/test/scala/jp/co/bizreach/play2stub/FakePlayHelper.scala
|
Scala
|
apache-2.0
| 1,276 |
package com.twitter.diffy.proxy
import java.net.InetSocketAddress
import com.twitter.util.Duration
case class Settings(
datacenter: String,
servicePort:InetSocketAddress,
candidate: Target,
primary: Target,
secondary: Target,
protocol: String,
clientId: String,
pathToThriftJar: String,
serviceClass: String,
serviceName: String,
apiRoot: String,
enableThriftMux: Boolean,
relativeThreshold: Double,
absoluteThreshold: Double,
teamEmail: String,
emailDelay: Duration,
rootUrl: String,
allowHttpSideEffects: Boolean,
excludeHttpHeadersComparison: Boolean,
skipEmailsWhenNoErrors: Boolean)
case class Target(path: String)
|
shaunstanislaus/diffy
|
src/main/scala/com/twitter/diffy/proxy/Settings.scala
|
Scala
|
apache-2.0
| 664 |
package org.jetbrains.plugins.scala.lang.completion
import com.intellij.codeInsight.completion._
import com.intellij.codeInsight.lookup.{LookupElement, LookupElementWeigher}
import com.intellij.patterns.PlatformPatterns
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.util.{Consumer, ProcessingContext}
import org.jetbrains.plugins.scala.lang.completion.lookups.ScalaLookupItem
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDefinition
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
/**
* Created by kate
* on 1/29/16
*/
class ScalaCaseClassParametersNameContributer extends ScalaCompletionContributor {
extend(CompletionType.BASIC, PlatformPatterns.psiElement(), new CompletionProvider[CompletionParameters] {
def addCompletions(parameters: CompletionParameters, context: ProcessingContext, _result: CompletionResultSet) {
val position = positionFromParameters(parameters)
val constructorPattern = PsiTreeUtil.getContextOfType(position, classOf[ScConstructorPattern])
if (constructorPattern == null) return
val classRef = constructorPattern.asInstanceOf[ScConstructorPattern].ref
val caseClassParams = classRef.resolve() match {
case funcDef: ScFunctionDefinition if funcDef.syntheticCaseClass.isDefined =>
funcDef.syntheticCaseClass.get.parameters
case fundef: ScFunctionDefinition
if fundef.getName == "unapply" || fundef.getName == "unapplySeq" =>
fundef.getParameterList.params
case _ => return
}
if (caseClassParams.isEmpty) return
val parameterWithPosition = getCorrespondedParameterForPosition(position, caseClassParams)
val corespondedParameter = parameterWithPosition.parameter
val myPosition = parameterWithPosition.position
val result = addByOrderSorter(parameters, _result, myPosition, caseClassParams)
byClassParamCompletionsItems(caseClassParams, result)
byTypeCompletionsItems(position, corespondedParameter, result)
}
def byTypeCompletionsItems(position: PsiElement, parameter: Option[ScParameter], result: CompletionResultSet) = {
position.getContext match {
case pattern: ScPattern if pattern.expectedType.isDefined && parameter.isDefined =>
val lookups =
NameSuggester.suggestNamesByType(pattern.expectedType.get).map(name => new ScalaLookupItem(parameter.get, name))
lookups.foreach(l => addLocalScalaLookUpItem(result, l))
case _ =>
}
}
def byClassParamCompletionsItems(params: Seq[ScParameter], result: CompletionResultSet): Unit = {
params.map(p => new ScalaLookupItem(p, p.name)).foreach(l => addLocalScalaLookUpItem(result, l))
}
def addByOrderSorter(parameters: CompletionParameters, result: CompletionResultSet,
currentPosition: Int, classParams: Seq[ScParameter]): CompletionResultSet = {
class PreferByParamsOrder extends LookupElementWeigher("orderByPosition") {
override def weigh(item: LookupElement): Comparable[_] = {
ScalaLookupItem.original(item) match {
case s: ScalaLookupItem =>
s.element match {
case param: ScParameter if param.name == s.name /*not equals when name computed by type*/ =>
val positionInClassParameters = classParams.indexOf(param)
if (currentPosition == positionInClassParameters) -1
else math.abs(currentPosition - positionInClassParameters)
case _ => 0
}
case _ => null
}
}
}
var sorter = CompletionSorter.defaultSorter(parameters, result.getPrefixMatcher)
sorter = sorter.weighAfter("prefix", new PreferByParamsOrder())
result.withRelevanceSorter(sorter)
}
private def addLocalScalaLookUpItem(result: CompletionResultSet, lookupElement: ScalaLookupItem): Unit = {
lookupElement.isLocalVariable = true
result.addElement(lookupElement)
}
private def getCorrespondedParameterForPosition(position: PsiElement, classParams: Seq[ScParameter]): ParameterWithPosition = {
val me = PsiTreeUtil.getContextOfType(position, classOf[ScPattern])
if (me == null) return ParameterWithPosition(None, -1)
val patterns = Option(PsiTreeUtil.getContextOfType(position, classOf[ScPatternArgumentList])).map(_.patterns)
if (patterns.isEmpty || (patterns.isDefined && patterns.get.length > classParams.length))
return ParameterWithPosition(None, -1) //try to type more param than can be
val myPosition = patterns.get.indexOf(me)
val coresponedParameter =
if ((myPosition >= 0) && (myPosition != classParams.length)) Some(classParams.apply(myPosition)) else None
ParameterWithPosition(coresponedParameter, myPosition)
}
case class ParameterWithPosition(parameter: Option[ScParameter], position: Int)
})
/**
* Enable completion for object with unapply/unapplySeq methods on case Lable position.
* Case label with low letter treat as ScReferencePattern and don't handle with ScalaBasicCompletionContributor,
* this handler add open and closed brackets to treat element as ScCodeReferenceElement
* and run ScalaBasicCompletionContributor.
*/
extend(CompletionType.BASIC, PlatformPatterns.psiElement().withParent(classOf[ScReferencePattern]).withSuperParent(2, classOf[ScCaseClause]), new CompletionProvider[CompletionParameters] {
override def addCompletions(parameters: CompletionParameters, context: ProcessingContext, result: CompletionResultSet): Unit = {
def typeIdentifierIn(element: Option[ScPattern]): Option[PsiElement] =
element.flatMap(_.depthFirst.find(_.getNode.getElementType == ScalaTokenTypes.tIDENTIFIER))
def createCaseClassPatern(text: String, pattern: PsiElement): Option[ScPattern] = {
Option(ScalaPsiElementFactory.createCaseClauseFromTextWithContext(text + "()",
pattern.getContext.getContext, pattern.getContext, pattern.getManager)).flatMap(_.pattern)
}
class MyConsumer(resultSet: CompletionResultSet) extends Consumer[CompletionResult] {
override def consume(completionResult: CompletionResult): Unit = {
completionResult.getLookupElement.getPsiElement match {
case obj: ScObject =>
obj.members.foreach {
case fundef: ScFunctionDefinition if fundef.getName == "unapply" || fundef.getName == "unapplySeq" =>
resultSet.consume(completionResult.getLookupElement)
case _ =>
}
case _ =>
}
}
}
def handleCompletionForLowerLetterObject(pattern: PsiElement, result: CompletionResultSet, completionParameters: CompletionParameters): Unit = {
val currentPrefix = result.getPrefixMatcher.getPrefix
val element = createCaseClassPatern(currentPrefix, pattern)
val typeIdentifierInElement = typeIdentifierIn(element)
typeIdentifierInElement.foreach { psiElement =>
val identifier = completionParameters.withPosition(psiElement, psiElement.getTextRange.getEndOffset)
result.runRemainingContributors(identifier, new MyConsumer(result), true)
}
}
val position = positionFromParameters(parameters)
handleCompletionForLowerLetterObject(position, result, parameters)
}
})
}
|
katejim/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/completion/ScalaCaseClassParametersNameContributer.scala
|
Scala
|
apache-2.0
| 7,868 |
class i0 {
private var main(i1: String): Unit = {
class i2 extends AnyRef {
def i3 = 0
}
def this() = { this(i4); i3 }
}
}
|
som-snytt/dotty
|
tests/fuzzy/comment3.scala
|
Scala
|
apache-2.0
| 123 |
/*
* Copyright 2016 Branislav Lazic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io
import java.time.LocalDate
import com.google.common.util.concurrent.{ FutureCallback, Futures, ListenableFuture }
import io.circe.Encoder
import scala.concurrent.{ Future, Promise }
import scala.reflect.{ ClassTag, classTag }
package object akkabot {
type Traversable[+A] = scala.collection.immutable.Traversable[A]
type Iterable[+A] = scala.collection.immutable.Iterable[A]
type Seq[+A] = scala.collection.immutable.Seq[A]
type IndexedSeq[+A] = scala.collection.immutable.IndexedSeq[A]
implicit class RichResultSetFuture[ResultSet](rsf: ListenableFuture[ResultSet]) {
def toFuture: Future[ResultSet] = {
val promise = Promise[ResultSet]()
Futures.addCallback(rsf, new FutureCallback[ResultSet] {
def onFailure(t: Throwable): Unit = promise.failure(t)
def onSuccess(result: ResultSet): Unit = promise.success(result)
})
promise.future
}
}
def className[A: ClassTag]: String = classTag[A].runtimeClass.getName
implicit val localDateEncoder: Encoder[LocalDate] = Encoder.encodeString.contramap[LocalDate](_.toString)
}
|
BranislavLazic/akkabot
|
akkabot-ui/src/main/scala/io/akkabot/package.scala
|
Scala
|
apache-2.0
| 1,712 |
package com.nyavro.manythanks.ws.contact
case class Contact(id:Long, phone:String)
|
nyavro/manythanks
|
webService/src/main/scala/com/nyavro/manythanks/ws/contact/Contact.scala
|
Scala
|
apache-2.0
| 84 |
package lila.coach
import lila.common.paginator.Paginator
final class CoachPager(api: CoachApi) {
val maxPerPage = 10
import CoachPager._
def apply(order: Order, page: Int): Fu[Paginator[Coach.WithUser]] =
api.listedWithUserList.map { all =>
Paginator.fromList(
list = all sortWith order.predicate,
currentPage = page,
maxPerPage = maxPerPage)
}
}
object CoachPager {
sealed abstract class Order(
val key: String,
val name: String,
val predicate: (Coach.WithUser, Coach.WithUser) => Boolean)
object Order {
case object Login extends Order("login", "Last login",
(a, b) => a.user.timeNoSee < b.user.timeNoSee)
case object LichessRating extends Order("rating", "Lichess rating",
(a, b) => a.user.perfs.bestStandardRating > b.user.perfs.bestStandardRating)
case object NbReview extends Order("review", "User reviews",
(a, b) => a.coach.nbReviews > b.coach.nbReviews)
case object Alphabetical extends Order("alphabetical", "Alphabetical",
(a, b) => a.user.username < b.user.username)
val default = Login
val all = List(Login, LichessRating, NbReview, Alphabetical)
def apply(key: String): Order = all.find(_.key == key) | default
}
}
|
clarkerubber/lila
|
modules/coach/src/main/CoachPager.scala
|
Scala
|
agpl-3.0
| 1,253 |
package reshapes.figures
import java.awt.Color
import java.awt.Graphics2D
import java.awt.Point
import reshapes.drawing.DrawingSpaceState
class Freedraw(
drawingSpaceState: DrawingSpaceState,
strokeWidth: Int = 1,
color: Color = Color.BLACK,
current: Int = 0,
path: List[Point] = List.empty)
extends Shape(drawingSpaceState, strokeWidth, color, current, path) with Movable with Resizable {
override def doDraw(g: Graphics2D) =
for ((a, b) <- toLines)
g.drawLine(a.x, a.y, b.x, b.y)
override def toLines() =
path zip path.tail
override def copy(
drawingSpaceState: DrawingSpaceState,
strokeWidth: Int,
color: Color,
current: Int,
path: List[Point]) =
new Freedraw(drawingSpaceState, strokeWidth, color, current, path)
}
|
volkc/REScala
|
Examples/Shapes/src/main/scala/reshapes/figures/Freedraw.scala
|
Scala
|
apache-2.0
| 807 |
package de.leanovate.swaggercheck.shrinkable
import com.fasterxml.jackson.core.JsonGenerator
import org.scalacheck.Shrink
import scala.collection.immutable.Stream._
/**
* Json object.
*
* @param required optional set of required fields for shrinking
* @param order optional order of fields
* @param fields the fields of the object
*/
case class CheckJsObject(
required: Set[String],
order: Option[Seq[String]],
fields: Map[String, CheckJsValue]
) extends CheckJsValue {
override def generate(json: JsonGenerator): Unit = {
json.writeStartObject()
order match {
case Some(fieldNames) => fieldNames.foreach {
name =>
fields.get(name).foreach {
value =>
json.writeFieldName(name)
value.generate(json)
}
}
case None =>
fields.foreach {
case (key, value) =>
json.writeFieldName(key)
value.generate(json)
}
}
json.writeEndObject()
}
override def shrink: Stream[CheckJsObject] = {
removeChunks(fields.keySet -- required).map {
removeFields =>
CheckJsObject(required, order, fields -- removeFields)
}.append(shrinkOne(fields))
}
def removeChunks(names: Traversable[String]): Stream[Traversable[String]] = {
if (names.isEmpty)
empty
else if (names.tail.isEmpty)
Stream(names)
else {
val half = names.size / 2
val left = names.take(half)
val right = names.drop(half)
cons(names, removeChunks(left).append(removeChunks(right)))
}
}
private def shrinkOne(remaining: Map[String, CheckJsValue]): Stream[CheckJsObject] =
if (remaining.isEmpty)
empty
else {
val head = remaining.head
val tail = remaining.tail
val headShrink = Shrink.shrink[CheckJsValue](head._2).map(v => CheckJsObject(required, order, fields.updated(head._1, v)))
headShrink.append(shrinkOne(tail))
}
}
object CheckJsObject {
def empty: CheckJsObject = CheckJsObject(Set.empty, None, Map.empty)
/**
* Create a fixed json object that will not shrink.
*/
def fixed(fields: Seq[(String, CheckJsValue)]): CheckJsObject =
CheckJsObject(fields.map(_._1).toSet, Some(fields.map(_._1)), fields.toMap)
implicit lazy val shrinkJsValue: Shrink[CheckJsObject] = Shrink[CheckJsObject](_.shrink)
}
|
leanovate/swagger-check
|
json-schema-gen/src/main/scala/de/leanovate/swaggercheck/shrinkable/CheckJsObject.scala
|
Scala
|
mit
| 2,465 |
import compiletime._
import compiletime.ops.int._
object Test {
type TupleTypeIndex[T <: Tuple, C] <: Int = T match {
case C *: t => 0
case h *: t => S[TupleTypeIndex[t, C]]
}
trait TupleExtractor[TT <: Tuple, C] {
def get(t: TT): C
}
given [T <: Tuple, C, EV <: TupleTypeIndex[T, C]]: TupleExtractor[T, C] with {
def get(t: T): C = t.toArray.apply(toIntC[TupleTypeIndex[T, C]]).asInstanceOf[C] // error
}
transparent inline def toIntC[N <: Int]: Int =
inline constValue[N] match
case 0 => 0
case _: S[n1] => 1 + toIntC[n1]
}
|
dotty-staging/dotty
|
tests/neg/i11985.scala
|
Scala
|
apache-2.0
| 585 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.