code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.twitter.finagle.partitioning
import com.twitter.conversions.DurationOps._
import com.twitter.finagle
import com.twitter.finagle.addr.WeightedAddress
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.partitioning.PartitionNodeManager.NoPartitionException
import com.twitter.finagle.partitioning.zk.ZkMetadata
import com.twitter.finagle.server.utils.StringServer
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle._
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.util.{Activity, Await, Awaitable, Duration, Future, Time, Var}
import java.net.{InetAddress, InetSocketAddress}
import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.immutable
import org.scalatest.funsuite.AnyFunSuite
class PartitionNodeManagerTest extends AnyFunSuite {
def await[T](a: Awaitable[T], d: Duration = 5.seconds): T =
Await.result(a, d)
def newAddress(inet: InetSocketAddress, weight: Int): Address = {
val shardId = inet.getPort
val md = ZkMetadata.toAddrMetadata(ZkMetadata(Some(shardId)))
val addr = new Address.Inet(inet, md) {
override def toString: String = s"Address(${inet.getPort})-($shardId)"
}
WeightedAddress(addr, weight)
}
class Ctx(addressSize: Int) {
val stringService = new Service[String, String] {
def apply(request: String): Future[String] = Future.value("service")
val isOpen = new AtomicBoolean(true)
override def status: Status = if (isOpen.get()) Status.Open else Status.Closed
override def close(deadline: Time): Future[Unit] = {
isOpen.set(false)
Future.Done
}
}
val inetAddresses =
(0 until addressSize).map(_ => new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
// assign ports to the localhost addresses
val fakeServers = inetAddresses.map { inet => StringServer.server.serve(inet, stringService) }
val fixedInetAddresses = fakeServers.map(_.boundAddress.asInstanceOf[InetSocketAddress])
val weightedAddress = fixedInetAddresses.map(newAddress(_, 1))
val varAddr = Var(Addr.Bound(weightedAddress: _*))
val factory: Stackable[ServiceFactory[String, String]] =
new Stack.Module1[LoadBalancerFactory.Dest, ServiceFactory[String, String]] {
val role = LoadBalancerFactory.role
val description: String = "mock the Stack[ServiceFactory[Req, Rep] for node manager"
def make(
param: LoadBalancerFactory.Dest,
next: ServiceFactory[String, String]
): ServiceFactory[String, String] = ServiceFactory.const(stringService)
}
val stack = new StackBuilder[ServiceFactory[String, String]](nilStack[String, String])
.push(factory)
.result
val sr = new InMemoryStatsReceiver
val defaultParams =
Stack.Params.empty + LoadBalancerFactory.Dest(varAddr) + finagle.param.Stats(sr)
// p0(0), p1(1,2), p2(3,4,5), p3(6,...)
// use port number to mock shardId
def getLogicalPartition(varAddresses: Var[Seq[InetSocketAddress]]): Int => Seq[Int] = {
replica =>
val addresses = varAddresses.sample()
require(addresses.size >= 7)
val partitionPositions = List(0.to(0), 1.to(2), 3.to(5), 6.until(addresses.size))
val position = addresses.indexWhere(_.getPort == replica)
val partitionId = partitionPositions.indexWhere(range => range.contains(position))
scala.Predef.assert(partitionId > -1)
Seq(partitionId)
}
}
def noReshardingManager(
stack: Stack[ServiceFactory[String, String]],
params: Stack.Params,
getLogicalPartition: Int => Seq[Int] = Seq(_)
): PartitionNodeManager[String, String, Unit, PartialFunction[String, Future[String]]] = {
new PartitionNodeManager(
stack,
Activity.value(()),
_ => { case s => Future.value(s) },
_ => getLogicalPartition,
params
)
}
def reshardingManager(
stack: Stack[ServiceFactory[String, String]],
params: Stack.Params,
pfMaker: Int => PartialFunction[String, Future[String]] = _ => { case s => Future.value(s) },
getLogicalPartition: Int => Int => Seq[Int] = _ => Seq(_),
observable: Activity[Int]
): PartitionNodeManager[String, String, Int, PartialFunction[String, Future[String]]] = {
new PartitionNodeManager(
stack,
observable,
pfMaker,
getLogicalPartition,
params
)
}
test("Remove a partition, each node is a partition") {
new Ctx(addressSize = 5) {
val nodeManager = noReshardingManager(stack, defaultParams)
val svc0 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
varAddr.update(Addr.Bound(weightedAddress.drop(1): _*))
intercept[NoPartitionException] {
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
}
val svc1 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(1).getPort))
assert(await(svc1("any")) == "service")
}
}
test("Add a partition, each node is a partition") {
new Ctx(addressSize = 5) {
val nodeManager = noReshardingManager(stack, defaultParams)
assert(sr.gauges(Seq("partitioner", "nodes"))() == 5)
val inet = new InetSocketAddress(InetAddress.getLoopbackAddress, 0)
// to get the port
val newIsa =
StringServer.server.serve(inet, stringService).boundAddress.asInstanceOf[InetSocketAddress]
intercept[NoPartitionException] {
await(nodeManager.snapshotSharder().getServiceByPartitionId(newIsa.getPort))
}
varAddr.update(Addr.Bound((weightedAddress :+ newAddress(newIsa, 1)): _*))
assert(sr.gauges(Seq("partitioner", "nodes"))() == 6)
await(nodeManager.snapshotSharder().getServiceByPartitionId(newIsa.getPort))
}
}
test("replicas belong to the same logical partition") {
new Ctx(addressSize = 7) {
val logicalPartition = getLogicalPartition(Var(fixedInetAddresses))
val nodeManager = noReshardingManager(stack, defaultParams, logicalPartition)
assert(sr.gauges(Seq("partitioner", "nodes"))() == 4)
val svc00 = await(nodeManager.snapshotSharder().getServiceByPartitionId(0))
val svc0 =
await(
nodeManager
.snapshotSharder().getServiceByPartitionId(
logicalPartition(fixedInetAddresses(0).getPort).head))
val svc10 = await(nodeManager.snapshotSharder().getServiceByPartitionId(1))
val svc11 =
await(
nodeManager
.snapshotSharder().getServiceByPartitionId(
logicalPartition(fixedInetAddresses(1).getPort).head))
val svc12 =
await(
nodeManager
.snapshotSharder().getServiceByPartitionId(
logicalPartition(fixedInetAddresses(2).getPort).head))
assert(svc00 eq svc0)
assert((svc10 eq svc11) && (svc10 eq svc12))
assert(svc00 ne svc10)
}
}
test("Add a node to an existing logical partition") {
new Ctx(addressSize = 7) {
val varInetAddress = Var(fixedInetAddresses)
val logicalPartition = getLogicalPartition(varInetAddress)
val nodeManager = noReshardingManager(stack, defaultParams, logicalPartition)
val inet = new InetSocketAddress(InetAddress.getLoopbackAddress, 0)
// to get the port
val newIsa =
StringServer.server.serve(inet, stringService).boundAddress.asInstanceOf[InetSocketAddress]
// before adding, cannot find the logical partition
intercept[AssertionError] {
await(
nodeManager
.snapshotSharder().getServiceByPartitionId(logicalPartition(newIsa.getPort).head))
}
val newAddresses = fixedInetAddresses :+ newIsa
varInetAddress.update(newAddresses)
varAddr.update(Addr.Bound(newAddresses.map(newAddress(_, 1)): _*))
// not throwing an exception here verifies that the service exists
await(nodeManager.snapshotSharder().getServiceByPartitionId(3))
assert(3 == logicalPartition(newIsa.getPort).head)
}
}
test("Remove a node from a partition has == 1 node") {
new Ctx(addressSize = 7) {
val logicalPartition = getLogicalPartition(Var(fixedInetAddresses))
val nodeManager = noReshardingManager(stack, defaultParams, logicalPartition)
await(nodeManager.snapshotSharder().getServiceByPartitionId(0))
// topology: p0(0), p1(1,2), p2(3,4,5), p3(6)
// partition 0 has one address, drop it
varAddr.update(Addr.Bound(weightedAddress.drop(1): _*))
val e = intercept[NoPartitionException] {
await(nodeManager.snapshotSharder().getServiceByPartitionId(0))
}
assert(e.getMessage.contains("No partition: 0 found in the node manager"))
}
}
test("Remove a node from a partition has > 1 nodes") {
new Ctx(addressSize = 8) {
val nodeManager = noReshardingManager(
stack,
defaultParams,
getLogicalPartition(Var(fixedInetAddresses))
)
assert(sr.gauges(Seq("partitioner", "nodes"))() == 4)
await(nodeManager.snapshotSharder().getServiceByPartitionId(0))
// topology: p0(0), p1(1,2), p2(3,4,5), p3(6,7)
// partition 3 has two address, remove one.
varAddr.update(Addr.Bound(weightedAddress.dropRight(1): _*))
await(nodeManager.snapshotSharder().getServiceByPartitionId(3))
// remove both
varAddr.update(Addr.Bound(weightedAddress.dropRight(2): _*))
val e = intercept[NoPartitionException] {
await(nodeManager.snapshotSharder().getServiceByPartitionId(3))
}
assert(e.getMessage.contains("No partition: 3 found in the node manager"))
}
}
test("Node manager listens to weight changes") {
new Ctx(addressSize = 8) {
val varInetAddressHelper = Var(fixedInetAddresses)
val nodeManager = noReshardingManager(
stack,
defaultParams,
getLogicalPartition(varInetAddressHelper)
)
val newWeightedAddress =
weightedAddress.dropRight(1) :+ newAddress(fixedInetAddresses.last, 2)
// for testing propose, we want to see if weight changes can trigger the node manager
// to rebuild the partition map. If rebuild, it will run into a failing getLogicalPartition
// and log errors.
varInetAddressHelper.update(immutable.IndexedSeq.empty)
// we should log exceptions here
varAddr.update(Addr.Bound(newWeightedAddress: _*))
}
}
test("Addresses refresh, each node is a partition") {
new Ctx(addressSize = 3) {
val nodeManager = noReshardingManager(stack, defaultParams)
assert(sr.gauges(Seq("partitioner", "nodes"))() == 3)
val svc0 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
// wipe out addresses won't trigger rebuild
varAddr.update(Addr.Bound(Set.empty[Address]))
val svc1 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
assert(svc0 eq svc1)
// rebuild
varAddr.update(Addr.Bound(fixedInetAddresses.map(newAddress(_, 2)): _*))
val svc2 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
assert(svc0 ne svc2)
// Neg won't trigger rebuild
varAddr.update(Addr.Neg)
val svc3 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
assert(svc2 eq svc3)
// rebuild
varAddr.update(Addr.Bound(fixedInetAddresses.map(newAddress(_, 3)): _*))
val svc4 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
assert(svc4 ne svc0)
}
}
test("close the node manager will close all ServiceFactories") {
new Ctx(addressSize = 5) {
val nodeManager = noReshardingManager(stack, defaultParams)
val svc0 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(0).getPort))
assert(svc0.status == Status.Open)
await(nodeManager.close())
val svc1 =
await(nodeManager.snapshotSharder().getServiceByPartitionId(fixedInetAddresses(1).getPort))
assert(svc0.status == Status.Closed)
assert(svc1.status == Status.Closed)
}
}
test("log errors when getLogicalPartition throws exceptions for certain shards") {
new Ctx(addressSize = 8) {
def getLogicalPartition: Int => Seq[Int] = {
case even if even % 2 == 0 => Seq(0)
case odd => throw new Exception("failed")
}
val nodeManager = noReshardingManager(
stack,
defaultParams,
getLogicalPartition
)
val succeedPort = fixedInetAddresses.map(_.getPort).filter(_ % 2 == 0)
if (succeedPort.nonEmpty) {
val svc0 = await(nodeManager.snapshotSharder().getServiceByPartitionId(0))
assert(await(svc0("any")) == "service")
}
}
}
test("Reshard based on the state that's passed in safely") {
new Ctx(addressSize = 7) {
val varInetAddress = Var(fixedInetAddresses)
val logicalPartition: Int => Int => Seq[Int] = { observed =>
if (observed % 2 == 0) {
getLogicalPartition(varInetAddress).andThen(_.map(_ % 3))
} else {
getLogicalPartition(varInetAddress)
}
}
val observed = Var(0)
val observable = Activity(observed.map(Activity.Ok(_)))
val oldPf: PartialFunction[String, Future[String]] = { case s => Future.value(s) }
val newPf: PartialFunction[String, Future[String]] = { case s => Future.value(s.reverse) }
val nodeManager = reshardingManager(
stack,
defaultParams,
{
case 0 => oldPf
case 1 => newPf
},
logicalPartition,
observable
)
val inet = new InetSocketAddress(InetAddress.getLoopbackAddress, 0)
// to get the port
val newIsa =
StringServer.server.serve(inet, stringService).boundAddress.asInstanceOf[InetSocketAddress]
val oldSnap = nodeManager.snapshotSharder()
// before adding, cannot find the logical partition
intercept[PartitionNodeManager.NoPartitionException] {
await(oldSnap.getServiceByPartitionId(3))
}
assert(oldSnap.partitionFunction eq oldPf)
observed() = 1
val newSnap = nodeManager.snapshotSharder()
// not throwing an exception here verifies that the service exists
await(newSnap.getServiceByPartitionId(3)) // the one we couldn't find before
assert(newSnap.partitionFunction eq newPf)
}
}
test("Empty serverset means we can't find a shard") {
new Ctx(addressSize = 0) {
val nodeManager = noReshardingManager(stack, defaultParams)
val snap = nodeManager.snapshotSharder()
intercept[NoPartitionException] {
await(snap.getServiceByPartitionId(0))
}
}
}
}
|
twitter/finagle
|
finagle-partitioning/src/test/scala/com/twitter/finagle/partitioning/PartitionNodeManagerTest.scala
|
Scala
|
apache-2.0
| 15,020 |
/*
* This file is part of eCobertura.
*
* Copyright (c) 2009, 2010 Joachim Hofer
* All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package ecobertura.core.log
import org.junit.Assert._
import org.mockito.Mockito._
import org.eclipse.core.runtime._
import org.junit._
import org.mockito.ArgumentCaptor
class EclipseLoggerTest {
private var ilog: ILog = null
private var status: ArgumentCaptor[IStatus] = null
@Before
def setUp = {
ilog = mock(classOf[ILog])
status = ArgumentCaptor forClass classOf[IStatus]
EclipseLogger logFor ilog
}
@Test
def testInfoString = {
EclipseLogger info "hello"
assertLogMessageSeverity("hello", IStatus.INFO);
}
@Test
def testWarnString = {
EclipseLogger warn "hello"
assertLogMessageSeverity("hello", IStatus.WARNING);
}
@Test
def testErrorString = {
EclipseLogger error "hello"
assertLogMessageSeverity("hello", IStatus.ERROR);
}
private def assertLogMessageSeverity(message: String, severity: Int) = {
verify(ilog, times(1)) log status.capture
assertEquals(message, status.getValue.getMessage)
assertEquals(severity, status.getValue.getSeverity)
assertNull(status.getValue.getException)
}
@Test
def testInfoStringThrowable = {
EclipseLogger info ("hello", new Exception("hello exc"))
assertLogMessageSeverityException("hello", IStatus.INFO, new Exception("hello exc"))
}
@Test
def testWarnStringThrowable = {
EclipseLogger warn ("hello", new Exception("hello exc"))
assertLogMessageSeverityException("hello", IStatus.WARNING, new Exception("hello exc"))
}
@Test
def testErrorStringThrowable = {
EclipseLogger error ("hello", new Exception("hello exc"))
assertLogMessageSeverityException("hello", IStatus.ERROR, new Exception("hello exc"))
}
@Test
def testInfoThrowable = {
EclipseLogger info (new Exception("hello"))
assertLogMessageSeverityException("hello", IStatus.INFO, new Exception("hello"))
}
@Test
def testWarnThrowable = {
EclipseLogger warn (new Exception("hello"))
assertLogMessageSeverityException("hello", IStatus.WARNING, new Exception("hello"))
}
@Test
def testErrorThrowable = {
EclipseLogger error (new Exception("hello"))
assertLogMessageSeverityException("hello", IStatus.ERROR, new Exception("hello"))
}
private def assertLogMessageSeverityException(
message: String, severity: Int, throwable: Throwable) = {
verify(ilog, times(1)) log status.capture
assertEquals(message, status.getValue.getMessage)
assertEquals(severity, status.getValue.getSeverity)
assertEquals(throwable.getClass, status.getValue.getException.getClass)
assertEquals(throwable.getMessage, status.getValue.getException.getMessage)
}
}
|
jmhofer/eCobertura
|
ecobertura.core/src/test/scala/ecobertura/core/log/EclipseLoggerTest.scala
|
Scala
|
epl-1.0
| 2,991 |
/**
* For copyright information see the LICENSE document.
*/
package entice.server.world
import entice.server.utils._
import entice.protocol._
import scala.concurrent.duration._
sealed trait Event extends Typeable
// concerning scheduling:
// tick is server internal, push update is for the server-gamestate update
case class Tick () extends Event
case class PushUpdate () extends Event
case class Schedule (event: Event, after: FiniteDuration) extends Event
// by the world:
case class Spawned (entity: RichEntity) extends Event
case class Despawned (world: World,
entity: Entity,
components: TypedSet[Component]) extends Event
// grouping only:
case class GroupInvite (sender: RichEntity, recipient: RichEntity) extends Event
case class GroupDecline (sender: RichEntity, recipient: RichEntity) extends Event
case class GroupAccept (sender: RichEntity, recipient: RichEntity) extends Event
case class GroupLeave (sender: RichEntity) extends Event
case class GroupKick (sender: RichEntity, recipient: RichEntity) extends Event
// entity actions:
case class Move (entity: RichEntity) extends Event
case class Chat (entity: RichEntity,
text: String,
channel: ChatChannels.Value) extends Event
case class Announcement (text: String) extends Event
case class Animate (entity: RichEntity, anim: String) extends Event
|
entice/old-server
|
src/main/scala/entice/server/world/Events.scala
|
Scala
|
bsd-3-clause
| 1,840 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.resource
import java.util.{Map => JMap}
import java.util.concurrent.atomic.AtomicInteger
import javax.annotation.concurrent.GuardedBy
import scala.collection.JavaConverters._
import org.apache.spark.SparkConf
import org.apache.spark.annotation.Evolving
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Python.PYSPARK_EXECUTOR_MEMORY
/**
* Resource profile to associate with an RDD. A ResourceProfile allows the user to
* specify executor and task requirements for an RDD that will get applied during a
* stage. This allows the user to change the resource requirements between stages.
* This is meant to be immutable so user can't change it after building.
*/
@Evolving
class ResourceProfile(
val executorResources: Map[String, ExecutorResourceRequest],
val taskResources: Map[String, TaskResourceRequest]) extends Serializable with Logging {
// _id is only a var for testing purposes
private var _id = ResourceProfile.getNextProfileId
def id: Int = _id
/**
* (Java-specific) gets a Java Map of resources to TaskResourceRequest
*/
def taskResourcesJMap: JMap[String, TaskResourceRequest] = taskResources.asJava
/**
* (Java-specific) gets a Java Map of resources to ExecutorResourceRequest
*/
def executorResourcesJMap: JMap[String, ExecutorResourceRequest] = {
executorResources.asJava
}
// Note that some cluster managers don't set the executor cores explicitly so
// be sure to check the Option as required
private[spark] def getExecutorCores: Option[Int] = {
executorResources.get(ResourceProfile.CORES).map(_.amount.toInt)
}
private[spark] def getTaskCpus: Option[Int] = {
taskResources.get(ResourceProfile.CPUS).map(_.amount.toInt)
}
// testing only
private[spark] def setToDefaultProfile(): Unit = {
_id = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID
}
override def equals(obj: Any): Boolean = {
obj match {
case that: ResourceProfile =>
that.getClass == this.getClass && that.id == _id &&
that.taskResources == taskResources && that.executorResources == executorResources
case _ =>
false
}
}
override def hashCode(): Int = Seq(taskResources, executorResources).hashCode()
override def toString(): String = {
s"Profile: id = ${_id}, executor resources: ${executorResources.mkString(",")}, " +
s"task resources: ${taskResources.mkString(",")}"
}
}
object ResourceProfile extends Logging {
// task resources
val CPUS = "cpus"
// Executor resources
val CORES = "cores"
val MEMORY = "memory"
val OVERHEAD_MEM = "memoryOverhead"
val PYSPARK_MEM = "pyspark.memory"
// all supported spark executor resources (minus the custom resources like GPUs/FPGAs)
val allSupportedExecutorResources = Seq(CORES, MEMORY, OVERHEAD_MEM, PYSPARK_MEM)
val UNKNOWN_RESOURCE_PROFILE_ID = -1
val DEFAULT_RESOURCE_PROFILE_ID = 0
private lazy val nextProfileId = new AtomicInteger(0)
private val DEFAULT_PROFILE_LOCK = new Object()
// The default resource profile uses the application level configs.
// var so that it can be reset for testing purposes.
@GuardedBy("DEFAULT_PROFILE_LOCK")
private var defaultProfile: Option[ResourceProfile] = None
private[spark] def getNextProfileId: Int = nextProfileId.getAndIncrement()
private[spark] def getOrCreateDefaultProfile(conf: SparkConf): ResourceProfile = {
DEFAULT_PROFILE_LOCK.synchronized {
defaultProfile match {
case Some(prof) => prof
case None =>
val taskResources = getDefaultTaskResources(conf)
val executorResources = getDefaultExecutorResources(conf)
val defProf = new ResourceProfile(executorResources, taskResources)
defProf.setToDefaultProfile
defaultProfile = Some(defProf)
logInfo("Default ResourceProfile created, executor resources: " +
s"${defProf.executorResources}, task resources: " +
s"${defProf.taskResources}")
defProf
}
}
}
private def getDefaultTaskResources(conf: SparkConf): Map[String, TaskResourceRequest] = {
val cpusPerTask = conf.get(CPUS_PER_TASK)
val treqs = new TaskResourceRequests().cpus(cpusPerTask)
ResourceUtils.addTaskResourceRequests(conf, treqs)
treqs.requests
}
private def getDefaultExecutorResources(conf: SparkConf): Map[String, ExecutorResourceRequest] = {
val ereqs = new ExecutorResourceRequests()
ereqs.cores(conf.get(EXECUTOR_CORES))
ereqs.memory(conf.get(EXECUTOR_MEMORY).toString)
conf.get(EXECUTOR_MEMORY_OVERHEAD).map(mem => ereqs.memoryOverhead(mem.toString))
conf.get(PYSPARK_EXECUTOR_MEMORY).map(mem => ereqs.pysparkMemory(mem.toString))
val execReq = ResourceUtils.parseAllResourceRequests(conf, SPARK_EXECUTOR_PREFIX)
execReq.foreach { req =>
val name = req.id.resourceName
ereqs.resource(name, req.amount, req.discoveryScript.getOrElse(""),
req.vendor.getOrElse(""))
}
ereqs.requests
}
// for testing only
private[spark] def reInitDefaultProfile(conf: SparkConf): Unit = {
clearDefaultProfile
// force recreate it after clearing
getOrCreateDefaultProfile(conf)
}
// for testing only
private[spark] def clearDefaultProfile: Unit = {
DEFAULT_PROFILE_LOCK.synchronized {
defaultProfile = None
}
}
private[spark] def getCustomTaskResources(
rp: ResourceProfile): Map[String, TaskResourceRequest] = {
rp.taskResources.filterKeys(k => !k.equals(ResourceProfile.CPUS))
}
private[spark] def getCustomExecutorResources(
rp: ResourceProfile): Map[String, ExecutorResourceRequest] = {
rp.executorResources.filterKeys(k => !ResourceProfile.allSupportedExecutorResources.contains(k))
}
}
|
ptkool/spark
|
core/src/main/scala/org/apache/spark/resource/ResourceProfile.scala
|
Scala
|
apache-2.0
| 6,646 |
package scala.collection
import scala.collection.generic.{CanBuildFrom, Subtractable}
import scala.collection.immutable.RedBlackTree
/** A template trait for bag collections of type `Bag[A]`.
*
* Note: This trait replaces every method that uses `break` in
* `TraversableLike` by an iterator version.
*
* @author Nicolas Stucki
* @tparam A the element type of the scala.collection
* @tparam This the type of the actual scala.collection containing the elements.
*
* @define Coll Bag
* @define coll bag scala.collection
*/
trait BagLike[A, +This <: BagLike[A, This] with Bag[A]]
extends IterableLike[A, This]
with GenBagLike[A, This]
with (A => This)
with Subtractable[A, This] {
self =>
def empty: This
/** Creates a new builder for this bag type.
*/
override protected[this] def newBuilder: mutable.BagBuilder[A, This] = mutable.BagBuilder(empty)
def apply(elem: A): This = equivalentTo(elem)
/**
* Returns a the sub-bag that has only elements equivalent to elem
* @param elem element
* @return A sub-bag with elements equivalent to elem
*/
def equivalentTo(elem: A): This = getBucket(elem) match {
case Some(bucket) => (newBuilder addBucket bucket).result()
case None => empty
}
def contains(elem: A): Boolean = repr.multiplicity(elem) > 0
def mostCommon: This = {
if (isEmpty) return empty
val maxSize = bucketsIterator.map(_.size).max
val b = newBuilder
for (bucket <- bucketsIterator if maxSize == bucket.size) {
b addBucket bucket
}
b.result()
}
override def leastCommon: This = {
if (isEmpty) return empty
val minM = bucketsIterator.map(_.size).min
val b = newBuilder
for (bucket <- bucketsIterator if minM == bucket.size) {
b addBucket bucket
}
b.result()
}
def distinct: This = {
val b = newBuilder
for (bucket <- bucketsIterator; elem <- bucket.distinctIterator)
b += elem
b.result()
}
def added(elem: A, count: Int): This = this.addedBucket(bagConfiguration.bucketFrom(elem, count))
def ++(elems: GenTraversableOnce[A]): This = {
val b = newBuilder
this.bucketsIterator.foreach(b addBucket _)
elems.foreach(b += _)
b.result()
}
def getBucket(elem: A): Option[BagBucket] = bucketsIterator.find(bucket => bagConfiguration.equiv(elem, bucket.sentinel))
def addedBucket(bucket: scala.collection.BagBucket[A]): This = getBucket(bucket.sentinel) match {
case Some(bucket2) => updatedBucket(bagConfiguration.bucketFrom(bucket, bucket2))
case None => updatedBucket(bagConfiguration.bucketFrom(bucket))
}
/**
* Put or replace the bucket associated with the new bucket.
* The bucket must be compatible with the bag (i.e. it was generated by the `bagConfiguration` instance)
* @param bucket new bucket
* @return the bag with the new bucket in place
*/
protected def updatedBucket(bucket: BagBucket): This
def union(that: GenBag[A]): This = {
val b = newBuilder
this.bucketsIterator.foreach(b addBucket _)
that.bucketsIterator.foreach(b addBucket _)
b.result()
}
def ++(that: GenBag[A]) = this union that
def diff(that: GenBag[A]): This = {
val b = newBuilder
for (bucket <- bucketsIterator) {
that.getBucket(bucket.sentinel) match {
case Some(bucket2) =>
val diffBucket = bucket diff bucket2
if (diffBucket.nonEmpty)
b addBucket diffBucket
case None =>
b addBucket bucket
}
}
b.result()
}
def --(that: GenBag[A]): This = this diff that
def maxUnion(that: GenBag[A]): This = {
val b = newBuilder
val seen = mutable.Set.empty[A]
for (bucket <- this.bucketsIterator; elem <- bucket.distinctIterator) {
b.add(elem, Math.max(bucket.multiplicity(elem), that.multiplicity(elem)))
seen += elem
}
for (bucket <- that.bucketsIterator; elem <- bucket.distinctIterator) {
if (!seen(elem)) {
b.add(elem, bucket.multiplicity(elem))
seen += elem
}
}
b.result()
}
override def intersect(that: GenBag[A]): This = {
val b = newBuilder
for (bucket <- bucketsIterator) {
that.getBucket(bucket.sentinel) match {
case Some(bucket2) =>
val intersectionBucket = bucket intersect bucket2
if (intersectionBucket.nonEmpty)
b addBucket intersectionBucket
case None =>
}
}
b.result()
}
// Removed elements
def -(elem: A): This = removed(elem, 1)
def -(elemCount: (A, Int)): This = removed(elemCount._1, elemCount._2)
def removed(elem: A, count: Int): This = {
val b = newBuilder
for (bucket <- bucketsIterator) {
if (bagConfiguration.equiv(bucket.sentinel, elem)) {
val bucket2 = bucket.removed(elem, count)
if (bucket2.nonEmpty)
b addBucket bucket2
} else {
b addBucket bucket
}
}
b.result()
}
def removedAll(elem: A): This = removedBucket(elem)
def removedBucket(elem: A): This = {
val b = newBuilder
for (bucket <- bucketsIterator if !bagConfiguration.equiv(bucket.sentinel, elem)) {
b addBucket bucket
}
b.result()
}
/**
* Returns a mapping of the multiplicities of the bag
* @return map containing the mapping of multiplicities of the of the bag
*/
def toMap: Map[A, Int] = new Multiplicities(repr)
/**
* Returns a bag with an updated multiplicity of some element
* @param elem element that will have it's multiplicity changed
* @param count number of times the element will be repeated in the bag
* @return bag with the new multiplicity
*/
def withMultiplicity(elem: A, count: Int): This = (this removedAll elem).added(elem, count)
override def forall(p: (A) => Boolean): Boolean = bucketsIterator.forall(_.forall(p))
override def exists(p: (A) => Boolean): Boolean = bucketsIterator.exists(_.exists(p))
override def find(p: (A) => Boolean): Option[A] = {
val it = bucketsIterator
while (it.hasNext) {
it.next().find(p) match {
case value@Some(_) => return value
case None =>
}
}
None
}
override def foldLeft[B](z: B)(op: (B, A) => B): B = {
var result = z
this.bucketsIterator foreach (bucket => result = bucket.foldLeft[B](result)(op))
result
}
override def take(n: Int): This = {
val b = newBuilder
var taken = 0
val it = bucketsIterator
while (it.hasNext && taken < n) {
val bucket = it.next()
val m = bucket.size
if (taken + m <= n) {
b.addBucket(bucket)
taken += m
} else {
// TODO implement BagBucketLike to have bucket.take and use it here
bucket.take(n - taken).foreach(b += _)
taken = n
}
}
b.result()
}
override def map[B, That](f: (A) => B)(implicit bf: CanBuildFrom[This, B, That]): That = super.map(f)(bf)
override def sum[B >: A](implicit num: Numeric[B]): B = bucketsIterator.map(_.sum(num)).foldLeft(num.zero)(num.plus)
override def product[B >: A](implicit num: Numeric[B]): B = bucketsIterator.map(_.product(num)).foldLeft(num.one)(num.times)
override def min[B >: A](implicit cmp: Ordering[B]): A = bucketsIterator.map(_.min(cmp)).min(cmp)
override def max[B >: A](implicit cmp: Ordering[B]): A = bucketsIterator.map(_.max(cmp)).max(cmp)
override def reduceLeft[B >: A](op: (B, A) => B): B = {
if (isEmpty)
throw new UnsupportedOperationException("empty.reduceLeft")
var first = true
var acc: B = 0.asInstanceOf[B]
for (bucket <- bucketsIterator if bucket.nonEmpty) {
if (first) {
acc = bucket.reduceLeft(op)
first = false
}
else acc = bucket.foldLeft(acc)(op)
}
acc
}
override def count(p: (A) => Boolean): Int = {
var cnt = 0
for (bucket <- bucketsIterator)
cnt += bucket.count(p)
cnt
}
}
|
sageserpent-open/multisets
|
src/main/scala/scala/collection/BagLike.scala
|
Scala
|
bsd-3-clause
| 7,939 |
package com.twitter.io
import com.twitter.concurrent.AsyncMutex
import com.twitter.util.{Closable, CloseAwaitably, Future, FuturePool, Time}
import java.io.InputStream
/**
* Provides the Reader API for an InputStream
*/
class InputStreamReader(inputStream: InputStream, maxBufferSize: Int)
extends Reader with Closable with CloseAwaitably {
private[this] val mutex = new AsyncMutex()
@volatile private[this] var discarded = false
/**
* Asynchronously read at most min(`n`, `maxBufferSize`) bytes from
* the InputStream. The returned future represents the results of
* the read operation. Any failure indicates an error; a buffer
* value of [[com.twitter.io.Buf.Eof]] indicates that the stream has
* completed.
*/
def read(n: Int): Future[Buf] = {
if (discarded)
return Future.exception(new Reader.ReaderDiscarded())
if (n == 0)
return Future.value(Buf.Empty)
mutex.acquire() flatMap { permit =>
FuturePool.interruptibleUnboundedPool {
try {
if (discarded)
throw new Reader.ReaderDiscarded()
val size = n min maxBufferSize
val buffer = new Array[Byte](size)
val c = inputStream.read(buffer, 0, size)
if (c == -1)
Buf.Eof
else
Buf.ByteArray(buffer, 0, c)
} catch { case exc: InterruptedException =>
discarded = true
throw exc
}
} ensure {
permit.release()
}
}
}
/**
* Discard this reader: its output is no longer required.
*/
def discard() { discarded = true }
/**
* Discards this Reader and closes the underlying InputStream
*/
def close(deadline: Time) = closeAwaitably {
discard()
FuturePool.unboundedPool { inputStream.close() }
}
}
object InputStreamReader {
val DefaultMaxBufferSize = 4096
def apply(inputStream: InputStream, maxBufferSize: Int = DefaultMaxBufferSize) =
new InputStreamReader(inputStream, maxBufferSize)
}
|
mosesn/util
|
util-core/src/main/scala/com/twitter/io/InputStreamReader.scala
|
Scala
|
apache-2.0
| 2,005 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.security.auth
import kafka.common.KafkaException
import org.apache.kafka.common.acl.AclPermissionType
import org.junit.Assert.assertEquals
import org.junit.Test
import org.scalatest.junit.JUnitSuite
class PermissionTypeTest extends JUnitSuite {
@Test
def testFromString(): Unit = {
val permissionType = PermissionType.fromString("Allow")
assertEquals(Allow, permissionType)
try {
PermissionType.fromString("badName")
fail("Expected exception on invalid PermissionType name.")
} catch {
case _: KafkaException => // expected
}
}
/**
* Test round trip conversions between org.apache.kafka.common.acl.AclPermissionType and
* kafka.security.auth.PermissionType.
*/
@Test
def testJavaConversions(): Unit = {
AclPermissionType.values().foreach {
case AclPermissionType.UNKNOWN | AclPermissionType.ANY =>
case aclPerm =>
val perm = PermissionType.fromJava(aclPerm)
val aclPerm2 = perm.toJava
assertEquals(aclPerm, aclPerm2)
}
}
}
|
wangcy6/storm_app
|
frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/test/scala/unit/kafka/security/auth/PermissionTypeTest.scala
|
Scala
|
apache-2.0
| 1,849 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage.jdbc
import java.sql.{DriverManager, ResultSet}
import com.github.nscala_time.time.Imports._
import org.apache.predictionio.data.storage.{
DataMap, Event, PEvents, StorageClientConfig}
import org.apache.predictionio.data.SparkVersionDependent
import org.apache.spark.SparkContext
import org.apache.spark.rdd.{JdbcRDD, RDD}
import org.apache.spark.sql.SaveMode
import org.json4s.JObject
import org.json4s.native.Serialization
import scalikejdbc._
/** JDBC implementation of [[PEvents]] */
class JDBCPEvents(client: String, config: StorageClientConfig, namespace: String) extends PEvents {
@transient private implicit lazy val formats = org.json4s.DefaultFormats
def find(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None)(sc: SparkContext): RDD[Event] = {
val lower = startTime.map(_.getMillis).getOrElse(0.toLong)
/** Change the default upper bound from +100 to +1 year because MySQL's
* FROM_UNIXTIME(t) will return NULL if we use +100 years.
*/
val upper = untilTime.map(_.getMillis).getOrElse((DateTime.now + 1.years).getMillis)
val par = scala.math.min(
new Duration(upper - lower).getStandardDays,
config.properties.getOrElse("PARTITIONS", "4").toLong).toInt
val entityTypeClause = entityType.map(x => s"and entityType = '$x'").getOrElse("")
val entityIdClause = entityId.map(x => s"and entityId = '$x'").getOrElse("")
val eventNamesClause =
eventNames.map("and (" + _.map(y => s"event = '$y'").mkString(" or ") + ")").getOrElse("")
val targetEntityTypeClause = targetEntityType.map(
_.map(x => s"and targetEntityType = '$x'"
).getOrElse("and targetEntityType is null")).getOrElse("")
val targetEntityIdClause = targetEntityId.map(
_.map(x => s"and targetEntityId = '$x'"
).getOrElse("and targetEntityId is null")).getOrElse("")
val q = s"""
select
id,
event,
entityType,
entityId,
targetEntityType,
targetEntityId,
properties,
eventTime,
eventTimeZone,
tags,
prId,
creationTime,
creationTimeZone
from ${JDBCUtils.eventTableName(namespace, appId, channelId)}
where
eventTime >= ${JDBCUtils.timestampFunction(client)}(?) and
eventTime < ${JDBCUtils.timestampFunction(client)}(?)
$entityTypeClause
$entityIdClause
$eventNamesClause
$targetEntityTypeClause
$targetEntityIdClause
""".replace("\\n", " ")
new JdbcRDD(
sc,
() => {
DriverManager.getConnection(
client,
config.properties("USERNAME"),
config.properties("PASSWORD"))
},
q,
lower / 1000,
upper / 1000,
par,
(r: ResultSet) => {
Event(
eventId = Option(r.getString("id")),
event = r.getString("event"),
entityType = r.getString("entityType"),
entityId = r.getString("entityId"),
targetEntityType = Option(r.getString("targetEntityType")),
targetEntityId = Option(r.getString("targetEntityId")),
properties = Option(r.getString("properties")).map(x =>
DataMap(Serialization.read[JObject](x))).getOrElse(DataMap()),
eventTime = new DateTime(r.getTimestamp("eventTime").getTime,
DateTimeZone.forID(r.getString("eventTimeZone"))),
tags = Option(r.getString("tags")).map(x =>
x.split(",").toList).getOrElse(Nil),
prId = Option(r.getString("prId")),
creationTime = new DateTime(r.getTimestamp("creationTime").getTime,
DateTimeZone.forID(r.getString("creationTimeZone"))))
}).cache()
}
def write(events: RDD[Event], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
val sqlSession = SparkVersionDependent.sqlSession(sc)
import sqlSession.implicits._
val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
val eventsColumnNamesInDF = Seq[String](
"id"
, "event"
, "entityType"
, "entityId"
, "targetEntityType"
, "targetEntityId"
, "properties"
, "eventTime"
, "eventTimeZone"
, "tags"
, "prId"
, "creationTime"
, "creationTimeZone")
// Necessary for handling postgres "case-sensitivity"
val eventsColumnNamesInSQL = JDBCUtils.driverType(client) match {
case "postgresql" => eventsColumnNamesInDF.map(_.toLowerCase)
case _ => eventsColumnNamesInDF
}
val eventDF = events.map { event =>
(event.eventId.getOrElse(JDBCUtils.generateId)
, event.event
, event.entityType
, event.entityId
, event.targetEntityType.orNull
, event.targetEntityId.orNull
, if (!event.properties.isEmpty) Serialization.write(event.properties.toJObject) else null
, new java.sql.Timestamp(event.eventTime.getMillis)
, event.eventTime.getZone.getID
, if (event.tags.nonEmpty) Some(event.tags.mkString(",")) else null
, event.prId
, new java.sql.Timestamp(event.creationTime.getMillis)
, event.creationTime.getZone.getID)
}.toDF(eventsColumnNamesInSQL:_*)
val prop = new java.util.Properties
prop.setProperty("user", config.properties("USERNAME"))
prop.setProperty("password", config.properties("PASSWORD"))
eventDF.write.mode(SaveMode.Append).jdbc(client, tableName, prop)
}
def delete(eventIds: RDD[String], appId: Int, channelId: Option[Int])(sc: SparkContext): Unit = {
eventIds.foreachPartition{ iter =>
iter.foreach { eventId =>
DB localTx { implicit session =>
val tableName = JDBCUtils.eventTableName(namespace, appId, channelId)
val table = SQLSyntax.createUnsafely(tableName)
sql"""
delete from $table where id = $eventId
""".update().apply()
true
}
}
}
}
}
|
mars/incubator-predictionio
|
storage/jdbc/src/main/scala/org/apache/predictionio/data/storage/jdbc/JDBCPEvents.scala
|
Scala
|
apache-2.0
| 7,054 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.mysqls
import org.apache.s2graph.core.GraphExceptions.ModelNotFoundException
import org.apache.s2graph.core.Management.JsonModel.{Index, Prop}
import org.apache.s2graph.core.utils.logger
import org.apache.s2graph.core.{GraphExceptions, GraphUtil, JSONParser}
import play.api.libs.json.Json
import scalikejdbc._
object Label extends Model[Label] {
val maxHBaseTableNames = 2
def apply(rs: WrappedResultSet): Label = {
Label(Option(rs.int("id")), rs.string("label"),
rs.int("src_service_id"), rs.string("src_column_name"), rs.string("src_column_type"),
rs.int("tgt_service_id"), rs.string("tgt_column_name"), rs.string("tgt_column_type"),
rs.boolean("is_directed"), rs.string("service_name"), rs.int("service_id"), rs.string("consistency_level"),
rs.string("hbase_table_name"), rs.intOpt("hbase_table_ttl"), rs.string("schema_version"), rs.boolean("is_async"), rs.string("compressionAlgorithm"))
}
def deleteAll(label: Label)(implicit session: DBSession) = {
val id = label.id
LabelMeta.findAllByLabelId(id.get, false).foreach { x => LabelMeta.delete(x.id.get) }
LabelIndex.findByLabelIdAll(id.get, false).foreach { x => LabelIndex.delete(x.id.get) }
Label.delete(id.get)
}
def findByName(labelName: String, useCache: Boolean = true)(implicit session: DBSession = AutoSession): Option[Label] = {
val cacheKey = "label=" + labelName
lazy val labelOpt =
sql"""
select *
from labels
where label = ${labelName}""".map { rs => Label(rs) }.single.apply()
if (useCache) withCache(cacheKey)(labelOpt)
else labelOpt
}
def insert(label: String,
srcServiceId: Int,
srcColumnName: String,
srcColumnType: String,
tgtServiceId: Int,
tgtColumnName: String,
tgtColumnType: String,
isDirected: Boolean,
serviceName: String,
serviceId: Int,
consistencyLevel: String,
hTableName: String,
hTableTTL: Option[Int],
schemaVersion: String,
isAsync: Boolean,
compressionAlgorithm: String)(implicit session: DBSession = AutoSession) = {
sql"""
insert into labels(label,
src_service_id, src_column_name, src_column_type,
tgt_service_id, tgt_column_name, tgt_column_type,
is_directed, service_name, service_id, consistency_level, hbase_table_name, hbase_table_ttl, schema_version, is_async, compressionAlgorithm)
values (${label},
${srcServiceId}, ${srcColumnName}, ${srcColumnType},
${tgtServiceId}, ${tgtColumnName}, ${tgtColumnType},
${isDirected}, ${serviceName}, ${serviceId}, ${consistencyLevel}, ${hTableName}, ${hTableTTL},
${schemaVersion}, ${isAsync}, ${compressionAlgorithm})
"""
.updateAndReturnGeneratedKey.apply()
}
def findByIdOpt(id: Int)(implicit session: DBSession = AutoSession): Option[Label] = {
val cacheKey = "id=" + id
withCache(cacheKey)(
sql"""
select *
from labels
where id = ${id}"""
.map { rs => Label(rs) }.single.apply())
}
def findById(id: Int)(implicit session: DBSession = AutoSession): Label = {
val cacheKey = "id=" + id
withCache(cacheKey)(
sql"""
select *
from labels
where id = ${id}"""
.map { rs => Label(rs) }.single.apply()).get
}
def findByTgtColumnId(columnId: Int)(implicit session: DBSession = AutoSession): List[Label] = {
val cacheKey = "tgtColumnId=" + columnId
val col = ServiceColumn.findById(columnId)
withCaches(cacheKey)(
sql"""
select *
from labels
where tgt_column_name = ${col.columnName}
and service_id = ${col.serviceId}
""".map { rs => Label(rs) }.list().apply())
}
def findBySrcColumnId(columnId: Int)(implicit session: DBSession = AutoSession): List[Label] = {
val cacheKey = "srcColumnId=" + columnId
val col = ServiceColumn.findById(columnId)
withCaches(cacheKey)(
sql"""
select *
from labels
where src_column_name = ${col.columnName}
and service_id = ${col.serviceId}
""".map { rs => Label(rs) }.list().apply())
}
def findBySrcServiceId(serviceId: Int)(implicit session: DBSession = AutoSession): List[Label] = {
val cacheKey = "srcServiceId=" + serviceId
withCaches(cacheKey)(
sql"""select * from labels where src_service_id = ${serviceId}""".map { rs => Label(rs) }.list().apply
)
}
def findByTgtServiceId(serviceId: Int)(implicit session: DBSession = AutoSession): List[Label] = {
val cacheKey = "tgtServiceId=" + serviceId
withCaches(cacheKey)(
sql"""select * from labels where tgt_service_id = ${serviceId}""".map { rs => Label(rs) }.list().apply
)
}
def insertAll(labelName: String, srcServiceName: String, srcColumnName: String, srcColumnType: String,
tgtServiceName: String, tgtColumnName: String, tgtColumnType: String,
isDirected: Boolean = true,
serviceName: String,
indices: Seq[Index],
metaProps: Seq[Prop],
consistencyLevel: String,
hTableName: Option[String],
hTableTTL: Option[Int],
schemaVersion: String,
isAsync: Boolean,
compressionAlgorithm: String)(implicit session: DBSession = AutoSession): Label = {
val srcServiceOpt = Service.findByName(srcServiceName, useCache = false)
val tgtServiceOpt = Service.findByName(tgtServiceName, useCache = false)
val serviceOpt = Service.findByName(serviceName, useCache = false)
if (srcServiceOpt.isEmpty) throw new RuntimeException(s"source service $srcServiceName is not created.")
if (tgtServiceOpt.isEmpty) throw new RuntimeException(s"target service $tgtServiceName is not created.")
if (serviceOpt.isEmpty) throw new RuntimeException(s"service $serviceName is not created.")
val newLabel = for {
srcService <- srcServiceOpt
tgtService <- tgtServiceOpt
service <- serviceOpt
} yield {
val srcServiceId = srcService.id.get
val tgtServiceId = tgtService.id.get
val serviceId = service.id.get
/* insert serviceColumn */
val srcCol = ServiceColumn.findOrInsert(srcServiceId, srcColumnName, Some(srcColumnType), schemaVersion)
val tgtCol = ServiceColumn.findOrInsert(tgtServiceId, tgtColumnName, Some(tgtColumnType), schemaVersion)
if (srcCol.columnType != srcColumnType) throw new RuntimeException(s"source service column type not matched ${srcCol.columnType} != ${srcColumnType}")
if (tgtCol.columnType != tgtColumnType) throw new RuntimeException(s"target service column type not matched ${tgtCol.columnType} != ${tgtColumnType}")
/* create label */
Label.findByName(labelName, useCache = false).getOrElse {
val createdId = insert(labelName, srcServiceId, srcColumnName, srcColumnType,
tgtServiceId, tgtColumnName, tgtColumnType, isDirected, serviceName, serviceId, consistencyLevel,
hTableName.getOrElse(service.hTableName), hTableTTL.orElse(service.hTableTTL), schemaVersion, isAsync, compressionAlgorithm).toInt
val labelMetaMap = metaProps.map { case Prop(propName, defaultValue, dataType) =>
val labelMeta = LabelMeta.findOrInsert(createdId, propName, defaultValue, dataType)
(propName -> labelMeta.seq)
}.toMap ++ LabelMeta.reservedMetas.map (labelMeta => labelMeta.name -> labelMeta.seq).toMap
if (indices.isEmpty) {
// make default index with _PK, _timestamp, 0
LabelIndex.findOrInsert(createdId, LabelIndex.DefaultName, LabelIndex.DefaultMetaSeqs.toList, "none")
} else {
indices.foreach { index =>
val metaSeq = index.propNames.map { name => labelMetaMap(name) }
LabelIndex.findOrInsert(createdId, index.name, metaSeq.toList, "none")
}
}
val cacheKeys = List(s"id=$createdId", s"label=$labelName")
val ret = findByName(labelName, useCache = false).get
putsToCache(cacheKeys.map(k => k -> ret))
ret
}
}
newLabel.getOrElse(throw new RuntimeException("failed to create label"))
}
def findAll()(implicit session: DBSession = AutoSession) = {
val ls = sql"""select * from labels""".map { rs => Label(rs) }.list().apply()
putsToCache(ls.map { x =>
val cacheKey = s"id=${x.id.get}"
(cacheKey -> x)
})
putsToCache(ls.map { x =>
val cacheKey = s"label=${x.label}"
(cacheKey -> x)
})
}
def updateName(oldName: String, newName: String)(implicit session: DBSession = AutoSession) = {
logger.info(s"rename label: $oldName -> $newName")
sql"""update labels set label = ${newName} where label = ${oldName}""".update.apply()
}
def updateHTableName(labelName: String, newHTableName: String)(implicit session: DBSession = AutoSession) = {
logger.info(s"update HTable of label $labelName to $newHTableName")
val cnt = sql"""update labels set hbase_table_name = $newHTableName where label = $labelName""".update().apply()
val label = Label.findByName(labelName, useCache = false).get
val cacheKeys = List(s"id=${label.id}", s"label=${label.label}")
cacheKeys.foreach { key =>
expireCache(key)
expireCaches(key)
}
cnt
}
def delete(id: Int)(implicit session: DBSession = AutoSession) = {
val label = findById(id)
logger.info(s"delete label: $label")
val cnt = sql"""delete from labels where id = ${label.id.get}""".update().apply()
val cacheKeys = List(s"id=$id", s"label=${label.label}")
cacheKeys.foreach { key =>
expireCache(key)
expireCaches(key)
}
cnt
}
}
case class Label(id: Option[Int], label: String,
srcServiceId: Int, srcColumnName: String, srcColumnType: String,
tgtServiceId: Int, tgtColumnName: String, tgtColumnType: String,
isDirected: Boolean = true, serviceName: String, serviceId: Int, consistencyLevel: String = "strong",
hTableName: String, hTableTTL: Option[Int],
schemaVersion: String, isAsync: Boolean = false,
compressionAlgorithm: String) extends JSONParser {
def metas = LabelMeta.findAllByLabelId(id.get)
def metaSeqsToNames = metas.map(x => (x.seq, x.name)) toMap
// lazy val firstHBaseTableName = hbaseTableName.split(",").headOption.getOrElse(Config.HBASE_TABLE_NAME)
lazy val srcService = Service.findById(srcServiceId)
lazy val tgtService = Service.findById(tgtServiceId)
lazy val service = Service.findById(serviceId)
/**
* TODO
* change this to apply hbase table from target serviceName
*/
// lazy val (hbaseZkAddr, hbaseTableName) = (service.cluster, service.tableName.split(",").headOption.getOrElse(Config.HBASE_TABLE_NAME))
// lazy val (hbaseZkAddr, hbaseTableName) = (Config.HBASE_ZOOKEEPER_QUORUM, hTableName.split(",").headOption.getOrElse(Config.HBASE_TABLE_NAME))
// lazy val (hbaseZkAddr, hbaseTableName) = (service.cluster, hTableName.split(",").headOption.getOrElse(GraphConnection.getConfVal("hbase.table.name")))
lazy val (hbaseZkAddr, hbaseTableName) = (service.cluster, hTableName.split(",").head)
lazy val srcColumn = ServiceColumn.find(srcServiceId, srcColumnName).getOrElse(throw ModelNotFoundException("Source column not found"))
lazy val tgtColumn = ServiceColumn.find(tgtServiceId, tgtColumnName).getOrElse(throw ModelNotFoundException("Target column not found"))
lazy val direction = if (isDirected) "out" else "undirected"
lazy val defaultIndex = LabelIndex.findByLabelIdAndSeq(id.get, LabelIndex.DefaultSeq)
//TODO: Make sure this is correct
lazy val indices = LabelIndex.findByLabelIdAll(id.get, useCache = true)
lazy val indicesMap = indices.map(idx => (idx.seq, idx)) toMap
lazy val indexSeqsMap = indices.map(idx => (idx.metaSeqs, idx)) toMap
lazy val indexNameMap = indices.map(idx => (idx.name, idx)) toMap
lazy val extraIndices = indices.filter(idx => defaultIndex.isDefined && idx.id.get != defaultIndex.get.id.get)
// indices filterNot (_.id.get == defaultIndex.get.id.get)
lazy val extraIndicesMap = extraIndices.map(idx => (idx.seq, idx)) toMap
lazy val metaProps = LabelMeta.reservedMetas.map { m =>
if (m == LabelMeta.to) m.copy(dataType = tgtColumnType)
else if (m == LabelMeta.from) m.copy(dataType = srcColumnType)
else m
} ::: LabelMeta.findAllByLabelId(id.get, useCache = true)
lazy val metaPropsInner = LabelMeta.reservedMetasInner.map { m =>
if (m == LabelMeta.to) m.copy(dataType = tgtColumnType)
else if (m == LabelMeta.from) m.copy(dataType = srcColumnType)
else m
} ::: LabelMeta.findAllByLabelId(id.get, useCache = true)
lazy val metaPropsMap = metaProps.map(x => (x.seq, x)).toMap
lazy val metaPropsInvMap = metaProps.map(x => (x.name, x)).toMap
lazy val metaPropNames = metaProps.map(x => x.name)
lazy val metaPropNamesMap = metaProps.map(x => (x.seq, x.name)) toMap
/** this is used only by edgeToProps */
lazy val metaPropsDefaultMap = (for {
prop <- metaProps if LabelMeta.isValidSeq(prop.seq)
jsValue <- innerValToJsValue(toInnerVal(prop.defaultValue, prop.dataType, schemaVersion), prop.dataType)
} yield prop.name -> jsValue).toMap
lazy val metaPropsDefaultMapInner = (for {
prop <- metaPropsInner if LabelMeta.isValidSeq(prop.seq)
jsValue <- innerValToJsValue(toInnerVal(prop.defaultValue, prop.dataType, schemaVersion), prop.dataType)
} yield prop.name -> jsValue).toMap
def srcColumnWithDir(dir: Int) = {
if (dir == GraphUtil.directions("out")) srcColumn else tgtColumn
}
def tgtColumnWithDir(dir: Int) = {
if (dir == GraphUtil.directions("out")) tgtColumn else srcColumn
}
def srcTgtColumn(dir: Int) =
if (isDirected) {
(srcColumnWithDir(dir), tgtColumnWithDir(dir))
} else {
if (dir == GraphUtil.directions("in")) {
(tgtColumn, srcColumn)
} else {
(srcColumn, tgtColumn)
}
}
def init() = {
metas
metaSeqsToNames
service
srcColumn
tgtColumn
defaultIndex
indices
metaProps
}
// def srcColumnInnerVal(jsValue: JsValue) = {
// jsValueToInnerVal(jsValue, srcColumnType, version)
// }
// def tgtColumnInnerVal(jsValue: JsValue) = {
// jsValueToInnerVal(jsValue, tgtColumnType, version)
// }
override def toString(): String = {
val orderByKeys = LabelMeta.findAllByLabelId(id.get)
super.toString() + orderByKeys.toString()
}
// def findLabelIndexSeq(scoring: List[(Byte, Double)]): Byte = {
// if (scoring.isEmpty) LabelIndex.defaultSeq
// else {
// LabelIndex.findByLabelIdAndSeqs(id.get, scoring.map(_._1).sorted).map(_.seq).getOrElse(LabelIndex.defaultSeq)
//
//// LabelIndex.findByLabelIdAndSeqs(id.get, scoring.map(_._1).sorted).map(_.seq).getOrElse(LabelIndex.defaultSeq)
// }
// }
lazy val toJson = Json.obj("labelName" -> label,
"from" -> srcColumn.toJson, "to" -> tgtColumn.toJson,
"isDirected" -> isDirected,
"serviceName" -> serviceName,
"consistencyLevel" -> consistencyLevel,
"schemaVersion" -> schemaVersion,
"isAsync" -> isAsync,
"compressionAlgorithm" -> compressionAlgorithm,
"defaultIndex" -> defaultIndex.map(x => x.toJson),
"extraIndex" -> extraIndices.map(exIdx => exIdx.toJson),
"metaProps" -> metaProps.filter { labelMeta => LabelMeta.isValidSeqForAdmin(labelMeta.seq) }.map(_.toJson)
)
}
|
jongwook/incubator-s2graph
|
s2core/src/main/scala/org/apache/s2graph/core/mysqls/Label.scala
|
Scala
|
apache-2.0
| 16,580 |
package com.sksamuel.elastic4s.requests.searches
import com.sksamuel.elastic4s.{ElasticError, HitReader}
import scala.util.Try
case class MultisearchResponseItem(index: Int, status: Int, response: Either[ElasticError, SearchResponse])
case class MultiSearchResponse(items: Seq[MultisearchResponseItem]) {
def size: Int = items.size
def failures: Seq[ElasticError] = items.map(_.response).collect {
case left: Left[ElasticError, SearchResponse] => left.left.get
}
def successes: Seq[SearchResponse] = items.map(_.response).collect {
case right: Right[ElasticError, SearchResponse] => right.right.get
}
def to[T: HitReader]: IndexedSeq[T] = successes.flatMap(_.hits.hits).map(_.to[T]).toIndexedSeq
def safeTo[T: HitReader]: IndexedSeq[Try[T]] = successes.flatMap(_.hits.hits).map(_.safeTo[T]).toIndexedSeq
}
|
stringbean/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/MultiSearchResponse.scala
|
Scala
|
apache-2.0
| 837 |
package pokescala.model
import java.time.LocalDateTime
class Move(
val name : String,
val description : String,
val power : Int,
val accuracy : Int,
val category : String,
val pp : Int,
val id : Int,
val resourceURI : String,
val created : LocalDateTime,
val modified : LocalDateTime) extends Model[Move] {
val registry = MoveRegistry;
registry.register(this);
def loadAdjacent = Vector();
override def toString = s"$name; $description; $power; $accuracy; $category; $pp; " + super.toString;
}
object Move {
object Category {
def apply(raw : String) : String = {
if (raw equals physical)
return physical;
if (raw equals special)
return special;
return other;
}
val physical = "physical";
val special = "special";
val other = "other";
}
}
object MoveRegistry extends ModelRegistry[Move] {
}
|
haferflocken/PokeScala
|
PokeScala/src/pokescala/model/Move.scala
|
Scala
|
apache-2.0
| 922 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.io
import java.io.InputStream
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
object ByteBuffers {
class ExpandingByteBuffer(capacity: Int) {
private var bb = ByteBuffer.allocate(capacity)
private def ensureRemaining(count: Int): Unit = {
if (bb.remaining < count) {
val expanded = ByteBuffer.allocate(bb.capacity() * 2)
bb.flip()
expanded.put(bb)
bb = expanded
}
}
def putString(string: String): Unit = {
if (string == null) { putInt(-1) } else {
putBytes(string.getBytes(StandardCharsets.UTF_8))
}
}
def getString: String = {
val length = getInt
if (length == -1) { null } else {
val bytes = Array.ofDim[Byte](length)
bb.get(bytes)
new String(bytes, StandardCharsets.UTF_8)
}
}
def putBytes(bytes: Array[Byte]): Unit = { ensureRemaining(bytes.length + 4); bb.putBytes(bytes) }
def getBytes: Array[Byte] = bb.getBytes
def putBool(bool: Boolean): Unit = { ensureRemaining(1); bb.putBool(bool) }
def getBool: Boolean = bb.getBool
def toArray: Array[Byte] = bb.toArray
def put(b: Byte): ByteBuffer = { ensureRemaining(1); bb.put(b) }
def get(): Byte = bb.get()
def put(src: Array[Byte]): ByteBuffer = { ensureRemaining(src.length); bb.put(src) }
def get(dst: Array[Byte]): ByteBuffer = bb.get(dst)
def putChar(value: Char): ByteBuffer = { ensureRemaining(2); bb.putChar(value) }
def getChar: Char = bb.getChar
def putShort(value: Short): ByteBuffer = { ensureRemaining(2); bb.putShort(value) }
def getShort: Short = bb.getShort
def putInt(value: Int): ByteBuffer = { ensureRemaining(4); bb.putInt(value) }
def getInt: Int = bb.getInt
def putLong(value: Long): ByteBuffer = { ensureRemaining(8); bb.putLong(value) }
def getLong: Long = bb.getLong
def putFloat(value: Float): ByteBuffer = { ensureRemaining(4); bb.putFloat(value) }
def getFloat: Float = bb.getFloat
def putDouble(value: Double): ByteBuffer = { ensureRemaining(8); bb.putDouble(value) }
def getDouble: Double = bb.getDouble
}
implicit class RichByteBuffer(val bb: ByteBuffer) extends AnyVal {
def toInputStream: InputStream = new ByteBufferInputStream(bb)
def putBytes(bytes: Array[Byte]): Unit = {
bb.putInt(bytes.length)
bb.put(bytes)
}
def getBytes: Array[Byte] = {
val bytes = Array.ofDim[Byte](bb.getInt())
bb.get(bytes)
bytes
}
def putString(string: String): Unit = {
if (string == null) { bb.putInt(-1) } else {
putBytes(string.getBytes(StandardCharsets.UTF_8))
}
}
def getString: String = {
val length = bb.getInt
if (length == -1) { null } else {
val bytes = Array.ofDim[Byte](length)
bb.get(bytes)
new String(bytes, StandardCharsets.UTF_8)
}
}
def putBool(bool: Boolean): Unit = bb.put(if (bool) { 1.toByte } else { 0.toByte })
def getBool: Boolean = bb.get == 1
def toArray: Array[Byte] = {
bb.flip()
val bytes = Array.ofDim[Byte](bb.remaining)
bb.get(bytes)
bytes
}
}
class ByteBufferInputStream(buffer: ByteBuffer) extends InputStream {
override def read(): Int = {
if (!buffer.hasRemaining) { -1 } else {
buffer.get() & 0xFF
}
}
override def read(bytes: Array[Byte], offset: Int, length: Int): Int = {
if (!buffer.hasRemaining) { -1 } else {
val read = math.min(length, buffer.remaining)
buffer.get(bytes, offset, read)
read
}
}
}
}
|
ddseapy/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/io/ByteBuffers.scala
|
Scala
|
apache-2.0
| 4,130 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.kafka
import org.junit.Test
import org.junit.Assert._
import org.apache.mesos.Protos.{ContainerInfo, Offer, TaskID, TaskState, TaskStatus, Volume}
import java.util.{Date, UUID}
import java.util.concurrent.TimeUnit
import ly.stealth.mesos.kafka.Broker.{Container, ContainerType, ExecutionOptions, Mount, MountMode}
import ly.stealth.mesos.kafka.executor.{Executor, LaunchConfig}
import ly.stealth.mesos.kafka.json.JsonUtil
import ly.stealth.mesos.kafka.scheduler.BrokerState
import ly.stealth.mesos.kafka.scheduler.mesos.{OfferManager, OfferResult}
import net.elodina.mesos.util.Period
import net.elodina.mesos.util.Strings.parseMap
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.concurrent.duration.Duration
class SchedulerTest extends KafkaMesosTestCase {
@Test
def newTask {
val broker = new Broker(1)
broker.options = parseMap("a=1").toMap
broker.log4jOptions = parseMap("b=2").toMap
broker.cpus = 0.5
broker.mem = 256
broker.heap = 512
broker.executionOptions = ExecutionOptions(jvmOptions = "-Xms64m")
val offer = this.offer("id", "fw-id", "slave", "host", s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000", "")
val reservation = broker.getReservation(offer)
val task = registry.taskFactory.newTask(broker, offer, reservation)
assertEquals("slave", task.getSlaveId.getValue)
assertNotNull(task.getExecutor)
// executor
val command = task.getExecutor.getCommand
assertTrue(command.getUrisCount > 0)
val cmd: String = command.getValue
assertTrue(cmd, cmd.contains("-Xmx" + broker.heap + "m"))
assertTrue(cmd, cmd.contains(broker.executionOptions.jvmOptions))
assertTrue(cmd, cmd.contains(Executor.getClass.getName.replace("$", "")))
// resources
assertEquals(resources(s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000"), task.getResourcesList)
// data
val launchConfig = JsonUtil.fromJson[LaunchConfig](task.getData.toByteArray)
assertEquals(broker.id, launchConfig.id)
assertEquals(broker.options, launchConfig.options)
assertEquals(broker.log4jOptions, launchConfig.log4jOptions)
val defaults = launchConfig.interpolatedOptions
assertEquals(broker.id.toString, defaults("broker.id"))
assertEquals("" + 1000, defaults("port"))
assertEquals(Config.zk, defaults("zookeeper.connect"))
assertEquals("kafka-logs", defaults("log.dirs"))
assertEquals(offer.getHostname, defaults("host.name"))
}
private def volume(host: String, container: String, mode: MountMode) =
Volume.newBuilder()
.setHostPath(host)
.setContainerPath(container)
.setMode(mode match {
case MountMode.ReadWrite => Volume.Mode.RW
case MountMode.ReadOnly => Volume.Mode.RO
})
.build()
@Test
def newDockerTask: Unit = {
val broker = new Broker(1)
broker.options = parseMap("a=1").toMap
broker.log4jOptions = parseMap("b=2").toMap
broker.cpus = 0.5
broker.mem = 256
broker.heap = 512
broker.executionOptions = ExecutionOptions(
container = Some(Container(
ctype = ContainerType.Docker,
name = "test",
mounts = Seq(Mount("/a", "/b", MountMode.ReadWrite))
)),
jvmOptions = "-Xms64m"
)
val offer = this.offer("id", "fw-id", "slave", "host", s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000", "")
val reservation = broker.getReservation(offer)
val task = registry.taskFactory.newTask(broker, offer, reservation)
assertEquals("test", task.getExecutor.getContainer.getDocker.getImage)
assertEquals(ContainerInfo.Type.DOCKER, task.getExecutor.getContainer.getType)
assertEquals(
Seq(volume("/a", "/b", MountMode.ReadWrite)).asJava,
task.getExecutor.getContainer.getVolumesList)
}
@Test
def newMesosImageTask: Unit = {
val broker = new Broker(1)
broker.options = parseMap("a=1").toMap
broker.log4jOptions = parseMap("b=2").toMap
broker.cpus = 0.5
broker.mem = 256
broker.heap = 512
broker.executionOptions = ExecutionOptions(
container = Some(Container(
ctype = ContainerType.Mesos,
name = "test",
mounts = Seq(Mount("/a", "/b", MountMode.ReadWrite))
)),
jvmOptions = "-Xms64m"
)
val offer = this.offer("id", "fw-id", "slave", "host", s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000", "")
val reservation = broker.getReservation(offer)
val task = registry.taskFactory.newTask(broker, offer, reservation)
assertEquals(ContainerInfo.Type.MESOS, task.getExecutor.getContainer.getType)
assertEquals("test", task.getExecutor.getContainer.getMesos.getImage.getDocker.getName)
assertEquals(
Seq(volume("/a", "/b", MountMode.ReadWrite)).asJava,
task.getExecutor.getContainer.getVolumesList)
}
@Test
def syncBrokers {
val broker = registry.cluster.addBroker(new Broker())
val offer = this.offer(s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000")
// broker !active
assertFalse(registry.scheduler.tryLaunchBrokers(Seq(offer)))
assertEquals(0, schedulerDriver.launchedTasks.size())
// broker active
broker.active = true
assertTrue(registry.scheduler.tryLaunchBrokers(Seq(offer)))
assertEquals(1, schedulerDriver.launchedTasks.size())
assertEquals(0, schedulerDriver.killedTasks.size())
// broker !active
broker.task = Broker.Task(id = "1")
broker.task.state = Broker.State.RUNNING
registry.brokerLifecycleManager.tryTransition(broker, BrokerState.Inactive())
assertTrue(broker.task.stopping)
assertEquals(1, schedulerDriver.launchedTasks.size())
assertEquals(1, schedulerDriver.killedTasks.size())
}
@Test
def acceptOffer {
val broker = registry.cluster.addBroker(new Broker())
broker.active = true
broker.task = null
var theOffer = offer(s"cpus:0.4; mem:${broker.mem}")
assertEquals(
Right(Seq(OfferResult.neverMatch(theOffer, broker, s"cpus < ${broker.cpus}"))),
registry.offerManager.tryAcceptOffer(theOffer, Seq(broker)))
theOffer = offer(s"cpus:${broker.cpus}; mem:99")
assertEquals(
Right(Seq(OfferResult.neverMatch(theOffer, broker, s"mem < ${broker.mem}"))),
registry.offerManager.tryAcceptOffer(theOffer, Seq(broker)))
theOffer = offer(s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000")
assertTrue(registry.scheduler.tryLaunchBrokers(Seq(theOffer)))
assertEquals(1, schedulerDriver.launchedTasks.size())
theOffer = offer(s"cpus:${broker.cpus}; mem:${broker.mem}")
assertEquals(
Right(Seq(OfferResult.NoWork(theOffer))),
registry.offerManager.tryAcceptOffer(theOffer, Seq(broker)))
}
@Test
def acceptMultipleOffers: Unit = {
val broker = registry.cluster.addBroker(new Broker())
broker.active = true
broker.task = null
val o1 = offer(s"cpus:1; mem: ${broker.mem}; ports:1000")
val o2 = offer(s"cpus:1; mem: ${broker.mem}; ports:1000")
assertTrue(registry.scheduler.tryLaunchBrokers(Seq(o1, o2)))
assertEquals(1, schedulerDriver.acceptedOffers.size())
assertEquals(1, schedulerDriver.declinedOffers.size())
assertEquals(1, schedulerDriver.launchedTasks.size())
assertEquals(broker.task.id, schedulerDriver.launchedTasks.get(0).getTaskId.getValue)
}
@Test
def launchMultipleBrokers = {
val b1 = registry.cluster.addBroker(new Broker(1))
val b2 = registry.cluster.addBroker(new Broker(2))
b1.active = true
b2.active = true
val o1 = offer("host1", s"cpus:1; mem: ${b1.mem}; ports:1000")
val o2 = offer("host2", s"cpus:1; mem: ${b2.mem}; ports:1000")
assertTrue(registry.scheduler.tryLaunchBrokers(Seq(o1, o2)))
assertEquals(2, schedulerDriver.acceptedOffers.distinct.size)
assertEquals(2, schedulerDriver.launchedTasks.map(_.getTaskId.getValue).size())
assertNotEquals(b1.task.hostname, b2.task.hostname)
}
@Test
def onBrokerStatus {
val broker = registry.cluster.addBroker(new Broker())
broker.active = true
broker.task = Broker.Task(Broker.nextTaskId(broker), "slave", "executor", "host")
assertEquals(Broker.State.PENDING, broker.task.state)
// broker started
registry.brokerLifecycleManager.tryTransition(taskStatus(broker.task.id, TaskState.TASK_STARTING))
registry.brokerLifecycleManager.tryTransition(taskStatus(broker.task.id, TaskState.TASK_RUNNING, "localhost:9092"))
assertEquals(Broker.State.RUNNING, broker.task.state)
assertEquals("localhost:9092", "" + broker.task.endpoint)
registry.brokerLifecycleManager.tryTransition(broker, BrokerState.Inactive())
// broker finished
registry.brokerLifecycleManager.tryTransition(taskStatus(broker.task.id, TaskState.TASK_FINISHED))
assertNull(broker.task)
assertEquals(0, broker.failover.failures)
}
@Test
def onBrokerStarted {
val broker = registry.cluster.addBroker(new Broker())
broker.active = true
broker.task = Broker.Task(id = "0-" + UUID.randomUUID())
assertEquals(Broker.State.PENDING, broker.task.state)
registry.brokerLifecycleManager.tryTransition(taskStatus(broker.task.id, TaskState.TASK_STARTING))
assertEquals(Broker.State.STARTING, broker.task.state)
registry.brokerLifecycleManager.tryTransition(taskStatus(broker.task.id, TaskState.TASK_RUNNING, "localhost:9092"))
assertEquals("localhost:9092", "" + broker.task.endpoint)
}
@Test
def onBrokerStopped {
val broker = registry.cluster.addBroker(new Broker())
broker.active = true
val task = Broker.Task(id = "0-" + UUID.randomUUID())
task.state = Broker.State.RUNNING
// finished
broker.task = task
broker.needsRestart = true
registry.brokerLifecycleManager.tryTransition(broker, BrokerState.Inactive(false))
registry.brokerLifecycleManager.tryTransition(taskStatus(task.id, TaskState.TASK_FINISHED))
assertNull(broker.task)
assertEquals(0, broker.failover.failures)
assertFalse(broker.needsRestart)
// failed
broker.active = true
broker.task = task
broker.needsRestart = true
MockWallClock.overrideNow(Some(new Date(0)))
broker.task.state = Broker.State.RUNNING
registry.brokerLifecycleManager.tryTransition(taskStatus(task.id, TaskState.TASK_FAILED)) //, new Date(0))
assertNull(broker.task)
assertEquals(1, broker.failover.failures)
assertEquals(new Date(0), broker.failover.failureTime)
assertFalse(broker.needsRestart)
// failed maxRetries exceeded
broker.failover.maxTries = 2
broker.task = task
MockWallClock.overrideNow(Some(new Date(1)))
registry.brokerLifecycleManager.tryTransition(taskStatus(task.id, TaskState.TASK_FAILED)) //, new Date(1))
assertNull(broker.task)
assertEquals(2, broker.failover.failures)
assertEquals(new Date(1), broker.failover.failureTime)
assertTrue(broker.failover.isMaxTriesExceeded)
assertFalse(broker.active)
}
@Test
def declineFailedBroker: Unit = {
val broker = registry.cluster.addBroker(new Broker(0))
}
@Test
def launchTask {
val broker = registry.cluster.addBroker(new Broker(100))
val offer = this.offer("id", "fw-id", "slave-id", "host", s"cpus:${broker.cpus}; mem:${broker.mem}; ports:1000", "a=1,b=2")
broker.needsRestart = true
broker.active = true
assertTrue(registry.scheduler.tryLaunchBrokers(Seq(offer)))
assertEquals(1, schedulerDriver.launchedTasks.size())
assertFalse(broker.needsRestart)
assertNotNull(broker.task)
assertEquals(Broker.State.PENDING, broker.task.state)
assertEquals(parseMap("a=1,b=2").toMap, broker.task.attributes)
val task = schedulerDriver.launchedTasks.get(0)
assertEquals(task.getTaskId.getValue, broker.task.id)
val startingUpdate = TaskStatus.newBuilder()
.setTaskId(task.getTaskId)
.setState(TaskState.TASK_STARTING)
.build()
registry.brokerLifecycleManager.tryTransition(startingUpdate)
assertEquals(Broker.State.STARTING, broker.task.state)
val runningUpdate = TaskStatus.newBuilder()
.setTaskId(task.getTaskId)
.setState(TaskState.TASK_RUNNING)
.build()
registry.brokerLifecycleManager.tryTransition(runningUpdate)
assertEquals(Broker.State.RUNNING, broker.task.state)
}
@Test
def reconcileTasksIfRequired {
val broker0 = registry.cluster.addBroker(new Broker(0))
val broker1 = registry.cluster.addBroker(new Broker(1))
broker1.task = Broker.Task(id = "1")
broker1.task.state = Broker.State.RUNNING
val broker2 = registry.cluster.addBroker(new Broker(2))
broker2.task = Broker.Task(id = "2")
MockWallClock.overrideNow(Some(new Date(0)))
registry.taskReconciler.start()
Thread.sleep(100)
assertEquals(1, registry.taskReconciler.attempts)
assertEquals(new Date(0), registry.taskReconciler.lastReconcile)
assertNull(broker0.task)
assertEquals(Broker.State.RECONCILING, broker1.task.state)
assertEquals(Broker.State.RECONCILING, broker2.task.state)
for (i <- 2 until Config.reconciliationAttempts + 1) {
registry.taskReconciler.asInstanceOf[{def retryReconciliation()}].retryReconciliation()
assertEquals(i, registry.taskReconciler.attempts)
assertEquals(Broker.State.RECONCILING, broker1.task.state)
}
assertEquals(0, schedulerDriver.killedTasks.size())
// last reconcile should stop broker
registry.taskReconciler.asInstanceOf[{def retryReconciliation()}].retryReconciliation()
assertEquals(Broker.State.STOPPING, broker1.task.state)
assertEquals(2, schedulerDriver.killedTasks.size())
}
@Test
def reconciliationFullRun = {
Config.reconciliationTimeout = new Period("1ms")
val mockRegistry = registry
val broker0 = registry.cluster.addBroker(new Broker(0))
val broker1 = registry.cluster.addBroker(new Broker(1))
broker1.task = Broker.Task(id = "1")
broker1.task.state = Broker.State.RUNNING
val broker2 = registry.cluster.addBroker(new Broker(2))
broker2.task = Broker.Task(id = "2")
registry.taskReconciler.start().get
while(registry.taskReconciler.isReconciling) {
Thread.sleep(10)
}
assertEquals(Broker.State.STOPPING, broker1.task.state)
assertEquals(2, schedulerDriver.killedTasks.size())
}
@Test
def reconciliationSucceeds: Unit = {
Config.reconciliationTimeout = new Period("100ms")
val broker0 = registry.cluster.addBroker(new Broker(0))
val broker1 = registry.cluster.addBroker(new Broker(1))
broker1.active = true
broker1.task = Broker.Task(id = "1")
broker1.task.state = Broker.State.RUNNING
registry.taskReconciler.start().get()
val status = TaskStatus.newBuilder()
.setState(TaskState.TASK_RUNNING)
.setTaskId(TaskID.newBuilder().setValue("1"))
.setReason(TaskStatus.Reason.REASON_RECONCILIATION)
registry.brokerLifecycleManager.tryTransition(status.build())
while(registry.taskReconciler.isReconciling) {
Thread.sleep(10)
}
assertFalse(broker1.task.reconciling)
// Starting again should reset the attempts
registry.taskReconciler.start()
assertEquals(1, registry.taskReconciler.attempts)
}
@Test
def otherTasksAttributes {
val broker0 = registry.cluster.addBroker(new Broker(0))
broker0.task = Broker.Task(hostname = "host0", attributes = parseMap("a=1,b=2").toMap)
val broker1 = registry.cluster.addBroker(new Broker(1))
broker1.task = Broker.Task(hostname = "host1", attributes = parseMap("b=3").toMap)
val brokers = Seq(broker0, broker1)
assertEquals(Seq("host0", "host1"), OfferManager.otherTasksAttributes("hostname", brokers))
assertEquals(Seq("1"), OfferManager.otherTasksAttributes("a", brokers))
assertEquals(Seq("2", "3"), OfferManager.otherTasksAttributes("b", brokers))
}
@Test
def onFrameworkMessage = {
val broker0 = registry.cluster.addBroker(new Broker(0))
broker0.active = true
val broker1 = registry.cluster.addBroker(new Broker(1))
broker1.active = true
val metrics0 = new Broker.Metrics(Map[String, Number](
"underReplicatedPartitions" -> 2,
"offlinePartitionsCount" -> 3,
"activeControllerCount" -> 1
), System.currentTimeMillis())
val data = JsonUtil.toJsonBytes(FrameworkMessage(metrics = Some(metrics0)))
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker0)), slaveId(), data)
// metrics updated for corresponding broker
assertNotNull(broker0.metrics)
def assertMetricsEquals(expected: Broker.Metrics, actual: Broker.Metrics): Unit = {
assertEquals(expected("underReplicatedPartitions"), actual("underReplicatedPartitions"))
assertEquals(expected("offlinePartitionsCount"), actual("offlinePartitionsCount"))
assertEquals(expected("activeControllerCount"), actual("activeControllerCount"))
assertEquals(expected.timestamp, actual.timestamp)
}
assertMetricsEquals(metrics0, broker0.metrics)
// metrics updated only for active brokers
broker1.active = false
val metrics1 = new Broker.Metrics(Map(
"offlinePartitionsCount" -> 1),
System.currentTimeMillis()
)
val data1 = JsonUtil.toJsonBytes(FrameworkMessage(metrics=Some(metrics1)))
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker1)), slaveId(), data1)
}
@Test
def sendReceiveBrokerLog = {
val broker = registry.cluster.addBroker(new Broker(0))
broker.task = new Broker.Task("task-id", "slave-id", "executor-id")
// driver connected
val future = registry.scheduler.requestBrokerLog(broker, "stdout", 111, Duration(1, TimeUnit.SECONDS))
assertEquals(1, schedulerDriver.sentFrameworkMessages.size())
val message = schedulerDriver.sentFrameworkMessages.get(0)
val messageData = LogRequest.parse(new String(message.data))
val requestId = messageData.requestId
assertEquals(broker.task.executorId, message.executorId)
assertEquals(broker.task.slaveId, message.slaveId)
assertEquals(LogRequest(requestId, 111, "stdout").toString, new String(message.data))
val content = "1\n2\n3\n"
val data = JsonUtil.toJsonBytes(FrameworkMessage(log = Some(LogResponse(requestId, content))))
// skip log response when broker is null
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(new Broker(100))), slaveId(), data)
assertFalse(future.isCompleted)
// skip log response when not active
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker)), slaveId(), data)
assertFalse(future.isCompleted)
// skip log response when no task
broker.active = true
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker)), slaveId(), data)
assertFalse(future.isCompleted)
// skip log response when has task but no running
broker.task = Broker.Task()
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker)), slaveId(), data)
assertFalse(future.isCompleted)
// broker has to be and task has to be running
broker.task = Broker.Task()
broker.task.state = Broker.State.RUNNING
registry.scheduler.frameworkMessage(schedulerDriver, executorId(Broker.nextExecutorId(broker)), slaveId(), data)
assertTrue(future.isCompleted)
}
}
|
tc-dc/kafka-mesos
|
src/test/ly/stealth/mesos/kafka/SchedulerTest.scala
|
Scala
|
apache-2.0
| 20,260 |
package com.zobot.client.packet.definitions.clientbound.play
import com.zobot.client.packet.Packet
case class EntityMetadata(entityId: Int, metadata: Any) extends Packet {
override lazy val packetId = 0x3C
override lazy val packetData: Array[Byte] =
fromVarInt(entityId) ++
fromAny(metadata)
}
|
BecauseNoReason/zobot
|
src/main/scala/com/zobot/client/packet/definitions/clientbound/play/EntityMetadata.scala
|
Scala
|
mit
| 309 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.stats
import akka.actor.{Actor, ActorLogging, ActorRef, ActorSystem, Props}
import akka.pattern.ask
import java.io._
import org.mashupbots.socko.events.HttpRequestEvent
import org.mashupbots.socko.routes._
import org.mashupbots.socko.infrastructure.Logger
import org.mashupbots.socko.webserver.{WebServer, WebServerConfig}
import scala.collection.mutable.Buffer
import scala.concurrent.Await
import scala.concurrent.duration._
import tdb.Constants._
import tdb.messages._
import tdb.worker.WorkerInfo
object Stats {
var registeredWorkers = Buffer[WorkerInfo]()
val imgSrc = "http://thomasdb.cs.cmu.edu/wordpress/wp-content/uploads/2014/08/thomasdb-white.png"
val tickFrequency = 100.milliseconds
var statsActor: ActorRef = null
def launch(system: ActorSystem, mode: String, host: String, port: Int) {
// We should only launch one stats actor per JVM.
assert(statsActor == null)
statsActor =
if (mode == "master")
system.actorOf(Props(classOf[MasterStats]))
else
system.actorOf(Props(classOf[WorkerStats]))
val routes = Routes({
case HttpRequest(request) => request match {
case GET(request) =>
statsActor ! request
}
})
val webServer =
new WebServer(
WebServerConfig(hostname = host, port = port),
routes,
system)
webServer.start()
import system.dispatcher
val cancellable =
system.scheduler.schedule(
0.milliseconds,
tickFrequency,
statsActor,
"tick")
}
def createPage(title: String, body: String): String =
s"""
<!DOCTYPE html>
<html>
<head>
<title>$title</title>
<style>
body {
font-family: calibri;
color: #333333;
}
body a {
text-decoration: none;
}
body a:hover {
text-decoration: underline;
}
#header {
background-color: #990000;
color: #000000;
font-size: 24pt;
padding: 0px;
}
#mainTable tr td {
padding: 10px;
}
#workerTable {
font-family: \\"Trebuchet MS\\", Arial, Helvetica, sans-serif;
width: 100%;
border-collapse: collapse;
}
#workerTable td, #workerTable th {
font-size: 1em;
border: 1px solid #333333;
padding: 3px 7px 2px 7px;
}
#workerTable th {
font-size: 1.1em;
text-align: left;
padding-top: 5px;
padding-bottom: 4px;
background-color: #333333;
color: #ffffff;
}
</style>
<script src=\\"Chart.js\\"></script>
<script type=\\"text/javascript\\">
// Get the context of the canvas element we want to select
//var ctx = document.getElementById(\\"myChart\\").getContext(\\"2d\\");
//var myNewChart = new Chart(ctx).PolarArea(data);
</script>
</head>
<body>
<table id=\\"mainTable\\">
<tr id=\\"header\\">
<td>
<img src=\\"$imgSrc\\" width=\\"50px\\">
</td>
<td>$title</td>
</tr>
<tr>
<td colspan=2>$body
<canvas id=\\"myChart\\" width=\\"400\\" height=\\"400\\"></canvas>
</td>
</tr>
</table>
</body>
</html>"""
def clear() {
if (statsActor != null) {
Await.result(statsActor ? ClearMessage, DURATION)
}
}
}
|
twmarshall/tdb
|
core/src/main/scala/tdb/stats/Stats.scala
|
Scala
|
apache-2.0
| 4,317 |
package synereo.client.modalpopups
import diode.{ModelR, ModelRO}
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.GlobalStyles
import synereo.client.css.SynereoCommanStylesCSS
import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import synereo.client.components._
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}
import scala.scalajs.js
/**
* Created by mandar.k on 4/13/2016.
*/
//scalastyle:off
object LoginErrorModal {
@inline private def bss = GlobalStyles.bootstrapStyles
case class Props(submitHandler: (Boolean) => Callback, loginError: String = "")
case class State(showLogin: Boolean = false,
lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value)
class LoginErrorBackend(t: BackendScope[Props, State]) {
def closeForm = {
jQuery(t.getDOMNode()).modal("hide")
t.modState(s => s.copy(showLogin = true))
}
def modalClosed(state: LoginErrorModal.State, props: LoginErrorModal.Props): Callback = {
props.submitHandler(state.showLogin)
}
def mounted(): Callback = Callback {
SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e))
}
def updateLang(reader: ModelRO[js.Dynamic]) = {
t.modState(s => s.copy(lang = reader.value)).runNow()
}
}
private val component = ReactComponentB[Props]("ErrorModal")
.initialState_P(p => State())
.backend(new LoginErrorBackend(_))
.renderPS((t, props, state) => {
val headerText = state.lang.selectDynamic("ERROR").toString
Modal(
Modal.Props(
// header contains a cancel button (X)
header = hide => <.span(<.h4(headerText)),
closed = () => t.backend.modalClosed(state, props)
),
<.div(^.className := "container-fluid")(
<.div(^.className := "row")(
<.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
<.div(^.className := "row")(
<.div()(
<.h3(SynereoCommanStylesCSS.Style.loginErrorHeading)(props.loginError)
),
<.div(bss.modal.footer, SynereoCommanStylesCSS.Style.errorModalFooter)(
<.div(^.className := "row")(
<.div(^.className := "col-md-12 text-center")(
<.div()(
<.h5(state.lang.selectDynamic("API_HOST_UNREACHABLE").toString),
<.button(^.tpe := "button", ^.className := "btn btn-default",
^.onClick --> t.backend.closeForm)(state.lang.selectDynamic("CLOSE").toString)
)
)
)
)
)
)
)
)
)
})
.componentDidMount(scope => scope.backend.mounted())
.build
def apply(props: Props) = component(props)
}
|
LivelyGig/ProductWebUI
|
sclient/src/main/scala/synereo/client/modalpopups/LoginErrorModal.scala
|
Scala
|
apache-2.0
| 2,983 |
package scala.forthe.impatient.ch7
/*
Scala for the Impatient
chapter 7. Packages and Imports
exercise 9.
Write a program that imports the java.lang.System class,
reads the user name from the user.name system property,
reads a password from the Console object,
and prints a message to the standard error stream if the password is not "secret".
Otherwise, print a greeting to the standard output stream.
Do not use any other imports, and do not use any qualified names (with dots)
*/
object ch7ex9 {
import java.lang.{System => jls}
val uname = jls.getProperty("user.name", "John/Jane Doe")
println(s"your name is $uname and your password is (it's a question):")
val pwd = jls.console().readPassword().mkString("")
if (pwd == "secret") jls.out.print(s"Hello $uname, welcome back!")
else jls.err.print(s"$uname, you should run away now.")
}
object ch7ex9v2 extends App {
import java.lang.System._
val uname = getProperty("user.name", "John/Jane Doe")
println(s"your name is $uname and your password is (it's a question):")
val pwd = console().readPassword().mkString("")
if (pwd == "secret") out.print(s"Hello $uname, welcome back!")
else err.print(s"$uname, you should run away now.")
}
|
vasnake/scala-for-the-impatient
|
src/main/scala/worksheets_1st_edition/scalaImpatient7_9.scala
|
Scala
|
gpl-3.0
| 1,242 |
/*
* Copyright 2013 - 2017 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.morpheus.schema
import org.scalatest.{Matchers, FlatSpec}
import com.outworkers.morpheus.tables.ZeroFillTable
class ZeroFillColumnsSerialisationTest extends FlatSpec with Matchers {
it should "serialise a ZEROFILL UNSIGNED NOT NULL column" in {
ZeroFillTable.tinyInt.qb.queryString shouldEqual "tinyInt TINYINT ZEROFILL UNSIGNED NOT NULL"
}
it should "serialise a ZEROFILL NOT NULL column" in {
ZeroFillTable.tinyIntLimited.qb.queryString shouldEqual "tinyIntLimited TINYINT(5) ZEROFILL NOT NULL"
}
it should "serialise a ZEROFILL UNSIGNED column" in {
ZeroFillTable.smallInt.qb.queryString shouldEqual "smallInt SMALLINT ZEROFILL UNSIGNED"
}
}
|
websudos/morpheus
|
morpheus-dsl/src/test/scala/com/outworkers/morpheus/schema/ZeroFillColumnsSerialisationTest.scala
|
Scala
|
bsd-2-clause
| 1,305 |
package org.automanlang.core.policy.timeout
import org.automanlang.core.question.Question
import org.automanlang.core.scheduler.Task
class FixedTimeoutPolicy(question: Question) extends TimeoutPolicy(question) {
/**
* Calculate the task timeout given a question and the
* worker timeout.
* @param worker_timeout_in_s The worker timeout.
* @return The new task timeout, in seconds
*/
override def calculateTaskTimeout(worker_timeout_in_s: Int): Int = (question.question_timeout_multiplier * worker_timeout_in_s).toInt
/**
* Calculate the worker timeout given a question, the
* tasks so far, and the round.
* @param tasks The Tasks so far.
* @param currentRound The current round.
* @return The new worker timeout, in seconds.
*/
override def calculateWorkerTimeout(tasks: List[Task], currentRound: Int, had_timeout: Boolean): Int = {
question.initial_worker_timeout_in_s
}
}
|
dbarowy/AutoMan
|
libautoman/src/main/scala/org/automanlang/core/policy/timeout/FixedTimeoutPolicy.scala
|
Scala
|
gpl-2.0
| 924 |
///////////////////////////////////////////////////////////////
// © ООО «Праймтолк», 2011-2013 //
// Все права принадлежат компании ООО «Праймтолк». //
///////////////////////////////////////////////////////////////
/**
* SynapseGrid
* © Primetalk Ltd., 2013.
* All rights reserved.
* Authors: A.Zhizhelev, A.Nehaev, P. Popov
* (2-clause BSD license) See LICENSE
*
* Created: 28.06.13, zhizhelev
*/
package ru.primetalk.synapse.core.components
/**
* Named is used to store graph specific information - label or name.
*/
trait Named {
def name: String
override def toString =
getClass.getSimpleName + "(\\"" + name + "\\")"
}
/**
* Stateful elements of the system.
*/
trait Stateful[State] {
type StateType = State
/**
* The initial state of the element.
*/
val s0: State
}
/** An outer description of a system.
* Actual description is deferred to descendants.
* See also [[Link]]s
*/
trait Component extends Named {
val inputContacts: Set[Contact[_]]
val outputContacts: Set[Contact[_]]
}
/** A component that has single input and single output.
*/
trait TwoPoleComponent[T1, T2] extends Component {
def from: Contact[T1]
def to: Contact[T2]
lazy val inputContacts : Set[Contact[_]] = Set(from)
lazy val outputContacts : Set[Contact[_]] = Set(to)
}
/** Transparent component whose internal structure can be represented as a StaticSystem.*/
trait ComponentWithInternalStructure extends Component with WithStaticSystem
/** The system that can be embedded into some other static system.
* It has specially processed state:
* @param s structure of the system
* @param stateHandle the handle within parent system that holds internal system's state. The handle points to the map (stateHandle -> value)
* @param sharedStateHandles a few state handles that are shared between the parent system and child.
* During runtime processing current values from parent are copied to child state
* before processing any signals and copied back afterwards.
*/
case class InnerSystemComponent(
s: StaticSystem,
/** main state handle that will hold private state of the subsystem. */
stateHandle: StateHandle[Map[Contact[_], Any]],
/** State handles to be shared with parent */
sharedStateHandles: List[StateHandle[_]] = Nil) extends Component with ComponentWithInternalStructure {
val inputContacts = s.inputContacts
val outputContacts = s.outputContacts
def name = s.name
def toStaticSystem: StaticSystem = s
}
/**
* Special component that atomically updates state. It doesn't have any output contact.
*/
case class StateUpdate[S, T2](
from: Contact[T2],
stateHandle: StateHandle[S],
override val name: String,
f: (S, T2) ⇒ S) // = (s : S, t : T2) ⇒ t)
extends Component {
lazy val inputContacts = Set(from): Set[Contact[_]]
lazy val outputContacts = Set[Contact[_]]() //stateHolder)
}
object StateUpdate {
def replace[S, T2 <: S](s: S, t: T2) = t
}
|
GitOutATown/SynapseGrid
|
synapse-grid-core/src/main/scala/ru/primetalk/synapse/core/components/Components.scala
|
Scala
|
bsd-2-clause
| 3,336 |
package com.stellmangreene.pbprdf
import org.eclipse.rdf4j.model._
import org.eclipse.rdf4j.model.vocabulary._
import org.eclipse.rdf4j.repository.Repository
import better.files._
import com.stellmangreene.pbprdf.model.EntityIriFactory
import com.stellmangreene.pbprdf.model.Ontology
import com.stellmangreene.pbprdf.plays._
import com.stellmangreene.pbprdf.util.RdfOperations._
import com.typesafe.scalalogging.LazyLogging
/**
* A play-by-play event that can be parsed into RDF triples
*
* @param gameIri
* IRI of the game
* @param eventNumber
* Sequential number of this event
* @param period
* Period this occurred in (overtime starts with period 5)
* @param time
* Time of the event
* @param description
* Description of the event (eg. "Washington full timeout")
* @param gamePeriodInfo
* Period information
* @param team
* Name of the team (to include in the text file contents for the play-by-play)
* @param Score
* Score for the event (to include in the text file contents for the play-by-play)
* @param play
* Trimmed text of the play (to include in the file contents for the play-by-play)
*
* @author andrewstellman
*/
case class Event(gameIri: IRI, eventNumber: Int, period: Int, time: String, description: String)(val gamePeriodInfo: GamePeriodInfo, val team: String, val score: String, play: String)
extends LazyLogging {
override def toString = "Period " + period + " " + time + " - " + description
/** IRI of this event for RDF */
val eventIri = EntityIriFactory.getEventIri(gameIri, eventNumber)
/**
* Add this event to an RDF repository
*
* @param rep
* rdf4j repository to add the events to
*/
def addRdf(rep: Repository) = {
val valueFactory = rep.getValueFactory
rep.addTriples(eventTriples(valueFactory))
rep.addTriples(secondsIntoGameTriple(valueFactory))
rep.addTriples(scoreTriples(valueFactory))
}
/** Generates the type, period, time, and label triples that every event must have */
private def eventTriples(valueFactory: ValueFactory): Set[(Resource, IRI, Value)] = {
Set(
(eventIri, RDF.TYPE, Ontology.EVENT),
(eventIri, Ontology.IN_GAME, gameIri),
(eventIri, Ontology.PERIOD, valueFactory.createLiteral(period)),
(eventIri, Ontology.TIME, valueFactory.createLiteral(time)),
(eventIri, RDFS.LABEL, valueFactory.createLiteral(description)))
}
/** Generate the pbprdf:secondsIntoGame and pbprdf:secondsLeftInPeriod triples */
private def secondsIntoGameTriple(valueFactory: ValueFactory): Set[(Resource, IRI, Value)] = {
val secondsLeft = gamePeriodInfo.clockToSecondsLeft(period, time)
secondsLeft.map(eventTimes => {
Set[(Resource, IRI, Value)](
(eventIri, Ontology.SECONDS_INTO_GAME, valueFactory.createLiteral(eventTimes.secondsIntoGame)),
(eventIri, Ontology.SECONDS_LEFT_IN_PERIOD, valueFactory.createLiteral(eventTimes.secondsLeftInPeriod)))
})
.getOrElse(Set[(Resource, IRI, Value)]())
}
/** Generate the score triples */
private def scoreTriples(valueFactory: ValueFactory): Set[(Resource, IRI, Value)] = {
val scoreRegex = """\\s*(\\d+)\\s*-\\s*(\\d+)\\s*""".r
score match {
case scoreRegex(awayScore, homeScore) => Set(
(eventIri, Ontology.AWAY_SCORE, valueFactory.createLiteral(awayScore.toInt)),
(eventIri, Ontology.HOME_SCORE, valueFactory.createLiteral(homeScore.toInt)))
case _ => {
logger.warn(s"Unable to parse score in play: $play")
Set()
}
}
}
/** Returns a text description of this event */
def getText: String = {
s"$team\\t$period\\t$time\\t$score\\t${play.replaceAll("\\t", " ")}"
}
}
/**
* Factory to create Play objects, choosing the subclass based on the play description
*
* @author andrewstellman
*/
object Event extends LazyLogging {
/**
* Create an instance of a play class, choosing the specific class based on the play description
*
* @param gameID
* Unique ID of the game
* @param eventNumber
* Sequential number of this event
* @param period
* Period this occurred in (overtime starts with period 5)
* @param team
* Name of the team
* @param play
* Description of the play (eg. "Alyssa Thomas makes free throw 2 of 2")
* @param score
* Game score ("10-4")
* @param gamePeriodInfo
* Period length in minutes
*
* @author andrewstellman
*/
def apply(gameIri: IRI, filename: String, eventNumber: Int, period: Int, time: String, team: String, play: String, score: String, gamePeriodInfo: GamePeriodInfo): Event = {
val trimmedPlay = play.trim.replaceAll(" +", " ")
trimmedPlay match {
case trimmedPlay if BlockPlay.matches(trimmedPlay) => new BlockPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if DelayOfGamePlay.matches(trimmedPlay) => new DelayOfGamePlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if EnterPlay.matches(trimmedPlay) => new EnterPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if FoulPlay.matches(trimmedPlay) => new FoulPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if JumpBallPlay.matches(trimmedPlay) => new JumpBallPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if ReboundPlay.matches(trimmedPlay) => new ReboundPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if ShotPlay.matches(trimmedPlay) => new ShotPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if DoubleTechnicalFoulPlay.matches(trimmedPlay) => new DoubleTechnicalFoulPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if TechnicalFoulPlay.matches(trimmedPlay) => new TechnicalFoulPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if ThreeSecondViolationPlay.matches(trimmedPlay) => new ThreeSecondViolationPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if FiveSecondViolationPlay.matches(trimmedPlay) => new FiveSecondViolationPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if TurnoverPlay.matches(trimmedPlay) => new TurnoverPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if TimeoutPlay.matches(trimmedPlay) => new TimeoutPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if EndOfPlay.matches(trimmedPlay) => new EndOfPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay if EjectionPlay.matches(trimmedPlay) => new EjectionPlay(gameIri, eventNumber, period, time, team, trimmedPlay, score, gamePeriodInfo)
case trimmedPlay => {
logger.warn(s"Could not match play description in ${filename}: ${trimmedPlay}")
Event(gameIri, eventNumber, period, time, trimmedPlay)(gamePeriodInfo, team, score, play)
}
}
}
/**
* Adds pbprdf:nextEvent and pbprdf:previousEvent triples to a list of events
*/
def addPreviousAndNextTriples(rep: Repository, events: Seq[Event]) = {
events
.sortBy(_.eventNumber)
.zipWithIndex.foreach(e => {
val (event, index) = e
rep.addTriple(event.eventIri, Ontology.EVENT_NUMBER, rep.getValueFactory.createLiteral(index + 1))
if (index + 1 < events.size) {
val nextEvent = events(index + 1)
rep.addTriple(event.eventIri, Ontology.NEXT_EVENT, nextEvent.eventIri)
if (nextEvent.period == event.period) {
val eventSecondsLeft = event.gamePeriodInfo.clockToSecondsLeft(nextEvent.period, nextEvent.time)
val nextEventSecondsLeft = event.gamePeriodInfo.clockToSecondsLeft(event.period, event.time)
if (nextEventSecondsLeft.isDefined && eventSecondsLeft.isDefined) {
val secondsUntilNextEvent = nextEventSecondsLeft.get.secondsLeftInPeriod - eventSecondsLeft.get.secondsLeftInPeriod
rep.addTriple(event.eventIri, Ontology.SECONDS_UNTIL_NEXT_EVENT, rep.getValueFactory.createLiteral(secondsUntilNextEvent))
}
}
}
if (index > 0) {
val previousEvent = events(index - 1)
rep.addTriple(event.eventIri, Ontology.PREVIOUS_EVENT, previousEvent.eventIri)
if (previousEvent.period == event.period) {
val eventSecondsLeft = event.gamePeriodInfo.clockToSecondsLeft(previousEvent.period, previousEvent.time)
val previousEventSecondsLeft = event.gamePeriodInfo.clockToSecondsLeft(event.period, event.time)
if (previousEventSecondsLeft.isDefined && eventSecondsLeft.isDefined) {
val secondsSincePreviousEvent = eventSecondsLeft.get.secondsLeftInPeriod - previousEventSecondsLeft.get.secondsLeftInPeriod
rep.addTriple(event.eventIri, Ontology.SECONDS_SINCE_PREVIOUS_EVENT, rep.getValueFactory.createLiteral(secondsSincePreviousEvent))
}
}
}
})
}
}
|
andrewstellman/pbprdf
|
src/main/scala/com/stellmangreene/pbprdf/Event.scala
|
Scala
|
mit
| 9,638 |
package com.landoop.streamreactor.connect.hive.formats
import com.landoop.streamreactor.connect.hive.parquet.ParquetSinkConfig
import com.landoop.streamreactor.connect.hive.Serde
import com.typesafe.scalalogging.slf4j.StrictLogging
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.kafka.connect.data.{Schema, Struct}
object ParquetHiveFormat extends HiveFormat with StrictLogging {
override def serde = Serde(
"org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe",
"org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat",
"org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat",
Map("serialization.format" -> "1")
)
override def writer(path: Path, schema: Schema)
(implicit fs: FileSystem): HiveWriter = new HiveWriter {
logger.debug(s"Creating parquet writer at $path")
val writer = com.landoop.streamreactor.connect.hive.parquet.parquetWriter(path, schema, ParquetSinkConfig(overwrite = true))
var count = 0
override def write(struct: Struct): Long = {
writer.write(struct)
count = count + 1
count
}
override def close(): Unit = {
logger.debug(s"Closing writer at path $path")
writer.close()
}
}
override def reader(path: Path, startAt: Int, schema: Schema)
(implicit fs: FileSystem): HiveReader = new HiveReader {
logger.debug(s"Creating parquet reader for $path with offset $startAt")
val reader = com.landoop.streamreactor.connect.hive.parquet.parquetReader(path)
var offset = startAt
override def iterator: Iterator[Record] = Iterator.continually(reader.read).takeWhile(_ != null).drop(startAt).map { struct =>
val record = Record(struct, path, offset)
offset = offset + 1
record
}
override def close(): Unit = reader.close()
}
}
|
CodeSmell/stream-reactor
|
kafka-connect-hive/connector/src/main/scala/com/landoop/streamreactor/connect/hive/formats/ParquetHiveFormat.scala
|
Scala
|
apache-2.0
| 1,859 |
package net.particlez.gui
import java.awt.image.BufferedImage
import java.awt.Color
import java.awt.Dimension
import java.awt.Graphics2D
import scala.swing.event.MouseClicked
import scala.swing.Panel
import net.particlez.Pos
import net.particlez.PosConfiguration
import scala.swing.event.Event
class DrawingPanel(val pm: ParticleManager, val w: Int = 200, val h: Int = 200) extends Panel {
preferredSize = new Dimension(w, h)
opaque = true
var repaints = 0
val bufferedImage = new BufferedImage(w, h, BufferedImage.TYPE_INT_BGR)
var cellSizeX: Int = 0
var cellSizeY: Int = 0
tooltip = "Current configuration"
listenTo(mouse.clicks)
reactions += {
case e: MouseClicked => publish(LocationSelected(e.point.x / cellSizeX, e.point.y / cellSizeY))
}
override def paintComponent(g: Graphics2D) {
super.paintComponent(g)
g.drawImage(bufferedImage, null, 0, 0)
//println("repaints#" + repaints)
repaints = repaints + 1
}
def drawConfiguration(c: PosConfiguration) = {
val g = bufferedImage.createGraphics();
assert(c.limits.size == 2)
g.setColor(Color.white)
g.fillRect(0, 0, h, w)
cellSizeX = (w / c.limits(0)).toInt
cellSizeY = (h / c.limits(1)).toInt
for (x <- 0 until c.limits(0).toInt; y <- 0 until c.limits(1).toInt) {
val particle = c.content(Pos(x, y))
pm.particleIcon(particle, cellSizeX, cellSizeY).paintIcon(null, g, cellSizeX * x, cellSizeY * y)
}
super.repaint()
}
}
case class LocationSelected(x: Int, y: Int) extends Event
|
bbiletskyy/particlez
|
src/main/scala/net/particlez/gui/DrawingPanel.scala
|
Scala
|
apache-2.0
| 1,530 |
package mesosphere.marathon.io
import java.net.URL
import java.util.UUID
import mesosphere.marathon.CanceledActionException
import mesosphere.marathon.io.storage.StorageProvider
import mesosphere.util.{ Logging, ThreadPoolContext }
import scala.concurrent.Future
/**
* Download given url to given path of given storage provider.
* Note: the url stream is persisted in a temporary file in the same location as the path.
* The temporary file is moved, when the download is complete.
* @param url the url to download
* @param provider the storage provider
* @param path the path inside the storage, to store the content of the url stream.
*/
final class CancelableDownload(val url: URL, val provider: StorageProvider, val path: String) extends Logging {
val tempItem = provider.item(path + UUID.randomUUID().toString)
var canceled = false
def cancel(): Unit = { canceled = true }
lazy val get: Future[CancelableDownload] = Future {
log.info(s"Download started from $url to path $path")
IO.using(url.openStream()) { in =>
tempItem.store { out => IO.transfer(in, out, close = false, !canceled) }
}
if (!canceled) {
log.info(s"Download finished from $url to path $path")
tempItem.moveTo(path)
}
else {
log.info(s"Cancel download of $url. Remove temporary storage item $tempItem")
tempItem.delete()
throw new CanceledActionException(s"Download of $path from $url has been canceled")
}
this
}(ThreadPoolContext.ioContext)
override def hashCode(): Int = url.hashCode()
override def equals(other: Any): Boolean = other match {
case c: CancelableDownload => (c.url == this.url) && (c.path == path)
case _ => false
}
}
|
ss75710541/marathon
|
src/main/scala/mesosphere/marathon/io/CancelableDownload.scala
|
Scala
|
apache-2.0
| 1,741 |
package houseprices.search
import org.scalatest.Matchers
import org.scalatest.WordSpec
import akka.actor.ActorSystem
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.http.scaladsl.model.HttpMethods
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.MediaTypes.`application/json`
import houseprices.search.model.Query
import scala.concurrent.Future
import scala.collection.Searching.SearchResult
import houseprices.search.model.SearchResult
import houseprices.search.model.PricePaidData
import spray.json._
class HousePriceSearchServerSpec extends WordSpec
with Matchers with ScalatestRouteTest with HousePriceSearchService {
implicit val ec = system.dispatcher
val searchClient = new SearchClient {
def search(qry: Query) = Future { SearchResult(1, List(PricePaidData(5, "today", "address1"))) }
}
"HousePriceSearchServer" when {
"searching" should {
"return search result" in {
val getRequest = HttpRequest(HttpMethods.GET, uri = "/search/some+query")
getRequest ~> routes ~> check {
status.isSuccess() shouldEqual true
responseEntity shouldEqual HttpEntity(`application/json`,
SearchResult(1, List(PricePaidData(5, "today", "address1"))).toJson.prettyPrint)
}
}
}
}
}
|
ayubmalik/houseprices
|
houseprices-rest-api/src/test/scala/houseprices/search/HousePriceSearchServerSpec.scala
|
Scala
|
mit
| 1,356 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import scala.language.postfixOps
import sbt._
import sbt.Keys._
import net.virtualvoid.sbt.graph.Plugin.graphSettings
import com.scalapenos.sbt.prompt.SbtPrompt.autoImport._
object Settings extends Build {
lazy val buildSettings = Seq(
name := "KillrWeather",
normalizedName := "killrweather",
organization := "com.datastax.killrweather",
organizationHomepage := Some(url("http://www.github.com/killrweather/killrweather")),
scalaVersion := Versions.Scala,
homepage := Some(url("https://github.com/killrweather/killrweather")),
licenses := Seq(("Apache License, Version 2.0", url("http://www.apache.org/licenses/LICENSE-2.0"))),
promptTheme := ScalapenosTheme
)
override lazy val settings = super.settings ++ buildSettings
val parentSettings = buildSettings ++ Seq(
publishArtifact := false,
publish := {}
)
lazy val defaultSettings = testSettings ++ graphSettings ++ sigarSettings ++ Seq(
autoCompilerPlugins := true,
// removed "-Xfatal-warnings" as temporary workaround for log4j fatal error.
scalacOptions ++= Seq("-encoding", "UTF-8", s"-target:jvm-${Versions.JDK}", "-feature", "-language:_", "-deprecation", "-unchecked", "-Xlint"),
javacOptions in Compile ++= Seq("-encoding", "UTF-8", "-source", Versions.JDK, "-target", Versions.JDK, "-Xlint:deprecation", "-Xlint:unchecked"),
run in Compile <<= Defaults.runTask(fullClasspath in Compile, mainClass in (Compile, run), runner in (Compile, run)),
ivyLoggingLevel in ThisBuild := UpdateLogging.Quiet,
parallelExecution in ThisBuild := false,
parallelExecution in Global := false/*,
ivyXML := <dependencies>
<exclude org="org.slf4j" module="slf4j-log4j12"/>
</dependencies>*/
)
val tests = inConfig(Test)(Defaults.testTasks) ++ inConfig(IntegrationTest)(Defaults.itSettings)
val testOptionSettings = Seq(
Tests.Argument(TestFrameworks.ScalaTest, "-oDF")
)
lazy val testSettings = tests ++ Seq(
parallelExecution in Test := false,
parallelExecution in IntegrationTest := false,
testOptions in Test ++= testOptionSettings,
testOptions in IntegrationTest ++= testOptionSettings,
baseDirectory in Test := baseDirectory.value.getParentFile(),
fork in Test := true,
fork in IntegrationTest := true,
(compile in IntegrationTest) <<= (compile in Test, compile in IntegrationTest) map { (_, c) => c },
managedClasspath in IntegrationTest <<= Classpaths.concat(managedClasspath in IntegrationTest, exportedProducts in Test)
)
lazy val sigarSettings = Seq(
unmanagedSourceDirectories in (Compile,run) += baseDirectory.value.getParentFile / "sigar",
javaOptions in run ++= {
System.setProperty("java.library.path", file("./sigar").getAbsolutePath)
Seq("-Xms128m", "-Xmx1024m")
})
}
|
arjunmantri/killrweather
|
project/Settings.scala
|
Scala
|
apache-2.0
| 3,612 |
package eu.ace_design.island.game.actions
/**
* Keywords to be used in JSON actions
*/
object Actions {
final val LAND = "land"
final val EXPLORE = "explore"
final val MOVE_TO = "move_to"
final val SCOUT = "scout"
final val EXPLOIT = "exploit"
final val STOP = "stop"
final val GLIMPSE = "glimpse"
final val TRANSFORM = "transform"
final val FLY = "fly"
final val HEADING = "heading"
final val ECHO = "echo"
final val SCAN = "scan"
}
|
ace-design/island
|
engine/src/main/scala/eu/ace_design/island/game/actions/Actions.scala
|
Scala
|
lgpl-3.0
| 501 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.workflowexecutor.partialexecution
import io.deepsense.commons.exception.FailureDescription
import io.deepsense.commons.models.Entity
import io.deepsense.deeplang.DOperable
import io.deepsense.deeplang.inference.InferContext
import io.deepsense.graph.DeeplangGraph.DeeplangNode
import io.deepsense.graph.Node.Id
import io.deepsense.graph._
import io.deepsense.models.workflows.{ExecutionReport, NodeStateWithResults}
import io.deepsense.reportlib.model.ReportContent
object Execution {
def empty: IdleExecution = IdleExecution(StatefulGraph(), Set.empty[Node.Id])
def apply(graph: StatefulGraph, selectedNodes: Set[Node.Id] = Set.empty): IdleExecution = {
IdleExecution(graph, selectedNodes)
}
def selectedNodes(directedGraph: DeeplangGraph, nodes: Seq[Id]): Set[Id] = {
val graphNodeIds = directedGraph.nodes.map(_.id)
val filteredNodes = nodes.filter(graphNodeIds.contains).toSet
filteredNodes
}
def defaultExecutionFactory(graph: StatefulGraph): Execution = {
Execution(graph)
}
}
sealed abstract class Execution {
final def node(id: Node.Id): DeeplangNode = graph.node(id)
def executionReport: ExecutionReport = {
ExecutionReport(graph.states.mapValues(_.nodeState), graph.executionFailure)
}
type NodeStates = Map[Node.Id, NodeStateWithResults]
def graph: StatefulGraph
def nodeStarted(id: Node.Id): Execution
def nodeFailed(id: Node.Id, cause: Exception): Execution
def nodeFinished(
id: Node.Id,
resultsIds: Seq[Entity.Id],
reports: Map[Entity.Id, ReportContent],
dOperables: Map[Entity.Id, DOperable]): Execution
def enqueue: Execution
def inferAndApplyKnowledge(inferContext: InferContext): Execution
def abort: Execution
}
case class IdleExecution(
override val graph: StatefulGraph,
selectedNodes: Set[Node.Id] = Set.empty)
extends Execution {
require(graph.readyNodes.isEmpty, "Idle executor must not have ready nodes")
override def nodeFinished(
id: Node.Id,
resultsIds: Seq[Entity.Id],
reports: Map[Entity.Id, ReportContent],
dOperables: Map[Entity.Id, DOperable]): Execution = {
throw new IllegalStateException("A node cannot finish in IdleExecution")
}
override def nodeFailed(id: Id, cause: Exception): Execution = {
throw new IllegalStateException("A node cannot fail in IdleExecution")
}
override def nodeStarted(id: Id): Execution = {
throw new IllegalStateException("A node cannot start in IdleExecution")
}
def updateStructure(
newStructure: DeeplangGraph,
nodes: Set[Id] = Set.empty): IdleExecution = {
val selected = Execution.selectedNodes(newStructure, nodes.toSeq)
val substructure = newStructure.subgraph(selected)
val newStates = findStates(newStructure, substructure, selected)
val graph = StatefulGraph(newStructure, newStates, None)
IdleExecution(graph, selected)
}
override def enqueue: Execution = {
val (selected: Set[Id], subgraph: StatefulGraph) = selectedSubgraph
val enqueuedSubgraph: StatefulGraph = subgraph.enqueueDraft
if (enqueuedSubgraph.isRunning) {
RunningExecution(graph, enqueuedSubgraph, selected)
} else {
this
}
}
override def inferAndApplyKnowledge(inferContext: InferContext): IdleExecution = {
val (_, subgraph: StatefulGraph) = selectedSubgraph
val inferred = subgraph.inferAndApplyKnowledge(inferContext)
copy(graph = graph.updateStates(inferred))
}
override def abort: Execution = {
throw new IllegalStateException("IdleExecution cannot be aborted!")
}
private def selectedSubgraph: (Set[Id], StatefulGraph) = {
val selected = Execution.selectedNodes(graph.directedGraph, selectedNodes.toSeq)
val subgraph = graph.subgraph(selected)
(selected, subgraph)
}
private def operationParamsChanged(
newNode: DeeplangGraph.DeeplangNode,
graph: StatefulGraph): Boolean = {
graph.nodes.find(_.id == newNode.id) match {
case Some(oldNode) =>
!newNode.value.sameAs(oldNode.value)
case None => true
}
}
private def findStates(
newStructure: DeeplangGraph,
substructure: DeeplangGraph,
nodes: Set[Node.Id]): NodeStates = {
val noMissingStates = newStructure.nodes.map {
case Node(id, _) => id -> graph.states.getOrElse(id, NodeStateWithResults.draft)
}.toMap
if (newStructure.containsCycle) {
noMissingStates.mapValues(_.draft.clearKnowledge)
} else {
val wholeGraph = StatefulGraph(newStructure, noMissingStates, None)
val newNodes = newStructure.nodes.map(_.id).diff(graph.directedGraph.nodes.map(_.id))
val nodesToExecute = substructure.nodes.filter { case Node(id, _) =>
nodes.contains(id) || !wholeGraph.states(id).isCompleted
}.map(_.id)
val predecessorsChangedNodes = newStructure.nodes.map(_.id).diff(newNodes).filter {
id => newStructure.predecessors(id) != graph.predecessors(id)
}
val changedParameters = newStructure.nodes.collect {
case node if operationParamsChanged(node, graph) => node.id
}
val changedNodes = predecessorsChangedNodes ++ changedParameters
val nodesNeedingDrafting = newNodes ++ nodesToExecute ++ changedNodes
val transformGraph = draftNodes(nodesNeedingDrafting) _ andThen
clearNodesKnowledge(changedNodes)
transformGraph(wholeGraph).states
}
}
private def draftNodes(
nodesNeedingDrafting: Set[Node.Id])(
graph: StatefulGraph): StatefulGraph = {
nodesNeedingDrafting.foldLeft(graph) {
case (g, id) => g.draft(id)
}
}
private def clearNodesKnowledge(
nodesToClear: Set[Node.Id])(
graph: StatefulGraph): StatefulGraph = {
nodesToClear.foldLeft(graph) {
case (g, id) => g.clearKnowledge(id)
}
}
}
abstract class StartedExecution(
fullGraph: StatefulGraph,
runningPart: StatefulGraph,
selectedNodes: Set[Node.Id])
extends Execution {
override def graph: StatefulGraph = {
val mergedStates = fullGraph.states ++ runningPart.states
// Assumes runningPart is subgraph of fullGraph
StatefulGraph(fullGraph.directedGraph, mergedStates, runningPart.executionFailure)
}
override def nodeFinished(
id: Node.Id,
resultsIds: Seq[Entity.Id],
reports: Map[Entity.Id, ReportContent],
dOperables: Map[Entity.Id, DOperable]): Execution = {
withRunningPartUpdated(_.nodeFinished(id, resultsIds, reports, dOperables))
}
override def nodeFailed(id: Id, cause: Exception): Execution =
withRunningPartUpdated(_.nodeFailed(id, cause))
override def enqueue: Execution = {
throw new IllegalStateException("An Execution that is not idle cannot be enqueued!")
}
override def inferAndApplyKnowledge(inferContext: InferContext): RunningExecution = {
throw new IllegalStateException("An Execution that is not idle cannot infer knowledge!")
}
final def withRunningPartUpdated(
update: (StatefulGraph) => StatefulGraph): Execution = {
val updatedRunningPart = update(runningPart)
val updatedFullGraph = fullGraph.updateStates(updatedRunningPart)
if (updatedRunningPart.isRunning) {
copyGraphs(updatedRunningPart, updatedFullGraph)
} else {
IdleExecution(updatedFullGraph, selectedNodes)
}
}
protected def copyGraphs(
updatedRunningPart: StatefulGraph,
updatedFullGraph: StatefulGraph): Execution
}
case class RunningExecution(
fullGraph: StatefulGraph,
runningPart: StatefulGraph,
selectedNodes: Set[Node.Id])
extends StartedExecution(fullGraph, runningPart, selectedNodes) {
override def nodeStarted(id: Id): RunningExecution = {
val updatedRunningPart = runningPart.nodeStarted(id)
val updatedFullGraph = fullGraph.updateStates(updatedRunningPart)
copy(fullGraph = updatedFullGraph, runningPart = updatedRunningPart)
}
override def abort: Execution = {
AbortedExecution(graph, runningPart.abortQueued, selectedNodes)
}
override protected def copyGraphs(
updatedRunningPart: StatefulGraph,
updatedGraph: StatefulGraph): Execution = {
RunningExecution(updatedGraph, updatedRunningPart, selectedNodes)
}
}
case class AbortedExecution(
val fullGraph: StatefulGraph,
runningPart: StatefulGraph,
selectedNodes: Set[Node.Id])
extends StartedExecution(fullGraph, runningPart, selectedNodes) {
override def nodeStarted(id: Id): AbortedExecution = {
throw new IllegalStateException("A node cannot be started when execution is Aborted!")
}
override def abort: Execution = {
throw new IllegalStateException("Once aborted execution cannot be aborted again!")
}
override protected def copyGraphs(
updatedRunningPart: StatefulGraph,
updatedFullGraph: StatefulGraph): Execution = {
AbortedExecution(updatedFullGraph, updatedRunningPart, selectedNodes)
}
}
|
deepsense-io/seahorse-workflow-executor
|
workflowexecutor/src/main/scala/io/deepsense/workflowexecutor/partialexecution/Execution.scala
|
Scala
|
apache-2.0
| 9,489 |
package edu.umass.cs.iesl.scalacommons
import collection.mutable
import java.util.Locale
import java.text.DateFormatSymbols
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
object DateUtils {
lazy val zeroBasedMonthsByName: Map[String, Int] = {
val result: mutable.Map[String, Int] = mutable.Map[String, Int]()
for (l <- Locale.getAvailableLocales) {
val months = new DateFormatSymbols(l).getMonths
for (i <- 0 until months.length) {
result += months(i) -> i
}
val shortmonths = new DateFormatSymbols(l).getShortMonths
for (i <- 0 until shortmonths.length) {
result += shortmonths(i) -> i
}
for (i <- 1 to 12) {
result += i.toString -> (i - 1)
}
for (i <- 1 to 9) {
result += ("0" + i.toString) -> (i - 1)
}
// a reasonable approximation
result += "Winter" -> 0
result += "Spring" -> 3
result += "Summer" -> 6
result += "Fall" -> 9
result += "Christmas" -> 12
}
val r = result.toMap
r ++ r.map({
case (a, b) => (a.toLowerCase, b)
}) ++ r.map({
case (a, b) => (a.toUpperCase, b)
})
}
def parseMonthZeroBased(s: String): Option[Int] = zeroBasedMonthsByName.get(s.trim)
def parseMonthOneBased(s: String): Option[Int] = zeroBasedMonthsByName.get(s.trim).map(_ + 1)
def formatInterval(x: Option[DateTime], y: Option[DateTime]): String = {
val yearFormat = DateTimeFormat.forPattern("yyyy")
//val monthAndYearFormat = DateTimeFormat.forPattern("MMM yyyy")
val fullFormat = DateTimeFormat.forPattern("MMM dd, yyyy")
//val monthformat = DateTimeFormat.forPattern("dd MMM")
val monthformat = DateTimeFormat.forPattern("MMM")
val dayformat = DateTimeFormat.forPattern("dd")
(x, y) match {
case (None, None) => ""
case (Some(x), None) => fullFormat.print(x)
case (None, Some(y)) => fullFormat.print(y)
case (Some(x), Some(y)) => {
if (x == y) fullFormat.print(x)
else {
if (x.year != y.year) {
fullFormat.print(x) + " - " + fullFormat.print(y)
}
else if (x.monthOfYear() != y.monthOfYear()) {
monthformat.print(x) + " " + dayformat.print(x) + " - " + monthformat.print(y) + " " + dayformat.print(y) + ", " + yearFormat.print(x)
}
else if (x.dayOfMonth() != y.dayOfMonth()) {
monthformat.print(x) + " " + dayformat.print(x) + " - " + dayformat.print(y) + ", " + yearFormat.print(x)
}
else fullFormat.print(x)
}
}
}
}
}
|
iesl/scalacommons
|
src/main/scala/edu/umass/cs/iesl/scalacommons/DateUtils.scala
|
Scala
|
apache-2.0
| 2,617 |
package scorex.crypto.authds
import com.google.common.primitives.Longs
import scorex.crypto.TestingCommons
import scorex.crypto.authds.avltree.batch.{Modification, Update}
import scorex.crypto.hash.Digest
import scala.util.Success
trait TwoPartyTests extends TestingCommons {
def genUpd(key: ADKey) = Update(key, ADValue @@ key.take(8))
def profileTree(tree: TwoPartyDictionary, elements: Seq[ADKey], inDigest: ADDigest): Seq[Float] = {
var digest = inDigest
val (insertTime: Float, proofs) = time(elements.map(e => tree.run(genUpd(e)).get))
val (verifyTime: Float, _) = time {
proofs.foreach { p =>
digest = p.verify(digest, genUpd(p.key).updateFn).get
}
}
val m: scala.collection.mutable.Map[Int, Float] =
scala.collection.mutable.Map(0 -> 0, 1 -> 0, 2 -> 0, 3 -> 0, 4 -> 0, 5 -> 0)
proofs.foreach { p =>
p.proofSeq.foreach {
case a: ProofLevel => m(0) = m(0) + 1
case a: ProofRightLabel => m(1) = m(1) + 1
case a: ProofLeftLabel => m(1) = m(1) + 1
case a: ProofKey => m(2) = m(2) + 1
case a: ProofNextLeafKey => m(2) = m(2) + 1
case a: ProofValue => m(3) = m(3) + 1
case a: ProofBalance => m(4) = m(4) + 1
case a: ProofDirection => m(5) = m(5) + 1
}
}
val pl: Float = proofs.length
val proofSize = proofs.foldLeft(0) { (a, b) =>
a + b.proofSeq.map(_.bytes.length).sum
} / elements.length
Seq(insertTime, verifyTime, proofSize, m(0) / pl, m(1) / pl, m(2) / pl, m(3) / pl, m(4) / pl, m(5) / pl)
}
case class Append(key: ADKey, value: ADValue) extends Modification {
override def updateFn: UpdateFunction = {
oldOpt: Option[ADValue] => Success(Some(ADValue @@ oldOpt.map(_ ++ value).getOrElse(value)))
}: UpdateFunction
}
case class TransactionUpdate(key: ADKey, amount: Long) extends Modification {
override def updateFn: UpdateFunction = {
oldOpt: Option[ADValue] =>
Success(Some(ADValue @@ Longs.toByteArray(oldOpt.map(v => Longs.fromByteArray(v) + amount).getOrElse(amount))))
}: UpdateFunction
}
}
|
ScorexProject/scrypto
|
src/test/scala/scorex/crypto/authds/TwoPartyTests.scala
|
Scala
|
cc0-1.0
| 2,117 |
package julienrf.forms.presenters
import julienrf.forms._
import julienrf.forms.codecs.Codec
import julienrf.forms.codecs.Codec._
import julienrf.forms.codecs.Constraint.{Constrainable, And, GreaterOrEqual}
/**
* Defines common HTML controls (input and select).
*/
abstract class Control[Out] {
def input[A : Mandatory : InputType]: Presenter[A, Out] = inputAttrs[A]()
def inputAttrs[A : Mandatory : InputType](additionalAttrs: (String, String)*): Presenter[A, Out]
def options(data: Seq[(String, String)])(field: Field[_]): Out
def select[A : Mandatory : Multiple](opts: Field[A] => Out): Presenter[A, Out]
// TODO Do not add the empty first choice in the case of a multiple select
def enumOptions[A](values: Set[A], keys: A => String, labels: A => String): Seq[(String, String)] =
("" -> "") +: (values.to[Seq] map (a => keys(a) -> labels(a)))
val checkbox: Presenter[Boolean, Out] = checkboxAttrs()
def checkboxAttrs(additionalAttrs: (String, String)*): Presenter[Boolean, Out]
}
object Control {
def validationAttrs[A: Mandatory](codec: Codec[_, A]): Map[String, String] =
if (Mandatory[A].value) validationAttrsFromCodecs(codec) + ("required" -> "required")
else validationAttrsFromCodecs(codec)
// TODO Make this extensible
def validationAttrsFromCodecs(codec: Codec[_, _]): Map[String, String] =
codec match {
case AndThen(lhs, rhs) => validationAttrsFromCodecs(lhs) ++ validationAttrsFromCodecs(rhs)
case And(lhs, rhs) => validationAttrsFromCodecs(lhs) ++ validationAttrsFromCodecs(rhs)
case GreaterOrEqual(num) => Map("min" -> num.toString)
case Opt(codec) => validationAttrsFromCodecs(codec)
case Head | ToInt | ToBigDecimal | ToBoolean | OrElse(_, _) | OneOf(_) | SeveralOf(_) | _: Codecable[x, y] | _: Constrainable[z] | ToLocalDate(_) => Map.empty
}
}
|
julienrf/play-forms
|
play-forms/src/main/scala/julienrf/forms/presenters/Control.scala
|
Scala
|
mit
| 1,852 |
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.http
import fm.common.Implicits._
import fm.common.{InputStreamResource, Logging, MultiUseResource, Resource}
import java.io.{File, InputStream}
import java.nio.charset.Charset
import io.netty.buffer.ByteBufInputStream
import io.netty.handler.codec.http.{multipart => netty}
object PostData extends Logging {
def apply(data: netty.InterfaceHttpData): PostData = data match {
case d: netty.MemoryAttribute => MemoryPostAttribute(d)
case d: netty.DiskAttribute => DiskPostAttribute(d)
case d: netty.MemoryFileUpload => MemoryFileUpload(d)
case d: netty.DiskFileUpload => DiskFileUpload(d)
case d: netty.MixedAttribute => if (d.isInMemory) MemoryPostAttribute(d) else DiskPostAttribute(d)
case d: netty.MixedFileUpload => if (d.isInMemory) MemoryFileUpload(d) else DiskFileUpload(d)
case _ =>
logger.error("Unknown PostData type: "+data.getClass.getName+" "+data)
throw new MatchError(data)
}
}
sealed trait PostData {
protected def self: netty.HttpData
/** The POST name for this data */
final def name: String = Option(self.getName()).getOrElse("")
/** The specified Charset */
final def charset: Option[Charset] = Option(self.getCharset())
/** The length (in bytes) of this data */
final def length: Long = self.length()
/** The InputStreamResource for reading this data */
def inputStreamResource(autoDecompress: Boolean = true, autoBuffer: Boolean = true): InputStreamResource
final def value: String = self.getString()
/** Force this data to disk (if it's not already there) */
//def toDiskPostData: DiskPostData
}
sealed trait MemoryPostData extends PostData {
// Note: ByteBufInputStream will increase the readerIndex of the ByteBuf instance so we need to duplicate() it first
// if we want the MultiUseResource to work with multiple reads.
protected final def resource: Resource[InputStream] = MultiUseResource{ new ByteBufInputStream(self.getByteBuf().duplicate()) }
final def inputStreamResource(autoDecompress: Boolean = true, autoBuffer: Boolean = true): InputStreamResource = InputStreamResource(resource, autoDecompress = autoDecompress, autoBuffer = autoBuffer)
}
sealed trait DiskPostData extends PostData {
final def file: File = self.getFile()
}
sealed trait PostAttribute extends PostData {
protected def self: netty.Attribute
}
sealed trait FileUpload extends PostData {
protected def self: netty.FileUpload
final def fileName: Option[String] = self.getFilename().toBlankOption
final def contentType: Option[String] = self.getContentType().toBlankOption
final def contentTransferEncoding: Option[String] = self.getContentTransferEncoding().toBlankOption
}
final case class MemoryPostAttribute(protected val self: netty.Attribute) extends PostAttribute with MemoryPostData {
require(self.isInMemory, "Can't use an isInMemory=false instance of MemoryAttribute with MemoryPostAttribute")
}
final case class DiskPostAttribute(protected val self: netty.Attribute) extends PostAttribute with DiskPostData {
require(!self.isInMemory, "Can't use an isInMemory=true instance of DiskAttribute with DiskPostAttribute")
def inputStreamResource(autoDecompress: Boolean = true, autoBuffer: Boolean = true): InputStreamResource = {
InputStreamResource.forFile(file, autoDecompress = autoDecompress, autoBuffer = autoBuffer)
}
}
final case class MemoryFileUpload(protected val self: netty.FileUpload) extends FileUpload with MemoryPostData {
require(self.isInMemory, "Can't use an isInMemory=false instance of FileUpload with MemoryFileUpload")
}
final case class DiskFileUpload(protected val self: netty.FileUpload) extends FileUpload with DiskPostData {
require(!self.isInMemory, "Can't use an isInMemory=true instance of FileUpload with DiskFileUpload")
def inputStreamResource(autoDecompress: Boolean = true, autoBuffer: Boolean = true): InputStreamResource = {
InputStreamResource.forFile(file, fileName.getOrElse(""), autoDecompress = autoDecompress, autoBuffer = autoBuffer)
}
}
|
frugalmechanic/fm-http
|
src/main/scala/fm/http/PostData.scala
|
Scala
|
apache-2.0
| 4,696 |
package skinny.micro
import skinny.micro.async.{ AsyncBeforeAfterDsl, AsyncSupport }
import skinny.micro.routing.AsyncRoutingDsl
/**
* Built-in features in SkinnyMicroFilter/SkinnyMicroServlet.
* These traits should not be mixed in SkinnyMicroBase.
*/
trait AsyncFeatures
extends AsyncSupport
with AsyncRoutingDsl
with AsyncBeforeAfterDsl { self: SkinnyMicroBase =>
}
|
xerial/skinny-micro
|
micro/src/main/scala/skinny/micro/AsyncFeatures.scala
|
Scala
|
bsd-2-clause
| 386 |
//package io.skysail.restlet
//
//import org.restlet.resource.ServerResource
//import io.skysail.core.app.SkysailApplication
//import io.skysail.core.utils.ReflectionUtils
//
//abstract class SkysailServerResource extends ServerResource {
//
// // DateTimeConverter dateConverter = new DateConverter(null);
// // dateConverter.setPattern("yyyy-MM-dd");
// // dateConverter.setUseLocaleFormat(true);
// // ConvertUtils.deregister(Date.class);
// // ConvertUtils.register(dateConverter, Date.class);
//
// // defaultMediaTypes.add("xml");
// // defaultMediaTypes.add("json");
// // defaultMediaTypes.add("x-yaml");
// // defaultMediaTypes.add("csv");
// // defaultMediaTypes.add("mailto");
//
// //val app = getApplication();
//
// def getMetricsCollector() = getApplication().asInstanceOf[SkysailApplication].getMetricsCollector()
//
// def getParameterizedType() = ReflectionUtils.getParameterizedType(getClass());
//}
|
evandor/skysail
|
skysail.restlet/src/io/skysail/restlet/SkysailServerResource.scala
|
Scala
|
apache-2.0
| 996 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.spray.directive
import com.typesafe.config.{Config, ConfigFactory}
import com.webtrends.harness.authentication.CIDRRules
import org.scalatest.FunSuite
import spray.http.HttpHeaders._
import spray.http.StatusCodes
import spray.testkit.ScalatestRouteTest
class CIDRNoIPSpec extends FunSuite with ScalatestRouteTest with CIDRDirectives {
implicit var cidrRules:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.allowConf))
test("reject when there is no source ip") {
Get("/good") ~> {
cidrFilter {
complete("good")
}
} ~> check {
status === StatusCodes.NotFound
}
}
}
class CIDRAllowSpec extends FunSuite with ScalatestRouteTest with CIDRDirectives {
implicit var cidrRules:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.allowConf))
test("accept a defined address using 'allow'") {
Get("/good") ~> addHeader(`Remote-Address`("127.0.0.1")) ~> {
cidrFilter {
complete("good")
}
} ~> check {
status === StatusCodes.OK
}
}
test("accept a defined second address using 'allow'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.allowConf))
Get("/good") ~> addHeader(`Remote-Address`("10.88.16.32")) ~> {
cidrFilter {
complete("good")
}
} ~> check {
status === StatusCodes.OK
}
}
test("reject an un-defined address using 'allow'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.allowConf))
Get("/bad") ~> addHeader(`Remote-Address`("216.64.169.240")) ~> {
cidrFilter {
complete("bad")
}
} ~> check {
status === StatusCodes.NotFound
}
}
}
class CIDRDenySpec extends FunSuite with ScalatestRouteTest with CIDRDirectives {
implicit var cidrRules:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.denyConf))
test("accept a defined address using 'deny'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.denyConf))
Get("/good") ~> addHeader(`Remote-Address`("127.0.0.1")) ~> {
cidrFilter {
complete("good")
}
} ~> check {
status === StatusCodes.OK
}
}
test("reject a defined address using 'deny'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.denyConf))
Get("/bad") ~> addHeader(`Remote-Address`("10.88.16.32")) ~> {
cidrFilter {
complete("bad")
}
} ~> check {
status === StatusCodes.NotFound
}
}
}
class CIDRMixSpec extends FunSuite with ScalatestRouteTest with CIDRDirectives {
implicit var cidrRules:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.mixConf))
test("accept a defined address using 'mix'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.mixConf))
Get("/good") ~> addHeader(`Remote-Address`("127.0.0.1")) ~> {
cidrFilter {
complete("good")
}
} ~> check {
status === StatusCodes.OK
}
}
test("reject an un-defined address using 'mix'") {
implicit var cidrRules:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.mixConf))
Get("/good") ~> addHeader(`Remote-Address`("216.64.169.240")) ~> {
cidrFilter {
complete("bad")
}
} ~> check {
status === StatusCodes.NotFound
}
}
test("reject a defined second address using 'mix'") {
implicit var settings:Option[CIDRRules] = Some(CIDRRules(CIDRConfig.mixConf))
Get("/bad") ~> addHeader(`Remote-Address`("10.88.16.32")) ~> {
cidrFilter {
complete("bad")
}
} ~> check {
status === StatusCodes.NotFound
}
}
}
object CIDRConfig {
val allowConf: Config = ConfigFactory.parseString( """
cidr-rules {
allow=["127.0.0.1/30", "10.0.0.0/8"]
deny=[]
}
""")
val denyConf: Config = ConfigFactory.parseString( """
cidr-rules {
allow=[]
deny=["10.0.0.0/8"]
}
""")
val mixConf: Config = ConfigFactory.parseString( """
cidr-rules {
allow=["127.0.0.1/30"]
deny=["10.0.0.0/8"]
}
""")
}
|
Webtrends/wookiee-spray
|
src/test/scala/com/webtrends/harness/component/spray/directive/CIDRDirectiveSpec.scala
|
Scala
|
apache-2.0
| 4,835 |
package scalaDemo.threadConcurrency
import java.util.{Timer, TimerTask}
import scala.concurrent._
/** Create Future[T] instances which will be completed after a delay.
* 建模异步事件
*/
object TimedEvent {
//Scala 代码使用一个 java.util.Timer 来安排 java.util.TimerTask 在一个延迟之后执行
val timer = new Timer
/**
* delayedSuccess 函数定制了一个任务,在运行时成功完成一个 Scala Future[T],然后将该 future 返回给调用方。
* delayedSuccess 函数返回相同类型的 future,但使用了一个在完成 future 时发生 IllegalArgumentException 异常的失败任务
*/
/** Return a Future which completes successfully with the supplied value after secs seconds. */
def delayedSuccess[T](secs: Int, value: T): Future[T] = {
val result = Promise[T]//Promise 交由任务执行者,任务执行者通过 Promise 可以标记任务完成或者失败
//java.util.TimerTask 在一个延迟之后执行。每个 TimerTask 在运行时完成一个有关联的 future
timer.schedule(new TimerTask() {
def run() = {
result.success(value)
}
}, secs * 1000)
// //Future 表示一个可能还没有实际完成的异步任务的结果,针对这个结果可以添加 Callback 以便在任务执行成功或失败后做出对应的操作
result.future
}
/** Return a Future which completes failing with an IllegalArgumentException after secs
* seconds. */
def delayedFailure(secs: Int, msg: String): Future[Int] = {
val result = Promise[Int]//Promise 交由任务执行者,任务执行者通过 Promise 可以标记任务完成或者失败
timer.schedule(new TimerTask() {
def run() = {
result.failure(new IllegalArgumentException(msg))
}
}, secs * 1000)
//Future 表示一个可能还没有实际完成的异步任务的结果,针对这个结果可以添加 Callback 以便在任务执行成功或失败后做出对应的操作
result.future
}
}
|
tophua/spark1.52
|
examples/src/main/scala/scalaDemo/threadConcurrency/TimedEvent.scala
|
Scala
|
apache-2.0
| 2,012 |
package pureconfig.module.joda
import org.joda.time._
import org.joda.time.format.{DateTimeFormat, DateTimeFormatter}
import pureconfig.BaseSuite
import pureconfig.module.joda.arbitrary._
class JodaSuite extends BaseSuite {
checkArbitrary[Instant]
checkArbitrary[Interval]
checkArbitrary[Duration]
checkArbitrary[DateTimeZone]
checkReadString[DateTimeFormatter](
"yyyy-MM-dd'T'HH:mm:ss.SSSZZZ" -> DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZZZ")
)
}
|
pureconfig/pureconfig
|
modules/joda/src/test/scala/pureconfig/module/joda/JodaSuite.scala
|
Scala
|
mpl-2.0
| 483 |
/**
* Copyright 2016 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.confluent.examples.streams
import java.lang.{Long => JLong}
import java.util.Properties
import io.confluent.examples.streams.kafka.EmbeddedSingleNodeKafkaCluster
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization._
import org.apache.kafka.streams.kstream.{KStream, KStreamBuilder}
import org.apache.kafka.streams.{KafkaStreams, KeyValue, StreamsConfig}
import org.apache.kafka.test.TestUtils
import org.assertj.core.api.Assertions.assertThat
import org.junit._
import org.scalatest.junit.AssertionsForJUnit
/**
* End-to-end integration test based on [[WordCountLambdaExample]], using an embedded Kafka cluster.
*
* See [[WordCountLambdaExample]] for further documentation.
*
* See [[WordCountLambdaIntegrationTest]] for the equivalent Java example.
*
* Note: We intentionally use JUnit4 (wrapped by ScalaTest) for implementing this Scala integration
* test so it is easier to compare this Scala code with the equivalent Java code at
* StreamToTableJoinIntegrationTest. One difference is that, to simplify the Scala/Junit integration, we
* switched from BeforeClass (which must be `static`) to Before as well as from @ClassRule (which
* must be `static` and `public`) to a workaround combination of `@Rule def` and a `private val`.
*/
class WordCountScalaIntegrationTest extends AssertionsForJUnit {
private val privateCluster: EmbeddedSingleNodeKafkaCluster = new EmbeddedSingleNodeKafkaCluster
@Rule def cluster: EmbeddedSingleNodeKafkaCluster = privateCluster
private val inputTopic = "inputTopic"
private val outputTopic = "output-topic"
@Before
def startKafkaCluster() {
cluster.createTopic(inputTopic)
cluster.createTopic(outputTopic)
}
@Test
def shouldCountWords() {
// To convert between Scala's `Tuple2` and Streams' `KeyValue`.
import KeyValueImplicits._
val inputTextLines: Seq[String] = Seq(
"Hello Kafka Streams",
"All streams lead to Kafka",
"Join Kafka Summit"
)
val expectedWordCounts: Seq[KeyValue[String, Long]] = Seq(
("hello", 1L),
("all", 1L),
("streams", 2L),
("lead", 1L),
("to", 1L),
("join", 1L),
("kafka", 3L),
("summit", 1L)
)
//
// Step 1: Configure and start the processor topology.
//
val streamsConfiguration: Properties = {
val p = new Properties()
p.put(StreamsConfig.APPLICATION_ID_CONFIG, "wordcount-scala-integration-test")
p.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers())
p.put(StreamsConfig.KEY_SERDE_CLASS_CONFIG, Serdes.String.getClass.getName)
p.put(StreamsConfig.VALUE_SERDE_CLASS_CONFIG, Serdes.String.getClass.getName)
// The commit interval for flushing records to state stores and downstream must be lower than
// this integration test's timeout (30 secs) to ensure we observe the expected processing results.
p.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, "10000")
p.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
// Use a temporary directory for storing state, which will be automatically removed after the test.
p.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory.getAbsolutePath)
p
}
val stringSerde: Serde[String] = Serdes.String()
val longSerde: Serde[JLong] = Serdes.Long()
val builder: KStreamBuilder = new KStreamBuilder()
// Construct a `KStream` from the input topic, where message values represent lines of text (for
// the sake of this example, we ignore whatever may be stored in the message keys).
val textLines: KStream[String, String] = builder.stream(inputTopic)
// Scala-Java interoperability: to convert `scala.collection.Iterable` to `java.util.Iterable`
// in `flatMapValues()` below.
import collection.JavaConverters.asJavaIterableConverter
val wordCounts: KStream[String, JLong] = textLines
.flatMapValues(value => value.toLowerCase.split("\\W+").toIterable.asJava)
// no need to specify explicit serdes because the resulting key and value types match our default serde settings
.groupBy((_, word) => word)
.count("Counts")
.toStream()
wordCounts.to(stringSerde, longSerde, outputTopic)
val streams: KafkaStreams = new KafkaStreams(builder, streamsConfiguration)
streams.start()
//
// Step 2: Publish some input text lines.
//
val producerConfig: Properties = {
val p = new Properties()
p.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers())
p.put(ProducerConfig.ACKS_CONFIG, "all")
p.put(ProducerConfig.RETRIES_CONFIG, "0")
p.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
p.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
p
}
import collection.JavaConverters._
IntegrationTestUtils.produceValuesSynchronously(inputTopic, inputTextLines.asJava, producerConfig)
//
// Step 3: Verify the application's output data.
//
val consumerConfig = {
val p = new Properties()
p.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.bootstrapServers())
p.put(ConsumerConfig.GROUP_ID_CONFIG, "wordcount-scala-integration-test-standard-consumer")
p.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
p.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, classOf[StringDeserializer])
p.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, classOf[LongDeserializer])
p
}
val actualWordCounts: java.util.List[KeyValue[String, Long]] =
IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerConfig, outputTopic, expectedWordCounts.size)
streams.close()
assertThat(actualWordCounts).containsExactlyElementsOf(expectedWordCounts.asJava)
}
}
|
randyzingle/tools
|
gradle-streams/src/test/scala/io/confluent/examples/streams/WordCountScalaIntegrationTest.scala
|
Scala
|
apache-2.0
| 6,541 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.classes.HeroicCharacterClass
import io.truthencode.ddo.model.classes.HeroicCharacterClass.Warlock
import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, GrantsToClass, RequiresAllOfClass, RequiresAllOfFeat}
/**
* +5 MRR, +25% Fortification
*/
protected[feats] trait EntropicWard
extends FeatRequisiteImpl with Passive with RequiresAllOfClass with RequiresAllOfFeat
with GrantsToClass {
self: ClassFeat =>
override def grantToClass: Seq[(HeroicCharacterClass, Int)] = Seq((Warlock, 6))
override def allOfFeats: Seq[Feat] = Seq(ClassFeat.PactGreatOldOne)
}
|
adarro/ddo-calc
|
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/EntropicWard.scala
|
Scala
|
apache-2.0
| 1,330 |
package picfg
import java.net.{DatagramPacket, DatagramSocket, InetSocketAddress}
import sodium.StreamSink
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
trait Scanner {
val findings = new StreamSink[Pi]
def scanNetwork(port: Int, timeout: Duration)(implicit ec: ExecutionContext): Future[Unit]
}
object Scanner extends Scanner {
def scanNetwork(port: Int, timeout: Duration)(implicit ec: ExecutionContext): Future[Unit] = Future {
// setup socket
val socket = new DatagramSocket()
socket.setBroadcast(true)
socket.setSoTimeout(timeout.toMillis.toInt)
// send
val msg = "picfg-ping".getBytes
//FIXME: broadcast address: 255.255.255.255 doesn't work under FreeBSD - why?
val packet = new DatagramPacket(msg, msg.length, new InetSocketAddress("255.255.255.255", port))
socket.send(packet)
// receive
while (true) {
val buf = new Array[Byte](256)
val rec = new DatagramPacket(buf, buf.length)
socket.receive(rec)
val msg = new String(buf, 0, rec.getLength)
val Array(name, ip) = msg.split(":")
findings.send(Pi(name, ip))
}
}
}
|
j-keck/picfg
|
src/main/scala/picfg/Scanner.scala
|
Scala
|
mit
| 1,167 |
package psyco.scala.basic
/**
* Created by lipeng on 15/8/6.
*/
object Curry extends App {
/**
* high level function
*/
def sum(f: Int => Int)(a: Int)(b: Int): Int =
if (a > b) 0 else f(a) + sum(f)(a + 1)(b)
println(sum(x => x * x * x)(1)(10))
println(sum(x => x)(1)(10))
}
|
psyco4j/projects
|
src/main/scala/psyco/scala/basic/Curry.scala
|
Scala
|
mit
| 296 |
package org.jetbrains.plugins.scala.testingSupport.specs2.specs2_2_11_2_4_15
import org.jetbrains.plugins.scala.testingSupport.specs2.Specs2RegExpTestNameTest
/**
* @author Roman.Shein
* @since 03.07.2015.
*/
class Specs2_2_11_2_4_15_RegExpTest extends Specs2RegExpTestNameTest with Specs2_2_11_2_4_15_Base {
}
|
katejim/intellij-scala
|
test/org/jetbrains/plugins/scala/testingSupport/specs2/specs2_2_11_2_4_15/Specs2_2_11_2_4_15_RegExpTest.scala
|
Scala
|
apache-2.0
| 321 |
/* sbt -- Simple Build Tool
* Copyright 2009 Mark Harrah
*/
package sbt.classfile
import sbt._
import scala.collection.mutable
import mutable.{ArrayBuffer, Buffer}
import java.io.File
private[sbt] object Analyze
{
def apply[T](basePath: Path, outputDirectory: Path, sources: Iterable[Path], roots: Iterable[Path], log: Logger)
(allProducts: => scala.collection.Set[Path], analysis: AnalysisCallback, loader: ClassLoader)
(compile: => Option[String]): Option[String] =
{
val sourceSet = Set(sources.toSeq : _*)
val classesFinder = outputDirectory ** GlobFilter("*.class")
val existingClasses = classesFinder.get
// runs after compilation
def analyze()
{
val allClasses = Set(classesFinder.get.toSeq : _*)
val newClasses = allClasses -- existingClasses -- allProducts
val productToSource = new mutable.HashMap[Path, Path]
val sourceToClassFiles = new mutable.HashMap[Path, Buffer[ClassFile]]
// parse class files and assign classes to sources. This must be done before dependencies, since the information comes
// as class->class dependencies that must be mapped back to source->class dependencies using the source+class assignment
for(newClass <- newClasses;
path <- Path.relativize(outputDirectory, newClass);
classFile = Parser(newClass.asFile, log);
sourceFile <- classFile.sourceFile;
source <- guessSourcePath(sourceSet, roots, classFile, log))
{
analysis.beginSource(source)
analysis.generatedClass(source, path)
productToSource(path) = source
sourceToClassFiles.getOrElseUpdate(source, new ArrayBuffer[ClassFile]) += classFile
}
// get class to class dependencies and map back to source to class dependencies
for( (source, classFiles) <- sourceToClassFiles )
{
for(classFile <- classFiles if isTopLevel(classFile);
method <- classFile.methods; if method.isMain)
analysis.foundApplication(source, classFile.className)
def processDependency(tpe: String)
{
Control.trapAndLog(log)
{
val clazz = Class.forName(tpe, false, loader)
for(file <- Control.convertException(FileUtilities.classLocationFile(clazz)).right)
{
if(file.isDirectory)
{
val resolved = resolveClassFile(file, tpe)
assume(resolved.exists, "Resolved class file " + resolved + " from " + source + " did not exist")
val resolvedPath = Path.fromFile(resolved)
if(Path.fromFile(file) == outputDirectory)
{
productToSource.get(resolvedPath) match
{
case Some(dependsOn) => analysis.sourceDependency(dependsOn, source)
case None => analysis.productDependency(resolvedPath, source)
}
}
else
analysis.classDependency(resolved, source)
}
else
analysis.jarDependency(file, source)
}
}
}
classFiles.flatMap(_.types).foreach(processDependency)
analysis.endSource(source)
}
}
compile orElse Control.convertErrorMessage(log)(analyze()).left.toOption
}
private def resolveClassFile(file: File, className: String): File = (file /: (className.replace('.','/') + ".class").split("/"))(new File(_, _))
private def guessSourcePath(sources: scala.collection.Set[Path], roots: Iterable[Path], classFile: ClassFile, log: Logger) =
{
val classNameParts = classFile.className.split("""\\.""")
val lastIndex = classNameParts.length - 1
val pkg = classNameParts.take(lastIndex)
val simpleClassName = classNameParts(lastIndex)
val sourceFileName = classFile.sourceFile.getOrElse(simpleClassName.takeWhile(_ != '$').mkString("", "", ".java"))
val relativeSourceFile = (pkg ++ (sourceFileName :: Nil)).mkString("/")
val candidates = roots.map(root => Path.fromString(root, relativeSourceFile)).filter(sources.contains).toList
candidates match
{
case Nil => log.warn("Could not determine source for class " + classFile.className)
case head :: Nil => ()
case _ =>log.warn("Multiple sources matched for class " + classFile.className + ": " + candidates.mkString(", "))
}
candidates
}
private def isTopLevel(classFile: ClassFile) = classFile.className.indexOf('$') < 0
}
|
matheshar/simple-build-tool
|
src/main/scala/sbt/classfile/Analyze.scala
|
Scala
|
bsd-3-clause
| 4,183 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.manager.utils
import java.util.Properties
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.KeeperException.NodeExistsException
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.util.Random
/**
* Borrowed from kafka 0.8.1.1, adapted to use curator framework
* https://git-wip-us.apache.org/repos/asf?p=kafka.git;a=blob;f=core/src/main/scala/kafka/admin/AdminUtils.scala
*/
object AdminUtils {
private[this] lazy val logger = LoggerFactory.getLogger(this.getClass)
val rand = new Random
val TopicConfigChangeZnodePrefix = "config_change_"
/**
* There are 2 goals of replica assignment:
* 1. Spread the replicas evenly among brokers.
* 2. For partitions assigned to a particular broker, their other replicas are spread over the other brokers.
*
* To achieve this goal, we:
* 1. Assign the first replica of each partition by round-robin, starting from a random position in the broker list.
* 2. Assign the remaining replicas of each partition with an increasing shift.
*
* Here is an example of assigning
* broker-0 broker-1 broker-2 broker-3 broker-4
* p0 p1 p2 p3 p4 (1st replica)
* p5 p6 p7 p8 p9 (1st replica)
* p4 p0 p1 p2 p3 (2nd replica)
* p8 p9 p5 p6 p7 (2nd replica)
* p3 p4 p0 p1 p2 (3nd replica)
* p7 p8 p9 p5 p6 (3nd replica)
*/
def assignReplicasToBrokers(brokerList: Seq[Int],
nPartitions: Int,
replicationFactor: Int,
fixedStartIndex: Int = -1,
startPartitionId: Int = -1)
: Map[Int, Seq[Int]] = {
checkCondition(nPartitions > 0,TopicErrors.PartitionsGreaterThanZero)
checkCondition(replicationFactor > 0,TopicErrors.ReplicationGreaterThanZero)
checkCondition(replicationFactor <= brokerList.size,
TopicErrors.ReplicationGreaterThanNumBrokers(replicationFactor, brokerList.size))
val ret = new mutable.HashMap[Int, List[Int]]()
val startIndex = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
var currentPartitionId = if (startPartitionId >= 0) startPartitionId else 0
var nextReplicaShift = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
for (i <- 0 until nPartitions) {
if (currentPartitionId > 0 && (currentPartitionId % brokerList.size == 0))
nextReplicaShift += 1
val firstReplicaIndex = (currentPartitionId + startIndex) % brokerList.size
var replicaList = List(brokerList(firstReplicaIndex))
for (j <- 0 until replicationFactor - 1)
replicaList ::= brokerList(replicaIndex(firstReplicaIndex, nextReplicaShift, j, brokerList.size))
ret.put(currentPartitionId, replicaList.reverse)
currentPartitionId = currentPartitionId + 1
}
ret.toMap
}
private def replicaIndex(firstReplicaIndex: Int, secondReplicaShift: Int, replicaIndex: Int, nBrokers: Int): Int = {
val shift = 1 + (secondReplicaShift + replicaIndex) % (nBrokers - 1)
(firstReplicaIndex + shift) % nBrokers
}
def deleteTopic(curator: CuratorFramework, topic: String): Unit = {
ZkUtils.createPersistentPath(curator,ZkUtils.getDeleteTopicPath(topic))
}
def createTopic(curator: CuratorFramework,
brokers: Seq[Int],
topic: String,
partitions: Int,
replicationFactor: Int,
topicConfig: Properties = new Properties): Unit = {
val replicaAssignment = assignReplicasToBrokers(brokers,partitions,replicationFactor)
createOrUpdateTopicPartitionAssignmentPathInZK(curator, topic, replicaAssignment, topicConfig)
}
def createOrUpdateTopicPartitionAssignmentPathInZK(curator: CuratorFramework,
topic: String,
partitionReplicaAssignment: Map[Int, Seq[Int]],
config: Properties = new Properties,
update: Boolean = false) {
// validate arguments
Topic.validate(topic)
LogConfig.validate(config)
checkCondition(partitionReplicaAssignment.values.map(_.size).toSet.size == 1, TopicErrors.InconsistentPartitionReplicas)
val topicPath = ZkUtils.getTopicPath(topic)
if(!update ) {
checkCondition(curator.checkExists().forPath(topicPath) == null,TopicErrors.TopicAlreadyExists(topic))
}
partitionReplicaAssignment.foreach {
case (part,reps) => checkCondition(reps.size == reps.toSet.size, TopicErrors.DuplicateReplicAssignment(topic,part,reps))
}
// write out the config if there is any, this isn't transactional with the partition assignments
writeTopicConfig(curator, topic, config)
// create the partition assignment
writeTopicPartitionAssignment(curator, topic, partitionReplicaAssignment, update)
}
/**
* Write out the topic config to zk, if there is any
*/
private def writeTopicConfig(curator: CuratorFramework, topic: String, config: Properties) {
val configMap: mutable.Map[String, String] = {
import scala.collection.JavaConverters._
config.asScala
}
val map : Map[String, Any] = Map("version" -> 1, "config" -> configMap)
ZkUtils.updatePersistentPath(curator, ZkUtils.getTopicConfigPath(topic), toJson(map))
}
private def writeTopicPartitionAssignment(curator: CuratorFramework, topic: String, replicaAssignment: Map[Int, Seq[Int]], update: Boolean) {
try {
val zkPath = ZkUtils.getTopicPath(topic)
val jsonPartitionData = ZkUtils.replicaAssignmentZkData(replicaAssignment.map(e => (e._1.toString -> e._2)))
if (!update) {
logger.info("Topic creation {}", jsonPartitionData.toString)
ZkUtils.createPersistentPath(curator, zkPath, jsonPartitionData)
} else {
logger.info("Topic update {}", jsonPartitionData.toString)
ZkUtils.updatePersistentPath(curator, zkPath, jsonPartitionData)
}
logger.debug("Updated path %s with %s for replica assignment".format(zkPath, jsonPartitionData))
} catch {
case e: NodeExistsException => throw new IllegalArgumentException("topic %s already exists".format(topic))
case e2: Throwable => throw new IllegalArgumentException(e2.toString)
}
}
}
|
patricklucas/kafka-manager
|
app/kafka/manager/utils/AdminUtils.scala
|
Scala
|
apache-2.0
| 7,423 |
package main.importedjs
import scala.scalajs.js
package object Meter extends js.GlobalScope {
def onVolumeChanged(volume: Int): Nothing = js.native
}
|
jaapmengers/OneMinuteChanges
|
src/main/scala/main/importedjs/VolumeMeter.scala
|
Scala
|
apache-2.0
| 154 |
package scala.models
import io.apibuilder.generator.v0.models.{File, InvocationForm}
import io.apibuilder.spec.v0.models.Service
import models.TestHelper._
import scala.util.matching.Regex
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
class ScalaCheckGeneratorSpec extends AnyFunSpec with Matchers {
def fileContent(service: Service): File =
ScalaCheckGenerator.invoke(InvocationForm(service))
.fold(
{ msgs => Left(new Throwable(s"Generated errors: ${msgs.mkString("\\n - ", "\\n - ", "")}")) },
{
case one :: Nil => Right(one)
case _ :: _ => Left(new Throwable(s"Generated too many files"))
case Nil => Left(new Throwable(s"Generated no files"))
}
)
.fold(throwable => throw throwable, identity)
def count(regex: Regex, service: Service): Int = {
val contents = fileContent(service).contents
regex.findAllIn(contents).length
}
def arbitraryCount(service: Service): Int = count("def arbitrary".r, service)
def genCount(service: Service): Int = count("def gen".r, service)
describe("for all services") {
List(
collectionJsonDefaultsService,
referenceApiService,
referenceWithImportsApiService,
generatorApiService,
apidocApiService,
dateTimeService,
builtInTypesService,
generatorApiServiceWithUnionAndDescriminator,
generatorApiServiceWithUnionWithoutDescriminator,
emptyService,
).zipWithIndex.foreach { case (service, index) =>
describe(s"for services ${index}") {
val elementCount = service.enums.size +
service.models.size +
service.unions.size +
2 // org.joda.time.DateTime + play.api.libs.json.JsObject;
val abstractArbitraryCount = 2 // org.joda.time.DateTime + play.api.libs.json.JsObject
it("generates all arbitraries") {
assert(arbitraryCount(service) == elementCount + abstractArbitraryCount)
}
it("generates all gens") {
assert(genCount(service) == elementCount)
}
}
}
}
}
|
mbryzek/apidoc-generator
|
scala-generator/src/test/scala/models/ScalaCheckGeneratorSpec.scala
|
Scala
|
mit
| 2,116 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.blueeyes
import org.specs2.mutable.Specification
class CacheDirectiveSpec extends Specification{
"Cache-Directive: Should parse a cache directive with a field correctly" in {
val testString1 = "private=\\"this\\", no-cache, max-age=10, no-transform"
HttpHeaders.`Cache-Control`(CacheDirectives.parseCacheDirectives(testString1): _*).value mustEqual testString1
}
"Cache-Directive: Should parse a cache-directive with a delta " in {
val testString2 = "private, no-cache, max-stale=590, no-transform"
HttpHeaders.`Cache-Control`(CacheDirectives.parseCacheDirectives(testString2): _*).value mustEqual testString2
}
"Cache-Directive: Should return empty array on bad input" in {
val testString3 = "amnamzimmeram"
CacheDirectives.parseCacheDirectives(testString3).length mustEqual 0
}
}
|
drostron/quasar
|
blueeyes/src/test/scala/quasar/blueeyes/CacheDirectiveSpec.scala
|
Scala
|
apache-2.0
| 1,443 |
package gplume.scala.context
import java.nio.charset.Charset
import java.util.{Date, Calendar, TimeZone}
import com.caibowen.gplume.context.{XMLAssembler, IBeanAssembler}
import com.caibowen.gplume.event.Broadcaster
/**
* @author BowenCai
* @since 12/12/2014.
*/
object AppContext {
object defaults {
var timeZone = TimeZone.getTimeZone("GMT")
private[context] val calendar = Calendar.getInstance(timeZone)
val charSet = Charset.forName("UTF-8")
}
/**
* thread local variables
*/
val currentCalendar: ThreadLocal[Calendar] =
new ThreadLocal[Calendar] {
protected override def initialValue: Calendar = {
return Calendar.getInstance(defaults.timeZone)
}
}
/**
* config file location, written in web.xml
*/
val MANIFEST = "manifest"
val LOCALE = "locale"
val TIME_ZONE = "timezone"
val beanAssembler: IBeanAssembler = new XMLAssembler(new gplume.scala.context.SBeanBuilder)
val broadcaster = new Broadcaster
def now = defaults.calendar.getTime
}
|
xkommando/Gplume
|
gplume-scala/src/main/scala/gplume/scala/context/AppContext.scala
|
Scala
|
apache-2.0
| 1,029 |
package io.github.tailhq.dynaml.tensorflow.dynamics
import io.github.tailhq.dynaml.pipes._
import io.github.tailhq.dynaml.DynaMLPipe._
import io.github.tailhq.dynaml.tensorflow.models.TFModel
import io.github.tailhq.dynaml.tensorflow._
import io.github.tailhq.dynaml.tensorflow.layers.{
PDEQuadrature,
Regularization,
L2Regularization,
L1Regularization
}
import io.github.tailhq.dynaml.tensorflow.data._
import org.platanios.tensorflow.api.learn.Mode
import org.platanios.tensorflow.api.learn.layers.Layer
import org.platanios.tensorflow.api._
//TODO: Use variable contexts for accessing system sources/epistemic quantities
/**
*
* <h3>Dynamical Systems</h3>
* <h4>Forecasting, System Identification and Design</h4>
*
* @param function A map containing quantity names and their associated function approximations.
* @param dynamics A system of differential operators, representing the system dynamics
* @param input_shape The data type and shape of the input tensor.
* @param target_shape The data types and shapes of the outputs and the latent variables.
* @param data_loss The loss/error measure between the neural surrogate output and
* the observations.
* @param quadrature_nodes The co-location points in the input domain, packaged as a tensor.
* @param quadrature_weights The weights associated with each co-location point.
* @param quadrature_loss_weightage The weight attached to the quadrature constructed
* for approximating the integrated error between the
* surrogate and the dynamical system.
* @param graphInstance An optional TensorFlow graph instance to create model in.
*
* */
private[dynaml] class PDESystem[
T: TF: IsDecimal,
U: TF: IsDecimal,
L: TF: IsFloatOrDouble
](val function: Layer[Output[T], Output[U]],
val dynamics: DifferentialOperator[Output[T], Output[U]],
val input_shape: Shape,
val target_shape: Shape,
val data_loss: Layer[(Output[U], Output[U]), Output[L]],
quadrature_nodes: Tensor[T],
quadrature_weights: Tensor[U],
quadrature_loss_weightage: Tensor[U],
graphInstance: Option[Graph],
val reg_f: Option[Regularization[L]] = None,
val reg_v: Option[Regularization[L]] = None,
name: String = "Output",
system_name: Option[String] = None) {
type P = (Tensor[T], Tensor[U])
type HandleOps[Patt] = TFModel.TFDataHandleOps[
Patt,
P,
PDESystem.ModelOutputsT[U],
(Output[T], Output[U])
]
type Config = TFModel.Config[(Output[T], Output[U])]
protected val observational_error: Layer[(Output[U], Output[U]), Output[L]] =
PDESystem.error[U, L]("ExtObsError", data_loss)
protected val projection
: Layer[(PDESystem.ModelOutputs[U], Output[U]), (Output[U], Output[U])] =
PDESystem.projectOutputs[U]("ProjectOutputs")
val system_outputs: PDESystem.ModelLayer[T, U] = function
val system_residuals: PDESystem.ModelLayer[T, U] = dynamics(system_outputs)
val system_variables: PDESystem.VarMap[T, U] =
dynamics.variables.map(kv => (kv._1, kv._2(system_outputs)))
private val var_scope =
DataPipe[String, String](dtfutils.get_scope(system_residuals)) >
DataPipe[String, String](dtfutils.process_scope)
val variable_scopes: Seq[String] =
system_variables.values.toSeq
.map(v => s"${var_scope(v.name)}${v.name}")
protected val quadrature: PDEQuadrature[T, U, L] =
PDEQuadrature(
"ColocationError",
system_residuals,
quadrature_nodes,
quadrature_weights,
quadrature_loss_weightage
)
private val dTypeTag = TF[T]
private val dTypeTagO = TF[U]
private val data_handles = (
tf.learn.Input[Output[T], DataType[T], Shape](
dTypeTag.dataType,
Shape(-1) ++ input_shape,
name = "Input"
),
tf.learn.Input[Output[U], DataType[U], Shape](
dTypeTagO.dataType,
Shape(-1) ++ target_shape,
name = "Outputs"
)
)
val system_variables_mapping: Layer[Output[T], Map[String, Output[U]]] =
dtflearn.scoped_map_layer("MapVars", system_variables, variable_scopes)
val model_architecture: Layer[Output[T], PDESystem.ModelOutputs[U]] =
dtflearn.bifurcation_layer(
if (system_name.isDefined) system_name.get else "CombinedOutputsAndVars",
system_outputs,
system_variables_mapping
)
private val primary_loss = projection >> observational_error >> quadrature
private val regularization_f =
PDESystem.regularization(model_architecture, reg_f)
private val regularization_v =
PDESystem.regularization(variable_scopes, reg_v)
protected val system_loss
: Layer[(PDESystem.ModelOutputs[U], Output[U]), Output[L]] =
primary_loss >>
regularization_f >>
regularization_v >>
tf.learn.Mean[L](name = "Loss/Mean") >>
tf.learn.ScalarSummary[L](name = "Loss/Summary", tag = "Loss")
private val tensors_to_symbolic =
DataPipe[(Tensor[T], Tensor[U]), (Output[T], Output[U])](
c => (c._1.toOutput, c._2.toOutput)
)
val pattern_to_tensor = DataPipe(
(ds: Seq[(Tensor[T], Tensor[U])]) => {
val (xs, ys) = ds.unzip
(
dtfpipe.EagerStack[T](axis = 0).run(xs),
dtfpipe.EagerStack[U](axis = 0).run(ys)
)
}
)
/**
* Train a neural net based approximation for the
* dynamical system.
*
* @param data Training data, a sequence of supervised/labeled data
* sets the length of the sequence must equal the number
* of governing equations.
* @param trainConfig Training configuration, of type [[TFModel.Config]]
* @param data_processing TensorFlow data operation pipeline, instance of [[TFModel.Ops]]
* @param inMemory Set to true if model is to be kept entirely in memory, defaults to false.
* @return A [[PDESystem.Model]] which encapsulates a predictive model of type [[TFModel]]
* */
def solve(
data: DataSet[P],
trainConfig: Config,
tf_handle_ops: HandleOps[P] = TFModel.tf_data_handle_ops(patternToTensor =
Some(pattern_to_tensor)),
inMemory: Boolean = false
): PDESystem.Model[T, U, L] = {
val model =
dtflearn.model[Output[T], Output[U], PDESystem.ModelOutputs[U], L, Tensor[
T
], DataType[T], Shape, Tensor[U], DataType[U], Shape, PDESystem.ModelOutputsT[
U
], (DataType[U], Map[String, DataType[U]]), (Shape, Map[String, Shape])](
model_architecture,
(dTypeTag.dataType, input_shape),
(dTypeTagO.dataType, target_shape),
system_loss,
inMemory,
graphInstance,
Some(data_handles)
)
model.train(data, trainConfig, tf_handle_ops)
PDESystem.model(model, name, system_variables.keys.toSeq)
}
}
object PDESystem {
type ModelLayer[T, U] = Layer[Output[T], Output[U]]
type VarMap[T, U] = Map[String, ModelLayer[T, U]]
type ModelOutputs[T] = (Output[T], Map[String, Output[T]])
type ModelOutputsT[T] = (Tensor[T], Map[String, Tensor[T]])
def modify_reg[L: TF: IsFloatOrDouble](
model_architecture: Layer[_, _],
reg: Regularization[L]
) = reg match {
case l2reg: L2Regularization[L] =>
l2reg.copy[L](
scopes = l2reg.names
.map(n => dtfutils.get_scope(model_architecture)(n.split("/").head))
)
case l1reg: L1Regularization[L] =>
l1reg.copy[L](
scopes = l1reg.names
.map(n => dtfutils.get_scope(model_architecture)(n.split("/").head))
)
}
def modify_reg[L: TF: IsFloatOrDouble](
scopes: Seq[String],
reg: Regularization[L]
) = reg match {
case l2reg: L2Regularization[L] =>
l2reg.copy[L](scopes = scopes)
case l1reg: L1Regularization[L] =>
l1reg.copy[L](scopes = scopes)
}
def regularization[L: TF: IsFloatOrDouble](
s: Seq[String],
reg: Option[Regularization[L]]
): Layer[Output[L], Output[L]] = reg match {
case None => dtflearn.identity[Output[L]]("Id")
case Some(r) => modify_reg(s, r)
}
def regularization[L: TF: IsFloatOrDouble](
arch: Layer[_, _],
reg: Option[Regularization[L]]
): Layer[Output[L], Output[L]] = reg match {
case None => dtflearn.identity[Output[L]]("Id")
case Some(r) => modify_reg(arch, r)
}
protected case class ObservationalError[
T: TF: IsDecimal,
L: TF: IsFloatOrDouble
](override val name: String,
error_measure: Layer[(Output[T], Output[T]), Output[L]])
extends Layer[(Output[T], Output[T]), Output[L]](name) {
override val layerType: String = "ObservationalError"
override def forwardWithoutContext(
input: (Output[T], Output[T])
)(
implicit mode: Mode
): Output[L] = error_measure.forwardWithoutContext(input._1, input._2)
}
protected case class ProjectOutputs[T: TF: IsDecimal](
override val name: String)
extends Layer[(ModelOutputs[T], Output[T]), (Output[T], Output[T])] {
override val layerType: String = "ProjectOutputs"
override def forwardWithoutContext(
input: (ModelOutputs[T], Output[T])
)(
implicit mode: Mode
): (Output[T], Output[T]) =
(input._1._1, input._2)
}
/**
* Neural-net based predictive model for dynamical systems.
*
* @param tfModel A DynaML TensorFlow model [[TFModel]]
*
* @param outputs The model quantities of interest which are
* governed by the system dynamics.
*
* @param variables The unobserved quantities of the system.
*
* */
case class Model[T: TF: IsDecimal, U: TF: IsDecimal, L: TF: IsFloatOrDouble](
tfModel: TFModel[Output[T], Output[U], PDESystem.ModelOutputs[U], L, Tensor[
T
], DataType[T], Shape, Tensor[U], DataType[U], Shape, PDESystem.ModelOutputsT[
U
], (DataType[U], Map[String, DataType[U]]), (Shape, Map[String, Shape])],
outputs: String,
variables: Seq[String]) {
private val model_quantities = Seq(outputs) ++ variables
protected def predict(input: Tensor[T]): Map[String, Tensor[U]] = {
val model_preds = tfModel.predict(input)
Map(outputs -> model_preds._1) ++ model_preds._2
}
def predict(quantities: String*)(input: Tensor[T]): Seq[Tensor[U]] = {
require(
quantities.forall(model_quantities.contains),
"Each provided quantity should be in the list of model quantities," +
" either as an output or as an inferred variable"
)
val outputs = predict(input)
quantities.map(outputs(_))
}
}
val error: ObservationalError.type = ObservationalError
val model: Model.type = Model
val projectOutputs: ProjectOutputs.type = ProjectOutputs
def apply[T: TF: IsDecimal, U: TF: IsDecimal, L: TF: IsFloatOrDouble](
quantities: Layer[Output[T], Output[U]],
dynamics: DifferentialOperator[Output[T], Output[U]],
input: Shape,
target: Shape,
data_loss: Layer[(Output[U], Output[U]), Output[L]],
quadrature_nodes: Tensor[T],
weights: Tensor[U],
loss_weightage: Tensor[U],
graphInstance: Option[Graph] = None,
reg_f: Option[Regularization[L]] = None,
reg_v: Option[Regularization[L]] = None,
name: String = "Output",
system_name: Option[String] = None
): PDESystem[T, U, L] =
new PDESystem[T, U, L](
quantities,
dynamics,
input,
target,
data_loss,
quadrature_nodes,
weights,
loss_weightage,
graphInstance,
reg_f,
reg_v,
name,
system_name
)
}
|
mandar2812/DynaML
|
dynaml-tensorflow/src/main/scala/io/github/tailhq/dynaml/tensorflow/dynamics/PDESystem.scala
|
Scala
|
apache-2.0
| 11,529 |
/**
* Copyright (C) 2011 Typesafe Inc. <http://typesafe.com>
*/
package com.typesafe.config.impl
import org.junit.Assert._
import org.junit._
import com.typesafe.config.ConfigValue
import com.typesafe.config.ConfigException
import com.typesafe.config.ConfigResolveOptions
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
class ConfigSubstitutionTest extends TestUtils {
private def resolveWithoutFallbacks(v: AbstractConfigObject) = {
val options = ConfigResolveOptions.noSystem()
ResolveContext.resolve(v, v, options).asInstanceOf[AbstractConfigObject].toConfig
}
private def resolveWithoutFallbacks(s: AbstractConfigValue, root: AbstractConfigObject) = {
val options = ConfigResolveOptions.noSystem()
ResolveContext.resolve(s, root, options)
}
private def resolve(v: AbstractConfigObject) = {
val options = ConfigResolveOptions.defaults()
ResolveContext.resolve(v, v, options).asInstanceOf[AbstractConfigObject].toConfig
}
private def resolve(s: AbstractConfigValue, root: AbstractConfigObject) = {
val options = ConfigResolveOptions.defaults()
ResolveContext.resolve(s, root, options)
}
private val simpleObject = {
parseObject("""
{
"foo" : 42,
"bar" : {
"int" : 43,
"bool" : true,
"null" : null,
"string" : "hello",
"double" : 3.14
}
}
""")
}
@Test
def resolveTrivialKey() {
val s = subst("foo")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(intValue(42), v)
}
@Test
def resolveTrivialPath() {
val s = subst("bar.int")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(intValue(43), v)
}
@Test
def resolveInt() {
val s = subst("bar.int")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(intValue(43), v)
}
@Test
def resolveBool() {
val s = subst("bar.bool")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(boolValue(true), v)
}
@Test
def resolveNull() {
val s = subst("bar.null")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(nullValue(), v)
}
@Test
def resolveString() {
val s = subst("bar.string")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("hello"), v)
}
@Test
def resolveDouble() {
val s = subst("bar.double")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(doubleValue(3.14), v)
}
@Test
def resolveMissingThrows() {
val e = intercept[ConfigException.UnresolvedSubstitution] {
val s = subst("bar.missing")
val v = resolveWithoutFallbacks(s, simpleObject)
}
assertTrue("wrong exception: " + e.getMessage,
!e.getMessage.contains("cycle"))
}
@Test
def resolveIntInString() {
val s = substInString("bar.int")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("start<43>end"), v)
}
@Test
def resolveNullInString() {
val s = substInString("bar.null")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("start<null>end"), v)
// when null is NOT a subst, it should also not become empty
val o = parseConfig("""{ "a" : null foo bar }""")
assertEquals("null foo bar", o.getString("a"))
}
@Test
def resolveMissingInString() {
val s = substInString("bar.missing", true /* optional */ )
val v = resolveWithoutFallbacks(s, simpleObject)
// absent object becomes empty string
assertEquals(stringValue("start<>end"), v)
intercept[ConfigException.UnresolvedSubstitution] {
val s2 = substInString("bar.missing", false /* optional */ )
resolveWithoutFallbacks(s2, simpleObject)
}
}
@Test
def resolveBoolInString() {
val s = substInString("bar.bool")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("start<true>end"), v)
}
@Test
def resolveStringInString() {
val s = substInString("bar.string")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("start<hello>end"), v)
}
@Test
def resolveDoubleInString() {
val s = substInString("bar.double")
val v = resolveWithoutFallbacks(s, simpleObject)
assertEquals(stringValue("start<3.14>end"), v)
}
@Test
def missingInArray() {
import scala.collection.JavaConverters._
val obj = parseObject("""
a : [ ${?missing}, ${?also.missing} ]
""")
val resolved = resolve(obj)
assertEquals(Seq(), resolved.getList("a").asScala)
}
@Test
def missingInObject() {
import scala.collection.JavaConverters._
val obj = parseObject("""
a : ${?missing}, b : ${?also.missing}, c : ${?b}, d : ${?c}
""")
val resolved = resolve(obj)
assertTrue(resolved.isEmpty())
}
private val substChainObject = {
parseObject("""
{
"foo" : ${bar},
"bar" : ${a.b.c},
"a" : { "b" : { "c" : 57 } }
}
""")
}
@Test
def chainSubstitutions() {
val s = subst("foo")
val v = resolveWithoutFallbacks(s, substChainObject)
assertEquals(intValue(57), v)
}
@Test
def substitutionsLookForward() {
val obj = parseObject("""a=1,b=${a},a=2""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("b"))
}
@Test
def throwOnIncrediblyTrivialCycle() {
val s = subst("a")
val e = intercept[ConfigException.UnresolvedSubstitution] {
val v = resolveWithoutFallbacks(s, parseObject("a: ${a}"))
}
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("cycle"))
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("${a}"))
}
private val substCycleObject = {
parseObject("""
{
"foo" : ${bar},
"bar" : ${a.b.c},
"a" : { "b" : { "c" : ${foo} } }
}
""")
}
@Test
def throwOnCycles() {
val s = subst("foo")
val e = intercept[ConfigException.UnresolvedSubstitution] {
val v = resolveWithoutFallbacks(s, substCycleObject)
}
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("cycle"))
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("${foo}, ${bar}, ${a.b.c}, ${foo}"))
}
@Test
def throwOnOptionalReferenceToNonOptionalCycle() {
// we look up ${?foo}, but the cycle has hard
// non-optional links in it so still has to throw.
val s = subst("foo", optional = true)
val e = intercept[ConfigException.UnresolvedSubstitution] {
val v = resolveWithoutFallbacks(s, substCycleObject)
}
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("cycle"))
}
// ALL the links have to be optional here for the cycle to be ignored
private val substCycleObjectOptionalLink = {
parseObject("""
{
"foo" : ${?bar},
"bar" : ${?a.b.c},
"a" : { "b" : { "c" : ${?foo} } }
}
""")
}
@Test
def optionalLinkCyclesActLikeUndefined() {
val s = subst("foo", optional = true)
val v = resolveWithoutFallbacks(s, substCycleObjectOptionalLink)
assertNull("Cycle with optional links in it resolves to null if it's a cycle", v)
}
@Test
def throwOnTwoKeyCycle() {
val obj = parseObject("""a:${b},b:${a}""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("cycle"))
}
@Test
def throwOnFourKeyCycle() {
val obj = parseObject("""a:${b},b:${c},c:${d},d:${a}""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("Wrong exception: " + e.getMessage, e.getMessage().contains("cycle"))
}
@Test
def resolveObject() {
val resolved = resolveWithoutFallbacks(substChainObject)
assertEquals(57, resolved.getInt("foo"))
assertEquals(57, resolved.getInt("bar"))
assertEquals(57, resolved.getInt("a.b.c"))
}
private val substSideEffectCycle = {
parseObject("""
{
"foo" : ${a.b.c},
"a" : { "b" : { "c" : 42, "cycle" : ${foo} }, "cycle" : ${foo} }
}
""")
}
@Test
def avoidSideEffectCycles() {
// The point of this test is that in traversing objects
// to resolve a path, we need to avoid resolving
// substitutions that are in the traversed objects but
// are not directly required to resolve the path.
// i.e. there should not be a cycle in this test.
val resolved = resolveWithoutFallbacks(substSideEffectCycle)
assertEquals(42, resolved.getInt("foo"))
assertEquals(42, resolved.getInt("a.b.cycle"))
assertEquals(42, resolved.getInt("a.cycle"))
}
@Test
def ignoreHiddenUndefinedSubst() {
// if a substitution is overridden then it shouldn't matter that it's undefined
val obj = parseObject("""a=${nonexistent},a=42""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("a"))
}
@Test
def objectDoesNotHideUndefinedSubst() {
// if a substitution is overridden by an object we still need to
// evaluate the substitution
val obj = parseObject("""a=${nonexistent},a={ b : 42 }""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("Could not resolve"))
}
@Test
def ignoreHiddenCircularSubst() {
// if a substitution is overridden then it shouldn't matter that it's circular
val obj = parseObject("""a=${a},a=42""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("a"))
}
private val delayedMergeObjectResolveProblem1 = {
parseObject("""
defaults {
a = 1
b = 2
}
// make item1 into a ConfigDelayedMergeObject
item1 = ${defaults}
// note that we'll resolve to a non-object value
// so item1.b will ignoreFallbacks and not depend on
// ${defaults}
item1.b = 3
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem1() {
assertTrue(delayedMergeObjectResolveProblem1.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem1)
assertEquals(3, resolved.getInt("item1.b"))
assertEquals(3, resolved.getInt("item2.b"))
}
private val delayedMergeObjectResolveProblem2 = {
parseObject("""
defaults {
a = 1
b = 2
}
// make item1 into a ConfigDelayedMergeObject
item1 = ${defaults}
// note that we'll resolve to an object value
// so item1.b will depend on also looking up ${defaults}
item1.b = { c : 43 }
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem2() {
assertTrue(delayedMergeObjectResolveProblem2.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem2)
assertEquals(parseObject("{ c : 43 }"), resolved.getObject("item1.b"))
assertEquals(43, resolved.getInt("item1.b.c"))
assertEquals(43, resolved.getInt("item2.b.c"))
}
// in this case, item1 is self-referential because
// it refers to ${defaults} which refers back to
// ${item1}. When self-referencing, only the
// value of ${item1} "looking back" should be
// visible. This is really a test of the
// self-referencing semantics.
private val delayedMergeObjectResolveProblem3 = {
parseObject("""
item1.b.c = 100
defaults {
// we depend on item1.b.c
a = ${item1.b.c}
b = 2
}
// make item1 into a ConfigDelayedMergeObject
item1 = ${defaults}
// the ${item1.b.c} above in ${defaults} should ignore
// this because it only looks back
item1.b = { c : 43 }
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem3() {
assertTrue(delayedMergeObjectResolveProblem3.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem3)
assertEquals(parseObject("{ c : 43 }"), resolved.getObject("item1.b"))
assertEquals(43, resolved.getInt("item1.b.c"))
assertEquals(43, resolved.getInt("item2.b.c"))
assertEquals(100, resolved.getInt("defaults.a"))
}
private val delayedMergeObjectResolveProblem4 = {
parseObject("""
defaults {
a = 1
b = 2
}
item1.b = 7
// make item1 into a ConfigDelayedMerge
item1 = ${defaults}
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem4() {
// in this case we have a ConfigDelayedMerge not a ConfigDelayedMergeObject
assertTrue(delayedMergeObjectResolveProblem4.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMerge])
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem4)
assertEquals(2, resolved.getInt("item1.b"))
assertEquals(2, resolved.getInt("item2.b"))
}
private val delayedMergeObjectResolveProblem5 = {
parseObject("""
defaults {
a = ${item1.b} // tricky cycle - we won't see ${defaults}
// as we resolve this
b = 2
}
item1.b = 7
// make item1 into a ConfigDelayedMerge
item1 = ${defaults}
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem5() {
// in this case we have a ConfigDelayedMerge not a ConfigDelayedMergeObject
assertTrue(delayedMergeObjectResolveProblem5.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMerge])
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem5)
assertEquals("item1.b", 2, resolved.getInt("item1.b"))
assertEquals("item2.b", 2, resolved.getInt("item2.b"))
assertEquals("defaults.a", 7, resolved.getInt("defaults.a"))
}
private val delayedMergeObjectResolveProblem6 = {
parseObject("""
z = 15
defaults-defaults-defaults {
m = ${z}
n.o.p = ${z}
}
defaults-defaults {
x = 10
y = 11
asdf = ${z}
}
defaults {
a = 1
b = 2
}
defaults-alias = ${defaults}
// make item1 into a ConfigDelayedMergeObject several layers deep
// that will NOT become resolved just because we resolve one path
// through it.
item1 = 345
item1 = ${?NONEXISTENT}
item1 = ${defaults-defaults-defaults}
item1 = {}
item1 = ${defaults-defaults}
item1 = ${defaults-alias}
item1 = ${defaults}
item1.b = { c : 43 }
item1.xyz = 101
// be sure we can resolve a substitution to a value in
// a delayed-merge object.
item2.b = ${item1.b}
""")
}
@Test
def avoidDelayedMergeObjectResolveProblem6() {
assertTrue(delayedMergeObjectResolveProblem6.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
// should be able to attemptPeekWithPartialResolve() a known non-object without resolving
assertEquals(101, delayedMergeObjectResolveProblem6.toConfig().getObject("item1").attemptPeekWithPartialResolve("xyz").unwrapped())
val resolved = resolveWithoutFallbacks(delayedMergeObjectResolveProblem6)
assertEquals(parseObject("{ c : 43 }"), resolved.getObject("item1.b"))
assertEquals(43, resolved.getInt("item1.b.c"))
assertEquals(43, resolved.getInt("item2.b.c"))
assertEquals(15, resolved.getInt("item1.n.o.p"))
}
private val delayedMergeObjectWithKnownValue = {
parseObject("""
defaults {
a = 1
b = 2
}
// make item1 into a ConfigDelayedMergeObject
item1 = ${defaults}
// note that we'll resolve to a non-object value
// so item1.b will ignoreFallbacks and not depend on
// ${defaults}
item1.b = 3
""")
}
@Test
def fetchKnownValueFromDelayedMergeObject() {
assertTrue(delayedMergeObjectWithKnownValue.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
assertEquals(3, delayedMergeObjectWithKnownValue.toConfig.getConfig("item1").getInt("b"))
}
private val delayedMergeObjectNeedsFullResolve = {
parseObject("""
defaults {
a = 1
b = { c : 31 }
}
item1 = ${defaults}
// because b is an object, fetching it requires resolving ${defaults} above
// to see if there are more keys to merge with b.
item1.b = { c : 41 }
""")
}
@Test
def failToFetchFromDelayedMergeObjectNeedsFullResolve() {
assertTrue(delayedMergeObjectWithKnownValue.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
val e = intercept[ConfigException.NotResolved] {
delayedMergeObjectNeedsFullResolve.toConfig().getObject("item1.b")
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("item1.b"))
}
// objects that mutually refer to each other
private val delayedMergeObjectEmbrace = {
parseObject("""
defaults {
a = 1
b = 2
}
item1 = ${defaults}
// item1.c refers to a field in item2 that refers to item1
item1.c = ${item2.d}
// item1.x refers to a field in item2 that doesn't go back to item1
item1.x = ${item2.y}
item2 = ${defaults}
// item2.d refers to a field in item1
item2.d = ${item1.a}
item2.y = 15
""")
}
@Test
def resolveDelayedMergeObjectEmbrace() {
assertTrue(delayedMergeObjectEmbrace.attemptPeekWithPartialResolve("item1").isInstanceOf[ConfigDelayedMergeObject])
assertTrue(delayedMergeObjectEmbrace.attemptPeekWithPartialResolve("item2").isInstanceOf[ConfigDelayedMergeObject])
val resolved = delayedMergeObjectEmbrace.toConfig.resolve()
assertEquals(1, resolved.getInt("item1.c"))
assertEquals(1, resolved.getInt("item2.d"))
assertEquals(15, resolved.getInt("item1.x"))
}
// objects that mutually refer to each other
private val plainObjectEmbrace = {
parseObject("""
item1.a = 10
item1.b = ${item2.d}
item2.c = 12
item2.d = 14
item2.e = ${item1.a}
item2.f = ${item1.b} // item1.b goes back to item2
item2.g = ${item2.f} // goes back to ourselves
""")
}
@Test
def resolvePlainObjectEmbrace() {
assertTrue(plainObjectEmbrace.attemptPeekWithPartialResolve("item1").isInstanceOf[SimpleConfigObject])
assertTrue(plainObjectEmbrace.attemptPeekWithPartialResolve("item2").isInstanceOf[SimpleConfigObject])
val resolved = plainObjectEmbrace.toConfig.resolve()
assertEquals(14, resolved.getInt("item1.b"))
assertEquals(10, resolved.getInt("item2.e"))
assertEquals(14, resolved.getInt("item2.f"))
assertEquals(14, resolved.getInt("item2.g"))
}
@Test
def useRelativeToSameFileWhenRelativized() {
val child = parseObject("""foo=in child,bar=${foo}""")
val values = new java.util.HashMap[String, AbstractConfigValue]()
values.put("a", child.relativized(new Path("a")))
// this "foo" should NOT be used.
values.put("foo", stringValue("in parent"));
val resolved = resolve(new SimpleConfigObject(fakeOrigin(), values));
assertEquals("in child", resolved.getString("a.bar"))
}
@Test
def useRelativeToRootWhenRelativized() {
// here, "foo" is not defined in the child
val child = parseObject("""bar=${foo}""")
val values = new java.util.HashMap[String, AbstractConfigValue]()
values.put("a", child.relativized(new Path("a")))
// so this "foo" SHOULD be used
values.put("foo", stringValue("in parent"));
val resolved = resolve(new SimpleConfigObject(fakeOrigin(), values));
assertEquals("in parent", resolved.getString("a.bar"))
}
private val substComplexObject = {
parseObject("""
{
"foo" : ${bar},
"bar" : ${a.b.c},
"a" : { "b" : { "c" : 57, "d" : ${foo}, "e" : { "f" : ${foo} } } },
"objA" : ${a},
"objB" : ${a.b},
"objE" : ${a.b.e},
"foo.bar" : 37,
"arr" : [ ${foo}, ${a.b.c}, ${"foo.bar"}, ${objB.d}, ${objA.b.e.f}, ${objE.f} ],
"ptrToArr" : ${arr},
"x" : { "y" : { "ptrToPtrToArr" : ${ptrToArr} } }
}
""")
}
@Test
def complexResolve() {
import scala.collection.JavaConverters._
val resolved = resolveWithoutFallbacks(substComplexObject)
assertEquals(57, resolved.getInt("foo"))
assertEquals(57, resolved.getInt("bar"))
assertEquals(57, resolved.getInt("a.b.c"))
assertEquals(57, resolved.getInt("a.b.d"))
assertEquals(57, resolved.getInt("objB.d"))
assertEquals(Seq(57, 57, 37, 57, 57, 57), resolved.getIntList("arr").asScala)
assertEquals(Seq(57, 57, 37, 57, 57, 57), resolved.getIntList("ptrToArr").asScala)
assertEquals(Seq(57, 57, 37, 57, 57, 57), resolved.getIntList("x.y.ptrToPtrToArr").asScala)
}
private val substSystemPropsObject = {
parseObject("""
{
"a" : ${configtest.a},
"b" : ${configtest.b}
}
""")
}
@Test
def doNotSerializeUnresolvedObject() {
checkNotSerializable(substComplexObject)
}
// this is a weird test, it used to test fallback to system props which made more sense.
// Now it just tests that if you override with system props, you can use system props
// in substitutions.
@Test
def overrideWithSystemProps() {
System.setProperty("configtest.a", "1234")
System.setProperty("configtest.b", "5678")
ConfigImpl.reloadSystemPropertiesConfig()
val resolved = resolve(ConfigFactory.systemProperties().withFallback(substSystemPropsObject).root.asInstanceOf[AbstractConfigObject])
assertEquals("1234", resolved.getString("a"))
assertEquals("5678", resolved.getString("b"))
}
private val substEnvVarObject = {
parseObject("""
{
"home" : ${?HOME},
"pwd" : ${?PWD},
"shell" : ${?SHELL},
"lang" : ${?LANG},
"path" : ${?PATH},
"not_here" : ${?NOT_HERE}
}
""")
}
@Test
def fallbackToEnv() {
import scala.collection.JavaConverters._
val resolved = resolve(substEnvVarObject)
var existed = 0
for (k <- resolved.root.keySet().asScala) {
val e = System.getenv(k.toUpperCase());
if (e != null) {
existed += 1
assertEquals(e, resolved.getString(k))
} else {
assertNull(resolved.root.get(k))
}
}
if (existed == 0) {
throw new Exception("None of the env vars we tried to use for testing were set")
}
}
@Test
def noFallbackToEnvIfValuesAreNull() {
import scala.collection.JavaConverters._
// create a fallback object with all the env var names
// set to null. we want to be sure this blocks
// lookup in the environment. i.e. if there is a
// { HOME : null } then ${HOME} should be null.
val nullsMap = new java.util.HashMap[String, Object]
for (k <- substEnvVarObject.keySet().asScala) {
nullsMap.put(k.toUpperCase(), null);
}
val nulls = ConfigFactory.parseMap(nullsMap, "nulls map")
val resolved = resolve(substEnvVarObject.withFallback(nulls))
for (k <- resolved.root.keySet().asScala) {
assertNotNull(resolved.root.get(k))
assertEquals(nullValue, resolved.root.get(k))
}
}
@Test
def fallbackToEnvWhenRelativized() {
import scala.collection.JavaConverters._
val values = new java.util.HashMap[String, AbstractConfigValue]()
values.put("a", substEnvVarObject.relativized(new Path("a")))
val resolved = resolve(new SimpleConfigObject(fakeOrigin(), values));
var existed = 0
for (k <- resolved.getObject("a").keySet().asScala) {
val e = System.getenv(k.toUpperCase());
if (e != null) {
existed += 1
assertEquals(e, resolved.getConfig("a").getString(k))
} else {
assertNull(resolved.getObject("a").get(k))
}
}
if (existed == 0) {
throw new Exception("None of the env vars we tried to use for testing were set")
}
}
@Test
def throwWhenEnvNotFound() {
val obj = parseObject("""{ a : ${NOT_HERE} }""")
intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
}
@Test
def optionalOverrideNotProvided() {
val obj = parseObject("""{ a: 42, a : ${?NOT_HERE} }""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("a"))
}
@Test
def optionalOverrideProvided() {
val obj = parseObject("""{ HERE : 43, a: 42, a : ${?HERE} }""")
val resolved = resolve(obj)
assertEquals(43, resolved.getInt("a"))
}
@Test
def optionalOverrideOfObjectNotProvided() {
val obj = parseObject("""{ a: { b : 42 }, a : ${?NOT_HERE} }""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("a.b"))
}
@Test
def optionalOverrideOfObjectProvided() {
val obj = parseObject("""{ HERE : 43, a: { b : 42 }, a : ${?HERE} }""")
val resolved = resolve(obj)
assertEquals(43, resolved.getInt("a"))
assertFalse(resolved.hasPath("a.b"))
}
@Test
def optionalVanishesFromArray() {
import scala.collection.JavaConverters._
val obj = parseObject("""{ a : [ 1, 2, 3, ${?NOT_HERE} ] }""")
val resolved = resolve(obj)
assertEquals(Seq(1, 2, 3), resolved.getIntList("a").asScala)
}
@Test
def optionalUsedInArray() {
import scala.collection.JavaConverters._
val obj = parseObject("""{ HERE: 4, a : [ 1, 2, 3, ${?HERE} ] }""")
val resolved = resolve(obj)
assertEquals(Seq(1, 2, 3, 4), resolved.getIntList("a").asScala)
}
@Test
def substSelfReference() {
val obj = parseObject("""a=1, a=${a}""")
val resolved = resolve(obj)
assertEquals(1, resolved.getInt("a"))
}
@Test
def substSelfReferenceUndefined() {
val obj = parseObject("""a=${a}""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("cycle"))
}
@Test
def substSelfReferenceOptional() {
val obj = parseObject("""a=${?a}""")
val resolved = resolve(obj)
assertEquals("optional self reference disappears", 0, resolved.root.size)
}
@Test
def substSelfReferenceAlongPath() {
val obj = parseObject("""a.b=1, a.b=${a.b}""")
val resolved = resolve(obj)
assertEquals(1, resolved.getInt("a.b"))
}
@Test
def substSelfReferenceAlongLongerPath() {
val obj = parseObject("""a.b.c=1, a.b.c=${a.b.c}""")
val resolved = resolve(obj)
assertEquals(1, resolved.getInt("a.b.c"))
}
@Test
def substSelfReferenceAlongPathMoreComplex() {
// this is an example from the spec
val obj = parseObject("""
foo : { a : { c : 1 } }
foo : ${foo.a}
foo : { a : 2 }
""")
val resolved = resolve(obj)
assertEquals(1, resolved.getInt("foo.c"))
assertEquals(2, resolved.getInt("foo.a"))
}
@Test
def substSelfReferenceIndirect() {
val obj = parseObject("""a=1, b=${a}, a=${b}""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("cycle"))
}
@Test
def substSelfReferenceDoubleIndirect() {
val obj = parseObject("""a=1, b=${c}, c=${a}, a=${b}""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage, e.getMessage.contains("cycle"))
}
@Test
def substSelfReferenceIndirectStackCycle() {
// this situation is undefined, depends on
// whether we resolve a or b first.
val obj = parseObject("""a=1, b={c=5}, b=${a}, a=${b}""")
val resolved = resolve(obj)
val option1 = parseObject(""" b={c=5}, a={c=5} """).toConfig()
val option2 = parseObject(""" b=1, a=1 """).toConfig()
assertTrue("not an expected possibility: " + resolved +
" expected 1: " + option1 + " or 2: " + option2,
resolved == option1 || resolved == option2)
}
@Test
def substSelfReferenceObject() {
val obj = parseObject("""a={b=5}, a=${a}""")
val resolved = resolve(obj)
assertEquals(5, resolved.getInt("a.b"))
}
@Test
def substSelfReferenceObjectAlongPath() {
val obj = parseObject("""a.b={c=5}, a.b=${a.b}""")
val resolved = resolve(obj)
assertEquals(5, resolved.getInt("a.b.c"))
}
@Test
def substSelfReferenceInConcat() {
val obj = parseObject("""a=1, a=${a}foo""")
val resolved = resolve(obj)
assertEquals("1foo", resolved.getString("a"))
}
@Test
def substSelfReferenceIndirectInConcat() {
// this situation is undefined, depends on
// whether we resolve a or b first. If b first
// then there's an error because ${a} is undefined.
// if a first then b=1foo and a=1foo.
val obj = parseObject("""a=1, b=${a}foo, a=${b}""")
val either = try {
Left(resolve(obj))
} catch {
case e: ConfigException.UnresolvedSubstitution =>
Right(e)
}
val option1 = Left(parseObject("""a:1foo,b:1foo""").toConfig)
assertTrue("not an expected possibility: " + either +
" expected value " + option1 + " or an exception",
either == option1 || either.isRight)
}
@Test
def substOptionalSelfReferenceInConcat() {
val obj = parseObject("""a=${?a}foo""")
val resolved = resolve(obj)
assertEquals("foo", resolved.getString("a"))
}
@Test
def substOptionalIndirectSelfReferenceInConcat() {
val obj = parseObject("""a=${?b}foo,b=${a}""")
val resolved = resolve(obj)
assertEquals("foo", resolved.getString("a"))
}
@Test
def substTwoOptionalSelfReferencesInConcat() {
val obj = parseObject("""a=${?a}foo${?a}""")
val resolved = resolve(obj)
assertEquals("foo", resolved.getString("a"))
}
@Test
def substTwoOptionalSelfReferencesInConcatWithPriorValue() {
val obj = parseObject("""a=1,a=${?a}foo${?a}""")
val resolved = resolve(obj)
assertEquals("1foo1", resolved.getString("a"))
}
@Test
def substSelfReferenceMiddleOfStack() {
val obj = parseObject("""a=1, a=${a}, a=2""")
val resolved = resolve(obj)
// the substitution would be 1, but then 2 overrides
assertEquals(2, resolved.getInt("a"))
}
@Test
def substSelfReferenceObjectMiddleOfStack() {
val obj = parseObject("""a={b=5}, a=${a}, a={c=6}""")
val resolved = resolve(obj)
assertEquals(5, resolved.getInt("a.b"))
assertEquals(6, resolved.getInt("a.c"))
}
@Test
def substOptionalSelfReferenceMiddleOfStack() {
val obj = parseObject("""a=1, a=${?a}, a=2""")
val resolved = resolve(obj)
// the substitution would be 1, but then 2 overrides
assertEquals(2, resolved.getInt("a"))
}
@Test
def substSelfReferenceBottomOfStack() {
// self-reference should just be ignored since it's
// overridden
val obj = parseObject("""a=${a}, a=1, a=2""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("a"))
}
@Test
def substOptionalSelfReferenceBottomOfStack() {
val obj = parseObject("""a=${?a}, a=1, a=2""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("a"))
}
@Test
def substSelfReferenceTopOfStack() {
val obj = parseObject("""a=1, a=2, a=${a}""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("a"))
}
@Test
def substOptionalSelfReferenceTopOfStack() {
val obj = parseObject("""a=1, a=2, a=${?a}""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("a"))
}
@Test
def substSelfReferenceAlongAPath() {
// ${a} in the middle of the stack means "${a} in the stack
// below us" and so ${a.b} means b inside the "${a} below us"
// not b inside the final "${a}"
val obj = parseObject("""a={b={c=5}}, a=${a.b}, a={b=2}""")
val resolved = resolve(obj)
assertEquals(5, resolved.getInt("a.c"))
}
@Test
def substSelfReferenceAlongAPathInsideObject() {
// if the ${a.b} is _inside_ a field value instead of
// _being_ the field value, it does not look backward.
val obj = parseObject("""a={b={c=5}}, a={ x : ${a.b} }, a={b=2}""")
val resolved = resolve(obj)
assertEquals(2, resolved.getInt("a.x"))
}
@Test
def substInChildFieldNotASelfReference1() {
// here, ${bar.foo} is not a self reference because
// it's the value of a child field of bar, not bar
// itself; so we use bar's current value, rather than
// looking back in the merge stack
val obj = parseObject("""
bar : { foo : 42,
baz : ${bar.foo}
}
""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("bar.baz"))
assertEquals(42, resolved.getInt("bar.foo"))
}
@Test
def substInChildFieldNotASelfReference2() {
// checking that having bar.foo later in the stack
// doesn't break the behavior
val obj = parseObject("""
bar : { foo : 42,
baz : ${bar.foo}
}
bar : { foo : 43 }
""")
val resolved = resolve(obj)
assertEquals(43, resolved.getInt("bar.baz"))
assertEquals(43, resolved.getInt("bar.foo"))
}
@Test
def substInChildFieldNotASelfReference3() {
// checking that having bar.foo earlier in the merge
// stack doesn't break the behavior.
val obj = parseObject("""
bar : { foo : 43 }
bar : { foo : 42,
baz : ${bar.foo}
}
""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("bar.baz"))
assertEquals(42, resolved.getInt("bar.foo"))
}
@Test
def substInChildFieldNotASelfReference4() {
// checking that having bar set to non-object earlier
// doesn't break the behavior.
val obj = parseObject("""
bar : 101
bar : { foo : 42,
baz : ${bar.foo}
}
""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("bar.baz"))
assertEquals(42, resolved.getInt("bar.foo"))
}
@Test
def substInChildFieldNotASelfReference5() {
// checking that having bar set to unresolved array earlier
// doesn't break the behavior.
val obj = parseObject("""
x : 0
bar : [ ${x}, 1, 2, 3 ]
bar : { foo : 42,
baz : ${bar.foo}
}
""")
val resolved = resolve(obj)
assertEquals(42, resolved.getInt("bar.baz"))
assertEquals(42, resolved.getInt("bar.foo"))
}
@Test
def mutuallyReferringNotASelfReference() {
val obj = parseObject("""
// bar.a should end up as 4
bar : { a : ${foo.d}, b : 1 }
bar.b = 3
// foo.c should end up as 3
foo : { c : ${bar.b}, d : 2 }
foo.d = 4
""")
val resolved = resolve(obj)
assertEquals(4, resolved.getInt("bar.a"))
assertEquals(3, resolved.getInt("foo.c"))
}
@Test
def substSelfReferenceMultipleTimes() {
val obj = parseObject("""a=1,a=${a},a=${a},a=${a}""")
val resolved = resolve(obj)
assertEquals(1, resolved.getInt("a"))
}
@Test
def substSelfReferenceInConcatMultipleTimes() {
val obj = parseObject("""a=1,a=${a}x,a=${a}y,a=${a}z""")
val resolved = resolve(obj)
assertEquals("1xyz", resolved.getString("a"))
}
@Test
def substSelfReferenceInArray() {
// never "look back" from "inside" an array
val obj = parseObject("""a=1,a=[${a}, 2]""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("cycle") && e.getMessage.contains("${a}"))
}
@Test
def substSelfReferenceInObject() {
// never "look back" from "inside" an object
val obj = parseObject("""a=1,a={ x : ${a} }""")
val e = intercept[ConfigException.UnresolvedSubstitution] {
resolve(obj)
}
assertTrue("wrong exception: " + e.getMessage,
e.getMessage.contains("cycle") && e.getMessage.contains("${a}"))
}
@Test
def selfReferentialObjectNotAffectedByOverriding() {
// this is testing that we can still refer to another
// field in the same object, even though we are overriding
// an earlier object.
val obj = parseObject("""a={ x : 42, y : ${a.x} }""")
val resolved = resolve(obj)
assertEquals(parseObject("{ x : 42, y : 42 }"), resolved.getConfig("a").root)
// this is expected because if adding "a=1" here affects the outcome,
// it would be flat-out bizarre.
val obj2 = parseObject("""a=1, a={ x : 42, y : ${a.x} }""")
val resolved2 = resolve(obj2)
assertEquals(parseObject("{ x : 42, y : 42 }"), resolved2.getConfig("a").root)
}
}
|
zeq9069/config
|
config/src/test/scala/com/typesafe/config/impl/ConfigSubstitutionTest.scala
|
Scala
|
apache-2.0
| 39,188 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io._
object GenerateInput {
def main(args : Array[String]) {
if (args.length != 6) {
println("usage: GenerateInput output-links-dir " +
"n-output-links-files ndocs mean-nlinks nlinks-range output-docs-file")
return;
}
val outputLinksDir = args(0)
val nOutputLinksFiles = args(1).toInt
val nDocs = args(2).toInt
val meanNLinks = args(3).toInt
val rangeNLinks = args(4).toInt
val outputDocsFile = args(5)
val r = new scala.util.Random(1)
val docRanks = new Array[Double](nDocs)
val docLinks = new Array[Int](nDocs)
var countLinks = 0
val docsWriter = new PrintWriter(outputDocsFile)
for (i <- 0 until nDocs) {
docRanks(i) = r.nextDouble * 100.0
docLinks(i) = meanNLinks + (r.nextInt(2 * rangeNLinks) - rangeNLinks)
assert(docLinks(i) > 0)
countLinks += docLinks(i)
docsWriter.write(docRanks(i) + " " + docLinks(i) + "\\n")
}
docsWriter.close()
val linkSource = new Array[Int](countLinks)
val linkDest = new Array[Int](countLinks)
var count = 0
for (i <- 0 until nDocs) {
for (j <- 0 until docLinks(i)) {
linkSource(count) = i
linkDest(count) = r.nextInt(nDocs)
count += 1
}
}
val linksPerFile = (countLinks + nOutputLinksFiles - 1) / nOutputLinksFiles
for (f <- 0 until nOutputLinksFiles) {
val writer = new PrintWriter(new File(outputLinksDir + "/input." + f))
val startLink = f * linksPerFile
var endLink = (f + 1) * linksPerFile
if (endLink > countLinks) {
endLink = countLinks
}
for (l <- startLink until endLink) {
writer.write(linkSource(l) + " " + linkDest(l) + "\\n")
}
writer.close
}
}
}
|
agrippa/spark-swat
|
functional-tests/pagerank/src/main/scala/sparkpagerank/GenerateInput.scala
|
Scala
|
bsd-3-clause
| 3,522 |
package com.twitter
import com.twitter.app.GlobalFlag
import com.twitter.finagle.{Addr, Dtab, NameTree, Namer, Resolver, Path, Name}
import com.twitter.util.{Var, Activity}
object newZk extends GlobalFlag(
true,
"If set to true, the new zk2 com.twitter.finagle.Resolver is used. Otherwise, " +
"an older, less reliable zookeeper client is used."
)
/**
* A namer for serverset paths of the form /zk-hosts/path... where zk-hosts is
* a zk connect string like 'zk.foo.com:2181'. Naming is performed by way of a
* Resolver. The specific zk resolver implementation is controlled by the
* `com.twitter.newZk` flag.
*/
private[twitter] trait BaseServersetNamer extends Namer {
private[this] val scheme = if (newZk()) "zk2" else "zk"
/** Resolve a resolver string to a Var[Addr]. */
protected[this] def resolve(spec: String): Var[Addr] = Resolver.eval(spec) match {
case Name.Bound(addr) => addr
case _ => Var.value(Addr.Neg)
}
protected[this] def resolveServerset(hosts: String, path: String) =
resolve(s"$scheme!$hosts!$path")
protected[this] def resolveServerset(hosts: String, path: String, endpoint: String) =
resolve(s"$scheme!$hosts!$path!$endpoint")
/** Bind a name. */
protected[this] def bind(path: Path): Option[Name.Bound]
// We have to involve a serverset roundtrip here to return a tree. We run the
// risk of invalidating an otherwise valid tree when there is a bad serverset
// on an Alt branch that would never be taken. A potential solution to this
// conundrum is to introduce some form of lazy evaluation of name trees.
def lookup(path: Path): Activity[NameTree[Name]] = bind(path) match {
case Some(name) =>
// We have to bind the name ourselves in order to know whether
// it resolves negatively.
Activity(name.addr map {
case Addr.Bound(_, _) => Activity.Ok(NameTree.Leaf(name))
case Addr.Neg => Activity.Ok(NameTree.Neg)
case Addr.Pending => Activity.Pending
case Addr.Failed(exc) => Activity.Failed(exc)
})
case None => Activity.value(NameTree.Neg)
}
}
/**
* The serverset namer takes [[com.twitter.finagle.Path Paths]] of the form
*
* {{{
* hosts/path...
* }}}
*
* and returns a dynamic represention of the resolution of the path into a
* tree of [[com.twitter.finagle.Name Names]].
*
* The namer synthesizes nodes for each endpoint in the serverset.
* Endpoint names are delimited by the ':' character. For example
*
* {{{
* /$/com.twitter.serverset/sdzookeeper.local.twitter.com:2181/twitter/service/cuckoo/prod/read:http
* }}}
*
* is the endpoint `http` of serverset `/twitter/service/cuckoo/prod/read` on
* the ensemble `sdzookeeper.local.twitter.com:2181`.
*/
class serverset extends BaseServersetNamer {
private[this] val idPrefix = Path.Utf8("$", "com.twitter.serverset")
protected[this] def bind(path: Path): Option[Name.Bound] = path match {
case Path.Utf8(hosts, rest@_*) =>
val addr = if (rest.nonEmpty && (rest.last contains ":")) {
val Array(name, endpoint) = rest.last.split(":", 2)
val zkPath = (rest.init :+ name).mkString("/", "/", "")
resolveServerset(hosts, zkPath, endpoint)
} else {
val zkPath = rest.mkString("/", "/", "")
resolveServerset(hosts, zkPath)
}
// Clients may depend on Name.Bound ids being Paths which resolve
// back to the same Name.Bound
val id = idPrefix ++ path
Some(Name.Bound(addr, id))
case _ => None
}
}
|
lysu/finagle
|
finagle-serversets/src/main/scala/com/twitter/serverset.scala
|
Scala
|
apache-2.0
| 3,518 |
package io.github.finagle.smtp
import java.io.File
import java.nio.charset.Charset
import org.jboss.netty.util.CharsetUtil
/**
* A trait for general MIME messages.
*/
sealed trait Mime{
val version = "1.0"
/**
* Headers not including MIME-Version.
*/
val headers: Map[String, String]
/**
* Gets all MIME headers of the message (including MIME-Version)
* in the form of a Map.
*/
def allHeaders: Map[String, String] = headers.updated("MIME-Version", version)
/**
* Gets string representations of all MIME headers of the message,
* including MIME-Version, ensuring that the latter is the first header.
*/
def getMimeHeaders: Seq[String] = {
allHeaders map {
case (k, v) => "%s: %s".format(k, v)
}
}.toSeq.sortWith((s1, s2) => s1 == ("MIME-Version: %s" format version))
/**
* Returns the body of the message that should be sent after headers
* as a String. Meant to be used mainly for test purposes.
*/
def message(): String
/**
* The size of the whole message in bytes.
*/
def size: Int = {
val headersSize = getMimeHeaders map { _.length } reduceLeft { _ + _ }
val contentSize = message().length
headersSize + contentSize
}
/**
* Gets content transfer encoding of the message.
*/
def contentTransferEncoding: String = headers.getOrElse("Content-Transfer-Encoding", TransferEncoding.default.value)
/**
* Gets content disposition of the message.
*/
def contentDisposition: String = headers.getOrElse("Content-Disposition", ContentDisposition.default.value)
/**
* Gets content type of the message.
*/
def contentType: String = headers.getOrElse("Content-Type", ContentType.default.value)
/**
* Adds a header to the message.
*
* @param key Header name
* @param value Header value
*/
def addHeader(key: String, value: String): Mime
/**
* Adds a header to the message.
*
* @param header [[io.github.finagle.smtp.MimeHeader]] representing the header.
*/
def addHeader(header: MimeHeader): Mime
/**
* Adds several headers to the message.
*
* @param newHeaders Map from header name to header value
*/
def addHeaders(newHeaders: Map[String, String]): Mime
/**
* Adds several headers to the message.
*
* @param newHeaders Sequence of [[io.github.finagle.smtp.MimeHeader MimeHeaders]]
*/
def addHeaders(newHeaders: Seq[MimeHeader]): Mime
}
object MimeTypes {
def withTypes(types: Seq[String]) = default ++ types
val default = Seq (
"text/html html htm",
"text/plain txt text",
"text/richtext rtf",
"image/gif gif GIF",
"image/png png",
"image/jpeg jpeg jpg jpe JPG",
"image/tiff tiff tif",
"audio/midi midi mid",
"audio/aifc aifc",
"audio/aiff aif aiff",
"audio/mpeg mpeg mpg",
"audio/wav wav",
"video/mpeg mpeg mpg mpe",
"video/quicktime qt mov",
"video/avi avi",
"application/zip zip"
)
}
object Mime {
val empty = MimePart.empty
protected val mimeTypeMap = new javax.activation.MimetypesFileTypeMap
MimeTypes.default foreach mimeTypeMap.addMimeTypes
private def textContent(text: String, subtype: String, enc: Charset) ={
val basic = MimePart(text.getBytes(enc))
val withHeaders =
if (enc != CharsetUtil.US_ASCII) {
basic
.setContentType(ContentType("text", subtype, Map("charset" -> enc.displayName())))
.setContentTransferEncoding(TransferEncoding.EightBit)
}
else basic.setContentType(ContentType("text", subtype))
withHeaders
}
/**
* Creates a plain text [[io.github.finagle.smtp.MimePart]] encoded in a given charset.
*
* @param text The text of the part
* @param enc The charset in which the text should be encoded
*/
def plainText(text: String, enc: Charset): MimePart = textContent(text, "plain", enc)
/**
* Creates a plain text [[io.github.finagle.smtp.MimePart]] encoded in a given charset.
*
* @param text The text of the part
* @param encName The name of the charset in which the text should be encoded
*/
def plainText(text: String, encName: String): MimePart = plainText(text, Charset.forName(encName))
/**
* Creates a plain text [[io.github.finagle.smtp.MimePart]] encoded in US-ASCII charset.
*
* @param text The text of the part
*/
def plainText(text: String): MimePart = plainText(text, Charset.forName("US-ASCII"))
/**
* Creates an HTML [[io.github.finagle.smtp.MimePart]] encoded in given charset
*
* @param text The HTML text
* @param enc The charset in which the text should be encoded
*/
def html(text: String, enc: Charset): MimePart = textContent(text, "html", enc)
/**
* Creates an HTML [[io.github.finagle.smtp.MimePart]] encoded in given charset
*
* @param text The HTML text
* @param encName The name of the charset in which the text should be encoded
*/
def html(text: String, encName: String): MimePart = html(text, Charset.forName(encName))
/**
* Creates an HTML [[io.github.finagle.smtp.MimePart]] encoded in given charset
*
* @param text The HTML text
*/
def html(text: String): MimePart = html(text, Charset.forName("US-ASCII"))
/**
* Creates a [[io.github.finagle.smtp.MimePart]] with contents from given file.
*
* @param path The path to the file
*/
def fromFile(path: String): MimePart = {
val file = new File(path)
val contents = com.twitter.io.Files.readBytes(file)
val probe = mimeTypeMap.getContentType(path)
val ct = if (probe == "application/octet-stream") ContentType.default
else ContentType parse probe
MimePart(contents, Map("Content-Type" -> ct.value))
}
}
/**
* A simple MIME message with some content.
*/
case class MimePart(content: Array[Byte], headers: Map[String, String] = Map.empty) extends Mime {
def message(): String = new String(content, "US-ASCII")
def addHeader(key: String, value: String): MimePart =
copy(headers = this.headers.updated(key, value))
def addHeader(header: MimeHeader): MimePart =
copy(headers = this.headers.updated(header.name, header.value))
def addHeaders(newHeaders: Seq[MimeHeader]): MimePart =
copy(headers = this.headers ++ newHeaders.map(h => (h.name, h.value)))
def addHeaders(newHeaders: Map[String, String]): MimePart =
copy(headers = this.headers ++ newHeaders)
/**
* Sets the ''Content-Type'' header of the message.
*/
def setContentType(ct: ContentType): MimePart = addHeader(ct)
/**
* Sets the charset of the message if its content type is text.
*/
def setCharset(charset: String): MimePart = {
val ct = ContentType parse this.contentType
if (ct.mediatype == "text")
setContentType(ct.copy(params = ct.params.updated("charset", charset)))
else this
}
/**
* Sets the ''Content-Transfer-Encoding'' header of the message.
*/
def setContentTransferEncoding(te: TransferEncoding): MimePart = addHeader(te)
/**
* Sets the ''Content-Disposition'' header of the message.
*/
def setContentDisposition(cd: ContentDisposition): MimePart = addHeader(cd)
}
object MimePart {
/**
* An empty [[io.github.finagle.smtp.MimePart]].
*/
val empty: MimePart = MimePart(Array.empty)
}
/**
* A multipart MIME message.
*
* @param parts The parts of the message
* @param headers The headers of this whole multipart message
* @param boundary The boundary that will be used to separate the parts and
* indicate the end of the message.
*/
case class MimeMultipart(parts: Seq[MimePart], headers: Map[String, String] = Map.empty)
(implicit val boundary: String = "d5f6s8asdkfh3") extends Mime {
// The content type of the whole multipart message
private val multiContentType = ContentType("multipart", "mixed", Map("boundary" -> boundary))
/**
* The delimiter used to separate the parts
*/
def delimiter: String = "--%s" format boundary
/**
* The delimiter used to indicate the end of the message
*/
def closingDelimiter: String = "--%s--" format boundary
override def allHeaders: Map[String, String] = super.allHeaders.updated("Content-Type", multiContentType.value)
def addHeader(key: String, value: String): MimeMultipart =
copy(headers = this.headers.updated(key, value))
def addHeader(header: MimeHeader): MimeMultipart =
copy(headers = this.headers.updated(header.name, header.value))
def addHeaders(newHeaders: Seq[MimeHeader]): MimeMultipart =
copy(headers = this.headers ++ newHeaders.map(h => (h.name, h.value)))
def addHeaders(newHeaders: Map[String, String]): MimeMultipart =
copy(headers = this.headers ++ newHeaders)
def message(): String = {
val partHeaders = parts map {_.getMimeHeaders.filter(!_.startsWith("MIME-Version")) mkString "\r\n" }
val partMessages = parts map { _.message() }
val partStrings = (partHeaders zip partMessages) map { case (h, m) => "\r\n%s\r\n\r\n%s\r\n".format(h, m)}
partStrings.mkString(delimiter, delimiter, closingDelimiter)
}
/**
* Adds given [[io.github.finagle.smtp.MimePart]] to the message.
*/
def addPart(part: MimePart) = copy(parts = this.parts :+ part)
/**
* Adds given sequence of [[io.github.finagle.smtp.MimePart]] to the message.
*/
def addParts(newparts: Seq[MimePart]) = copy(parts = this.parts ++ newparts)
/**
* Syntactic sugar for [[io.github.finagle.smtp.MimeMultipart.addPart()]]
*/
def + (part: MimePart) = addPart(part)
/**
* Sets the boundary of the message.
*/
def setBoundary(bnd: String) = copy()(bnd)
}
object MimeMultipart {
/**
* An empty [[io.github.finagle.smtp.MimeMultipart]].
*/
val empty: MimeMultipart = MimeMultipart(Seq.empty)
/**
* Creates a [[io.github.finagle.smtp.MimeMultipart]] containing
* only given [[io.github.finagle.smtp.MimePart]].
*/
def wrap(part: MimePart): MimeMultipart = MimeMultipart(Seq(part))
}
|
finagle/finagle-smtp
|
src/main/scala/io/github/finagle/smtp/Mime.scala
|
Scala
|
apache-2.0
| 9,958 |
/*
Facsimile: A Discrete-Event Simulation Library
Copyright © 2004-2020, Michael J Allen.
This file is part of Facsimile.
Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
version.
Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see
http://www.gnu.org/licenses/lgpl.
The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
project home page at:
http://facsim.org/
Thank you for your interest in the Facsimile project!
IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for inclusion
as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If your code
fails to comply with the standard, then your patches will be rejected. For further information, please visit the coding
standards at:
http://facsim.org/Documentation/CodingStandards/
========================================================================================================================
Scala source file from the org.facsim.anim.cell package.
*/
package org.facsim.anim.cell
import org.facsim.LibResource
import org.facsim.anim.{Mesh, Point3D}
/**
Class representing ''[[http://www.automod.com/ AutoMod®]] cell conic frustum''
primitives.
@see
[[http://facsim.org/Documentation/Resources/AutoModCellFile/ConicFrustums.html
Conic Frustums]] for further information.
@constructor Construct a new conic frustum primitive from the data stream.
@param scene Reference to the CellScene of which this cell is a part.
@param parent Parent set of this cell primitive. If this value is `None`, then
this cell is the scene's root cell.
@throws org.facsim.anim.cell.IncorrectFormatException if the file supplied is
not an ''AutoMod® cell'' file.
@throws org.facsim.anim.cell.ParsingErrorException if errors are encountered
during parsing of the file.
@see
[[http://facsim.org/Documentation/Resources/AutoModCellFile/ConicFrustums.html
Conic Frustums]] for further information.
*/
private[cell] final class ConicFrustum(scene: CellScene,
parent: Option[Set])
extends Mesh3D(scene, parent) {
/**
Conic frustum base radius.
Base radius, measured on the X-Y plane.
*/
private val baseRadius = scene.readDouble(_ >= 0.0, LibResource
(ConicFrustum.ReadDimKey, 0))
/**
Conic frustum top radius.
Top radius, measured on the X-Y plane.
*/
private val topRadius = scene.readDouble(_ >= 0.0, LibResource
(ConicFrustum.ReadDimKey, 1))
/**
ConicFrustum height (Z-dimension).
Height of the conic frustum measured along the Z-Axis.
*/
private val height = scene.readDouble(_ >= 0.0, LibResource
(ConicFrustum.ReadDimKey, 2))
/**
ConicFrustum top X-axis offset.
*/
private val xOffset = scene.readDouble(LibResource
(ConicFrustum.ReadOffsetKey, 0))
/**
ConicFrustum top Y-axis offset.
*/
private val yOffset = scene.readDouble(LibResource
(ConicFrustum.ReadOffsetKey, 1))
/**
@inheritdoc
@note The origin of the conic frustum is at the center of its base.
*/
protected[cell] override def cellMesh: Mesh =
Mesh.conicFrustum(Point3D.Origin, baseRadius,
Point3D(xOffset, yOffset, height), topRadius, ConicFrustum.Divisions)
}
/**
ConicFrustum companion object.
*/
private object ConicFrustum {
/**
Read dimension string resource key.
*/
val ReadDimKey = "anim.cell.ConicFrustum.readDim"
/**
Read offset string resource key.
*/
val ReadOffsetKey = "anim.cell.ConicFrustum.readOffset"
/**
Number of divisions per conic frustum.
The number of divisions for a fine conic frustum in AutoMod is 16, and for a
course conic frustum it's 8. For simplicity, we'll convert all conic frustums
to have 16 divisions.
*/
val Divisions = 16
}
|
MichaelJAllen/facsimile
|
core/src/main/scala/org/facsim/anim/cell/ConicFrustum.scala
|
Scala
|
lgpl-3.0
| 4,183 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.serving.http
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.module.SimpleModule
import com.intel.analytics.bigdl.nn.abstractnn.Activity
import com.intel.analytics.zoo.serving.TestUtils
import com.intel.analytics.zoo.serving.serialization
import com.intel.analytics.zoo.serving.utils.ConfigParser
import org.scalatest.{FlatSpec, Matchers}
class JsonInputDeserializerSpec extends FlatSpec with Matchers with Supportive {
val configPath = getClass.getClassLoader.getResource("serving").getPath + "/config-test.yaml"
val configParser = new ConfigParser(configPath)
"read json string" should "work" in {
val mapper = new ObjectMapper()
val module = new SimpleModule()
module.addDeserializer(classOf[Activity], new serialization.JsonInputDeserializer())
mapper.registerModule(module)
val jsonStr = """{
"instances" : [ {
"intTensor" : [ 7756, 9549, 1094, 9808, 4959, 3831, 3926, 6578, 1870, 1741 ],
"floatTensor" : [ 0.6804766, 0.30136853, 0.17394465, 0.44770062, 0.20275897 ],
"intTensor2" : [ [ 1, 2 ], [ 3, 4 ], [ 5, 6 ] ],
"floatTensor2" : [ [ [ 0.2, 0.3 ], [ 0.5, 0.6 ] ], [ [ 0.2, 0.3 ], [ 0.5, 0.6 ] ] ],
"intScalar" : 12345,
"floatScalar" : 3.14159
} ]
}"""
val tinyJsonStr = """{
"instances" : [ {
"intScalar" : 12345
} ]
}"""
timing("decode")() {
(0 to 100).foreach(_ => {
timing("decode once")() {
mapper.readValue(jsonStr, classOf[Activity])
}
timing("decode tiny string")() {
mapper.readValue(tinyJsonStr, classOf[Activity])
}
})
}
val a = mapper.readValue(jsonStr, classOf[Activity])
val b = mapper.readValue(tinyJsonStr, classOf[Activity])
a
}
"read dien string" should "work" in {
val mapper = new ObjectMapper()
val module = new SimpleModule()
module.addDeserializer(classOf[Activity], new serialization.JsonInputDeserializer())
mapper.registerModule(module)
val jsonStr = TestUtils.getStrFromResourceFile("dien_json_str.json")
val a = mapper.readValue(jsonStr, classOf[Activity])
a
}
}
|
intel-analytics/analytics-zoo
|
zoo/src/test/scala/com/intel/analytics/zoo/serving/http/ServingFrontendSerializerSpec.scala
|
Scala
|
apache-2.0
| 2,771 |
package de.flapdoodle.server.stats
import de.flapdoodle.server.Instrumented
import java.lang.management.ManagementFactory
import org.json4s.JsonAST.JObject
import scala.util.Try
import java.lang.reflect.Method
/**
* JVM Stats
* -> taken from https://github.com/twitter/twitter-server
*/
protected[server] object JvmStats extends Instrumented{
import scala.collection.JavaConverters._
def register() = {
val mem = ManagementFactory.getMemoryMXBean()
def heap = mem.getHeapMemoryUsage()
metrics.gauge("heap.committed") { heap.getCommitted() }
metrics.gauge("heap.max") { heap.getMax() }
metrics.gauge("heap.used") { heap.getUsed() }
def nonHeap = mem.getNonHeapMemoryUsage()
metrics.gauge("nonheap.committed") { nonHeap.getCommitted() }
metrics.gauge("nonheap.max") { nonHeap.getMax() }
metrics.gauge("nonheap.used") { nonHeap.getUsed() }
val threads = ManagementFactory.getThreadMXBean()
metrics.gauge("thread.daemon_count") { threads.getDaemonThreadCount().toLong }
metrics.gauge("thread.count") { threads.getThreadCount().toLong }
metrics.gauge("thread.peak_count") { threads.getPeakThreadCount().toLong }
val runtime = ManagementFactory.getRuntimeMXBean()
metrics.gauge("start_time") { runtime.getStartTime() }
metrics.gauge("uptime") { runtime.getUptime() }
val os = ManagementFactory.getOperatingSystemMXBean()
metrics.gauge("num_cpus") { os.getAvailableProcessors().toLong }
os match {
case unix: com.sun.management.UnixOperatingSystemMXBean =>
metrics.gauge("fd_count") { unix.getOpenFileDescriptorCount }
metrics.gauge("fd_limit") { unix.getMaxFileDescriptorCount }
case _ =>
}
val compilation = ManagementFactory.getCompilationMXBean()
metrics.gauge("compilation.time_msec") { compilation.getTotalCompilationTime() }
val classes = ManagementFactory.getClassLoadingMXBean()
metrics.gauge("classes.total_loaded") { classes.getTotalLoadedClassCount() }
metrics.gauge("classes.total_unloaded") { classes.getUnloadedClassCount() }
metrics.gauge("classes.current_loaded") { classes.getLoadedClassCount().toLong }
val memPool = ManagementFactory.getMemoryPoolMXBeans.asScala
memPool foreach {
pool =>
val poolName: String = pool.getName().replaceAll("\\\\s","_")
if (pool.getCollectionUsage != null) {
def usage = pool.getCollectionUsage // this is a snapshot, we can't reuse the value
metrics.gauge(s"mem.postGC.${poolName}.used") { usage.getUsed }
metrics.gauge(s"mem.postGC.${poolName}.max") { usage.getMax }
}
if (pool.getUsage != null) {
def usage = pool.getUsage // this is a snapshot, we can't reuse the value
metrics.gauge(s"mem.current.${poolName}.used") { usage.getUsed }
metrics.gauge(s"mem.current.${poolName}.max") { usage.getMax }
}
}
metrics.gauge("mem.postGC.used") {
memPool flatMap(p => Option(p.getCollectionUsage)) map(_.getUsed) sum
}
metrics.gauge("mem.current.used") {
memPool flatMap(p => Option(p.getUsage)) map(_.getUsed) sum
}
// `BufferPoolMXBean` and `ManagementFactory.getPlatfromMXBeans` are introduced in Java 1.7.
// Use reflection to add these gauges so we can still compile with 1.6
for {
bufferPoolMXBean <- Try[Class[_]] {
ClassLoader.getSystemClassLoader.loadClass("java.lang.management.BufferPoolMXBean")
}
getPlatformMXBeans <- classOf[ManagementFactory].getMethods.find { m =>
m.getName == "getPlatformMXBeans" && m.getParameterTypes.length == 1
}
pool <- getPlatformMXBeans.invoke(null /* static method */, bufferPoolMXBean)
.asInstanceOf[java.util.List[_]].asScala
} {
val name = bufferPoolMXBean.getMethod("getName").invoke(pool).asInstanceOf[String].replaceAll("\\\\s","_")
val getCount: Method = bufferPoolMXBean.getMethod("getCount")
metrics.gauge(s"buffer.${name}.count") { getCount.invoke(pool).asInstanceOf[Long] }
val getMemoryUsed: Method = bufferPoolMXBean.getMethod("getMemoryUsed")
metrics.gauge(s"buffer.${name}.used") { getMemoryUsed.invoke(pool).asInstanceOf[Long] }
val getTotalCapacity: Method = bufferPoolMXBean.getMethod("getTotalCapacity")
metrics.gauge(s"buffer.${name}.max") { getTotalCapacity.invoke(pool).asInstanceOf[Long] }
}
val gcPool = ManagementFactory.getGarbageCollectorMXBeans.asScala
gcPool foreach { gc =>
val name = gc.getName.replaceAll("\\\\s","_")
metrics.gauge(s"gc.${name}.cycles") { gc.getCollectionCount }
metrics.gauge(s"gc.${name}.msec") { gc.getCollectionTime }
}
// note, these could be -1 if the collector doesn't have support for it.
metrics.gauge(s"gc.cycles") { gcPool map(_.getCollectionCount) filter(_ > 0) sum }
metrics.gauge(s"gc.msec") { gcPool map(_.getCollectionTime) filter(_ > 0) sum }
}
}
|
flapdoodle-oss/de.flapdoodle.server
|
src/main/scala/de.flapdoodle.server/stats/JvmStats.scala
|
Scala
|
apache-2.0
| 4,935 |
package controller
import skinny._
import skinny.filter.TxPerRequestFilter
import model.Company
import org.joda.time.DateTime
object ErrorController extends ApplicationController with TxPerRequestFilter with Routes {
def runtime = {
throw new RuntimeException
}
val runtimeUrl = get("/error/runtime")(runtime).as(Symbol("errorPage"))
get("/error/rollback") {
Company.createWithAttributes(Symbol("name") -> "Typesafe", Symbol("createdAt") -> DateTime.now)
rollbackTxPerRequest
logger.info("Transaction should be rolled back.")
}.as(Symbol("rollbackPage"))
}
|
skinny-framework/skinny-framework
|
example/src/main/scala/controller/ErrorController.scala
|
Scala
|
mit
| 591 |
/* Compile with
dotc implicits2.scala -Xprint:typer -Xprint-types -verbose
and verify that the inserted wrapString comes from Predef. You should see
val x: <root>.scala.collection.immutable.WrappedString =
<
<scala.Predef.wrapString:
((s: java.lang.String)scala.collection.immutable.WrappedString)
>
(<"abc":java.lang.String("abc")>):scala.collection.immutable.WrappedString
>
*/
object implicits2 {
val x: scala.collection.immutable.WrappedString = "abc"
implicit val (xx: String, y: Int) = ("a", 22)
def main(args: Array[String]) = {
println(implicitly[String])
println(implicitly[Int])
}
}
|
dotty-staging/dotty
|
tests/pos/test-implicits2.scala
|
Scala
|
apache-2.0
| 673 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qsu
import slamdata.Predef._
import quasar.{IdStatus, RenderTree, RenderTreeT, RenderedTree}
import quasar.common.{JoinType, SortDir}
import quasar.contrib.iota._
import quasar.contrib.matryoshka._
import quasar.contrib.pathy.AFile
import quasar.contrib.std.errorNotImplemented
import quasar.ejson.{EJson, Fixed}
import quasar.ejson.implicits._
import quasar.fp.ski.{ι, κ}
import quasar.fp._
import quasar.qscript._
import quasar.qscript.RecFreeS._
import quasar.qscript.provenance.JoinKeys
import matryoshka.{Hole => _, birecursiveIso => _, _} // {delayEqual, equalTEqual, delayShow, showTShow, BirecursiveT, Delay, Embed, EqualT, ShowT}
import matryoshka.data._
import matryoshka.patterns.{CoEnv, EnvT}
import monocle.{Iso, PTraversal, Prism}
import pathy.Path
import scalaz.{Applicative, Bitraverse, Cofree, Enum, Equal, Forall, Free, Functor, Id, Order, Scalaz, Show, Traverse, \\/, \\/-, NonEmptyList => NEL}
import scalaz.std.anyVal._
import scalaz.std.list._
import scalaz.std.tuple._
import scalaz.syntax.equal._
import scalaz.syntax.show._
import scalaz.syntax.std.option._
sealed trait QScriptUniform[T[_[_]], A] extends Product with Serializable
object QScriptUniform {
implicit def traverse[T[_[_]]]: Traverse[QScriptUniform[T, ?]] = new Traverse[QScriptUniform[T, ?]] {
// we need both apply and traverse syntax, which conflict
import Scalaz._
def traverseImpl[G[_]: Applicative, A, B](qsu: QScriptUniform[T, A])(f: A => G[B])
: G[QScriptUniform[T, B]] = qsu match {
case AutoJoin2(left, right, combiner) =>
(f(left) |@| f(right))(AutoJoin2(_, _, combiner))
case AutoJoin3(left, center, right, combiner) =>
(f(left) |@| f(center) |@| f(right))(AutoJoin3(_, _, _, combiner))
case QSAutoJoin(left, right, keys, combiner) =>
(f(left) |@| f(right))(QSAutoJoin(_, _, keys, combiner))
case GroupBy(left, right) =>
(f(left) |@| f(right))(GroupBy(_, _))
case DimEdit(source, dtrans) =>
f(source).map(DimEdit(_, dtrans))
case LPJoin(left, right, condition, joinType, leftRef, rightRef) =>
(f(left) |@| f(right) |@| f(condition))(LPJoin(_, _, _, joinType, leftRef, rightRef))
case ThetaJoin(left, right, condition, joinType, combiner) =>
(f(left) |@| f(right))(ThetaJoin(_, _, condition, joinType, combiner))
case Unary(source, mf) =>
f(source).map(Unary(_, mf))
case Map(source, fm) =>
f(source).map(Map(_, fm))
case Read(path, idStatus) =>
(Read(path, idStatus): QScriptUniform[T, B]).point[G]
case Transpose(source, retain, rotations) =>
f(source).map(Transpose(_, retain, rotations))
case LeftShift(source, struct, idStatus, onUndefined, repair, rot) =>
f(source).map(LeftShift(_, struct, idStatus, onUndefined, repair, rot))
case MultiLeftShift(source, shifts, onUndefined, repair) =>
f(source).map(MultiLeftShift(_, shifts, onUndefined, repair))
case LPReduce(source, reduce) =>
f(source).map(LPReduce(_, reduce))
case QSReduce(source, buckets, reducers, repair) =>
f(source).map(QSReduce(_, buckets, reducers, repair))
case Distinct(source) =>
f(source).map(Distinct(_))
case LPSort(source, order) =>
val T = Bitraverse[(?, ?)].leftTraverse[SortDir]
val source2G = f(source)
val orders2G = order.traverse(p => T.traverse(p)(f))
(source2G |@| orders2G)(LPSort(_, _))
case QSSort(source, buckets, order) =>
f(source).map(QSSort(_, buckets, order))
case Union(left, right) =>
(f(left) |@| f(right))(Union(_, _))
case Subset(from, op, count) =>
(f(from) |@| f(count))(Subset(_, op, _))
case LPFilter(source, predicate) =>
(f(source) |@| f(predicate))(LPFilter(_, _))
case QSFilter(source, predicate) =>
f(source).map(QSFilter(_, predicate))
case JoinSideRef(id) => (JoinSideRef(id): QScriptUniform[T, B]).point[G]
case Unreferenced() => (Unreferenced(): QScriptUniform[T, B]).point[G]
}
}
implicit def show[T[_[_]]: ShowT]: Delay[Show, QScriptUniform[T, ?]] =
new Delay[Show, QScriptUniform[T, ?]] {
def apply[A](a: Show[A]) = {
implicit val showA = a
Show shows {
case AutoJoin2(left, right, combiner) =>
s"AutoJoin2(${left.shows}, ${right.shows}, ${combiner.shows})"
case AutoJoin3(left, center, right, combiner) =>
s"AutoJoin3(${left.shows}, ${center.shows}, ${right.shows}, ${combiner.shows})"
case QSAutoJoin(left, right, keys, combiner) =>
s"QSAutoJoin(${left.shows}, ${right.shows}, ${keys.shows}, ${combiner.shows})"
case GroupBy(left, right) =>
s"GroupBy(${left.shows}, ${right.shows})"
case DimEdit(source, dtrans) =>
s"DimEdit(${source.shows}, ${dtrans.shows})"
case LPJoin(left, right, condition, joinType, leftRef, rightRef) =>
s"LPJoin(${left.shows}, ${right.shows}, ${condition.shows}, ${joinType.shows}, ${leftRef.shows}, ${rightRef.shows})"
case ThetaJoin(left, right, condition, joinType, combiner) =>
s"ThetaJoin(${left.shows}, ${right.shows}, ${condition.shows}, ${joinType.shows}, ${combiner.shows})"
case Unary(source, mf) =>
s"Unary(${source.shows}, ${mf.shows})"
case Map(source, fm) =>
s"Map(${source.shows}, ${fm.shows})"
case Read(path, idStatus) =>
s"Read(${Path.posixCodec.printPath(path)}, ${idStatus.shows})"
case Transpose(source, retain, rotations) =>
s"Transpose(${source.shows}, ${retain.shows}, ${rotations.shows})"
case LeftShift(source, struct, idStatus, onUndefined, repair, rot) =>
s"LeftShift(${source.shows}, ${struct.linearize.shows}, ${idStatus.shows}, ${onUndefined.shows}, ${repair.shows}, ${rot.shows})"
case MultiLeftShift(source, shifts, onUndefined, repair) =>
s"MultiLeftShift(${source.shows}, ${shifts.shows}, ${onUndefined.shows}, ${repair.shows})"
case LPReduce(source, reduce) =>
s"LPReduce(${source.shows}, ${reduce.shows})"
case QSReduce(source, buckets, reducers, repair) =>
s"QSReduce(${source.shows}, ${buckets.shows}, ${reducers.shows}, ${repair.shows})"
case Distinct(source) =>
s"Distinct(${source.shows})"
case LPSort(source, order) =>
s"LPSort(${source.shows}, ${order.shows})"
case QSSort(source, buckets, order) =>
s"QSSort(${source.shows}, ${buckets.shows}, ${order.shows})"
case Union(left, right) =>
s"Union(${left.shows}, ${right.shows})"
case Subset(from, op, count) =>
s"Subset(${from.shows}, ${op.shows}, ${count.shows})"
case LPFilter(source, predicate) =>
s"LPFilter(${source.shows}, ${predicate.shows})"
case QSFilter(source, predicate) =>
s"QSFilter(${source.shows}, ${predicate.shows})"
case JoinSideRef(id) =>
s"JoinSideRef(${id.shows})"
case Unreferenced() =>
"⊥"
}
}
}
@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
implicit def renderTree[T[_[_]]: RenderTreeT: ShowT]
: Delay[RenderTree, QScriptUniform[T, ?]] = errorNotImplemented
@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
implicit def equal[T[_[_]]: BirecursiveT: EqualT]
: Delay[Equal, QScriptUniform[T, ?]] = errorNotImplemented
final case class AutoJoin2[T[_[_]], A](
left: A,
right: A,
combiner: FreeMapA[T, JoinSide]) extends QScriptUniform[T, A]
final case class AutoJoin3[T[_[_]], A](
left: A,
center: A,
right: A,
combiner: FreeMapA[T, JoinSide3]) extends QScriptUniform[T, A]
final case class QSAutoJoin[T[_[_]], A](
left: A,
right: A,
keys: JoinKeys[IdAccess],
combiner: JoinFunc[T]) extends QScriptUniform[T, A]
final case class GroupBy[T[_[_]], A](
left: A,
right: A) extends QScriptUniform[T, A]
final case class DimEdit[T[_[_]], A](
source: A,
trans: DTrans[T]) extends QScriptUniform[T, A]
sealed trait DTrans[T[_[_]]] extends Product with Serializable
object DTrans {
final case class Squash[T[_[_]]]() extends DTrans[T]
final case class Group[T[_[_]]](getKey: FreeMap[T]) extends DTrans[T]
implicit def show[T[_[_]]: ShowT]: Show[DTrans[T]] =
Show.shows[DTrans[T]] {
case Squash() => "Squash"
case Group(k) => s"Group(${k.shows})"
}
}
// LPish
final case class LPJoin[T[_[_]], A](
left: A,
right: A,
condition: A,
joinType: JoinType,
leftRef: Symbol,
rightRef: Symbol) extends QScriptUniform[T, A]
// QScriptish
final case class ThetaJoin[T[_[_]], A](
left: A,
right: A,
condition: JoinFunc[T],
joinType: JoinType,
combiner: JoinFunc[T]) extends QScriptUniform[T, A]
/**
* This is a non-free (as in monad) variant of Map. We need it
* in ReadLP so that graph compaction is defined, which is required
* because compaction utilizes an `SMap[QSU[Symbol], Symbol]`, which
* isn't valid when the `QSU`s inside the keys are libre.
*/
final case class Unary[T[_[_]], A](
source: A,
mf: MapFunc[T, Hole]) extends QScriptUniform[T, A]
final case class Map[T[_[_]], A](
source: A,
fm: RecFreeMap[T]) extends QScriptUniform[T, A]
final case class Read[T[_[_]], A](
path: AFile,
idStatus: IdStatus) extends QScriptUniform[T, A]
// LPish
final case class Transpose[T[_[_]], A](
source: A,
retain: Retain,
rotations: Rotation) extends QScriptUniform[T, A]
sealed trait Retain extends Product with Serializable {
def fold[A](ids: => A, vals: => A): A = this match {
case Retain.Identities => ids
case Retain.Values => vals
}
}
object Retain {
case object Identities extends Retain
case object Values extends Retain
implicit val enum: Enum[Retain] =
new Enum[Retain] {
def succ(r: Retain) =
r match {
case Identities => Values
case Values => Identities
}
def pred(r: Retain) =
r match {
case Identities => Values
case Values => Identities
}
override val min = Some(Identities)
override val max = Some(Values)
def order(x: Retain, y: Retain) =
Order[Int].order(toInt(x), toInt(y))
val toInt: Retain => Int = {
case Identities => 0
case Values => 1
}
}
implicit val show: Show[Retain] =
Show.showFromToString
}
sealed trait Rotation extends Product with Serializable
object Rotation {
case object FlattenArray extends Rotation
case object ShiftArray extends Rotation
case object FlattenMap extends Rotation
case object ShiftMap extends Rotation
implicit val enum: Enum[Rotation] =
new Enum[Rotation] {
def succ(r: Rotation) =
r match {
case FlattenArray => ShiftArray
case ShiftArray => FlattenMap
case FlattenMap => ShiftMap
case ShiftMap => FlattenArray
}
def pred(r: Rotation) =
r match {
case FlattenArray => ShiftMap
case ShiftArray => FlattenArray
case FlattenMap => ShiftArray
case ShiftMap => FlattenMap
}
override val min = Some(FlattenArray)
override val max = Some(ShiftMap)
def order(x: Rotation, y: Rotation) =
Order[Int].order(toInt(x), toInt(y))
val toInt: Rotation => Int = {
case FlattenArray => 0
case ShiftArray => 1
case FlattenMap => 2
case ShiftMap => 3
}
}
implicit val show: Show[Rotation] =
Show.showFromToString
}
sealed trait ShiftTarget extends Product with Serializable
object ShiftTarget {
case object LeftTarget extends ShiftTarget
case object RightTarget extends ShiftTarget
final case class AccessLeftTarget(access: Access[Hole]) extends ShiftTarget
implicit val equalShiftTarget: Equal[ShiftTarget] = Equal.equal {
case (AccessLeftTarget(access1), AccessLeftTarget(access2)) => access1 ≟ access2
case (LeftTarget, LeftTarget) => true
case (RightTarget, RightTarget) => true
case _ => false
}
implicit val showShiftTarget: Show[ShiftTarget] = Show.shows {
case LeftTarget => "LeftTarget"
case RightTarget => "RightTarget"
case AccessLeftTarget(access) => s"AccessLeftTarget(${access.shows})"
}
implicit val renderShiftTarget: RenderTree[ShiftTarget] = RenderTree.make {
case LeftTarget =>
RenderedTree("ShiftTarget" :: Nil, "LeftTarget".some, Nil)
case RightTarget =>
RenderedTree("ShiftTarget" :: Nil, "RightTarget".some, Nil)
case AccessLeftTarget(access) =>
RenderedTree("ShiftTarget" :: Nil, "AccessLeftTarget".some, RenderTree[Access[Hole]].render(access) :: Nil)
}
}
// QScriptish
final case class LeftShift[T[_[_]], A](
source: A,
struct: RecFreeMap[T],
idStatus: IdStatus,
onUndefined: OnUndefined,
repair: FreeMapA[T, ShiftTarget],
rot: Rotation) extends QScriptUniform[T, A]
// shifting multiple structs on the same source;
// horizontal composition of LeftShifts
final case class MultiLeftShift[T[_[_]], A](
source: A,
// TODO: NEL
shifts: List[(FreeMap[T], IdStatus, Rotation)],
onUndefined: OnUndefined,
repair: FreeMapA[T, Access[Hole] \\/ Int]) extends QScriptUniform[T, A]
// LPish
final case class LPReduce[T[_[_]], A](
source: A,
reduce: ReduceFunc[Unit]) extends QScriptUniform[T, A]
// QScriptish
final case class QSReduce[T[_[_]], A](
source: A,
buckets: List[FreeMapA[T, Access[Hole]]],
// TODO: NEL
reducers: List[ReduceFunc[FreeMap[T]]],
repair: FreeMapA[T, ReduceIndex]) extends QScriptUniform[T, A]
final case class Distinct[T[_[_]], A](source: A) extends QScriptUniform[T, A]
// LPish
final case class LPSort[T[_[_]], A](
source: A,
order: NEL[(A, SortDir)]) extends QScriptUniform[T, A]
// QScriptish
final case class QSSort[T[_[_]], A](
source: A,
buckets: List[FreeMapA[T, Access[Hole]]],
order: NEL[(FreeMap[T], SortDir)]) extends QScriptUniform[T, A]
final case class Union[T[_[_]], A](left: A, right: A) extends QScriptUniform[T, A]
final case class Subset[T[_[_]], A](
from: A,
op: SelectionOp,
count: A) extends QScriptUniform[T, A]
// LPish
final case class LPFilter[T[_[_]], A](
source: A,
predicate: A) extends QScriptUniform[T, A]
// QScriptish
final case class QSFilter[T[_[_]], A](
source: A,
predicate: RecFreeMap[T]) extends QScriptUniform[T, A]
final case class Unreferenced[T[_[_]], A]() extends QScriptUniform[T, A]
final case class JoinSideRef[T[_[_]], A](id: Symbol) extends QScriptUniform[T, A]
final class Optics[T[_[_]]] private () extends QSUTTypes[T] {
def autojoin2[A]: Prism[QScriptUniform[A], (A, A, FreeMapA[JoinSide])] =
Prism.partial[QScriptUniform[A], (A, A, FreeMapA[JoinSide])] {
case AutoJoin2(left, right, func) => (left, right, func)
} { case (left, right, func) => AutoJoin2(left, right, func) }
def autojoin3[A]: Prism[QScriptUniform[A], (A, A, A, FreeMapA[JoinSide3])] =
Prism.partial[QScriptUniform[A], (A, A, A, FreeMapA[JoinSide3])] {
case AutoJoin3(left, center, right, func) => (left, center, right, func)
} { case (left, center, right, func) => AutoJoin3(left, center, right, func) }
def dimEdit[A]: Prism[QScriptUniform[A], (A, DTrans[T])] =
Prism.partial[QScriptUniform[A], (A, DTrans[T])] {
case DimEdit(a, dt) => (a, dt)
} { case (a, dt) => DimEdit(a, dt) }
def distinct[A]: Prism[QScriptUniform[A], A] =
Prism.partial[QScriptUniform[A], A] {
case Distinct(a) => a
} (Distinct(_))
def groupBy[A]: Prism[QScriptUniform[A], (A, A)] =
Prism.partial[QScriptUniform[A], (A, A)] {
case GroupBy(l, r) => (l, r)
} { case (l, r) => GroupBy(l, r) }
def joinSideRef[A]: Prism[QScriptUniform[A], Symbol] =
Prism.partial[QScriptUniform[A], Symbol] {
case JoinSideRef(s) => s
} (JoinSideRef(_))
def leftShift[A]: Prism[QScriptUniform[A], (A, RecFreeMap, IdStatus, OnUndefined, FreeMapA[ShiftTarget], Rotation)] =
Prism.partial[QScriptUniform[A], (A, RecFreeMap, IdStatus, OnUndefined, FreeMapA[ShiftTarget], Rotation)] {
case LeftShift(s, fm, ids, ou, jf, rot) => (s, fm, ids, ou, jf, rot)
} { case (s, fm, ids, ou, jf, rot) => LeftShift(s, fm, ids, ou, jf, rot) }
def multiLeftShift[A]: Prism[QScriptUniform[A], (A, List[(FreeMap, IdStatus, Rotation)], OnUndefined, FreeMapA[Access[Hole] \\/ Int])] =
Prism.partial[QScriptUniform[A], (A, List[(FreeMap, IdStatus, Rotation)], OnUndefined, FreeMapA[Access[Hole] \\/ Int])] {
case MultiLeftShift(s, ss, ou, map) => (s, ss, ou, map)
} { case (s, ss, ou, map) => MultiLeftShift(s, ss, ou, map) }
def lpFilter[A]: Prism[QScriptUniform[A], (A, A)] =
Prism.partial[QScriptUniform[A], (A, A)] {
case LPFilter(s, p) => (s, p)
} { case (s, p) => LPFilter(s, p) }
def lpJoin[A]: Prism[QScriptUniform[A], (A, A, A, JoinType, Symbol, Symbol)] =
Prism.partial[QScriptUniform[A], (A, A, A, JoinType, Symbol, Symbol)] {
case LPJoin(l, r, c, t, lr, rr) => (l, r, c, t, lr, rr)
} { case (l, r, c, t, lr, rr) => LPJoin(l, r, c, t, lr, rr) }
def lpReduce[A]: Prism[QScriptUniform[A], (A, ReduceFunc[Unit])] =
Prism.partial[QScriptUniform[A], (A, ReduceFunc[Unit])] {
case LPReduce(a, rf) => (a, rf)
} { case (a, rf) => LPReduce(a, rf) }
def lpSort[A]: Prism[QScriptUniform[A], (A, NEL[(A, SortDir)])] =
Prism.partial[QScriptUniform[A], (A, NEL[(A, SortDir)])] {
case LPSort(a, keys) => (a, keys)
} { case (a, keys) => LPSort(a, keys) }
def unary[A]: Prism[QScriptUniform[A], (A, MapFunc[Hole])] =
Prism.partial[QScriptUniform[A], (A, MapFunc[Hole])] {
case Unary(a, mf) => (a, mf)
} { case (a, mf) => Unary(a, mf) }
def map[A]: Prism[QScriptUniform[A], (A, RecFreeMap)] =
Prism.partial[QScriptUniform[A], (A, RecFreeMap)] {
case Map(a, fm) => (a, fm)
} { case (a, fm) => Map(a, fm) }
def qsAutoJoin[A]: Prism[QScriptUniform[A], (A, A, JoinKeys[IdAccess], JoinFunc)] =
Prism.partial[QScriptUniform[A], (A, A, JoinKeys[IdAccess], JoinFunc)] {
case QSAutoJoin(l, r, ks, c) => (l, r, ks, c)
} { case (l, r, ks, c) => QSAutoJoin(l, r, ks, c) }
def qsFilter[A]: Prism[QScriptUniform[A], (A, RecFreeMap)] =
Prism.partial[QScriptUniform[A], (A, RecFreeMap)] {
case QSFilter(a, p) => (a, p)
} { case (a, p) => QSFilter(a, p) }
def qsReduce[A]: Prism[QScriptUniform[A], (A, List[FreeAccess[Hole]], List[ReduceFunc[FreeMap]], FreeMapA[ReduceIndex])] =
Prism.partial[QScriptUniform[A], (A, List[FreeAccess[Hole]], List[ReduceFunc[FreeMap]], FreeMapA[ReduceIndex])] {
case QSReduce(a, bs, rfs, rep) => (a, bs, rfs, rep)
} { case (a, bs, rfs, rep) => QSReduce(a, bs, rfs, rep) }
def qsSort[A]: Prism[QScriptUniform[A], (A, List[FreeAccess[Hole]], NEL[(FreeMap, SortDir)])] =
Prism.partial[QScriptUniform[A], (A, List[FreeAccess[Hole]], NEL[(FreeMap, SortDir)])] {
case QSSort(a, buckets, keys) => (a, buckets, keys)
} { case (a, buckets, keys) => QSSort(a, buckets, keys) }
def read[A]: Prism[QScriptUniform[A], (AFile, IdStatus)] =
Prism.partial[QScriptUniform[A], (AFile, IdStatus)] {
case Read(f, s) => (f, s)
} { case (f, s) => Read(f, s) }
def subset[A]: Prism[QScriptUniform[A], (A, SelectionOp, A)] =
Prism.partial[QScriptUniform[A], (A, SelectionOp, A)] {
case Subset(f, op, c) => (f, op, c)
} { case (f, op, c) => Subset(f, op, c) }
def thetaJoin[A]: Prism[QScriptUniform[A], (A, A, JoinFunc, JoinType, JoinFunc)] =
Prism.partial[QScriptUniform[A], (A, A, JoinFunc, JoinType, JoinFunc)] {
case ThetaJoin(l, r, c, t, b) => (l, r, c, t, b)
} { case (l, r, c, t, b) => ThetaJoin(l, r, c, t, b) }
def transpose[A]: Prism[QScriptUniform[A], (A, Retain, Rotation)] =
Prism.partial[QScriptUniform[A], (A, Retain, Rotation)] {
case Transpose(a, ret, rot) => (a, ret, rot)
} { case (a, ret, rot) => Transpose(a, ret, rot) }
def union[A]: Prism[QScriptUniform[A], (A, A)] =
Prism.partial[QScriptUniform[A], (A, A)] {
case Union(l, r) => (l, r)
} { case (l, r) => Union(l, r) }
def unreferenced[A]: Prism[QScriptUniform[A], Unit] =
Prism.partial[QScriptUniform[A], Unit] {
case Unreferenced() => ()
} (κ(Unreferenced()))
def holes[A, B]: PTraversal[QScriptUniform[A], QScriptUniform[B], A, B] =
PTraversal.fromTraverse[QScriptUniform, A, B]
}
object Optics {
def apply[T[_[_]]]: Optics[T] = new Optics[T]
}
sealed abstract class Dsl[T[_[_]]: BirecursiveT, F[_]: Functor, A] extends QSUTTypes[T] {
import Scalaz._
val iso: Iso[A, F[QScriptUniform[A]]]
def lifting[S, A]: Prism[S, A] => Prism[F[S], F[A]]
val recFunc = construction.RecFunc[T]
type Bin[A] = (A, A) => Binary[T, A]
type Tri[A] = (A, A, A) => Ternary[T, A]
private val O = Optics[T]
def mfc[A] = PrismNT.injectCopK[MapFuncCore, MapFunc].asPrism[A]
private def composeLifting[G[_]](optic: Prism[QScriptUniform[A], G[A]]) =
iso composePrism lifting[QScriptUniform[A], G[A]](optic)
def _autojoin2: Prism[A, F[(A, A, FreeMapA[JoinSide])]] = {
type G[A] = (A, A, FreeMapA[JoinSide])
composeLifting[G](O.autojoin2[A])
}
def _autojoin3: Prism[A, F[(A, A, A, FreeMapA[JoinSide3])]] = {
type G[A] = (A, A, A, FreeMapA[JoinSide3])
composeLifting[G](O.autojoin3[A])
}
def autojoin2(input: F[(A, A, Forall.CPS[Bin])]): A =
_autojoin2(input.map {
case (left, right, combiner) =>
(left, right,
Free.liftF(mfc(Forall[Bin](combiner)[JoinSide](LeftSide, RightSide))))
})
def autojoin3(input: F[(A, A, A, Forall.CPS[Tri])]): A =
_autojoin3(input.map {
case (left, center, right, combiner) =>
(left, center, right,
Free.liftF(mfc(Forall[Tri](combiner)[JoinSide3](LeftSide3, Center, RightSide3))))
})
def dimEdit: Prism[A, F[(A, DTrans[T])]] =
composeLifting[(?, DTrans[T])](O.dimEdit[A])
def distinct: Prism[A, F[A]] =
composeLifting[Id](O.distinct[A])
def groupBy: Prism[A, F[(A, A)]] = {
type G[A] = (A, A)
composeLifting[G](O.groupBy[A])
}
def joinSideRef: Prism[A, F[Symbol]] = {
type G[A] = Symbol
composeLifting[G](O.joinSideRef[A])
}
def leftShift: Prism[A, F[(A, RecFreeMap, IdStatus, OnUndefined, FreeMapA[ShiftTarget], Rotation)]] = {
composeLifting[(?, RecFreeMap, IdStatus, OnUndefined, FreeMapA[ShiftTarget], Rotation)](O.leftShift[A])
}
def multiLeftShift: Prism[A, F[(A, List[(FreeMap, IdStatus, Rotation)], OnUndefined, FreeMapA[Access[Hole] \\/ Int])]] = {
composeLifting[(?, List[(FreeMap, IdStatus, Rotation)], OnUndefined, FreeMapA[Access[Hole] \\/ Int])](O.multiLeftShift[A])
}
def lpFilter: Prism[A, F[(A, A)]] = {
type G[A] = (A, A)
composeLifting[G](O.lpFilter[A])
}
def lpJoin: Prism[A, F[(A, A, A, JoinType, Symbol, Symbol)]] = {
type G[A] = (A, A, A, JoinType, Symbol, Symbol)
composeLifting[G](O.lpJoin[A])
}
def lpReduce: Prism[A, F[(A, ReduceFunc[Unit])]] =
composeLifting[(?, ReduceFunc[Unit])](O.lpReduce[A])
def lpSort: Prism[A, F[(A, NEL[(A, SortDir)])]] = {
type G[A] = (A, NEL[(A, SortDir)])
composeLifting[G](O.lpSort[A])
}
def unary: Prism[A, F[(A, MapFunc[Hole])]] =
composeLifting[(?, MapFunc[Hole])](O.unary[A])
def map: Prism[A, F[(A, RecFreeMap)]] =
composeLifting[(?, RecFreeMap)](O.map[A])
def map1(pair: F[(A, MapFuncCore[Hole])]): A =
map(pair.map {
case(src, f) => (src, RecFreeS.roll(mfc(f.as(recFunc.Hole))))
})
def qsAutoJoin: Prism[A, F[(A, A, JoinKeys[IdAccess], JoinFunc)]] = {
type G[A] = (A, A, JoinKeys[IdAccess], JoinFunc)
composeLifting[G](O.qsAutoJoin[A])
}
def qsFilter: Prism[A, F[(A, RecFreeMap)]] =
composeLifting[(?, RecFreeMap)](O.qsFilter[A])
def qsReduce: Prism[A, F[(A, List[FreeAccess[Hole]], List[ReduceFunc[FreeMap]], FreeMapA[ReduceIndex])]] =
composeLifting[(?, List[FreeAccess[Hole]], List[ReduceFunc[FreeMap]], FreeMapA[ReduceIndex])](O.qsReduce[A])
def qsSort: Prism[A, F[(A, List[FreeAccess[Hole]], NEL[(FreeMap, SortDir)])]] =
composeLifting[(?, List[FreeAccess[Hole]], NEL[(FreeMap, SortDir)])](O.qsSort[A])
def read: Prism[A, F[(AFile, IdStatus)]] = {
type G[_] = (AFile, IdStatus)
composeLifting[G](O.read[A])
}
def subset: Prism[A, F[(A, SelectionOp, A)]] = {
type G[A] = (A, SelectionOp, A)
composeLifting[G](O.subset[A])
}
def thetaJoin: Prism[A, F[(A, A, JoinFunc, JoinType, JoinFunc)]] = {
type G[A] = (A, A, JoinFunc, JoinType, JoinFunc)
composeLifting[G](O.thetaJoin[A])
}
def transpose: Prism[A, F[(A, Retain, Rotation)]] =
composeLifting[(?, Retain, Rotation)](O.transpose[A])
def union: Prism[A, F[(A, A)]] = {
type G[A] = (A, A)
composeLifting[G](O.union[A])
}
def unreferenced: Prism[A, F[Unit]] = {
type G[_] = Unit
composeLifting[G](O.unreferenced[A])
}
}
sealed abstract class DslT[T[_[_]]: BirecursiveT] private () extends Dsl[T, Id.Id, T[QScriptUniform[T, ?]]] {
type QSU[A] = QScriptUniform[A]
private val J = Fixed[T[EJson]]
// read
def tread(file: AFile): T[QSU] =
read((file, IdStatus.ExcludeId))
def tread1(name: String): T[QSU] =
tread(Path.rootDir </> Path.file(name))
// undefined
val undefined: Prism[T[QSU], Unit] =
Prism[T[QSU], Unit](map.getOption(_) collect {
case (Unreferenced(), Embed(CoEnv(\\/-(Suspend(MFC(MapFuncsCore.Undefined())))))) => ()
})(_ => map(unreferenced(), recFunc.Undefined[Hole]))
// constants
val constant: Prism[T[QSU], T[EJson]] =
Prism[T[QSU], T[EJson]](map.getOption(_) collect {
case (Unreferenced(), Embed(CoEnv(\\/-(Suspend(MFC(MapFuncsCore.Constant(ejs))))))) => ejs
})(ejs => map(unreferenced(), recFunc.Constant[Hole](ejs)))
val carr: Prism[T[QSU], List[T[EJson]]] =
constant composePrism J.arr
val cbool: Prism[T[QSU], Boolean] =
constant composePrism J.bool
val cchar: Prism[T[QSU], Char] =
constant composePrism J.char
val cdec: Prism[T[QSU], BigDecimal] =
constant composePrism J.dec
val cint: Prism[T[QSU], BigInt] =
constant composePrism J.int
val cmap: Prism[T[QSU], List[(T[EJson], T[EJson])]] =
constant composePrism J.map
val cmeta: Prism[T[QSU], (T[EJson], T[EJson])] =
constant composePrism J.meta
val cnull: Prism[T[QSU], Unit] =
constant composePrism J.nul
val cstr: Prism[T[QSU], String] =
constant composePrism J.str
}
object DslT {
def apply[T[_[_]]: BirecursiveT]: DslT[T] =
new DslT {
val iso: Iso[T[QSU], QSU[T[QSU]]] = birecursiveIso[T[QSU], QSU]
def lifting[S, A]: Prism[S, A] => Prism[S, A] = ι
}
}
object AnnotatedDsl {
import Scalaz._
def apply[T[_[_]]: BirecursiveT, A]
: Dsl[T, (A, ?), Cofree[QScriptUniform[T, ?], A]] = {
type QSU[B] = QScriptUniform[T, B]
type CoQSU = Cofree[QSU, A]
new Dsl[T, (A, ?), CoQSU] {
val iso: Iso[CoQSU, (A, QSU[CoQSU])] =
birecursiveIso[CoQSU, EnvT[A, QSU, ?]]
.composeIso(envTIso[A, QSU, CoQSU])
def lifting[S, B]: Prism[S, B] => Prism[(A, S), (A, B)] =
_.second[A]
}
}
}
}
|
slamdata/slamengine
|
qsu/src/main/scala/quasar/qsu/QScriptUniform.scala
|
Scala
|
apache-2.0
| 29,125 |
import stainless.lang._
import stainless.annotation._
object MutateInside13 {
case class Mut[@mutable T](var t: T)
case class Thing[@mutable T](var field: T)
def change_thing[@mutable T](mut: Mut[Thing[T]], v: T) = {
mut.t = Thing(freshCopy(v))
}
def main() = {
val thing = Thing(123)
{
val mut = Mut(thing)
change_thing(mut, 789)
mut.t.field = 500
}
assert(thing.field == 123)
}
}
|
epfl-lara/stainless
|
frontends/benchmarks/extraction/invalid/MutateInside13.scala
|
Scala
|
apache-2.0
| 437 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package complete
import jline.console.ConsoleReader
import jline.console.completer.{ Completer, CompletionHandler }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
object JLineCompletion {
def installCustomCompletor(reader: ConsoleReader, parser: Parser[_]): Unit =
installCustomCompletor(reader)(parserAsCompletor(parser))
def installCustomCompletor(reader: ConsoleReader)(
complete: (String, Int) => (Seq[String], Seq[String])
): Unit =
installCustomCompletor(customCompletor(complete), reader)
def installCustomCompletor(
complete: (ConsoleReader, Int) => Boolean,
reader: ConsoleReader
): Unit = {
reader.removeCompleter(DummyCompletor)
reader.addCompleter(DummyCompletor)
reader.setCompletionHandler(new CustomHandler(complete))
}
private[this] final class CustomHandler(completeImpl: (ConsoleReader, Int) => Boolean)
extends CompletionHandler {
private[this] var previous: Option[(String, Int)] = None
private[this] var level: Int = 1
override def complete(
reader: ConsoleReader,
candidates: java.util.List[CharSequence],
position: Int
) = {
val current = Some(bufferSnapshot(reader))
level = if (current == previous) level + 1 else 1
previous = current
try completeImpl(reader, level)
catch {
case e: Exception =>
reader.print("\\nException occurred while determining completions.")
e.printStackTrace()
false
}
}
}
// always provides dummy completions so that the custom completion handler gets called
// (ConsoleReader doesn't call the handler if there aren't any completions)
// the custom handler will then throw away the candidates and call the custom function
private[this] final object DummyCompletor extends Completer {
override def complete(
buffer: String,
cursor: Int,
candidates: java.util.List[CharSequence]
): Int = {
candidates.asInstanceOf[java.util.List[String]] add "dummy"
0
}
}
def parserAsCompletor(p: Parser[_]): (String, Int) => (Seq[String], Seq[String]) =
(str, level) => convertCompletions(Parser.completions(p, str, level))
def convertCompletions(c: Completions): (Seq[String], Seq[String]) = {
val cs = c.get
if (cs.isEmpty)
(Nil, "{invalid input}" :: Nil)
else
convertCompletions(cs)
}
def convertCompletions(cs: Set[Completion]): (Seq[String], Seq[String]) = {
val (insert, display) =
cs.foldLeft((Set.empty[String], Set.empty[String])) {
case (t @ (insert, display), comp) =>
if (comp.isEmpty) t
else (appendNonEmpty(insert, comp.append), appendNonEmpty(display, comp.display))
}
(insert.toSeq, display.toSeq.sorted)
}
def appendNonEmpty(set: Set[String], add: String) = if (add.trim.isEmpty) set else set + add
def customCompletor(
f: (String, Int) => (Seq[String], Seq[String])
): (ConsoleReader, Int) => Boolean =
(reader, level) => {
val success = complete(beforeCursor(reader), string => f(string, level), reader)
reader.flush()
success
}
def bufferSnapshot(reader: ConsoleReader): (String, Int) = {
val b = reader.getCursorBuffer
(b.buffer.toString, b.cursor)
}
def beforeCursor(reader: ConsoleReader): String = {
val b = reader.getCursorBuffer
b.buffer.substring(0, b.cursor)
}
// returns false if there was nothing to insert and nothing to display
def complete(
beforeCursor: String,
completions: String => (Seq[String], Seq[String]),
reader: ConsoleReader
): Boolean = {
val (insert, display) = completions(beforeCursor)
val common = commonPrefix(insert)
if (common.isEmpty)
if (display.isEmpty)
()
else
showCompletions(display, reader)
else
appendCompletion(common, reader)
!(common.isEmpty && display.isEmpty)
}
def appendCompletion(common: String, reader: ConsoleReader): Unit = {
reader.getCursorBuffer.write(common)
reader.redrawLine()
}
/**
* `display` is assumed to be the exact strings requested to be displayed.
* In particular, duplicates should have been removed already.
*/
def showCompletions(display: Seq[String], reader: ConsoleReader): Unit = {
printCompletions(display, reader)
reader.drawLine()
}
def printCompletions(cs: Seq[String], reader: ConsoleReader): Unit = {
val print = shouldPrint(cs, reader)
reader.println()
if (print) printLinesAndColumns(cs, reader)
}
def printLinesAndColumns(cs: Seq[String], reader: ConsoleReader): Unit = {
val (lines, columns) = cs partition hasNewline
for (line <- lines) {
reader.print(line)
if (line.charAt(line.length - 1) != '\\n')
reader.println()
}
reader.printColumns(columns.map(_.trim).asJava)
}
def hasNewline(s: String): Boolean = s.indexOf('\\n') >= 0
def shouldPrint(cs: Seq[String], reader: ConsoleReader): Boolean = {
val size = cs.size
(size <= reader.getAutoprintThreshold) ||
confirm("Display all %d possibilities? (y or n) ".format(size), 'y', 'n', reader)
}
def confirm(prompt: String, trueC: Char, falseC: Char, reader: ConsoleReader): Boolean = {
reader.println()
reader.print(prompt)
reader.flush()
reader.readCharacter(trueC, falseC) == trueC
}
def commonPrefix(s: Seq[String]): String = if (s.isEmpty) "" else s reduceLeft commonPrefix
def commonPrefix(a: String, b: String): String = {
val len = scala.math.min(a.length, b.length)
@tailrec def loop(i: Int): Int = if (i >= len) len else if (a(i) != b(i)) i else loop(i + 1)
a.substring(0, loop(0))
}
}
|
sbt/sbt
|
internal/util-complete/src/main/scala/sbt/internal/util/complete/JLineCompletion.scala
|
Scala
|
apache-2.0
| 5,914 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.internal.util
package appmacro
import scala.reflect._
import macros._
import ContextUtil.{ DynamicDependencyError, DynamicReferenceError }
object ContextUtil {
final val DynamicDependencyError = "Illegal dynamic dependency"
final val DynamicReferenceError = "Illegal dynamic reference"
/**
* Constructs an object with utility methods for operating in the provided macro context `c`.
* Callers should explicitly specify the type parameter as `c.type` in order to preserve the path dependent types.
*/
def apply[C <: blackbox.Context with Singleton](c: C): ContextUtil[C] = new ContextUtil(c: C)
/**
* Helper for implementing a no-argument macro that is introduced via an implicit.
* This method removes the implicit conversion and evaluates the function `f` on the target of the conversion.
*
* Given `myImplicitConversion(someValue).extensionMethod`, where `extensionMethod` is a macro that uses this
* method, the result of this method is `f(<Tree of someValue>)`.
*/
def selectMacroImpl[T: c.WeakTypeTag](
c: blackbox.Context
)(f: (c.Expr[Any], c.Position) => c.Expr[T]): c.Expr[T] = {
import c.universe._
c.macroApplication match {
case s @ Select(Apply(_, t :: Nil), _) => f(c.Expr[Any](t), s.pos)
case a @ Apply(_, t :: Nil) => f(c.Expr[Any](t), a.pos)
case x => unexpectedTree(x)
}
}
def unexpectedTree[C <: blackbox.Context](tree: C#Tree): Nothing =
sys.error("Unexpected macro application tree (" + tree.getClass + "): " + tree)
}
/**
* Utility methods for macros. Several methods assume that the context's universe is a full compiler
* (`scala.tools.nsc.Global`).
* This is not thread safe due to the underlying Context and related data structures not being thread safe.
* Use `ContextUtil[c.type](c)` to construct.
*/
final class ContextUtil[C <: blackbox.Context](val ctx: C) {
import ctx.universe.{ Apply => ApplyTree, _ }
import internal.decorators._
val powerContext = ctx.asInstanceOf[reflect.macros.runtime.Context]
val global: powerContext.universe.type = powerContext.universe
def callsiteTyper: global.analyzer.Typer = powerContext.callsiteTyper
val initialOwner: Symbol = callsiteTyper.context.owner.asInstanceOf[ctx.universe.Symbol]
lazy val alistType = ctx.typeOf[AList[KList]]
lazy val alist: Symbol = alistType.typeSymbol.companion
lazy val alistTC: Type = alistType.typeConstructor
/** Modifiers for a local val.*/
lazy val localModifiers = Modifiers(NoFlags)
def getPos(sym: Symbol) = if (sym eq null) NoPosition else sym.pos
/**
* Constructs a unique term name with the given prefix within this Context.
* (The current implementation uses Context.freshName, which increments
*/
def freshTermName(prefix: String) = TermName(ctx.freshName("$" + prefix))
/**
* Constructs a new, synthetic, local ValDef Type `tpe`, a unique name,
* Position `pos`, an empty implementation (no rhs), and owned by `owner`.
*/
def freshValDef(tpe: Type, pos: Position, owner: Symbol): ValDef = {
val SYNTHETIC = (1 << 21).toLong.asInstanceOf[FlagSet]
val sym = owner.newTermSymbol(freshTermName("q"), pos, SYNTHETIC)
setInfo(sym, tpe)
val vd = internal.valDef(sym, EmptyTree)
vd.setPos(pos)
vd
}
lazy val parameterModifiers = Modifiers(Flag.PARAM)
/**
* Collects all definitions in the tree for use in checkReferences.
* This excludes definitions in wrapped expressions because checkReferences won't allow nested dereferencing anyway.
*/
def collectDefs(
tree: Tree,
isWrapper: (String, Type, Tree) => Boolean
): collection.Set[Symbol] = {
val defs = new collection.mutable.HashSet[Symbol]
// adds the symbols for all non-Ident subtrees to `defs`.
val process = new Traverser {
override def traverse(t: Tree) = t match {
case _: Ident => ()
case ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil)
if isWrapper(nme.decodedName.toString, tpe.tpe, qual) =>
()
case tree =>
if (tree.symbol ne null) {
defs += tree.symbol
()
}
super.traverse(tree)
}
}
process.traverse(tree)
defs
}
/**
* A reference is illegal if it is to an M instance defined within the scope of the macro call.
* As an approximation, disallow referenced to any local definitions `defs`.
*/
def illegalReference(defs: collection.Set[Symbol], sym: Symbol, mType: Type): Boolean =
sym != null && sym != NoSymbol && defs.contains(sym) && {
sym match {
case m: MethodSymbol => m.returnType.erasure <:< mType
case _ => sym.typeSignature <:< mType
}
}
/**
* A reference is illegal if it is to an M instance defined within the scope of the macro call.
* As an approximation, disallow referenced to any local definitions `defs`.
*/
def illegalReference(defs: collection.Set[Symbol], sym: Symbol): Boolean =
illegalReference(defs, sym, weakTypeOf[Any])
type PropertyChecker = (String, Type, Tree) => Boolean
/**
* A function that checks the provided tree for illegal references to M instances defined in the
* expression passed to the macro and for illegal dereferencing of M instances.
*/
def checkReferences(
defs: collection.Set[Symbol],
isWrapper: PropertyChecker,
mType: Type
): Tree => Unit = {
case s @ ApplyTree(TypeApply(Select(_, nme), tpe :: Nil), qual :: Nil) =>
if (isWrapper(nme.decodedName.toString, tpe.tpe, qual)) {
ctx.error(s.pos, DynamicDependencyError)
}
case id @ Ident(name) if illegalReference(defs, id.symbol, mType) =>
ctx.error(id.pos, DynamicReferenceError + ": " + name)
case _ => ()
}
@deprecated("Use that variant that specifies the M instance types to exclude", since = "1.3.0")
/**
* A function that checks the provided tree for illegal references to M instances defined in the
* expression passed to the macro and for illegal dereferencing of M instances.
*/
def checkReferences(defs: collection.Set[Symbol], isWrapper: PropertyChecker): Tree => Unit =
checkReferences(defs, isWrapper, weakTypeOf[Any])
/** Constructs a ValDef with a parameter modifier, a unique name, with the provided Type and with an empty rhs. */
def freshMethodParameter(tpe: Type): ValDef =
ValDef(parameterModifiers, freshTermName("p"), TypeTree(tpe), EmptyTree)
/** Constructs a ValDef with local modifiers and a unique name. */
def localValDef(tpt: Tree, rhs: Tree): ValDef =
ValDef(localModifiers, freshTermName("q"), tpt, rhs)
/** Constructs a tuple value of the right TupleN type from the provided inputs.*/
def mkTuple(args: List[Tree]): Tree =
global.gen.mkTuple(args.asInstanceOf[List[global.Tree]]).asInstanceOf[ctx.universe.Tree]
def setSymbol[_Tree](t: _Tree, sym: Symbol): Unit = {
t.asInstanceOf[global.Tree].setSymbol(sym.asInstanceOf[global.Symbol])
()
}
def setInfo(sym: Symbol, tpe: Type): Unit = {
sym.asInstanceOf[global.Symbol].setInfo(tpe.asInstanceOf[global.Type])
()
}
/** Creates a new, synthetic type variable with the specified `owner`. */
def newTypeVariable(owner: Symbol, prefix: String = "T0"): TypeSymbol =
owner
.asInstanceOf[global.Symbol]
.newSyntheticTypeParam(prefix, 0L)
.asInstanceOf[ctx.universe.TypeSymbol]
/** The type representing the type constructor `[X] X` */
lazy val idTC: Type = {
val tvar = newTypeVariable(NoSymbol)
internal.polyType(tvar :: Nil, refVar(tvar))
}
/** A Type that references the given type variable. */
def refVar(variable: TypeSymbol): Type = variable.toTypeConstructor
/** Constructs a new, synthetic type variable that is a type constructor. For example, in type Y[L[x]], L is such a type variable. */
def newTCVariable(owner: Symbol): TypeSymbol = {
val tc = newTypeVariable(owner)
val arg = newTypeVariable(tc, "x");
tc.setInfo(internal.polyType(arg :: Nil, emptyTypeBounds))
tc
}
/** >: Nothing <: Any */
def emptyTypeBounds: TypeBounds =
internal.typeBounds(definitions.NothingClass.toType, definitions.AnyClass.toType)
/** Creates a new anonymous function symbol with Position `pos`. */
def functionSymbol(pos: Position): Symbol =
callsiteTyper.context.owner
.newAnonymousFunctionValue(pos.asInstanceOf[global.Position])
.asInstanceOf[ctx.universe.Symbol]
def functionType(args: List[Type], result: Type): Type = {
val tpe = global.definitions
.functionType(args.asInstanceOf[List[global.Type]], result.asInstanceOf[global.Type])
tpe.asInstanceOf[Type]
}
/** Create a Tree that references the `val` represented by `vd`, copying attributes from `replaced`. */
def refVal(replaced: Tree, vd: ValDef): Tree =
treeCopy.Ident(replaced, vd.name).setSymbol(vd.symbol)
/** Creates a Function tree using `functionSym` as the Symbol and changing `initialOwner` to `functionSym` in `body`.*/
def createFunction(params: List[ValDef], body: Tree, functionSym: Symbol): Tree = {
changeOwner(body, initialOwner, functionSym)
val f = Function(params, body)
setSymbol(f, functionSym)
f
}
def changeOwner(tree: Tree, prev: Symbol, next: Symbol): Unit =
new ChangeOwnerAndModuleClassTraverser(
prev.asInstanceOf[global.Symbol],
next.asInstanceOf[global.Symbol]
).traverse(tree.asInstanceOf[global.Tree])
// Workaround copied from scala/async:can be removed once https://github.com/scala/scala/pull/3179 is merged.
private[this] class ChangeOwnerAndModuleClassTraverser(
oldowner: global.Symbol,
newowner: global.Symbol
) extends global.ChangeOwnerTraverser(oldowner, newowner) {
override def traverse(tree: global.Tree): Unit = {
tree match {
case _: global.DefTree => change(tree.symbol.moduleClass)
case _ =>
}
super.traverse(tree)
}
}
/** Returns the Symbol that references the statically accessible singleton `i`. */
def singleton[T <: AnyRef with Singleton](i: T)(implicit it: ctx.TypeTag[i.type]): Symbol =
it.tpe match {
case SingleType(_, sym) if !sym.isFreeTerm && sym.isStatic => sym
case x => sys.error("Instance must be static (was " + x + ").")
}
def select(t: Tree, name: String): Tree = Select(t, TermName(name))
/** Returns the symbol for the non-private method named `name` for the class/module `obj`. */
def method(obj: Symbol, name: String): Symbol = {
val ts: Type = obj.typeSignature
val m: global.Symbol = ts.asInstanceOf[global.Type].nonPrivateMember(global.newTermName(name))
m.asInstanceOf[Symbol]
}
/**
* Returns a Type representing the type constructor tcp.<name>. For example, given
* `object Demo { type M[x] = List[x] }`, the call `extractTC(Demo, "M")` will return a type representing
* the type constructor `[x] List[x]`.
*/
def extractTC(tcp: AnyRef with Singleton, name: String)(
implicit it: ctx.TypeTag[tcp.type]
): ctx.Type = {
val itTpe = it.tpe.asInstanceOf[global.Type]
val m = itTpe.nonPrivateMember(global.newTypeName(name))
val tc = itTpe.memberInfo(m).asInstanceOf[ctx.universe.Type]
assert(tc != NoType && tc.takesTypeArgs, "Invalid type constructor: " + tc)
tc
}
/**
* Substitutes wrappers in tree `t` with the result of `subWrapper`.
* A wrapper is a Tree of the form `f[T](v)` for which isWrapper(<Tree of f>, <Underlying Type>, <qual>.target) returns true.
* Typically, `f` is a `Select` or `Ident`.
* The wrapper is replaced with the result of `subWrapper(<Type of T>, <Tree of v>, <wrapper Tree>)`
*/
def transformWrappers(
t: Tree,
subWrapper: (String, Type, Tree, Tree) => Converted[ctx.type]
): Tree = {
// the main tree transformer that replaces calls to InputWrapper.wrap(x) with
// plain Idents that reference the actual input value
object appTransformer extends Transformer {
override def transform(tree: Tree): Tree =
tree match {
case ApplyTree(TypeApply(Select(_, nme), targ :: Nil), qual :: Nil) =>
subWrapper(nme.decodedName.toString, targ.tpe, qual, tree) match {
case Converted.Success(t, finalTx) =>
changeOwner(qual, currentOwner, initialOwner) // Fixes https://github.com/sbt/sbt/issues/1150
finalTx(t)
case Converted.Failure(p, m) => ctx.abort(p, m)
case _: Converted.NotApplicable[_] => super.transform(tree)
}
case _ => super.transform(tree)
}
}
appTransformer.atOwner(initialOwner) {
appTransformer.transform(t)
}
}
}
|
sbt/sbt
|
core-macros/src/main/scala/sbt/internal/util/appmacro/ContextUtil.scala
|
Scala
|
apache-2.0
| 13,019 |
package com.twitter.cache.guava
import com.google.common.cache.{CacheLoader, CacheBuilder}
import com.twitter.cache.AbstractFutureCacheTest
import com.twitter.util.{Future, Promise}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GuavaCacheTest extends AbstractFutureCacheTest {
def name: String = "GuavaCache"
def mkCtx(): Ctx = new Ctx {
val guava = CacheBuilder.newBuilder().build[String, Future[String]]()
val cache = new GuavaCache[String, String](guava)
}
def mkCache() =
CacheBuilder
.newBuilder()
.build(
new CacheLoader[String, Future[Int]] {
override def load(k: String): Future[Int] = new Promise[Int]
}
)
test("GuavaCache#fromLoadingCache is interrupt safe") {
val fCache = GuavaCache.fromLoadingCache(mkCache())
interruptSafe(fCache)
}
test("GuavaCache#fromCache is interrupt safe") {
val fCache = GuavaCache.fromCache((_:String) => new Promise[Int], mkCache())
interruptSafe(fCache)
}
def interruptSafe(fCache: (String => Future[Int])) {
val f = fCache("key")
val exn = new Exception
f.raise(exn)
val f2 = fCache("key")
val p = new Promise[Int]
p.become(f2)
assert(p.isInterrupted === None)
}
}
|
travisbrown/util
|
util-cache/src/test/scala/com/twitter/cache/guava/GuavaCacheTest.scala
|
Scala
|
apache-2.0
| 1,298 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.akkastream.example
import akka.actor.{Actor, ActorSystem, Props}
import akka.stream.scaladsl.{Sink, Source}
import org.apache.gearpump.akkastream.GearpumpMaterializer
import org.apache.gearpump.akkastream.graph.GraphPartitioner
import org.apache.gearpump.cluster.main.ArgumentsParser
import org.apache.gearpump.util.AkkaApp
import scala.concurrent.Await
import scala.concurrent.duration._
/**
* Source and Sink are materialized locally.
* Remaining GraphStages are materialized remotely:
* statefulMap, filter, fold, flatMap
*/
object Test extends AkkaApp with ArgumentsParser {
// scalastyle:off println
override def main(akkaConf: Config, args: Array[String]): Unit = {
implicit val system = ActorSystem("Test", akkaConf)
implicit val materializer = GearpumpMaterializer(GraphPartitioner.AllRemoteStrategy)
val echo = system.actorOf(Props(new Echo()))
val sink = Sink.actorRef(echo, "COMPLETE")
Source(
List("red hat", "yellow sweater", "blue jack", "red apple", "green plant", "blue sky")
).filter(_.startsWith("red")).fold("Items:") {(a, b) =>
a + "|" + b
}.map("I want to order item: " + _).runWith(sink)
Await.result(system.whenTerminated, 60.minutes)
}
class Echo extends Actor {
def receive: Receive = {
case any: AnyRef =>
println("Confirm received: " + any)
}
}
// scalastyle:on println
}
|
manuzhang/incubator-gearpump
|
experiments/akkastream/src/main/scala/org/apache/gearpump/akkastream/example/Test.scala
|
Scala
|
apache-2.0
| 2,226 |
package http
sealed trait GeneralHeader {
val asString: String
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9">§</a>
*/
final case object CacheControl extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = "Cache-Control"
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.10">§</a>
*/
final case object Connection extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.18">§</a>
*/
final case object Date extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.32">§</a>
*/
final case object Pragma extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.40">§</a>
*/
final case object Trailer extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.41">§</a>
*/
final case object TransferEncoding extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = "Transfer-Encoding"
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.42">§</a>
*/
final case object Upgrade extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.45">§</a>
*/
final case object Via extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
/**
* <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.46">§</a>
*/
final case object Warning extends GeneralHeader {
/**
* A string representation of this general header.
*/
override val asString = toString
}
object GeneralHeader {
def from(s: String) = s.toLowerCase match {
case "cache-control" => Some(CacheControl)
case "connection" => Some(Connection)
case "date" => Some(Date)
case "pragma" => Some(Pragma)
case "trailer" => Some(Trailer)
case "transfer-encoding" => Some(TransferEncoding)
case "upgrade" => Some(Upgrade)
case "via" => Some(Via)
case "warning" => Some(Warning)
case _ => None
}
}
|
gpampara/scalaz-http-client
|
src/main/scala/http/GeneralHeader.scala
|
Scala
|
bsd-3-clause
| 2,766 |
package controllers
import java.util.Date
import play.api.libs.ws.WS
import play.api.mvc._
import play.api.libs.json._
import play.modules.reactivemongo._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.modules.reactivemongo.json.collection.JSONCollection
import plex.API
import security.Secured
import utils.SubtitlesUtils
import scala.concurrent.Future
object MovieController extends Controller with Secured with MongoController {
def collection: JSONCollection = db.collection[JSONCollection]("movies")
def index = withAuth { token => implicit request =>
val movies = API.getMovies(token)
Ok(views.html.movies(movies, token))
}
def movie(movieId: String) = withUserFuture { (user, token) => implicit request =>
API.getMovie(movieId, token) match {
case Some(movie) =>
val findOffset = collection.find(Json.obj(
"uid" -> user.uid,
"movieId" -> movieId
)).one[JsObject]
findOffset.map { opt =>
val offset = opt.map { obj =>
(obj \\ "offset").toString().toDouble
}
Ok(views.html.movie(movie, offset, token))
}
case None => Future.apply(NotFound)
}
}
def watch(movieId: String, state: String, offset: Double) = withUserFuture { (user, token) => implicit request =>
val futureUpdate = collection.update(
Json.obj(
"movieId" -> movieId,
"uid" -> user.uid
),
Json.obj(
"movieId" -> movieId,
"uid" -> user.uid,
"offset" -> JsNumber(offset),
"date" -> new Date
),
upsert = true
)
futureUpdate.map { r =>
Ok(Json.obj("success" -> r.ok))
}
}
def subtitles(movieId: String, lang: String) = withAuthFuture { token => implicit request =>
API.getMovie(movieId, token) match {
case Some(movie) =>
import play.api.Play.current
WS.url(movie.subtitles.find(_.languageCode == lang).head.url(token).toString()).get().map { res =>
Ok(SubtitlesUtils.convertSRTToVVT(res.body))
}
case None => Future.apply(NotFound)
}
}
}
|
tomasharkema/Plex.Scala
|
app/controllers/MovieController.scala
|
Scala
|
mit
| 2,135 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import java.time.{Duration, Period}
import org.apache.spark.sql.{QueryTest, Row}
/**
* This base suite contains unified tests for the `ALTER TABLE .. ADD COLUMNS` command that
* check V1 and V2 table catalogs. The tests that cannot run for all supported catalogs are
* located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableAddColumnsSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddColumnsSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableAddColumnsSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableAddColumnsSuite`
*/
trait AlterTableAddColumnsSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. ADD COLUMNS"
test("add an ANSI interval columns") {
assume(!catalogVersion.contains("Hive")) // Hive catalog doesn't support the interval types
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (id bigint) $defaultUsing")
sql(s"ALTER TABLE $t ADD COLUMNS (ym INTERVAL YEAR, dt INTERVAL HOUR)")
sql(s"INSERT INTO $t SELECT 0, INTERVAL '100' YEAR, INTERVAL '10' HOUR")
checkAnswer(
sql(s"SELECT id, ym, dt data FROM $t"),
Seq(Row(0, Period.ofYears(100), Duration.ofHours(10))))
}
}
}
|
ueshin/apache-spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddColumnsSuiteBase.scala
|
Scala
|
apache-2.0
| 2,291 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import java.io._
import java.net.URI
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, Path}
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce._
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
import org.apache.orc.{OrcUtils => _, _}
import org.apache.orc.OrcConf.{COMPRESS, MAPRED_OUTPUT_SCHEMA}
import org.apache.orc.mapred.OrcStruct
import org.apache.orc.mapreduce._
import org.apache.spark.TaskContext
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.util.{SerializableConfiguration, Utils}
private[sql] object OrcFileFormat {
private def checkFieldName(name: String): Unit = {
try {
TypeDescription.fromString(s"struct<$name:int>")
} catch {
case _: IllegalArgumentException =>
throw new AnalysisException(
s"""Column name "$name" contains invalid character(s).
|Please use alias to rename it.
""".stripMargin.split("\n").mkString(" ").trim)
}
}
def checkFieldNames(names: Seq[String]): Unit = {
names.foreach(checkFieldName)
}
def getQuotedSchemaString(dataType: DataType): String = dataType match {
case _: AtomicType => dataType.catalogString
case StructType(fields) =>
fields.map(f => s"`${f.name}`:${getQuotedSchemaString(f.dataType)}")
.mkString("struct<", ",", ">")
case ArrayType(elementType, _) =>
s"array<${getQuotedSchemaString(elementType)}>"
case MapType(keyType, valueType, _) =>
s"map<${getQuotedSchemaString(keyType)},${getQuotedSchemaString(valueType)}>"
case _ => // UDT and others
dataType.catalogString
}
}
/**
* New ORC File Format based on Apache ORC.
*/
class OrcFileFormat
extends FileFormat
with DataSourceRegister
with Serializable {
override def shortName(): String = "orc"
override def toString: String = "ORC"
override def hashCode(): Int = getClass.hashCode()
override def equals(other: Any): Boolean = other.isInstanceOf[OrcFileFormat]
override def inferSchema(
sparkSession: SparkSession,
options: Map[String, String],
files: Seq[FileStatus]): Option[StructType] = {
OrcUtils.inferSchema(sparkSession, files, options)
}
override def prepareWrite(
sparkSession: SparkSession,
job: Job,
options: Map[String, String],
dataSchema: StructType): OutputWriterFactory = {
val orcOptions = new OrcOptions(options, sparkSession.sessionState.conf)
val conf = job.getConfiguration
conf.set(MAPRED_OUTPUT_SCHEMA.getAttribute, OrcFileFormat.getQuotedSchemaString(dataSchema))
conf.set(COMPRESS.getAttribute, orcOptions.compressionCodec)
conf.asInstanceOf[JobConf]
.setOutputFormat(classOf[org.apache.orc.mapred.OrcOutputFormat[OrcStruct]])
new OutputWriterFactory {
override def newInstance(
path: String,
dataSchema: StructType,
context: TaskAttemptContext): OutputWriter = {
new OrcOutputWriter(path, dataSchema, context)
}
override def getFileExtension(context: TaskAttemptContext): String = {
val compressionExtension: String = {
val name = context.getConfiguration.get(COMPRESS.getAttribute)
OrcUtils.extensionsForCompressionCodecNames.getOrElse(name, "")
}
compressionExtension + ".orc"
}
}
}
override def supportBatch(sparkSession: SparkSession, schema: StructType): Boolean = {
val conf = sparkSession.sessionState.conf
conf.orcVectorizedReaderEnabled && conf.wholeStageEnabled &&
schema.length <= conf.wholeStageMaxNumFields &&
schema.forall(_.dataType.isInstanceOf[AtomicType])
}
override def isSplitable(
sparkSession: SparkSession,
options: Map[String, String],
path: Path): Boolean = {
true
}
override def buildReaderWithPartitionValues(
sparkSession: SparkSession,
dataSchema: StructType,
partitionSchema: StructType,
requiredSchema: StructType,
filters: Seq[Filter],
options: Map[String, String],
hadoopConf: Configuration): (PartitionedFile) => Iterator[InternalRow] = {
if (sparkSession.sessionState.conf.orcFilterPushDown) {
OrcFilters.createFilter(dataSchema, filters).foreach { f =>
OrcInputFormat.setSearchArgument(hadoopConf, f, dataSchema.fieldNames)
}
}
val resultSchema = StructType(requiredSchema.fields ++ partitionSchema.fields)
val sqlConf = sparkSession.sessionState.conf
val enableVectorizedReader = supportBatch(sparkSession, resultSchema)
val capacity = sqlConf.orcVectorizedReaderBatchSize
val resultSchemaString = OrcUtils.orcTypeDescriptionString(resultSchema)
OrcConf.MAPRED_INPUT_SCHEMA.setString(hadoopConf, resultSchemaString)
OrcConf.IS_SCHEMA_EVOLUTION_CASE_SENSITIVE.setBoolean(hadoopConf, sqlConf.caseSensitiveAnalysis)
val broadcastedConf =
sparkSession.sparkContext.broadcast(new SerializableConfiguration(hadoopConf))
val isCaseSensitive = sparkSession.sessionState.conf.caseSensitiveAnalysis
(file: PartitionedFile) => {
val conf = broadcastedConf.value.value
val filePath = new Path(new URI(file.filePath))
val fs = filePath.getFileSystem(conf)
val readerOptions = OrcFile.readerOptions(conf).filesystem(fs)
val requestedColIdsOrEmptyFile =
Utils.tryWithResource(OrcFile.createReader(filePath, readerOptions)) { reader =>
OrcUtils.requestedColumnIds(
isCaseSensitive, dataSchema, requiredSchema, reader, conf)
}
if (requestedColIdsOrEmptyFile.isEmpty) {
Iterator.empty
} else {
val requestedColIds = requestedColIdsOrEmptyFile.get
assert(requestedColIds.length == requiredSchema.length,
"[BUG] requested column IDs do not match required schema")
val taskConf = new Configuration(conf)
val fileSplit = new FileSplit(filePath, file.start, file.length, Array.empty)
val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0)
val taskAttemptContext = new TaskAttemptContextImpl(taskConf, attemptId)
if (enableVectorizedReader) {
val batchReader = new OrcColumnarBatchReader(capacity)
// SPARK-23399 Register a task completion listener first to call `close()` in all cases.
// There is a possibility that `initialize` and `initBatch` hit some errors (like OOM)
// after opening a file.
val iter = new RecordReaderIterator(batchReader)
Option(TaskContext.get()).foreach(_.addTaskCompletionListener[Unit](_ => iter.close()))
val requestedDataColIds = requestedColIds ++ Array.fill(partitionSchema.length)(-1)
val requestedPartitionColIds =
Array.fill(requiredSchema.length)(-1) ++ Range(0, partitionSchema.length)
batchReader.initialize(fileSplit, taskAttemptContext)
batchReader.initBatch(
TypeDescription.fromString(resultSchemaString),
resultSchema.fields,
requestedDataColIds,
requestedPartitionColIds,
file.partitionValues)
iter.asInstanceOf[Iterator[InternalRow]]
} else {
val orcRecordReader = new OrcInputFormat[OrcStruct]
.createRecordReader(fileSplit, taskAttemptContext)
val iter = new RecordReaderIterator[OrcStruct](orcRecordReader)
Option(TaskContext.get()).foreach(_.addTaskCompletionListener[Unit](_ => iter.close()))
val fullSchema = requiredSchema.toAttributes ++ partitionSchema.toAttributes
val unsafeProjection = GenerateUnsafeProjection.generate(fullSchema, fullSchema)
val deserializer = new OrcDeserializer(dataSchema, requiredSchema, requestedColIds)
if (partitionSchema.length == 0) {
iter.map(value => unsafeProjection(deserializer.deserialize(value)))
} else {
val joinedRow = new JoinedRow()
iter.map(value =>
unsafeProjection(joinedRow(deserializer.deserialize(value), file.partitionValues)))
}
}
}
}
}
override def supportDataType(dataType: DataType): Boolean = dataType match {
case _: AtomicType => true
case st: StructType => st.forall { f => supportDataType(f.dataType) }
case ArrayType(elementType, _) => supportDataType(elementType)
case MapType(keyType, valueType, _) =>
supportDataType(keyType) && supportDataType(valueType)
case udt: UserDefinedType[_] => supportDataType(udt.sqlType)
case _ => false
}
}
|
goldmedal/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileFormat.scala
|
Scala
|
apache-2.0
| 9,890 |
package com.gilt.gfc.time
import collection.mutable.ArrayBuffer
import org.scalatest.{FunSuite, Matchers}
import scala.concurrent.duration._
import scala.concurrent.{Promise, Await, Future}
import java.util.concurrent.atomic.AtomicInteger
/**
* Tests the Timer trait & object.
*/
class TimerTest extends FunSuite with Matchers {
class AutoAdvancingClock(incr: Long) {
private var seed = 0L
def apply(): Long = {
val next = seed
seed += incr
next
}
}
test("Basics") {
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
val times = new ArrayBuffer[Long]()
timer.time(times.append(_))(Unit)
times.head should equal(1)
}
test("Logging works") {
// just tests that the api is kind of reasonable
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
import timer.{time, pretty}
var msg = ""
val result = time(delta => msg = "Operation took %s".format(pretty(delta))) {
"lalalalalalalala"
}
result should equal("lalalalalalalala")
msg should equal("Operation took 1 ns")
}
test("timePretty") {
// just tests that the api is kind of reasonable
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
import timer.timePretty
var msg = ""
timePretty(msg = _)("lalalalalalalala") should equal("lalalalalalalala")
msg should equal("1 ns")
}
test("TimePrettyFormat") {
// just tests that the api is kind of reasonable
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
import timer.timePrettyFormat
var msg = ""
timePrettyFormat("This took %s", msg = _)("lalalalalalalala") should equal("lalalalalalalala")
msg should equal("This took 1 ns")
}
test("TimeFuture") {
// just test the side effect is executed only once
import scala.concurrent.ExecutionContext.Implicits.global
val sideEffectsCount = new AtomicInteger()
def task() = Future.successful {
sideEffectsCount.incrementAndGet()
"lalalalalalala"
}
Await.result(Timer.timeFuture(_ => Unit)(task), Duration.Inf) should be("lalalalalalala")
sideEffectsCount.intValue() should equal(1)
}
test("TimeFuturePretty") {
// just tests that the api is kind of reasonable
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
import timer.timeFuturePretty
import scala.concurrent.ExecutionContext.Implicits.global
val prom = Promise[String]
Await.result(timeFuturePretty(prom.success(_))(Future.successful("lalalalalalala")), Duration.Inf) should be ("lalalalalalala")
Await.result(prom.future, 1.second) should equal("1 ns")
}
test("TimeFuturePrettyFormat") {
// just tests that the api is kind of reasonable
val timer = new Timer {
val clock = new AutoAdvancingClock(1)
def nanoClock() = clock()
}
import timer.timeFuturePrettyFormat
import scala.concurrent.ExecutionContext.Implicits.global
val prom = Promise[String]
Await.result(timeFuturePrettyFormat("This took %s", prom.success(_))(Future.successful("lalalalalalalala")), Duration.Inf) should equal("lalalalalalalala")
Await.result(prom.future, 1.second) should equal("This took 1 ns")
}
test("Format") {
Timer.pretty(1) should equal ("1 ns")
Timer.pretty(100) should equal ("100 ns")
Timer.pretty(1000) should equal ("1 us")
Timer.pretty(1001) should equal ("1.001 us")
Timer.pretty(1101) should equal ("1.101 us")
Timer.pretty(999999) should equal ("999.999 us")
Timer.pretty(1000000) should equal ("1 ms")
Timer.pretty(1001000) should equal ("1.001 ms")
Timer.pretty(999001000) should equal ("999.001 ms")
Timer.pretty(1000000000) should equal ("1 s")
Timer.pretty(1000000001) should equal ("1 s")
Timer.pretty(1100000001) should equal ("1.100 s")
Timer.pretty(3600000000000L) should equal ("01:00:00")
Timer.pretty(3920101100000001L) should equal ("45 days 08:55:01")
Timer.pretty(Long.MaxValue) should equal ("106751 days 23:47:16")
Timer.pretty(0) should equal ("0 ns")
Timer.pretty(-1) should equal("-1 ns")
Timer.pretty(-3920101100000001L) should equal ("-3920101.100 s")
}
test("Real timing") {
// should be within 10% of really sleeping 1 second
// Diabled check due to intermitted failures in jenkins. Instead printing an exception stack trace.
import Timer.{time, pretty}
time(duration =>
//assert(math.abs(duration - 1e9) < 1e8, duration + " -> " + pretty(duration))
if (math.abs(duration - 1e9) > 1e8) {
new java.lang.Exception("FAILED TimerTest.realTiming: duration=%s".format(pretty(duration))).printStackTrace
}
) {
Thread.sleep(1000)
}
}
}
|
gilt/gfc-time
|
src/test/scala/com/gilt/gfc/time/TimerTest.scala
|
Scala
|
apache-2.0
| 4,949 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.examples
import org.apache.spark.streaming.{Seconds, StreamingContext}
import StreamingContext._
import org.apache.spark.SparkContext._
import org.apache.spark.streaming.twitter._
/**
* Calculates popular hashtags (topics) over sliding 10 and 60 second windows from a Twitter
* stream. The stream is instantiated with credentials and optionally filters supplied by the
* command line arguments.
*
*/
object TwitterPopularTags {
def main(args: Array[String]) {
if (args.length < 1) {
System.err.println("Usage: TwitterPopularTags <master>" +
" [filter1] [filter2] ... [filter n]")
System.exit(1)
}
StreamingExamples.setStreamingLogLevels()
val (master, filters) = (args.head, args.tail)
val ssc = new StreamingContext(master, "TwitterPopularTags", Seconds(2),
System.getenv("SPARK_HOME"), StreamingContext.jarOfClass(this.getClass).toSeq)
val stream = TwitterUtils.createStream(ssc, None, filters)
val hashTags = stream.flatMap(status => status.getText.split(" ").filter(_.startsWith("#")))
val topCounts60 = hashTags.map((_, 1)).reduceByKeyAndWindow(_ + _, Seconds(60))
.map{case (topic, count) => (count, topic)}
.transform(_.sortByKey(false))
val topCounts10 = hashTags.map((_, 1)).reduceByKeyAndWindow(_ + _, Seconds(10))
.map{case (topic, count) => (count, topic)}
.transform(_.sortByKey(false))
// Print popular hashtags
topCounts60.foreachRDD(rdd => {
val topList = rdd.take(5)
println("\\nPopular topics in last 60 seconds (%s total):".format(rdd.count()))
topList.foreach{case (count, tag) => println("%s (%s tweets)".format(tag, count))}
})
topCounts10.foreachRDD(rdd => {
val topList = rdd.take(5)
println("\\nPopular topics in last 10 seconds (%s total):".format(rdd.count()))
topList.foreach{case (count, tag) => println("%s (%s tweets)".format(tag, count))}
})
ssc.start()
ssc.awaitTermination()
}
}
|
zhangjunfang/eclipse-dir
|
spark/examples/src/main/scala/org/apache/spark/streaming/examples/TwitterPopularTags.scala
|
Scala
|
bsd-2-clause
| 2,879 |
package geotrellis.vectortile
import collection.mutable.ListBuffer
import geotrellis.vector._
/** Interprets the commands from a VectorTile and converts them into
* Geometries.
*/
object Command {
val MoveTo: Int = 1
val LineTo: Int = 2
val ClosePath: Int = 7
type GeomType = VectorTile.GeomType
val POINT = VectorTile.POINT
val LINESTRING = VectorTile.LINESTRING
val POLYGON = VectorTile.POLYGON
case class UnsupportedGeomType(message: String) extends Exception(message)
case class UnsupportedCommand(message: String) extends Exception(message)
case class TooFewCommandArgs(message: String) extends Exception(message)
case class NoGeometryToExtend(message: String) extends Exception(message)
/** Interprets the commands, converts the resulting data into a geometry,
* then returns the geometry.
*
* @param geomType the type of geometry to expect
* @param extent the extent of the geometry
* @param commands the list of commands and arguments to interpret
* @return the geometry that was described
*/
def parse(geomType: GeomType, extent: Int, commands: Seq[Int]): Geometry = {
val scale: Double = extent / 256.0
var point_lists: List[List[(Double, Double)]] =
interpret_commands(scale, commands)
return geomType match {
case POINT =>
if (point_lists.length == 1) {
Point(point_lists.head.head)
} else {
MultiPoint(point_lists.map(
(pt: List[(Double, Double)]) => Point(pt.head)))
}
case LINESTRING =>
if (point_lists.length == 1) {
Line(point_lists.head)
} else {
point_lists = point_lists.filter(_.length >= 2)
MultiLine(point_lists.map(
(ln: List[(Double, Double)]) => Line(ln)))
}
case POLYGON =>
if (point_lists.length == 1) {
Polygon(point_lists.head)
} else {
MultiPolygon(point_lists.map(
(pg: List[(Double, Double)]) => Polygon(pg)))
}
case _ =>
throw UnsupportedGeomType(s"Geometry ${geomType} not supported.")
}
}
/** A helper function for parse. Builds lists of points out of the
* commands and arguments.
*
* @param scale the scale of the geometry
* @param commands the commands to interpet
* @return a list of lists of points
*/
private def interpret_commands(scale: Double, commands: Seq[Int]):
List[List[(Double, Double)]] = {
val point_lists: ListBuffer[List[(Double, Double)]] =
ListBuffer.empty[List[(Double, Double)]]
val point_list: ListBuffer[(Double, Double)] =
ListBuffer.empty[(Double, Double)]
var (x: Int, y: Int) = (0, 0)
var idx: Int = 0
def zigZagDecode(n: Int) = ((n >> 1) ^ (-(n & 1)))
while(idx < commands.length) {
var command = commands(idx)
var (id, count) = (command & 0x7, command >> 3)
idx += 1
id match {
case MoveTo =>
for(_ <- 0 until count) {
if (!point_list.isEmpty) {
point_lists += point_list.toList
point_list.clear
}
if (idx + 2 > commands.length) {
throw TooFewCommandArgs("Source: MoveTo command.")
}
x += zigZagDecode(commands(idx))
y += zigZagDecode(commands(idx+1))
point_list += ((x / scale, y / scale))
idx += 2
}
case LineTo =>
for(_ <- 0 until count) {
if (point_list.isEmpty) {
throw NoGeometryToExtend("Source: LineTo")
}
if (idx + 2 > commands.length) {
throw TooFewCommandArgs("Source: MoveTo command.")
}
val dx = zigZagDecode(commands(idx))
val dy = zigZagDecode(commands(idx+1))
x += dx
y += dy
if (dx != 0 || dy != 0)
point_list += ((x / scale, y / scale))
idx += 2
}
case ClosePath =>
for(_ <- 0 until count) {
if (point_list.isEmpty) {
throw NoGeometryToExtend("Source: ClosePath")
} else {
if (point_list.head != point_list.last) {
point_list += point_list.head
}
point_lists += point_list.toList
point_list.clear
}
}
case _ =>
throw UnsupportedCommand(
s"Unsupported Command ID: ${id}")
}
}
if (!point_list.isEmpty) { point_lists += point_list.toList }
return point_lists.toList
}
}
|
geotrellis/geotrellis-osm-elevation
|
scala-vector-tile/src/main/scala/geotrellis/vectortile/Command.scala
|
Scala
|
apache-2.0
| 5,562 |
package dbpedia.destinations
/**
* The quads generated by the DBpedia framework are organized in a number of datasets.
* TODO: remove this class? It has no real purpose, it's a just a string holder.
*/
class Dataset(val name: String, val description: String)
{
def this(name: String)
{
this(name, null)
}
override def toString = name
override def hashCode = name.hashCode
override def equals(other : Any) = other match {
case that: Dataset => (this.name == that.name)
case _ => false
}
}
|
FnOio/dbpedia-parsing-functions-scala
|
src/main/scala/dbpedia/destinations/Dataset.scala
|
Scala
|
gpl-2.0
| 523 |
package com.overviewdocs.csv
import java.nio.{ByteBuffer,CharBuffer}
import java.nio.charset.{CharsetDecoder,CoderResult,CodingErrorAction}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import com.overviewdocs.database.{HasDatabase,LargeObject,TreeIdGenerator}
import com.overviewdocs.metadata.{MetadataField,MetadataFieldType,MetadataSchema}
import com.overviewdocs.models.{CsvImport,DocumentProcessingError,Tree}
import com.overviewdocs.models.tables.{CsvImports,Documents,DocumentProcessingErrors,DocumentSets,Tags,Trees}
import com.overviewdocs.util.AddDocumentsCommon
/** Processes a CSV Import. */
class CsvImporter(
csvImport: CsvImport,
addDocumentsCommon: AddDocumentsCommon = AddDocumentsCommon,
/** Number of bytes to process at a time.
*
* A larger buffer makes processing faster (up to a point); a smaller buffer
* makes Overview update progress and check for cancellation more frequently.
* Aim for a progress report every 1s at the longest.
*/
bufferSize: Int = 5 * 1024 * 1024
) extends HasDatabase {
private var nBytesProcessed: Long = 0
private var nDocumentsWritten: Int = 0
private var unparsedBytes: Array[Byte] = Array[Byte]()
private val decoder: CharsetDecoder = csvImport.charset.newDecoder
.onMalformedInput(CodingErrorAction.REPLACE)
.onUnmappableCharacter(CodingErrorAction.REPLACE)
private val csvParser = new CsvParser
private val csvDocumentProducer = new CsvDocumentProducer
private lazy val csvDocumentWriterFuture: Future[CsvDocumentWriter] = {
import database.api._
for {
maxDocumentId <- database.run(Documents.filter(_.documentSetId === csvImport.documentSetId).map(_.id).max.result)
existingTags <- database.seq(Tags.filter(_.documentSetId === csvImport.documentSetId))
} yield {
new CsvDocumentWriter(csvImport.documentSetId, maxDocumentId, existingTags)
}
}
private[csv] sealed trait NextStep
private[csv] object NextStep {
/** Run another step(). */
case object Continue extends NextStep
/** Delete the CsvImport, and optionally create a DocumentProcessingError.
*/
case class Finish(error: Option[String]) extends NextStep
}
/** All logic.
*
* At the start, the `csv_import` in the database may be anything. At the end
* of processing, we guarantee:
*
* * The large object will be deleted.
* * `n_bytes_processed` will equal `n_bytes`
* * We will add documents to the database.
* * We may add a document processing error to the database.
*/
def run: Future[Unit] = {
for {
_ <- addDocumentsCommon.beforeAddDocuments(csvImport.documentSetId)
_ <- continue
} yield ()
}
private def continue: Future[Unit] = {
step(bufferSize).flatMap(_ match {
case NextStep.Continue => continue
case NextStep.Finish(error) => finish(error)
})
}
/** Update document set counts and metadata schema; delete CSV from database.
*/
private def finish(error: Option[String]): Future[Unit] = {
for {
_ <- addDocumentsCommon.afterAddDocuments(csvImport.documentSetId)
_ <- writeMetadataSchema
_ <- deleteCsvImport(error)
} yield ()
}
/** Mark stuff in the database so we never resume this import. */
private def cancel: Future[Unit] = {
Future.unit
}
/** Process some bytes from the blob, write some documents to the database,
* report process to the database, and check for cancellation.
*
* @param bufferSize Number of bytes to read.
* @return What to do next.
*/
private[csv] def step(bufferSize: Int): Future[NextStep] = {
for {
bytes <- readBytes(bufferSize)
_ <- processBytes(bytes)
result <- reportProgressAndDecideWhatsNext
} yield result
}
/** Writes all the documents (and Tags and DocumentTags) we've read so far
* to the database; update nBytesProcessed and nDocumentsWritten.
*/
private def processBytes(bytes: Array[Byte]): Future[Unit] = {
val allBytes = unparsedBytes ++ bytes
val byteBuffer = ByteBuffer.wrap(allBytes)
// We know the number of chars is <= the number of bytes
val chars = new Array[Char](byteBuffer.limit)
val charBuffer = CharBuffer.wrap(chars)
var isLastBatch: Boolean = nBytesProcessed + bytes.length >= csvImport.nBytes
val coderResult1 = decoder.decode(byteBuffer, charBuffer, isLastBatch)
assert(coderResult1 == CoderResult.UNDERFLOW)
if (isLastBatch) {
val coderResult2 = decoder.flush(charBuffer)
assert(coderResult2 == CoderResult.UNDERFLOW)
}
val nUnparsedBytes = byteBuffer.remaining
unparsedBytes = new Array[Byte](nUnparsedBytes)
byteBuffer.get(unparsedBytes)
nBytesProcessed += bytes.length
csvParser.write(chars, 0, charBuffer.position)
if (isLastBatch) csvParser.end
csvParser.getParsedRows.foreach(csvDocumentProducer.addCsvRow)
csvParser.clearParsedRows
val newCsvDocuments = csvDocumentProducer.getProducedDocuments
.drop(Math.max(0, csvImport.nDocuments - nDocumentsWritten))
nDocumentsWritten += csvDocumentProducer.getProducedDocuments.length
csvDocumentProducer.clearProducedDocuments
for {
csvDocumentWriter <- csvDocumentWriterFuture
_ <- { newCsvDocuments.foreach(csvDocumentWriter.add); csvDocumentWriter.flush }
} yield ()
}
/** Returns the next buffer full of bytes from the database.
*/
private def readBytes(bufferSize: Int): Future[Array[Byte]] = {
import database.api._
database.run((for {
lo <- database.largeObjectManager.open(csvImport.loid, LargeObject.Mode.Read)
_ <- lo.seek(nBytesProcessed)
bytes <- lo.read(bufferSize)
} yield bytes).transactionally)
}
/** Writes current progress to the database; returns what we should do next.
*/
private def reportProgressAndDecideWhatsNext: Future[NextStep] = {
import database.api._
val q = sql"""
UPDATE csv_import
SET n_bytes_processed = $nBytesProcessed, n_documents = $nDocumentsWritten
WHERE id = ${csvImport.id}
RETURNING cancelled
""".as[Boolean]
database.option(q).map(_ match {
case Some(false) if csvImport.nBytes > nBytesProcessed => NextStep.Continue
case Some(true) => NextStep.Finish(Some("Overview stopped adding documents because you cancelled processing this CSV"))
case _ if csvParser.isFullyParsed => NextStep.Finish(None)
case _ => NextStep.Finish(Some("Overview stopped adding documents because this is not a valid CSV"))
})
}
private def updateDocumentSetCount: Future[Unit] = {
import database.api._
database.runUnit(sqlu"""
UPDATE document_set
SET document_count = (SELECT COUNT(*) FROM document WHERE document_set_id = document_set.id)
WHERE id = ${csvImport.documentSetId}
""")
}
private lazy val byId = {
import database.api._
Compiled { csvImportId: Rep[Long] =>
CsvImports.filter(_.id === csvImportId)
}
}
private def maybeCreateDocumentProcessingError(maybeError: Option[String]) = {
import database.api._
DocumentProcessingErrors.map(_.createAttributes) ++= maybeError.toSeq.map { error =>
DocumentProcessingError.CreateAttributes(
csvImport.documentSetId,
None,
csvImport.filename,
error,
None,
None
)
}
}
private def deleteCsvImport(error: Option[String]): Future[Unit] = {
import database.api._
database.runUnit((for {
_ <- maybeCreateDocumentProcessingError(error)
_ <- database.largeObjectManager.unlink(csvImport.loid)
_ <- byId(csvImport.id).delete
} yield ()).transactionally)
}
private def writeMetadataSchema: Future[Unit] = {
import database.api._
val metadataSchema = MetadataSchema(1,
csvDocumentProducer.metadataColumnNames.map { name =>
MetadataField(name, MetadataFieldType.String)
}
)
database.runUnit(
DocumentSets
.filter(_.id === csvImport.documentSetId)
.map(_.metadataSchema)
.update(metadataSchema)
)
}
}
|
overview/overview-server
|
worker/src/main/scala/com/overviewdocs/csv/CsvImporter.scala
|
Scala
|
agpl-3.0
| 8,159 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import scala.util.Random
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.plans.logical.OneRowRelation
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{withDefaultTimeZone, UTC}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
/**
* Test suite for functions in [[org.apache.spark.sql.functions]].
*/
class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("array with column name") {
val df = Seq((0, 1)).toDF("a", "b")
val row = df.select(array("a", "b")).first()
val expectedType = ArrayType(IntegerType, containsNull = false)
assert(row.schema(0).dataType === expectedType)
assert(row.getAs[Seq[Int]](0) === Seq(0, 1))
}
test("array with column expression") {
val df = Seq((0, 1)).toDF("a", "b")
val row = df.select(array(col("a"), col("b") + col("b"))).first()
val expectedType = ArrayType(IntegerType, containsNull = false)
assert(row.schema(0).dataType === expectedType)
assert(row.getSeq[Int](0) === Seq(0, 2))
}
test("map with column expressions") {
val df = Seq(1 -> "a").toDF("a", "b")
val row = df.select(map($"a" + 1, $"b")).first()
val expectedType = MapType(IntegerType, StringType, valueContainsNull = true)
assert(row.schema(0).dataType === expectedType)
assert(row.getMap[Int, String](0) === Map(2 -> "a"))
}
test("map with arrays") {
val df1 = Seq((Seq(1, 2), Seq("a", "b"))).toDF("k", "v")
val expectedType = MapType(IntegerType, StringType, valueContainsNull = true)
val row = df1.select(map_from_arrays($"k", $"v")).first()
assert(row.schema(0).dataType === expectedType)
assert(row.getMap[Int, String](0) === Map(1 -> "a", 2 -> "b"))
checkAnswer(df1.select(map_from_arrays($"k", $"v")), Seq(Row(Map(1 -> "a", 2 -> "b"))))
val df2 = Seq((Seq(1, 2), Seq(null, "b"))).toDF("k", "v")
checkAnswer(df2.select(map_from_arrays($"k", $"v")), Seq(Row(Map(1 -> null, 2 -> "b"))))
val df3 = Seq((null, null)).toDF("k", "v")
checkAnswer(df3.select(map_from_arrays($"k", $"v")), Seq(Row(null)))
val df4 = Seq((1, "a")).toDF("k", "v")
intercept[AnalysisException] {
df4.select(map_from_arrays($"k", $"v"))
}
val df5 = Seq((Seq("a", null), Seq(1, 2))).toDF("k", "v")
val msg1 = intercept[Exception] {
df5.select(map_from_arrays($"k", $"v")).collect
}.getMessage
assert(msg1.contains("Cannot use null as map key"))
val df6 = Seq((Seq(1, 2), Seq("a"))).toDF("k", "v")
val msg2 = intercept[Exception] {
df6.select(map_from_arrays($"k", $"v")).collect
}.getMessage
assert(msg2.contains("The key array and value array of MapData must have the same length"))
}
test("struct with column name") {
val df = Seq((1, "str")).toDF("a", "b")
val row = df.select(struct("a", "b")).first()
val expectedType = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType)
))
assert(row.schema(0).dataType === expectedType)
assert(row.getAs[Row](0) === Row(1, "str"))
}
test("struct with column expression") {
val df = Seq((1, "str")).toDF("a", "b")
val row = df.select(struct((col("a") * 2).as("c"), col("b"))).first()
val expectedType = StructType(Seq(
StructField("c", IntegerType, nullable = false),
StructField("b", StringType)
))
assert(row.schema(0).dataType === expectedType)
assert(row.getAs[Row](0) === Row(2, "str"))
}
test("struct with column expression to be automatically named") {
val df = Seq((1, "str")).toDF("a", "b")
val result = df.select(struct((col("a") * 2), col("b")))
val expectedType = StructType(Seq(
StructField("col1", IntegerType, nullable = false),
StructField("b", StringType)
))
assert(result.first.schema(0).dataType === expectedType)
checkAnswer(result, Row(Row(2, "str")))
}
test("struct with literal columns") {
val df = Seq((1, "str1"), (2, "str2")).toDF("a", "b")
val result = df.select(struct((col("a") * 2), lit(5.0)))
val expectedType = StructType(Seq(
StructField("col1", IntegerType, nullable = false),
StructField("col2", DoubleType, nullable = false)
))
assert(result.first.schema(0).dataType === expectedType)
checkAnswer(result, Seq(Row(Row(2, 5.0)), Row(Row(4, 5.0))))
}
test("struct with all literal columns") {
val df = Seq((1, "str1"), (2, "str2")).toDF("a", "b")
val result = df.select(struct(lit("v"), lit(5.0)))
val expectedType = StructType(Seq(
StructField("col1", StringType, nullable = false),
StructField("col2", DoubleType, nullable = false)
))
assert(result.first.schema(0).dataType === expectedType)
checkAnswer(result, Seq(Row(Row("v", 5.0)), Row(Row("v", 5.0))))
}
test("constant functions") {
checkAnswer(
sql("SELECT E()"),
Row(scala.math.E)
)
checkAnswer(
sql("SELECT PI()"),
Row(scala.math.Pi)
)
}
test("bitwiseNOT") {
checkAnswer(
testData2.select(bitwiseNOT($"a")),
testData2.collect().toSeq.map(r => Row(~r.getInt(0))))
}
test("bin") {
val df = Seq[(Integer, Integer)]((12, null)).toDF("a", "b")
checkAnswer(
df.select(bin("a"), bin("b")),
Row("1100", null))
checkAnswer(
df.selectExpr("bin(a)", "bin(b)"),
Row("1100", null))
}
test("if function") {
val df = Seq((1, 2)).toDF("a", "b")
checkAnswer(
df.selectExpr("if(a = 1, 'one', 'not_one')", "if(b = 1, 'one', 'not_one')"),
Row("one", "not_one"))
}
test("misc md5 function") {
val df = Seq(("ABC", Array[Byte](1, 2, 3, 4, 5, 6))).toDF("a", "b")
checkAnswer(
df.select(md5($"a"), md5($"b")),
Row("902fbdd2b1df0c4f70b4a5d23525e932", "6ac1e56bc78f031059be7be854522c4c"))
checkAnswer(
df.selectExpr("md5(a)", "md5(b)"),
Row("902fbdd2b1df0c4f70b4a5d23525e932", "6ac1e56bc78f031059be7be854522c4c"))
}
test("misc sha1 function") {
val df = Seq(("ABC", "ABC".getBytes(StandardCharsets.UTF_8))).toDF("a", "b")
checkAnswer(
df.select(sha1($"a"), sha1($"b")),
Row("3c01bdbb26f358bab27f267924aa2c9a03fcfdb8", "3c01bdbb26f358bab27f267924aa2c9a03fcfdb8"))
val dfEmpty = Seq(("", "".getBytes(StandardCharsets.UTF_8))).toDF("a", "b")
checkAnswer(
dfEmpty.selectExpr("sha1(a)", "sha1(b)"),
Row("da39a3ee5e6b4b0d3255bfef95601890afd80709", "da39a3ee5e6b4b0d3255bfef95601890afd80709"))
}
test("misc sha2 function") {
val df = Seq(("ABC", Array[Byte](1, 2, 3, 4, 5, 6))).toDF("a", "b")
checkAnswer(
df.select(sha2($"a", 256), sha2($"b", 256)),
Row("b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78",
"7192385c3c0605de55bb9476ce1d90748190ecb32a8eed7f5207b30cf6a1fe89"))
checkAnswer(
df.selectExpr("sha2(a, 256)", "sha2(b, 256)"),
Row("b5d4045c3f466fa91fe2cc6abe79232a1a57cdf104f7a26e716e0a1e2789df78",
"7192385c3c0605de55bb9476ce1d90748190ecb32a8eed7f5207b30cf6a1fe89"))
intercept[IllegalArgumentException] {
df.select(sha2($"a", 1024))
}
}
test("misc crc32 function") {
val df = Seq(("ABC", Array[Byte](1, 2, 3, 4, 5, 6))).toDF("a", "b")
checkAnswer(
df.select(crc32($"a"), crc32($"b")),
Row(2743272264L, 2180413220L))
checkAnswer(
df.selectExpr("crc32(a)", "crc32(b)"),
Row(2743272264L, 2180413220L))
}
test("string function find_in_set") {
val df = Seq(("abc,b,ab,c,def", "abc,b,ab,c,def")).toDF("a", "b")
checkAnswer(
df.selectExpr("find_in_set('ab', a)", "find_in_set('x', b)"),
Row(3, 0))
}
test("conditional function: least") {
checkAnswer(
testData2.select(least(lit(-1), lit(0), col("a"), col("b"))).limit(1),
Row(-1)
)
checkAnswer(
sql("SELECT least(a, 2) as l from testData2 order by l"),
Seq(Row(1), Row(1), Row(2), Row(2), Row(2), Row(2))
)
}
test("conditional function: greatest") {
checkAnswer(
testData2.select(greatest(lit(2), lit(3), col("a"), col("b"))).limit(1),
Row(3)
)
checkAnswer(
sql("SELECT greatest(a, 2) as g from testData2 order by g"),
Seq(Row(2), Row(2), Row(2), Row(2), Row(3), Row(3))
)
}
test("pmod") {
val intData = Seq((7, 3), (-7, 3)).toDF("a", "b")
checkAnswer(
intData.select(pmod($"a", $"b")),
Seq(Row(1), Row(2))
)
checkAnswer(
intData.select(pmod($"a", lit(3))),
Seq(Row(1), Row(2))
)
checkAnswer(
intData.select(pmod(lit(-7), $"b")),
Seq(Row(2), Row(2))
)
checkAnswer(
intData.selectExpr("pmod(a, b)"),
Seq(Row(1), Row(2))
)
checkAnswer(
intData.selectExpr("pmod(a, 3)"),
Seq(Row(1), Row(2))
)
checkAnswer(
intData.selectExpr("pmod(-7, b)"),
Seq(Row(2), Row(2))
)
val doubleData = Seq((7.2, 4.1)).toDF("a", "b")
checkAnswer(
doubleData.select(pmod($"a", $"b")),
Seq(Row(3.1000000000000005)) // same as hive
)
checkAnswer(
doubleData.select(pmod(lit(2), lit(Int.MaxValue))),
Seq(Row(2))
)
}
test("array_sort with lambda functions") {
spark.udf.register("fAsc", (x: Int, y: Int) => {
if (x < y) -1
else if (x == y) 0
else 1
})
spark.udf.register("fDesc", (x: Int, y: Int) => {
if (x < y) 1
else if (x == y) 0
else -1
})
spark.udf.register("fString", (x: String, y: String) => {
if (x == null && y == null) 0
else if (x == null) 1
else if (y == null) -1
else if (x < y) 1
else if (x == y) 0
else -1
})
spark.udf.register("fStringLength", (x: String, y: String) => {
if (x == null && y == null) 0
else if (x == null) 1
else if (y == null) -1
else if (x.length < y.length) -1
else if (x.length == y.length) 0
else 1
})
val df1 = Seq(Array[Int](3, 2, 5, 1, 2)).toDF("a")
checkAnswer(
df1.selectExpr("array_sort(a, (x, y) -> fAsc(x, y))"),
Seq(
Row(Seq(1, 2, 2, 3, 5)))
)
checkAnswer(
df1.selectExpr("array_sort(a, (x, y) -> fDesc(x, y))"),
Seq(
Row(Seq(5, 3, 2, 2, 1)))
)
val df2 = Seq(Array[String]("bc", "ab", "dc")).toDF("a")
checkAnswer(
df2.selectExpr("array_sort(a, (x, y) -> fString(x, y))"),
Seq(
Row(Seq("dc", "bc", "ab")))
)
val df3 = Seq(Array[String]("a", "abcd", "abc")).toDF("a")
checkAnswer(
df3.selectExpr("array_sort(a, (x, y) -> fStringLength(x, y))"),
Seq(
Row(Seq("a", "abc", "abcd")))
)
val df4 = Seq((Array[Array[Int]](Array(2, 3, 1), Array(4, 2, 1, 4),
Array(1, 2)), "x")).toDF("a", "b")
checkAnswer(
df4.selectExpr("array_sort(a, (x, y) -> fAsc(cardinality(x), cardinality(y)))"),
Seq(
Row(Seq[Seq[Int]](Seq(1, 2), Seq(2, 3, 1), Seq(4, 2, 1, 4))))
)
val df5 = Seq(Array[String]("bc", null, "ab", "dc")).toDF("a")
checkAnswer(
df5.selectExpr("array_sort(a, (x, y) -> fString(x, y))"),
Seq(
Row(Seq("dc", "bc", "ab", null)))
)
spark.sql("drop temporary function fAsc")
spark.sql("drop temporary function fDesc")
spark.sql("drop temporary function fString")
spark.sql("drop temporary function fStringLength")
}
test("sort_array/array_sort functions") {
val df = Seq(
(Array[Int](2, 1, 3), Array("b", "c", "a")),
(Array.empty[Int], Array.empty[String]),
(null, null)
).toDF("a", "b")
checkAnswer(
df.select(sort_array($"a"), sort_array($"b")),
Seq(
Row(Seq(1, 2, 3), Seq("a", "b", "c")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df.select(sort_array($"a", false), sort_array($"b", false)),
Seq(
Row(Seq(3, 2, 1), Seq("c", "b", "a")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df.selectExpr("sort_array(a)", "sort_array(b)"),
Seq(
Row(Seq(1, 2, 3), Seq("a", "b", "c")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df.selectExpr("sort_array(a, true)", "sort_array(b, false)"),
Seq(
Row(Seq(1, 2, 3), Seq("c", "b", "a")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
val df2 = Seq((Array[Array[Int]](Array(2), Array(1), Array(2, 4), null), "x")).toDF("a", "b")
checkAnswer(
df2.selectExpr("sort_array(a, true)", "sort_array(a, false)"),
Seq(
Row(
Seq[Seq[Int]](null, Seq(1), Seq(2), Seq(2, 4)),
Seq[Seq[Int]](Seq(2, 4), Seq(2), Seq(1), null)))
)
val df3 = Seq(("xxx", "x")).toDF("a", "b")
assert(intercept[AnalysisException] {
df3.selectExpr("sort_array(a)").collect()
}.getMessage().contains("only supports array input"))
checkAnswer(
df.select(array_sort($"a"), array_sort($"b")),
Seq(
Row(Seq(1, 2, 3), Seq("a", "b", "c")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df.selectExpr("array_sort(a)", "array_sort(b)"),
Seq(
Row(Seq(1, 2, 3), Seq("a", "b", "c")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df2.selectExpr("array_sort(a)"),
Seq(Row(Seq[Seq[Int]](Seq(1), Seq(2), Seq(2, 4), null)))
)
assert(intercept[AnalysisException] {
df3.selectExpr("array_sort(a)").collect()
}.getMessage().contains("argument 1 requires array type, however, '`a`' is of string type"))
}
def testSizeOfArray(sizeOfNull: Any): Unit = {
val df = Seq(
(Seq[Int](1, 2), "x"),
(Seq[Int](), "y"),
(Seq[Int](1, 2, 3), "z"),
(null, "empty")
).toDF("a", "b")
checkAnswer(df.select(size($"a")), Seq(Row(2), Row(0), Row(3), Row(sizeOfNull)))
checkAnswer(df.selectExpr("size(a)"), Seq(Row(2), Row(0), Row(3), Row(sizeOfNull)))
checkAnswer(df.selectExpr("cardinality(a)"), Seq(Row(2L), Row(0L), Row(3L), Row(sizeOfNull)))
}
test("array size function - legacy") {
withSQLConf(SQLConf.LEGACY_SIZE_OF_NULL.key -> "true") {
testSizeOfArray(sizeOfNull = -1)
}
}
test("array size function") {
withSQLConf(SQLConf.LEGACY_SIZE_OF_NULL.key -> "false") {
testSizeOfArray(sizeOfNull = null)
}
// size(null) should return null under ansi mode.
withSQLConf(
SQLConf.LEGACY_SIZE_OF_NULL.key -> "true",
SQLConf.ANSI_ENABLED.key -> "true") {
testSizeOfArray(sizeOfNull = null)
}
}
test("dataframe arrays_zip function") {
val df1 = Seq((Seq(9001, 9002, 9003), Seq(4, 5, 6))).toDF("val1", "val2")
val df2 = Seq((Seq("a", "b"), Seq(true, false), Seq(10, 11))).toDF("val1", "val2", "val3")
val df3 = Seq((Seq("a", "b"), Seq(4, 5, 6))).toDF("val1", "val2")
val df4 = Seq((Seq("a", "b", null), Seq(4L))).toDF("val1", "val2")
val df5 = Seq((Seq(-1), Seq(null), Seq(), Seq(null, null))).toDF("val1", "val2", "val3", "val4")
val df6 = Seq((Seq(192.toByte, 256.toByte), Seq(1.1), Seq(), Seq(null, null)))
.toDF("v1", "v2", "v3", "v4")
val df7 = Seq((Seq(Seq(1, 2, 3), Seq(4, 5)), Seq(1.1, 2.2))).toDF("v1", "v2")
val df8 = Seq((Seq(Array[Byte](1.toByte, 5.toByte)), Seq(null))).toDF("v1", "v2")
val expectedValue1 = Row(Seq(Row(9001, 4), Row(9002, 5), Row(9003, 6)))
checkAnswer(df1.select(arrays_zip($"val1", $"val2")), expectedValue1)
checkAnswer(df1.selectExpr("arrays_zip(val1, val2)"), expectedValue1)
val expectedValue2 = Row(Seq(Row("a", true, 10), Row("b", false, 11)))
checkAnswer(df2.select(arrays_zip($"val1", $"val2", $"val3")), expectedValue2)
checkAnswer(df2.selectExpr("arrays_zip(val1, val2, val3)"), expectedValue2)
val expectedValue3 = Row(Seq(Row("a", 4), Row("b", 5), Row(null, 6)))
checkAnswer(df3.select(arrays_zip($"val1", $"val2")), expectedValue3)
checkAnswer(df3.selectExpr("arrays_zip(val1, val2)"), expectedValue3)
val expectedValue4 = Row(Seq(Row("a", 4L), Row("b", null), Row(null, null)))
checkAnswer(df4.select(arrays_zip($"val1", $"val2")), expectedValue4)
checkAnswer(df4.selectExpr("arrays_zip(val1, val2)"), expectedValue4)
val expectedValue5 = Row(Seq(Row(-1, null, null, null), Row(null, null, null, null)))
checkAnswer(df5.select(arrays_zip($"val1", $"val2", $"val3", $"val4")), expectedValue5)
checkAnswer(df5.selectExpr("arrays_zip(val1, val2, val3, val4)"), expectedValue5)
val expectedValue6 = Row(Seq(
Row(192.toByte, 1.1, null, null), Row(256.toByte, null, null, null)))
checkAnswer(df6.select(arrays_zip($"v1", $"v2", $"v3", $"v4")), expectedValue6)
checkAnswer(df6.selectExpr("arrays_zip(v1, v2, v3, v4)"), expectedValue6)
val expectedValue7 = Row(Seq(
Row(Seq(1, 2, 3), 1.1), Row(Seq(4, 5), 2.2)))
checkAnswer(df7.select(arrays_zip($"v1", $"v2")), expectedValue7)
checkAnswer(df7.selectExpr("arrays_zip(v1, v2)"), expectedValue7)
val expectedValue8 = Row(Seq(
Row(Array[Byte](1.toByte, 5.toByte), null)))
checkAnswer(df8.select(arrays_zip($"v1", $"v2")), expectedValue8)
checkAnswer(df8.selectExpr("arrays_zip(v1, v2)"), expectedValue8)
}
testWithWholeStageCodegenOnAndOff("SPARK-24633: arrays_zip splits input " +
"processing correctly") { _ =>
val df = spark.range(1)
val exprs = (0 to 5).map(x => array($"id" + lit(x)))
checkAnswer(df.select(arrays_zip(exprs: _*)),
Row(Seq(Row(0, 1, 2, 3, 4, 5))))
}
def testSizeOfMap(sizeOfNull: Any): Unit = {
val df = Seq(
(Map[Int, Int](1 -> 1, 2 -> 2), "x"),
(Map[Int, Int](), "y"),
(Map[Int, Int](1 -> 1, 2 -> 2, 3 -> 3), "z"),
(null, "empty")
).toDF("a", "b")
checkAnswer(df.select(size($"a")), Seq(Row(2), Row(0), Row(3), Row(sizeOfNull)))
checkAnswer(df.selectExpr("size(a)"), Seq(Row(2), Row(0), Row(3), Row(sizeOfNull)))
}
test("map size function - legacy") {
withSQLConf(SQLConf.LEGACY_SIZE_OF_NULL.key -> "true") {
testSizeOfMap(sizeOfNull = -1: Int)
}
}
test("map size function") {
withSQLConf(SQLConf.LEGACY_SIZE_OF_NULL.key -> "false") {
testSizeOfMap(sizeOfNull = null)
}
// size(null) should return null under ansi mode.
withSQLConf(
SQLConf.LEGACY_SIZE_OF_NULL.key -> "true",
SQLConf.ANSI_ENABLED.key -> "true") {
testSizeOfMap(sizeOfNull = null)
}
}
test("map_keys/map_values function") {
val df = Seq(
(Map[Int, Int](1 -> 100, 2 -> 200), "x"),
(Map[Int, Int](), "y"),
(Map[Int, Int](1 -> 100, 2 -> 200, 3 -> 300), "z")
).toDF("a", "b")
checkAnswer(
df.selectExpr("map_keys(a)"),
Seq(Row(Seq(1, 2)), Row(Seq.empty), Row(Seq(1, 2, 3)))
)
checkAnswer(
df.selectExpr("map_values(a)"),
Seq(Row(Seq(100, 200)), Row(Seq.empty), Row(Seq(100, 200, 300)))
)
}
test("map_entries") {
// Primitive-type elements
val idf = Seq(
Map[Int, Int](1 -> 100, 2 -> 200, 3 -> 300),
Map[Int, Int](),
null
).toDF("m")
val iExpected = Seq(
Row(Seq(Row(1, 100), Row(2, 200), Row(3, 300))),
Row(Seq.empty),
Row(null)
)
def testPrimitiveType(): Unit = {
checkAnswer(idf.select(map_entries($"m")), iExpected)
checkAnswer(idf.selectExpr("map_entries(m)"), iExpected)
checkAnswer(idf.selectExpr("map_entries(map(1, null, 2, null))"),
Seq.fill(iExpected.length)(Row(Seq(Row(1, null), Row(2, null)))))
}
// Test with local relation, the Project will be evaluated without codegen
testPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
idf.cache()
testPrimitiveType()
// Non-primitive-type elements
val sdf = Seq(
Map[String, String]("a" -> "f", "b" -> "o", "c" -> "o"),
Map[String, String]("a" -> null, "b" -> null),
Map[String, String](),
null
).toDF("m")
val sExpected = Seq(
Row(Seq(Row("a", "f"), Row("b", "o"), Row("c", "o"))),
Row(Seq(Row("a", null), Row("b", null))),
Row(Seq.empty),
Row(null)
)
def testNonPrimitiveType(): Unit = {
checkAnswer(sdf.select(map_entries($"m")), sExpected)
checkAnswer(sdf.selectExpr("map_entries(m)"), sExpected)
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
sdf.cache()
testNonPrimitiveType()
}
test("map_concat function") {
val df1 = Seq(
(Map[Int, Int](1 -> 100, 2 -> 200), Map[Int, Int](3 -> 300, 4 -> 400)),
(Map[Int, Int](1 -> 100, 2 -> 200), Map[Int, Int](3 -> 300, 1 -> 400)),
(null, Map[Int, Int](3 -> 300, 4 -> 400))
).toDF("map1", "map2")
val expected1a = Seq(
Row(Map(1 -> 100, 2 -> 200, 3 -> 300, 4 -> 400)),
Row(Map(1 -> 400, 2 -> 200, 3 -> 300)),
Row(null)
)
intercept[SparkException](df1.selectExpr("map_concat(map1, map2)").collect())
intercept[SparkException](df1.select(map_concat($"map1", $"map2")).collect())
withSQLConf(SQLConf.MAP_KEY_DEDUP_POLICY.key -> SQLConf.MapKeyDedupPolicy.LAST_WIN.toString) {
checkAnswer(df1.selectExpr("map_concat(map1, map2)"), expected1a)
checkAnswer(df1.select(map_concat($"map1", $"map2")), expected1a)
}
val expected1b = Seq(
Row(Map(1 -> 100, 2 -> 200)),
Row(Map(1 -> 100, 2 -> 200)),
Row(null)
)
checkAnswer(df1.selectExpr("map_concat(map1)"), expected1b)
checkAnswer(df1.select(map_concat($"map1")), expected1b)
val df2 = Seq(
(
Map[Array[Int], Int](Array(1) -> 100, Array(2) -> 200),
Map[String, Int]("3" -> 300, "4" -> 400)
)
).toDF("map1", "map2")
val expected2 = Seq(Row(Map()))
checkAnswer(df2.selectExpr("map_concat()"), expected2)
checkAnswer(df2.select(map_concat()), expected2)
val df3 = {
val schema = StructType(
StructField("map1", MapType(StringType, IntegerType, true), false) ::
StructField("map2", MapType(StringType, IntegerType, false), false) :: Nil
)
val data = Seq(
Row(Map[String, Any]("a" -> 1, "b" -> null), Map[String, Any]("c" -> 3, "d" -> 4)),
Row(Map[String, Any]("a" -> 1, "b" -> 2), Map[String, Any]("c" -> 3, "d" -> 4))
)
spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
}
val expected3 = Seq(
Row(Map[String, Any]("a" -> 1, "b" -> null, "c" -> 3, "d" -> 4)),
Row(Map[String, Any]("a" -> 1, "b" -> 2, "c" -> 3, "d" -> 4))
)
checkAnswer(df3.selectExpr("map_concat(map1, map2)"), expected3)
checkAnswer(df3.select(map_concat($"map1", $"map2")), expected3)
val expectedMessage1 = "input to function map_concat should all be the same type"
assert(intercept[AnalysisException] {
df2.selectExpr("map_concat(map1, map2)").collect()
}.getMessage().contains(expectedMessage1))
assert(intercept[AnalysisException] {
df2.select(map_concat($"map1", $"map2")).collect()
}.getMessage().contains(expectedMessage1))
val expectedMessage2 = "input to function map_concat should all be of type map"
assert(intercept[AnalysisException] {
df2.selectExpr("map_concat(map1, 12)").collect()
}.getMessage().contains(expectedMessage2))
assert(intercept[AnalysisException] {
df2.select(map_concat($"map1", lit(12))).collect()
}.getMessage().contains(expectedMessage2))
}
test("map_from_entries function") {
// Test cases with primitive-type keys and values
val idf = Seq(
Seq((1, 10), (2, 20), (3, 10)),
Seq((1, 10), null, (2, 20)),
Seq.empty,
null
).toDF("a")
val iExpected = Seq(
Row(Map(1 -> 10, 2 -> 20, 3 -> 10)),
Row(null),
Row(Map.empty),
Row(null))
def testPrimitiveType(): Unit = {
checkAnswer(idf.select(map_from_entries($"a")), iExpected)
checkAnswer(idf.selectExpr("map_from_entries(a)"), iExpected)
checkAnswer(idf.selectExpr("map_from_entries(array(struct(1, null), struct(2, null)))"),
Seq.fill(iExpected.length)(Row(Map(1 -> null, 2 -> null))))
}
// Test with local relation, the Project will be evaluated without codegen
testPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
idf.cache()
testPrimitiveType()
// Test cases with non-primitive-type keys and values
val sdf = Seq(
Seq(("a", "aa"), ("b", "bb"), ("c", "aa")),
Seq(("a", "aa"), null, ("b", "bb")),
Seq(("a", null), ("b", null)),
Seq.empty,
null
).toDF("a")
val sExpected = Seq(
Row(Map("a" -> "aa", "b" -> "bb", "c" -> "aa")),
Row(null),
Row(Map("a" -> null, "b" -> null)),
Row(Map.empty),
Row(null))
def testNonPrimitiveType(): Unit = {
checkAnswer(sdf.select(map_from_entries($"a")), sExpected)
checkAnswer(sdf.selectExpr("map_from_entries(a)"), sExpected)
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
sdf.cache()
testNonPrimitiveType()
}
test("array contains function") {
val df = Seq(
(Seq[Int](1, 2), "x", 1),
(Seq[Int](), "x", 1)
).toDF("a", "b", "c")
// Simple test cases
checkAnswer(
df.select(array_contains(df("a"), 1)),
Seq(Row(true), Row(false))
)
checkAnswer(
df.selectExpr("array_contains(a, 1)"),
Seq(Row(true), Row(false))
)
checkAnswer(
df.select(array_contains(df("a"), df("c"))),
Seq(Row(true), Row(false))
)
checkAnswer(
df.selectExpr("array_contains(a, c)"),
Seq(Row(true), Row(false))
)
// In hive, this errors because null has no type information
intercept[AnalysisException] {
df.select(array_contains(df("a"), null))
}
intercept[AnalysisException] {
df.selectExpr("array_contains(a, null)")
}
intercept[AnalysisException] {
df.selectExpr("array_contains(null, 1)")
}
checkAnswer(
df.selectExpr("array_contains(array(array(1), null)[0], 1)"),
Seq(Row(true), Row(true))
)
checkAnswer(
df.selectExpr("array_contains(array(1, null), array(1, null)[0])"),
Seq(Row(true), Row(true))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(1), 1.23D)"),
Seq(Row(false))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(1), 1.0D)"),
Seq(Row(true))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(1.0D), 1)"),
Seq(Row(true))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(1.23D), 1)"),
Seq(Row(false))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(array(1)), array(1.0D))"),
Seq(Row(true))
)
checkAnswer(
OneRowRelation().selectExpr("array_contains(array(array(1)), array(1.23D))"),
Seq(Row(false))
)
val e1 = intercept[AnalysisException] {
OneRowRelation().selectExpr("array_contains(array(1), .01234567890123456790123456780)")
}
val errorMsg1 =
s"""
|Input to function array_contains should have been array followed by a
|value with same element type, but it's [array<int>, decimal(38,29)].
""".stripMargin.replace("\\n", " ").trim()
assert(e1.message.contains(errorMsg1))
val e2 = intercept[AnalysisException] {
OneRowRelation().selectExpr("array_contains(array(1), 'foo')")
}
val errorMsg2 =
s"""
|Input to function array_contains should have been array followed by a
|value with same element type, but it's [array<int>, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e2.message.contains(errorMsg2))
}
test("SPARK-29600: ArrayContains function may return incorrect result for DecimalType") {
checkAnswer(
sql("select array_contains(array(1.10), 1.1)"),
Seq(Row(true))
)
checkAnswer(
sql("SELECT array_contains(array(1.1), 1.10)"),
Seq(Row(true))
)
checkAnswer(
sql("SELECT array_contains(array(1.11), 1.1)"),
Seq(Row(false))
)
}
test("arrays_overlap function") {
val df = Seq(
(Seq[Option[Int]](Some(1), Some(2)), Seq[Option[Int]](Some(-1), Some(10))),
(Seq[Option[Int]](Some(1), Some(2)), Seq[Option[Int]](Some(-1), None)),
(Seq[Option[Int]](Some(3), Some(2)), Seq[Option[Int]](Some(1), Some(2)))
).toDF("a", "b")
val answer = Seq(Row(false), Row(null), Row(true))
checkAnswer(df.select(arrays_overlap(df("a"), df("b"))), answer)
checkAnswer(df.selectExpr("arrays_overlap(a, b)"), answer)
checkAnswer(
Seq((Seq(1, 2, 3), Seq(2.0, 2.5))).toDF("a", "b").selectExpr("arrays_overlap(a, b)"),
Row(true))
intercept[AnalysisException] {
sql("select arrays_overlap(array(1, 2, 3), array('a', 'b', 'c'))")
}
intercept[AnalysisException] {
sql("select arrays_overlap(null, null)")
}
intercept[AnalysisException] {
sql("select arrays_overlap(map(1, 2), map(3, 4))")
}
}
test("slice function") {
val df = Seq(
Seq(1, 2, 3),
Seq(4, 5)
).toDF("x")
val answer = Seq(Row(Seq(2, 3)), Row(Seq(5)))
checkAnswer(df.select(slice(df("x"), 2, 2)), answer)
checkAnswer(df.selectExpr("slice(x, 2, 2)"), answer)
val answerNegative = Seq(Row(Seq(3)), Row(Seq(5)))
checkAnswer(df.select(slice(df("x"), -1, 1)), answerNegative)
checkAnswer(df.selectExpr("slice(x, -1, 1)"), answerNegative)
}
test("array_join function") {
val df = Seq(
(Seq[String]("a", "b"), ","),
(Seq[String]("a", null, "b"), ","),
(Seq.empty[String], ",")
).toDF("x", "delimiter")
checkAnswer(
df.select(array_join(df("x"), ";")),
Seq(Row("a;b"), Row("a;b"), Row(""))
)
checkAnswer(
df.select(array_join(df("x"), ";", "NULL")),
Seq(Row("a;b"), Row("a;NULL;b"), Row(""))
)
checkAnswer(
df.selectExpr("array_join(x, delimiter)"),
Seq(Row("a,b"), Row("a,b"), Row("")))
checkAnswer(
df.selectExpr("array_join(x, delimiter, 'NULL')"),
Seq(Row("a,b"), Row("a,NULL,b"), Row("")))
val idf = Seq(Seq(1, 2, 3)).toDF("x")
checkAnswer(
idf.select(array_join(idf("x"), ", ")),
Seq(Row("1, 2, 3"))
)
checkAnswer(
idf.selectExpr("array_join(x, ', ')"),
Seq(Row("1, 2, 3"))
)
intercept[AnalysisException] {
idf.selectExpr("array_join(x, 1)")
}
intercept[AnalysisException] {
idf.selectExpr("array_join(x, ', ', 1)")
}
}
test("array_min function") {
val df = Seq(
Seq[Option[Int]](Some(1), Some(3), Some(2)),
Seq.empty[Option[Int]],
Seq[Option[Int]](None),
Seq[Option[Int]](None, Some(1), Some(-100))
).toDF("a")
val answer = Seq(Row(1), Row(null), Row(null), Row(-100))
checkAnswer(df.select(array_min(df("a"))), answer)
checkAnswer(df.selectExpr("array_min(a)"), answer)
}
test("array_max function") {
val df = Seq(
Seq[Option[Int]](Some(1), Some(3), Some(2)),
Seq.empty[Option[Int]],
Seq[Option[Int]](None),
Seq[Option[Int]](None, Some(1), Some(-100))
).toDF("a")
val answer = Seq(Row(3), Row(null), Row(null), Row(1))
checkAnswer(df.select(array_max(df("a"))), answer)
checkAnswer(df.selectExpr("array_max(a)"), answer)
}
test("sequence") {
checkAnswer(Seq((-2, 2)).toDF().select(sequence($"_1", $"_2")),
Seq(Row(Array(-2, -1, 0, 1, 2))))
checkAnswer(Seq((7, 2, -2)).toDF().select(sequence($"_1", $"_2", $"_3")),
Seq(Row(Array(7, 5, 3))))
checkAnswer(
spark.sql("select sequence(" +
" cast('2018-01-01 00:00:00' as timestamp)" +
", cast('2018-01-02 00:00:00' as timestamp)" +
", interval 12 hours)"),
Seq(Row(Array(
Timestamp.valueOf("2018-01-01 00:00:00"),
Timestamp.valueOf("2018-01-01 12:00:00"),
Timestamp.valueOf("2018-01-02 00:00:00")))))
withDefaultTimeZone(UTC) {
checkAnswer(
spark.sql("select sequence(" +
" cast('2018-01-01' as date)" +
", cast('2018-03-01' as date)" +
", interval 1 month)"),
Seq(Row(Array(
Date.valueOf("2018-01-01"),
Date.valueOf("2018-02-01"),
Date.valueOf("2018-03-01")))))
}
// test type coercion
checkAnswer(
Seq((1.toByte, 3L, 1)).toDF().select(sequence($"_1", $"_2", $"_3")),
Seq(Row(Array(1L, 2L, 3L))))
checkAnswer(
spark.sql("select sequence(" +
" cast('2018-01-01' as date)" +
", cast('2018-01-02 00:00:00' as timestamp)" +
", interval 12 hours)"),
Seq(Row(Array(
Timestamp.valueOf("2018-01-01 00:00:00"),
Timestamp.valueOf("2018-01-01 12:00:00"),
Timestamp.valueOf("2018-01-02 00:00:00")))))
// test invalid data types
intercept[AnalysisException] {
Seq((true, false)).toDF().selectExpr("sequence(_1, _2)")
}
intercept[AnalysisException] {
Seq((true, false, 42)).toDF().selectExpr("sequence(_1, _2, _3)")
}
intercept[AnalysisException] {
Seq((1, 2, 0.5)).toDF().selectExpr("sequence(_1, _2, _3)")
}
}
test("reverse function - string") {
val oneRowDF = Seq(("Spark", 3215)).toDF("s", "i")
def testString(): Unit = {
checkAnswer(oneRowDF.select(reverse($"s")), Seq(Row("krapS")))
checkAnswer(oneRowDF.selectExpr("reverse(s)"), Seq(Row("krapS")))
checkAnswer(oneRowDF.select(reverse($"i")), Seq(Row("5123")))
checkAnswer(oneRowDF.selectExpr("reverse(i)"), Seq(Row("5123")))
checkAnswer(oneRowDF.selectExpr("reverse(null)"), Seq(Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testString()
// Test with cached relation, the Project will be evaluated with codegen
oneRowDF.cache()
testString()
}
test("reverse function - array for primitive type not containing null") {
val idfNotContainsNull = Seq(
Seq(1, 9, 8, 7),
Seq(5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(
idfNotContainsNull.select(reverse($"i")),
Seq(Row(Seq(7, 8, 9, 1)), Row(Seq(2, 7, 9, 8, 5)), Row(Seq.empty), Row(null))
)
checkAnswer(
idfNotContainsNull.selectExpr("reverse(i)"),
Seq(Row(Seq(7, 8, 9, 1)), Row(Seq(2, 7, 9, 8, 5)), Row(Seq.empty), Row(null))
)
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
idfNotContainsNull.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("reverse function - array for primitive type containing null") {
val idfContainsNull = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(null, 5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(
idfContainsNull.select(reverse($"i")),
Seq(Row(Seq(7, null, 8, 9, 1)), Row(Seq(2, 7, 9, 8, 5, null)), Row(Seq.empty), Row(null))
)
checkAnswer(
idfContainsNull.selectExpr("reverse(i)"),
Seq(Row(Seq(7, null, 8, 9, 1)), Row(Seq(2, 7, 9, 8, 5, null)), Row(Seq.empty), Row(null))
)
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
idfContainsNull.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("reverse function - array for non-primitive type") {
val sdf = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testArrayOfNonPrimitiveType(): Unit = {
checkAnswer(
sdf.select(reverse($"s")),
Seq(Row(Seq("b", "a", "c")), Row(Seq(null, "c", null, "b")), Row(Seq.empty), Row(null))
)
checkAnswer(
sdf.selectExpr("reverse(s)"),
Seq(Row(Seq("b", "a", "c")), Row(Seq(null, "c", null, "b")), Row(Seq.empty), Row(null))
)
checkAnswer(
sdf.selectExpr("reverse(array(array(1, 2), array(3, 4)))"),
Seq.fill(sdf.count().toInt)(Row(Seq(Seq(3, 4), Seq(1, 2))))
)
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
sdf.cache()
testArrayOfNonPrimitiveType()
}
test("reverse function - data type mismatch") {
val ex1 = intercept[AnalysisException] {
sql("select reverse(struct(1, 'a'))")
}
assert(ex1.getMessage.contains("data type mismatch"))
val ex2 = intercept[AnalysisException] {
sql("select reverse(map(1, 'a'))")
}
assert(ex2.getMessage.contains("data type mismatch"))
}
test("array position function") {
val df = Seq(
(Seq[Int](1, 2), "x", 1),
(Seq[Int](), "x", 1)
).toDF("a", "b", "c")
checkAnswer(
df.select(array_position(df("a"), 1)),
Seq(Row(1L), Row(0L))
)
checkAnswer(
df.selectExpr("array_position(a, 1)"),
Seq(Row(1L), Row(0L))
)
checkAnswer(
df.selectExpr("array_position(a, c)"),
Seq(Row(1L), Row(0L))
)
checkAnswer(
df.select(array_position(df("a"), df("c"))),
Seq(Row(1L), Row(0L))
)
checkAnswer(
df.select(array_position(df("a"), null)),
Seq(Row(null), Row(null))
)
checkAnswer(
df.selectExpr("array_position(a, null)"),
Seq(Row(null), Row(null))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(1), 1.23D)"),
Seq(Row(0L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(1), 1.0D)"),
Seq(Row(1L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(1.D), 1)"),
Seq(Row(1L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(1.23D), 1)"),
Seq(Row(0L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(array(1)), array(1.0D))"),
Seq(Row(1L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(array(1)), array(1.23D))"),
Seq(Row(0L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(array(1), null)[0], 1)"),
Seq(Row(1L))
)
checkAnswer(
OneRowRelation().selectExpr("array_position(array(1, null), array(1, null)[0])"),
Seq(Row(1L))
)
val e1 = intercept[AnalysisException] {
Seq(("a string element", "a")).toDF().selectExpr("array_position(_1, _2)")
}
val errorMsg1 =
s"""
|Input to function array_position should have been array followed by a
|value with same element type, but it's [string, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e1.message.contains(errorMsg1))
val e2 = intercept[AnalysisException] {
OneRowRelation().selectExpr("array_position(array(1), '1')")
}
val errorMsg2 =
s"""
|Input to function array_position should have been array followed by a
|value with same element type, but it's [array<int>, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e2.message.contains(errorMsg2))
}
test("element_at function") {
val df = Seq(
(Seq[String]("1", "2", "3"), 1),
(Seq[String](null, ""), -1),
(Seq[String](), 2)
).toDF("a", "b")
intercept[Exception] {
checkAnswer(
df.select(element_at(df("a"), 0)),
Seq(Row(null), Row(null), Row(null))
)
}.getMessage.contains("SQL array indices start at 1")
intercept[Exception] {
checkAnswer(
df.select(element_at(df("a"), 1.1)),
Seq(Row(null), Row(null), Row(null))
)
}
checkAnswer(
df.select(element_at(df("a"), 4)),
Seq(Row(null), Row(null), Row(null))
)
checkAnswer(
df.select(element_at(df("a"), df("b"))),
Seq(Row("1"), Row(""), Row(null))
)
checkAnswer(
df.selectExpr("element_at(a, b)"),
Seq(Row("1"), Row(""), Row(null))
)
checkAnswer(
df.select(element_at(df("a"), 1)),
Seq(Row("1"), Row(null), Row(null))
)
checkAnswer(
df.select(element_at(df("a"), -1)),
Seq(Row("3"), Row(""), Row(null))
)
checkAnswer(
df.selectExpr("element_at(a, 4)"),
Seq(Row(null), Row(null), Row(null))
)
checkAnswer(
df.selectExpr("element_at(a, 1)"),
Seq(Row("1"), Row(null), Row(null))
)
checkAnswer(
df.selectExpr("element_at(a, -1)"),
Seq(Row("3"), Row(""), Row(null))
)
val e1 = intercept[AnalysisException] {
Seq(("a string element", 1)).toDF().selectExpr("element_at(_1, _2)")
}
val errorMsg1 =
s"""
|The first argument to function element_at should have been array or map type, but
|its string type.
""".stripMargin.replace("\\n", " ").trim()
assert(e1.message.contains(errorMsg1))
checkAnswer(
OneRowRelation().selectExpr("element_at(array(2, 1), 2S)"),
Seq(Row(1))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(array('a', 'b'), 1Y)"),
Seq(Row("a"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(array(1, 2, 3), 3)"),
Seq(Row(3))
)
val e2 = intercept[AnalysisException] {
OneRowRelation().selectExpr("element_at(array('a', 'b'), 1L)")
}
val errorMsg2 =
s"""
|Input to function element_at should have been array followed by a int, but it's
|[array<string>, bigint].
""".stripMargin.replace("\\n", " ").trim()
assert(e2.message.contains(errorMsg2))
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 2Y)"),
Seq(Row("b"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 1S)"),
Seq(Row("a"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 2)"),
Seq(Row("b"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 2L)"),
Seq(Row("b"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 1.0D)"),
Seq(Row("a"))
)
checkAnswer(
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), 1.23D)"),
Seq(Row(null))
)
val e3 = intercept[AnalysisException] {
OneRowRelation().selectExpr("element_at(map(1, 'a', 2, 'b'), '1')")
}
val errorMsg3 =
s"""
|Input to function element_at should have been map followed by a value of same
|key type, but it's [map<int,string>, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e3.message.contains(errorMsg3))
}
test("array_union functions") {
val df1 = Seq((Array(1, 2, 3), Array(4, 2))).toDF("a", "b")
val ans1 = Row(Seq(1, 2, 3, 4))
checkAnswer(df1.select(array_union($"a", $"b")), ans1)
checkAnswer(df1.selectExpr("array_union(a, b)"), ans1)
val df2 = Seq((Array[Integer](1, 2, null, 4, 5), Array(-5, 4, -3, 2, -1))).toDF("a", "b")
val ans2 = Row(Seq(1, 2, null, 4, 5, -5, -3, -1))
checkAnswer(df2.select(array_union($"a", $"b")), ans2)
checkAnswer(df2.selectExpr("array_union(a, b)"), ans2)
val df3 = Seq((Array(1L, 2L, 3L), Array(4L, 2L))).toDF("a", "b")
val ans3 = Row(Seq(1L, 2L, 3L, 4L))
checkAnswer(df3.select(array_union($"a", $"b")), ans3)
checkAnswer(df3.selectExpr("array_union(a, b)"), ans3)
val df4 = Seq((Array[java.lang.Long](1L, 2L, null, 4L, 5L), Array(-5L, 4L, -3L, 2L, -1L)))
.toDF("a", "b")
val ans4 = Row(Seq(1L, 2L, null, 4L, 5L, -5L, -3L, -1L))
checkAnswer(df4.select(array_union($"a", $"b")), ans4)
checkAnswer(df4.selectExpr("array_union(a, b)"), ans4)
val df5 = Seq((Array("b", "a", "c"), Array("b", null, "a", "g"))).toDF("a", "b")
val ans5 = Row(Seq("b", "a", "c", null, "g"))
checkAnswer(df5.select(array_union($"a", $"b")), ans5)
checkAnswer(df5.selectExpr("array_union(a, b)"), ans5)
val df6 = Seq((null, Array("a"))).toDF("a", "b")
assert(intercept[AnalysisException] {
df6.select(array_union($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df6.selectExpr("array_union(a, b)")
}.getMessage.contains("data type mismatch"))
val df7 = Seq((null, null)).toDF("a", "b")
assert(intercept[AnalysisException] {
df7.select(array_union($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df7.selectExpr("array_union(a, b)")
}.getMessage.contains("data type mismatch"))
val df8 = Seq((Array(Array(1)), Array("a"))).toDF("a", "b")
assert(intercept[AnalysisException] {
df8.select(array_union($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df8.selectExpr("array_union(a, b)")
}.getMessage.contains("data type mismatch"))
}
test("concat function - arrays") {
val nseqi : Seq[Int] = null
val nseqs : Seq[String] = null
val df = Seq(
(Seq(1), Seq(2, 3), Seq(5L, 6L), nseqi, Seq("a", "b", "c"), Seq("d", "e"), Seq("f"), nseqs),
(Seq(1, 0), Seq.empty[Int], Seq(2L), nseqi, Seq("a"), Seq.empty[String], Seq(null), nseqs)
).toDF("i1", "i2", "i3", "in", "s1", "s2", "s3", "sn")
// Simple test cases
def simpleTest(): Unit = {
checkAnswer (
df.select(concat($"i1", $"s1")),
Seq(Row(Seq("1", "a", "b", "c")), Row(Seq("1", "0", "a")))
)
checkAnswer(
df.select(concat($"i1", $"i2", $"i3")),
Seq(Row(Seq(1, 2, 3, 5, 6)), Row(Seq(1, 0, 2)))
)
checkAnswer(
df.selectExpr("concat(array(1, null), i2, i3)"),
Seq(Row(Seq(1, null, 2, 3, 5, 6)), Row(Seq(1, null, 2)))
)
checkAnswer(
df.select(concat($"s1", $"s2", $"s3")),
Seq(Row(Seq("a", "b", "c", "d", "e", "f")), Row(Seq("a", null)))
)
checkAnswer(
df.selectExpr("concat(s1, s2, s3)"),
Seq(Row(Seq("a", "b", "c", "d", "e", "f")), Row(Seq("a", null)))
)
}
// Test with local relation, the Project will be evaluated without codegen
simpleTest()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
simpleTest()
// Null test cases
def nullTest(): Unit = {
checkAnswer(
df.select(concat($"i1", $"in")),
Seq(Row(null), Row(null))
)
checkAnswer(
df.select(concat($"in", $"i1")),
Seq(Row(null), Row(null))
)
checkAnswer(
df.select(concat($"s1", $"sn")),
Seq(Row(null), Row(null))
)
checkAnswer(
df.select(concat($"sn", $"s1")),
Seq(Row(null), Row(null))
)
}
// Test with local relation, the Project will be evaluated without codegen
df.unpersist(blocking = true)
nullTest()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
nullTest()
// Type error test cases
intercept[AnalysisException] {
df.selectExpr("concat(i1, i2, null)")
}
intercept[AnalysisException] {
df.selectExpr("concat(i1, array(i1, i2))")
}
val e = intercept[AnalysisException] {
df.selectExpr("concat(map(1, 2), map(3, 4))")
}
assert(e.getMessage.contains("string, binary or array"))
}
test("SPARK-31227: Non-nullable null type should not coerce to nullable type in concat") {
val actual = spark.range(1).selectExpr("concat(array(), array(1)) as arr")
val expected = spark.range(1).selectExpr("array(1) as arr")
checkAnswer(actual, expected)
assert(actual.schema === expected.schema)
}
test("flatten function") {
// Test cases with a primitive type
val intDF = Seq(
(Seq(Seq(1, 2, 3), Seq(4, 5), Seq(6))),
(Seq(Seq(1, 2))),
(Seq(Seq(1), Seq.empty)),
(Seq(Seq.empty, Seq(1))),
(Seq(Seq.empty, Seq.empty)),
(Seq(Seq(1), null)),
(Seq(null, Seq(1))),
(Seq(null, null))
).toDF("i")
val intDFResult = Seq(
Row(Seq(1, 2, 3, 4, 5, 6)),
Row(Seq(1, 2)),
Row(Seq(1)),
Row(Seq(1)),
Row(Seq.empty),
Row(null),
Row(null),
Row(null))
def testInt(): Unit = {
checkAnswer(intDF.select(flatten($"i")), intDFResult)
checkAnswer(intDF.selectExpr("flatten(i)"), intDFResult)
}
// Test with local relation, the Project will be evaluated without codegen
testInt()
// Test with cached relation, the Project will be evaluated with codegen
intDF.cache()
testInt()
// Test cases with non-primitive types
val strDF = Seq(
(Seq(Seq("a", "b"), Seq("c"), Seq("d", "e", "f"))),
(Seq(Seq("a", "b"))),
(Seq(Seq("a", null), Seq(null, "b"), Seq(null, null))),
(Seq(Seq("a"), Seq.empty)),
(Seq(Seq.empty, Seq("a"))),
(Seq(Seq.empty, Seq.empty)),
(Seq(Seq("a"), null)),
(Seq(null, Seq("a"))),
(Seq(null, null))
).toDF("s")
val strDFResult = Seq(
Row(Seq("a", "b", "c", "d", "e", "f")),
Row(Seq("a", "b")),
Row(Seq("a", null, null, "b", null, null)),
Row(Seq("a")),
Row(Seq("a")),
Row(Seq.empty),
Row(null),
Row(null),
Row(null))
def testString(): Unit = {
checkAnswer(strDF.select(flatten($"s")), strDFResult)
checkAnswer(strDF.selectExpr("flatten(s)"), strDFResult)
}
// Test with local relation, the Project will be evaluated without codegen
testString()
// Test with cached relation, the Project will be evaluated with codegen
strDF.cache()
testString()
val arrDF = Seq((1, "a", Seq(1, 2, 3))).toDF("i", "s", "arr")
def testArray(): Unit = {
checkAnswer(
arrDF.selectExpr("flatten(array(arr, array(null, 5), array(6, null)))"),
Seq(Row(Seq(1, 2, 3, null, 5, 6, null))))
checkAnswer(
arrDF.selectExpr("flatten(array(array(arr, arr), array(arr)))"),
Seq(Row(Seq(Seq(1, 2, 3), Seq(1, 2, 3), Seq(1, 2, 3)))))
}
// Test with local relation, the Project will be evaluated without codegen
testArray()
// Test with cached relation, the Project will be evaluated with codegen
arrDF.cache()
testArray()
// Error test cases
val oneRowDF = Seq((1, "a", Seq(1, 2, 3))).toDF("i", "s", "arr")
intercept[AnalysisException] {
oneRowDF.select(flatten($"arr"))
}
intercept[AnalysisException] {
oneRowDF.select(flatten($"i"))
}
intercept[AnalysisException] {
oneRowDF.select(flatten($"s"))
}
intercept[AnalysisException] {
oneRowDF.selectExpr("flatten(null)")
}
}
test("array_repeat function") {
val strDF = Seq(
("hi", 2),
(null, 2)
).toDF("a", "b")
val strDFTwiceResult = Seq(
Row(Seq("hi", "hi")),
Row(Seq(null, null))
)
def testString(): Unit = {
checkAnswer(strDF.select(array_repeat($"a", 2)), strDFTwiceResult)
checkAnswer(strDF.select(array_repeat($"a", $"b")), strDFTwiceResult)
checkAnswer(strDF.selectExpr("array_repeat(a, 2)"), strDFTwiceResult)
checkAnswer(strDF.selectExpr("array_repeat(a, b)"), strDFTwiceResult)
}
// Test with local relation, the Project will be evaluated without codegen
testString()
// Test with cached relation, the Project will be evaluated with codegen
strDF.cache()
testString()
val intDF = {
val schema = StructType(Seq(
StructField("a", IntegerType),
StructField("b", IntegerType)))
val data = Seq(
Row(3, 2),
Row(null, 2)
)
spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
}
val intDFTwiceResult = Seq(
Row(Seq(3, 3)),
Row(Seq(null, null))
)
def testInt(): Unit = {
checkAnswer(intDF.select(array_repeat($"a", 2)), intDFTwiceResult)
checkAnswer(intDF.select(array_repeat($"a", $"b")), intDFTwiceResult)
checkAnswer(intDF.selectExpr("array_repeat(a, 2)"), intDFTwiceResult)
checkAnswer(intDF.selectExpr("array_repeat(a, b)"), intDFTwiceResult)
}
// Test with local relation, the Project will be evaluated without codegen
testInt()
// Test with cached relation, the Project will be evaluated with codegen
intDF.cache()
testInt()
val nullCountDF = {
val schema = StructType(Seq(
StructField("a", StringType),
StructField("b", IntegerType)))
val data = Seq(
Row("hi", null),
Row(null, null)
)
spark.createDataFrame(spark.sparkContext.parallelize(data), schema)
}
def testNull(): Unit = {
checkAnswer(
nullCountDF.select(array_repeat($"a", $"b")),
Seq(Row(null), Row(null))
)
}
// Test with local relation, the Project will be evaluated without codegen
testNull()
// Test with cached relation, the Project will be evaluated with codegen
nullCountDF.cache()
testNull()
// Error test cases
val invalidTypeDF = Seq(("hi", "1")).toDF("a", "b")
intercept[AnalysisException] {
invalidTypeDF.select(array_repeat($"a", $"b"))
}
intercept[AnalysisException] {
invalidTypeDF.select(array_repeat($"a", lit("1")))
}
intercept[AnalysisException] {
invalidTypeDF.selectExpr("array_repeat(a, 1.0)")
}
}
test("array remove") {
val df = Seq(
(Array[Int](2, 1, 2, 3), Array("a", "b", "c", "a"), Array("", ""), 2),
(Array.empty[Int], Array.empty[String], Array.empty[String], 2),
(null, null, null, 2)
).toDF("a", "b", "c", "d")
checkAnswer(
df.select(array_remove($"a", 2), array_remove($"b", "a"), array_remove($"c", "")),
Seq(
Row(Seq(1, 3), Seq("b", "c"), Seq.empty[String]),
Row(Seq.empty[Int], Seq.empty[String], Seq.empty[String]),
Row(null, null, null))
)
checkAnswer(
df.select(array_remove($"a", $"d")),
Seq(
Row(Seq(1, 3)),
Row(Seq.empty[Int]),
Row(null))
)
checkAnswer(
df.selectExpr("array_remove(a, d)"),
Seq(
Row(Seq(1, 3)),
Row(Seq.empty[Int]),
Row(null))
)
checkAnswer(
OneRowRelation().selectExpr("array_remove(array(1, 2), 1.23D)"),
Seq(
Row(Seq(1.0, 2.0))
)
)
checkAnswer(
OneRowRelation().selectExpr("array_remove(array(1, 2), 1.0D)"),
Seq(
Row(Seq(2.0))
)
)
checkAnswer(
OneRowRelation().selectExpr("array_remove(array(1.0D, 2.0D), 2)"),
Seq(
Row(Seq(1.0))
)
)
checkAnswer(
OneRowRelation().selectExpr("array_remove(array(1.1D, 1.2D), 1)"),
Seq(
Row(Seq(1.1, 1.2))
)
)
checkAnswer(
df.selectExpr("array_remove(a, 2)", "array_remove(b, \\"a\\")",
"array_remove(c, \\"\\")"),
Seq(
Row(Seq(1, 3), Seq("b", "c"), Seq.empty[String]),
Row(Seq.empty[Int], Seq.empty[String], Seq.empty[String]),
Row(null, null, null))
)
val e1 = intercept[AnalysisException] {
Seq(("a string element", "a")).toDF().selectExpr("array_remove(_1, _2)")
}
val errorMsg1 =
s"""
|Input to function array_remove should have been array followed by a
|value with same element type, but it's [string, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e1.message.contains(errorMsg1))
val e2 = intercept[AnalysisException] {
OneRowRelation().selectExpr("array_remove(array(1, 2), '1')")
}
val errorMsg2 =
s"""
|Input to function array_remove should have been array followed by a
|value with same element type, but it's [array<int>, string].
""".stripMargin.replace("\\n", " ").trim()
assert(e2.message.contains(errorMsg2))
}
test("array_distinct functions") {
val df = Seq(
(Array[Int](2, 1, 3, 4, 3, 5), Array("b", "c", "a", "c", "b", "", "")),
(Array.empty[Int], Array.empty[String]),
(null, null)
).toDF("a", "b")
checkAnswer(
df.select(array_distinct($"a"), array_distinct($"b")),
Seq(
Row(Seq(2, 1, 3, 4, 5), Seq("b", "c", "a", "")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
checkAnswer(
df.selectExpr("array_distinct(a)", "array_distinct(b)"),
Seq(
Row(Seq(2, 1, 3, 4, 5), Seq("b", "c", "a", "")),
Row(Seq.empty[Int], Seq.empty[String]),
Row(null, null))
)
}
// Shuffle expressions should produce same results at retries in the same DataFrame.
private def checkShuffleResult(df: DataFrame): Unit = {
checkAnswer(df, df.collect())
}
test("shuffle function - array for primitive type not containing null") {
val idfNotContainsNull = Seq(
Seq(1, 9, 8, 7),
Seq(5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkShuffleResult(idfNotContainsNull.select(shuffle($"i")))
checkShuffleResult(idfNotContainsNull.selectExpr("shuffle(i)"))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
idfNotContainsNull.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("shuffle function - array for primitive type containing null") {
val idfContainsNull = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(null, 5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkShuffleResult(idfContainsNull.select(shuffle($"i")))
checkShuffleResult(idfContainsNull.selectExpr("shuffle(i)"))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
idfContainsNull.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("shuffle function - array for non-primitive type") {
val sdf = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testNonPrimitiveType(): Unit = {
checkShuffleResult(sdf.select(shuffle($"s")))
checkShuffleResult(sdf.selectExpr("shuffle(s)"))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
sdf.cache()
testNonPrimitiveType()
}
test("array_except functions") {
val df1 = Seq((Array(1, 2, 4), Array(4, 2))).toDF("a", "b")
val ans1 = Row(Seq(1))
checkAnswer(df1.select(array_except($"a", $"b")), ans1)
checkAnswer(df1.selectExpr("array_except(a, b)"), ans1)
val df2 = Seq((Array[Integer](1, 2, null, 4, 5), Array[Integer](-5, 4, null, 2, -1)))
.toDF("a", "b")
val ans2 = Row(Seq(1, 5))
checkAnswer(df2.select(array_except($"a", $"b")), ans2)
checkAnswer(df2.selectExpr("array_except(a, b)"), ans2)
val df3 = Seq((Array(1L, 2L, 4L), Array(4L, 2L))).toDF("a", "b")
val ans3 = Row(Seq(1L))
checkAnswer(df3.select(array_except($"a", $"b")), ans3)
checkAnswer(df3.selectExpr("array_except(a, b)"), ans3)
val df4 = Seq(
(Array[java.lang.Long](1L, 2L, null, 4L, 5L), Array[java.lang.Long](-5L, 4L, null, 2L, -1L)))
.toDF("a", "b")
val ans4 = Row(Seq(1L, 5L))
checkAnswer(df4.select(array_except($"a", $"b")), ans4)
checkAnswer(df4.selectExpr("array_except(a, b)"), ans4)
val df5 = Seq((Array("c", null, "a", "f"), Array("b", null, "a", "g"))).toDF("a", "b")
val ans5 = Row(Seq("c", "f"))
checkAnswer(df5.select(array_except($"a", $"b")), ans5)
checkAnswer(df5.selectExpr("array_except(a, b)"), ans5)
val df6 = Seq((null, null)).toDF("a", "b")
intercept[AnalysisException] {
df6.select(array_except($"a", $"b"))
}
intercept[AnalysisException] {
df6.selectExpr("array_except(a, b)")
}
val df7 = Seq((Array(1), Array("a"))).toDF("a", "b")
intercept[AnalysisException] {
df7.select(array_except($"a", $"b"))
}
intercept[AnalysisException] {
df7.selectExpr("array_except(a, b)")
}
val df8 = Seq((Array("a"), null)).toDF("a", "b")
intercept[AnalysisException] {
df8.select(array_except($"a", $"b"))
}
intercept[AnalysisException] {
df8.selectExpr("array_except(a, b)")
}
val df9 = Seq((null, Array("a"))).toDF("a", "b")
intercept[AnalysisException] {
df9.select(array_except($"a", $"b"))
}
intercept[AnalysisException] {
df9.selectExpr("array_except(a, b)")
}
val df10 = Seq(
(Array[Integer](1, 2), Array[Integer](2)),
(Array[Integer](1, 2), Array[Integer](1, null)),
(Array[Integer](1, null, 3), Array[Integer](1, 2)),
(Array[Integer](1, null), Array[Integer](2, null))
).toDF("a", "b")
val result10 = df10.select(array_except($"a", $"b"))
val expectedType10 = ArrayType(IntegerType, containsNull = true)
assert(result10.first.schema(0).dataType === expectedType10)
}
test("array_intersect functions") {
val df1 = Seq((Array(1, 2, 4), Array(4, 2))).toDF("a", "b")
val ans1 = Row(Seq(2, 4))
checkAnswer(df1.select(array_intersect($"a", $"b")), ans1)
checkAnswer(df1.selectExpr("array_intersect(a, b)"), ans1)
val df2 = Seq((Array[Integer](1, 2, null, 4, 5), Array[Integer](-5, 4, null, 2, -1)))
.toDF("a", "b")
val ans2 = Row(Seq(2, null, 4))
checkAnswer(df2.select(array_intersect($"a", $"b")), ans2)
checkAnswer(df2.selectExpr("array_intersect(a, b)"), ans2)
val df3 = Seq((Array(1L, 2L, 4L), Array(4L, 2L))).toDF("a", "b")
val ans3 = Row(Seq(2L, 4L))
checkAnswer(df3.select(array_intersect($"a", $"b")), ans3)
checkAnswer(df3.selectExpr("array_intersect(a, b)"), ans3)
val df4 = Seq(
(Array[java.lang.Long](1L, 2L, null, 4L, 5L), Array[java.lang.Long](-5L, 4L, null, 2L, -1L)))
.toDF("a", "b")
val ans4 = Row(Seq(2L, null, 4L))
checkAnswer(df4.select(array_intersect($"a", $"b")), ans4)
checkAnswer(df4.selectExpr("array_intersect(a, b)"), ans4)
val df5 = Seq((Array("c", null, "a", "f"), Array("b", "a", null, "g"))).toDF("a", "b")
val ans5 = Row(Seq(null, "a"))
checkAnswer(df5.select(array_intersect($"a", $"b")), ans5)
checkAnswer(df5.selectExpr("array_intersect(a, b)"), ans5)
val df6 = Seq((null, null)).toDF("a", "b")
assert(intercept[AnalysisException] {
df6.select(array_intersect($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df6.selectExpr("array_intersect(a, b)")
}.getMessage.contains("data type mismatch"))
val df7 = Seq((Array(1), Array("a"))).toDF("a", "b")
assert(intercept[AnalysisException] {
df7.select(array_intersect($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df7.selectExpr("array_intersect(a, b)")
}.getMessage.contains("data type mismatch"))
val df8 = Seq((null, Array("a"))).toDF("a", "b")
assert(intercept[AnalysisException] {
df8.select(array_intersect($"a", $"b"))
}.getMessage.contains("data type mismatch"))
assert(intercept[AnalysisException] {
df8.selectExpr("array_intersect(a, b)")
}.getMessage.contains("data type mismatch"))
}
test("transform function - array for primitive type not containing null") {
val df = Seq(
Seq(1, 9, 8, 7),
Seq(5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(df.selectExpr("transform(i, x -> x + 1)"),
Seq(
Row(Seq(2, 10, 9, 8)),
Row(Seq(6, 9, 10, 8, 3)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.selectExpr("transform(i, (x, i) -> x + i)"),
Seq(
Row(Seq(1, 10, 10, 10)),
Row(Seq(5, 9, 11, 10, 6)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("i"), x => x + 1)),
Seq(
Row(Seq(2, 10, 9, 8)),
Row(Seq(6, 9, 10, 8, 3)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("i"), (x, i) => x + i)),
Seq(
Row(Seq(1, 10, 10, 10)),
Row(Seq(5, 9, 11, 10, 6)),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("transform function - array for primitive type containing null") {
val df = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(5, null, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(df.selectExpr("transform(i, x -> x + 1)"),
Seq(
Row(Seq(2, 10, 9, null, 8)),
Row(Seq(6, null, 9, 10, 8, 3)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.selectExpr("transform(i, (x, i) -> x + i)"),
Seq(
Row(Seq(1, 10, 10, null, 11)),
Row(Seq(5, null, 10, 12, 11, 7)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("i"), x => x + 1)),
Seq(
Row(Seq(2, 10, 9, null, 8)),
Row(Seq(6, null, 9, 10, 8, 3)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("i"), (x, i) => x + i)),
Seq(
Row(Seq(1, 10, 10, null, 11)),
Row(Seq(5, null, 10, 12, 11, 7)),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("transform function - array for non-primitive type") {
val df = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testNonPrimitiveType(): Unit = {
checkAnswer(df.selectExpr("transform(s, x -> concat(x, x))"),
Seq(
Row(Seq("cc", "aa", "bb")),
Row(Seq("bb", null, "cc", null)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.selectExpr("transform(s, (x, i) -> concat(x, i))"),
Seq(
Row(Seq("c0", "a1", "b2")),
Row(Seq("b0", null, "c2", null)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("s"), x => concat(x, x))),
Seq(
Row(Seq("cc", "aa", "bb")),
Row(Seq("bb", null, "cc", null)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("s"), (x, i) => concat(x, i))),
Seq(
Row(Seq("c0", "a1", "b2")),
Row(Seq("b0", null, "c2", null)),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testNonPrimitiveType()
}
test("transform function - special cases") {
val df = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("arg")
def testSpecialCases(): Unit = {
checkAnswer(df.selectExpr("transform(arg, arg -> arg)"),
Seq(
Row(Seq("c", "a", "b")),
Row(Seq("b", null, "c", null)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.selectExpr("transform(arg, arg)"),
Seq(
Row(Seq(Seq("c", "a", "b"), Seq("c", "a", "b"), Seq("c", "a", "b"))),
Row(Seq(
Seq("b", null, "c", null),
Seq("b", null, "c", null),
Seq("b", null, "c", null),
Seq("b", null, "c", null))),
Row(Seq.empty),
Row(null)))
checkAnswer(df.selectExpr("transform(arg, x -> concat(arg, array(x)))"),
Seq(
Row(Seq(Seq("c", "a", "b", "c"), Seq("c", "a", "b", "a"), Seq("c", "a", "b", "b"))),
Row(Seq(
Seq("b", null, "c", null, "b"),
Seq("b", null, "c", null, null),
Seq("b", null, "c", null, "c"),
Seq("b", null, "c", null, null))),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("arg"), arg => arg)),
Seq(
Row(Seq("c", "a", "b")),
Row(Seq("b", null, "c", null)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("arg"), _ => col("arg"))),
Seq(
Row(Seq(Seq("c", "a", "b"), Seq("c", "a", "b"), Seq("c", "a", "b"))),
Row(Seq(
Seq("b", null, "c", null),
Seq("b", null, "c", null),
Seq("b", null, "c", null),
Seq("b", null, "c", null))),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(transform(col("arg"), x => concat(col("arg"), array(x)))),
Seq(
Row(Seq(Seq("c", "a", "b", "c"), Seq("c", "a", "b", "a"), Seq("c", "a", "b", "b"))),
Row(Seq(
Seq("b", null, "c", null, "b"),
Seq("b", null, "c", null, null),
Seq("b", null, "c", null, "c"),
Seq("b", null, "c", null, null))),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testSpecialCases()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testSpecialCases()
}
test("transform function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), 1),
(Seq("b", null, "c", null), 2),
(Seq.empty, 3),
(null, 4)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("transform(s, (x, y, z) -> x + y + z)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '3' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("transform(i, x -> x)")
}
assert(ex2.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("transform(a, x -> x)")
}
assert(ex3.getMessage.contains("cannot resolve '`a`'"))
}
test("map_filter") {
val dfInts = Seq(
Map(1 -> 10, 2 -> 20, 3 -> 30),
Map(1 -> -1, 2 -> -2, 3 -> -3),
Map(1 -> 10, 2 -> 5, 3 -> -3)).toDF("m")
checkAnswer(dfInts.selectExpr(
"map_filter(m, (k, v) -> k * 10 = v)", "map_filter(m, (k, v) -> k = -v)"),
Seq(
Row(Map(1 -> 10, 2 -> 20, 3 -> 30), Map()),
Row(Map(), Map(1 -> -1, 2 -> -2, 3 -> -3)),
Row(Map(1 -> 10), Map(3 -> -3))))
checkAnswer(dfInts.select(
map_filter(col("m"), (k, v) => k * 10 === v),
map_filter(col("m"), (k, v) => k === (v * -1))),
Seq(
Row(Map(1 -> 10, 2 -> 20, 3 -> 30), Map()),
Row(Map(), Map(1 -> -1, 2 -> -2, 3 -> -3)),
Row(Map(1 -> 10), Map(3 -> -3))))
val dfComplex = Seq(
Map(1 -> Seq(Some(1)), 2 -> Seq(Some(1), Some(2)), 3 -> Seq(Some(1), Some(2), Some(3))),
Map(1 -> null, 2 -> Seq(Some(-2), Some(-2)), 3 -> Seq[Option[Int]](None))).toDF("m")
checkAnswer(dfComplex.selectExpr(
"map_filter(m, (k, v) -> k = v[0])", "map_filter(m, (k, v) -> k = size(v))"),
Seq(
Row(Map(1 -> Seq(1)), Map(1 -> Seq(1), 2 -> Seq(1, 2), 3 -> Seq(1, 2, 3))),
Row(Map(), Map(2 -> Seq(-2, -2)))))
checkAnswer(dfComplex.select(
map_filter(col("m"), (k, v) => k === element_at(v, 1)),
map_filter(col("m"), (k, v) => k === size(v))),
Seq(
Row(Map(1 -> Seq(1)), Map(1 -> Seq(1), 2 -> Seq(1, 2), 3 -> Seq(1, 2, 3))),
Row(Map(), Map(2 -> Seq(-2, -2)))))
// Invalid use cases
val df = Seq(
(Map(1 -> "a"), 1),
(Map.empty[Int, String], 2),
(null, 3)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("map_filter(s, (x, y, z) -> x + y + z)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '3' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("map_filter(s, x -> x)")
}
assert(ex2.getMessage.contains("The number of lambda function arguments '1' does not match"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("map_filter(i, (k, v) -> k > v)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 1 requires map type"))
val ex3a = intercept[AnalysisException] {
df.select(map_filter(col("i"), (k, v) => k > v))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 1 requires map type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("map_filter(a, (k, v) -> k > v)")
}
assert(ex4.getMessage.contains("cannot resolve '`a`'"))
}
test("filter function - array for primitive type not containing null") {
val df = Seq(
Seq(1, 9, 8, 7),
Seq(5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(df.selectExpr("filter(i, x -> x % 2 == 0)"),
Seq(
Row(Seq(8)),
Row(Seq(8, 2)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(filter(col("i"), _ % 2 === 0)),
Seq(
Row(Seq(8)),
Row(Seq(8, 2)),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("filter function - array for primitive type containing null") {
val df = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(5, null, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(df.selectExpr("filter(i, x -> x % 2 == 0)"),
Seq(
Row(Seq(8)),
Row(Seq(8, 2)),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(filter(col("i"), _ % 2 === 0)),
Seq(
Row(Seq(8)),
Row(Seq(8, 2)),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("filter function - array for non-primitive type") {
val df = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testNonPrimitiveType(): Unit = {
checkAnswer(df.selectExpr("filter(s, x -> x is not null)"),
Seq(
Row(Seq("c", "a", "b")),
Row(Seq("b", "c")),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(filter(col("s"), x => x.isNotNull)),
Seq(
Row(Seq("c", "a", "b")),
Row(Seq("b", "c")),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testNonPrimitiveType()
}
test("filter function - index argument") {
val df = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testIndexArgument(): Unit = {
checkAnswer(df.selectExpr("filter(s, (x, i) -> i % 2 == 0)"),
Seq(
Row(Seq("c", "b")),
Row(Seq("b", "c")),
Row(Seq.empty),
Row(null)))
checkAnswer(df.select(filter(col("s"), (x, i) => i % 2 === 0)),
Seq(
Row(Seq("c", "b")),
Row(Seq("b", "c")),
Row(Seq.empty),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testIndexArgument()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testIndexArgument()
}
test("filter function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), 1),
(Seq("b", null, "c", null), 2),
(Seq.empty, 3),
(null, 4)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("filter(s, (x, y, z) -> x + y)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '3' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("filter(i, x -> x)")
}
assert(ex2.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex2a = intercept[AnalysisException] {
df.select(filter(col("i"), x => x))
}
assert(ex2a.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("filter(s, x -> x)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex3a = intercept[AnalysisException] {
df.select(filter(col("s"), x => x))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("filter(a, x -> x)")
}
assert(ex4.getMessage.contains("cannot resolve '`a`'"))
}
test("exists function - array for primitive type not containing null") {
val df = Seq(
Seq(1, 9, 8, 7),
Seq(5, 9, 7),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(df.selectExpr("exists(i, x -> x % 2 == 0)"),
Seq(
Row(true),
Row(false),
Row(false),
Row(null)))
checkAnswer(df.select(exists(col("i"), _ % 2 === 0)),
Seq(
Row(true),
Row(false),
Row(false),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("exists function - array for primitive type containing null") {
val df = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(1, 3, 5),
Seq(5, null, null, 9, 7, null),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(df.selectExpr("exists(i, x -> x % 2 == 0)"),
Seq(
Row(true),
Row(false),
Row(null),
Row(false),
Row(null)))
checkAnswer(df.select(exists(col("i"), _ % 2 === 0)),
Seq(
Row(true),
Row(false),
Row(null),
Row(false),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("exists function - array for non-primitive type") {
val df = Seq(
Seq("c", "a", "b"),
Seq("b", null, "c", null),
Seq.empty,
null
).toDF("s")
def testNonPrimitiveType(): Unit = {
checkAnswer(df.selectExpr("exists(s, x -> x is null)"),
Seq(
Row(false),
Row(true),
Row(false),
Row(null)))
checkAnswer(df.select(exists(col("s"), x => x.isNull)),
Seq(
Row(false),
Row(true),
Row(false),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testNonPrimitiveType()
}
test("exists function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), 1),
(Seq("b", null, "c", null), 2),
(Seq.empty, 3),
(null, 4)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("exists(s, (x, y) -> x + y)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '2' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("exists(i, x -> x)")
}
assert(ex2.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex2a = intercept[AnalysisException] {
df.select(exists(col("i"), x => x))
}
assert(ex2.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("exists(s, x -> x)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex3a = intercept[AnalysisException] {
df.select(exists(df("s"), x => x))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("exists(a, x -> x)")
}
assert(ex4.getMessage.contains("cannot resolve '`a`'"))
}
test("forall function - array for primitive type not containing null") {
val df = Seq(
Seq(1, 9, 8, 7),
Seq(2, 4, 6),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(df.selectExpr("forall(i, x -> x % 2 == 0)"),
Seq(
Row(false),
Row(true),
Row(true),
Row(null)))
checkAnswer(df.select(forall(col("i"), x => x % 2 === 0)),
Seq(
Row(false),
Row(true),
Row(true),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("forall function - array for primitive type containing null") {
val df = Seq[Seq[Integer]](
Seq(1, 9, 8, null, 7),
Seq(2, null, null, 4, 6, null),
Seq(2, 4, 6, 8),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(df.selectExpr("forall(i, x -> x % 2 == 0 or x is null)"),
Seq(
Row(false),
Row(true),
Row(true),
Row(true),
Row(null)))
checkAnswer(df.select(forall(col("i"), x => (x % 2 === 0) || x.isNull)),
Seq(
Row(false),
Row(true),
Row(true),
Row(true),
Row(null)))
checkAnswer(df.selectExpr("forall(i, x -> x % 2 == 0)"),
Seq(
Row(false),
Row(null),
Row(true),
Row(true),
Row(null)))
checkAnswer(df.select(forall(col("i"), x => x % 2 === 0)),
Seq(
Row(false),
Row(null),
Row(true),
Row(true),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("forall function - array for non-primitive type") {
val df = Seq(
Seq("c", "a", "b"),
Seq[String](null, null, null, null),
Seq.empty,
null
).toDF("s")
def testNonPrimitiveType(): Unit = {
checkAnswer(df.selectExpr("forall(s, x -> x is null)"),
Seq(
Row(false),
Row(true),
Row(true),
Row(null)))
checkAnswer(df.select(forall(col("s"), _.isNull)),
Seq(
Row(false),
Row(true),
Row(true),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testNonPrimitiveType()
}
test("forall function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), 1),
(Seq("b", null, "c", null), 2),
(Seq.empty, 3),
(null, 4)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("forall(s, (x, y) -> x + y)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '2' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("forall(i, x -> x)")
}
assert(ex2.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex2a = intercept[AnalysisException] {
df.select(forall(col("i"), x => x))
}
assert(ex2a.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("forall(s, x -> x)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex3a = intercept[AnalysisException] {
df.select(forall(col("s"), x => x))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 2 requires boolean type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("forall(a, x -> x)")
}
assert(ex4.getMessage.contains("cannot resolve '`a`'"))
val ex4a = intercept[AnalysisException] {
df.select(forall(col("a"), x => x))
}
assert(ex4a.getMessage.contains("cannot resolve '`a`'"))
}
test("aggregate function - array for primitive type not containing null") {
val df = Seq(
Seq(1, 9, 8, 7),
Seq(5, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeNotContainsNull(): Unit = {
checkAnswer(df.selectExpr("aggregate(i, 0, (acc, x) -> acc + x)"),
Seq(
Row(25),
Row(31),
Row(0),
Row(null)))
checkAnswer(df.selectExpr("aggregate(i, 0, (acc, x) -> acc + x, acc -> acc * 10)"),
Seq(
Row(250),
Row(310),
Row(0),
Row(null)))
checkAnswer(df.select(aggregate(col("i"), lit(0), (acc, x) => acc + x)),
Seq(
Row(25),
Row(31),
Row(0),
Row(null)))
checkAnswer(df.select(aggregate(col("i"), lit(0), (acc, x) => acc + x, _ * 10)),
Seq(
Row(250),
Row(310),
Row(0),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeNotContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeNotContainsNull()
}
test("aggregate function - array for primitive type containing null") {
val df = Seq[Seq[Integer]](
Seq(1, 9, 8, 7),
Seq(5, null, 8, 9, 7, 2),
Seq.empty,
null
).toDF("i")
def testArrayOfPrimitiveTypeContainsNull(): Unit = {
checkAnswer(df.selectExpr("aggregate(i, 0, (acc, x) -> acc + x)"),
Seq(
Row(25),
Row(null),
Row(0),
Row(null)))
checkAnswer(
df.selectExpr("aggregate(i, 0, (acc, x) -> acc + x, acc -> coalesce(acc, 0) * 10)"),
Seq(
Row(250),
Row(0),
Row(0),
Row(null)))
checkAnswer(df.select(aggregate(col("i"), lit(0), (acc, x) => acc + x)),
Seq(
Row(25),
Row(null),
Row(0),
Row(null)))
checkAnswer(
df.select(
aggregate(col("i"), lit(0), (acc, x) => acc + x, acc => coalesce(acc, lit(0)) * 10)),
Seq(
Row(250),
Row(0),
Row(0),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testArrayOfPrimitiveTypeContainsNull()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testArrayOfPrimitiveTypeContainsNull()
}
test("aggregate function - array for non-primitive type") {
val df = Seq(
(Seq("c", "a", "b"), "a"),
(Seq("b", null, "c", null), "b"),
(Seq.empty, "c"),
(null, "d")
).toDF("ss", "s")
def testNonPrimitiveType(): Unit = {
checkAnswer(df.selectExpr("aggregate(ss, s, (acc, x) -> concat(acc, x))"),
Seq(
Row("acab"),
Row(null),
Row("c"),
Row(null)))
checkAnswer(
df.selectExpr("aggregate(ss, s, (acc, x) -> concat(acc, x), acc -> coalesce(acc , ''))"),
Seq(
Row("acab"),
Row(""),
Row("c"),
Row(null)))
checkAnswer(df.select(aggregate(col("ss"), col("s"), (acc, x) => concat(acc, x))),
Seq(
Row("acab"),
Row(null),
Row("c"),
Row(null)))
checkAnswer(
df.select(
aggregate(col("ss"), col("s"), (acc, x) => concat(acc, x),
acc => coalesce(acc, lit("")))),
Seq(
Row("acab"),
Row(""),
Row("c"),
Row(null)))
}
// Test with local relation, the Project will be evaluated without codegen
testNonPrimitiveType()
// Test with cached relation, the Project will be evaluated with codegen
df.cache()
testNonPrimitiveType()
}
test("aggregate function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), 1),
(Seq("b", null, "c", null), 2),
(Seq.empty, 3),
(null, 4)
).toDF("s", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("aggregate(s, '', x -> x)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '1' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("aggregate(s, '', (acc, x) -> x, (acc, x) -> x)")
}
assert(ex2.getMessage.contains("The number of lambda function arguments '2' does not match"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("aggregate(i, 0, (acc, x) -> x)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3a = intercept[AnalysisException] {
df.select(aggregate(col("i"), lit(0), (acc, x) => x))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("aggregate(s, 0, (acc, x) -> x)")
}
assert(ex4.getMessage.contains("data type mismatch: argument 3 requires int type"))
val ex4a = intercept[AnalysisException] {
df.select(aggregate(col("s"), lit(0), (acc, x) => x))
}
assert(ex4a.getMessage.contains("data type mismatch: argument 3 requires int type"))
val ex5 = intercept[AnalysisException] {
df.selectExpr("aggregate(a, 0, (acc, x) -> x)")
}
assert(ex5.getMessage.contains("cannot resolve '`a`'"))
}
test("map_zip_with function - map of primitive types") {
val df = Seq(
(Map(8 -> 6L, 3 -> 5L, 6 -> 2L), Map[Integer, Integer]((6, 4), (8, 2), (3, 2))),
(Map(10 -> 6L, 8 -> 3L), Map[Integer, Integer]((8, 4), (4, null))),
(Map.empty[Int, Long], Map[Integer, Integer]((5, 1))),
(Map(5 -> 1L), null)
).toDF("m1", "m2")
checkAnswer(df.selectExpr("map_zip_with(m1, m2, (k, v1, v2) -> k == v1 + v2)"),
Seq(
Row(Map(8 -> true, 3 -> false, 6 -> true)),
Row(Map(10 -> null, 8 -> false, 4 -> null)),
Row(Map(5 -> null)),
Row(null)))
checkAnswer(df.select(map_zip_with(df("m1"), df("m2"), (k, v1, v2) => k === v1 + v2)),
Seq(
Row(Map(8 -> true, 3 -> false, 6 -> true)),
Row(Map(10 -> null, 8 -> false, 4 -> null)),
Row(Map(5 -> null)),
Row(null)))
}
test("map_zip_with function - map of non-primitive types") {
val df = Seq(
(Map("z" -> "a", "y" -> "b", "x" -> "c"), Map("x" -> "a", "z" -> "c")),
(Map("b" -> "a", "c" -> "d"), Map("c" -> "a", "b" -> null, "d" -> "k")),
(Map("a" -> "d"), Map.empty[String, String]),
(Map("a" -> "d"), null)
).toDF("m1", "m2")
checkAnswer(df.selectExpr("map_zip_with(m1, m2, (k, v1, v2) -> (v1, v2))"),
Seq(
Row(Map("z" -> Row("a", "c"), "y" -> Row("b", null), "x" -> Row("c", "a"))),
Row(Map("b" -> Row("a", null), "c" -> Row("d", "a"), "d" -> Row(null, "k"))),
Row(Map("a" -> Row("d", null))),
Row(null)))
checkAnswer(df.select(map_zip_with(col("m1"), col("m2"), (k, v1, v2) => struct(v1, v2))),
Seq(
Row(Map("z" -> Row("a", "c"), "y" -> Row("b", null), "x" -> Row("c", "a"))),
Row(Map("b" -> Row("a", null), "c" -> Row("d", "a"), "d" -> Row(null, "k"))),
Row(Map("a" -> Row("d", null))),
Row(null)))
}
test("map_zip_with function - invalid") {
val df = Seq(
(Map(1 -> 2), Map(1 -> "a"), Map("a" -> "b"), Map(Map(1 -> 2) -> 2), 1)
).toDF("mii", "mis", "mss", "mmi", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("map_zip_with(mii, mis, (x, y) -> x + y)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '2' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("map_zip_with(mis, mmi, (x, y, z) -> concat(x, y, z))")
}
assert(ex2.getMessage.contains("The input to function map_zip_with should have " +
"been two maps with compatible key types"))
val ex2a = intercept[AnalysisException] {
df.select(map_zip_with(df("mis"), col("mmi"), (x, y, z) => concat(x, y, z)))
}
assert(ex2a.getMessage.contains("The input to function map_zip_with should have " +
"been two maps with compatible key types"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("map_zip_with(i, mis, (x, y, z) -> concat(x, y, z))")
}
assert(ex3.getMessage.contains("type mismatch: argument 1 requires map type"))
val ex3a = intercept[AnalysisException] {
df.select(map_zip_with(col("i"), col("mis"), (x, y, z) => concat(x, y, z)))
}
assert(ex3a.getMessage.contains("type mismatch: argument 1 requires map type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("map_zip_with(mis, i, (x, y, z) -> concat(x, y, z))")
}
assert(ex4.getMessage.contains("type mismatch: argument 2 requires map type"))
val ex4a = intercept[AnalysisException] {
df.select(map_zip_with(col("mis"), col("i"), (x, y, z) => concat(x, y, z)))
}
assert(ex4a.getMessage.contains("type mismatch: argument 2 requires map type"))
val ex5 = intercept[AnalysisException] {
df.selectExpr("map_zip_with(mmi, mmi, (x, y, z) -> x)")
}
assert(ex5.getMessage.contains("function map_zip_with does not support ordering on type map"))
}
test("transform keys function - primitive data types") {
val dfExample1 = Seq(
Map[Int, Int](1 -> 1, 9 -> 9, 8 -> 8, 7 -> 7)
).toDF("i")
val dfExample2 = Seq(
Map[Int, Double](1 -> 1.0, 2 -> 1.40, 3 -> 1.70)
).toDF("j")
val dfExample3 = Seq(
Map[Int, Boolean](25 -> true, 26 -> false)
).toDF("x")
val dfExample4 = Seq(
Map[Array[Int], Boolean](Array(1, 2) -> false)
).toDF("y")
def testMapOfPrimitiveTypesCombination(): Unit = {
checkAnswer(dfExample1.selectExpr("transform_keys(i, (k, v) -> k + v)"),
Seq(Row(Map(2 -> 1, 18 -> 9, 16 -> 8, 14 -> 7))))
checkAnswer(dfExample1.select(transform_keys(col("i"), (k, v) => k + v)),
Seq(Row(Map(2 -> 1, 18 -> 9, 16 -> 8, 14 -> 7))))
checkAnswer(dfExample2.selectExpr("transform_keys(j, " +
"(k, v) -> map_from_arrays(ARRAY(1, 2, 3), ARRAY('one', 'two', 'three'))[k])"),
Seq(Row(Map("one" -> 1.0, "two" -> 1.4, "three" -> 1.7))))
checkAnswer(dfExample2.select(
transform_keys(
col("j"),
(k, v) => element_at(
map_from_arrays(
array(lit(1), lit(2), lit(3)),
array(lit("one"), lit("two"), lit("three"))
),
k
)
)
),
Seq(Row(Map("one" -> 1.0, "two" -> 1.4, "three" -> 1.7))))
checkAnswer(dfExample2.selectExpr("transform_keys(j, (k, v) -> CAST(v * 2 AS BIGINT) + k)"),
Seq(Row(Map(3 -> 1.0, 4 -> 1.4, 6 -> 1.7))))
checkAnswer(dfExample2.select(transform_keys(col("j"),
(k, v) => (v * 2).cast("bigint") + k)),
Seq(Row(Map(3 -> 1.0, 4 -> 1.4, 6 -> 1.7))))
checkAnswer(dfExample2.selectExpr("transform_keys(j, (k, v) -> k + v)"),
Seq(Row(Map(2.0 -> 1.0, 3.4 -> 1.4, 4.7 -> 1.7))))
checkAnswer(dfExample2.select(transform_keys(col("j"), (k, v) => k + v)),
Seq(Row(Map(2.0 -> 1.0, 3.4 -> 1.4, 4.7 -> 1.7))))
intercept[SparkException] {
dfExample3.selectExpr("transform_keys(x, (k, v) -> k % 2 = 0 OR v)").collect()
}
intercept[SparkException] {
dfExample3.select(transform_keys(col("x"), (k, v) => k % 2 === 0 || v)).collect()
}
withSQLConf(SQLConf.MAP_KEY_DEDUP_POLICY.key -> SQLConf.MapKeyDedupPolicy.LAST_WIN.toString) {
checkAnswer(dfExample3.selectExpr("transform_keys(x, (k, v) -> k % 2 = 0 OR v)"),
Seq(Row(Map(true -> true, true -> false))))
checkAnswer(dfExample3.select(transform_keys(col("x"), (k, v) => k % 2 === 0 || v)),
Seq(Row(Map(true -> true, true -> false))))
}
checkAnswer(dfExample3.selectExpr("transform_keys(x, (k, v) -> if(v, 2 * k, 3 * k))"),
Seq(Row(Map(50 -> true, 78 -> false))))
checkAnswer(dfExample3.select(transform_keys(col("x"),
(k, v) => when(v, k * 2).otherwise(k * 3))),
Seq(Row(Map(50 -> true, 78 -> false))))
checkAnswer(dfExample4.selectExpr("transform_keys(y, (k, v) -> array_contains(k, 3) AND v)"),
Seq(Row(Map(false -> false))))
checkAnswer(dfExample4.select(transform_keys(col("y"),
(k, v) => array_contains(k, lit(3)) && v)),
Seq(Row(Map(false -> false))))
}
// Test with local relation, the Project will be evaluated without codegen
testMapOfPrimitiveTypesCombination()
dfExample1.cache()
dfExample2.cache()
dfExample3.cache()
dfExample4.cache()
// Test with cached relation, the Project will be evaluated with codegen
testMapOfPrimitiveTypesCombination()
}
test("transform keys function - Invalid lambda functions and exceptions") {
val dfExample1 = Seq(
Map[String, String]("a" -> null)
).toDF("i")
val dfExample2 = Seq(
Seq(1, 2, 3, 4)
).toDF("j")
val ex1 = intercept[AnalysisException] {
dfExample1.selectExpr("transform_keys(i, k -> k)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '1' does not match"))
val ex2 = intercept[AnalysisException] {
dfExample1.selectExpr("transform_keys(i, (k, v, x) -> k + 1)")
}
assert(ex2.getMessage.contains(
"The number of lambda function arguments '3' does not match"))
val ex3 = intercept[Exception] {
dfExample1.selectExpr("transform_keys(i, (k, v) -> v)").show()
}
assert(ex3.getMessage.contains("Cannot use null as map key"))
val ex3a = intercept[Exception] {
dfExample1.select(transform_keys(col("i"), (k, v) => v)).show()
}
assert(ex3a.getMessage.contains("Cannot use null as map key"))
val ex4 = intercept[AnalysisException] {
dfExample2.selectExpr("transform_keys(j, (k, v) -> k + 1)")
}
assert(ex4.getMessage.contains(
"data type mismatch: argument 1 requires map type"))
}
test("transform values function - test primitive data types") {
val dfExample1 = Seq(
Map[Int, Int](1 -> 1, 9 -> 9, 8 -> 8, 7 -> 7)
).toDF("i")
val dfExample2 = Seq(
Map[Boolean, String](false -> "abc", true -> "def")
).toDF("x")
val dfExample3 = Seq(
Map[String, Int]("a" -> 1, "b" -> 2, "c" -> 3)
).toDF("y")
val dfExample4 = Seq(
Map[Int, Double](1 -> 1.0, 2 -> 1.40, 3 -> 1.70)
).toDF("z")
val dfExample5 = Seq(
Map[Int, Array[Int]](1 -> Array(1, 2))
).toDF("c")
def testMapOfPrimitiveTypesCombination(): Unit = {
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> k + v)"),
Seq(Row(Map(1 -> 2, 9 -> 18, 8 -> 16, 7 -> 14))))
checkAnswer(dfExample2.selectExpr(
"transform_values(x, (k, v) -> if(k, v, CAST(k AS String)))"),
Seq(Row(Map(false -> "false", true -> "def"))))
checkAnswer(dfExample2.selectExpr("transform_values(x, (k, v) -> NOT k AND v = 'abc')"),
Seq(Row(Map(false -> true, true -> false))))
checkAnswer(dfExample3.selectExpr("transform_values(y, (k, v) -> v * v)"),
Seq(Row(Map("a" -> 1, "b" -> 4, "c" -> 9))))
checkAnswer(dfExample3.selectExpr(
"transform_values(y, (k, v) -> k || ':' || CAST(v as String))"),
Seq(Row(Map("a" -> "a:1", "b" -> "b:2", "c" -> "c:3"))))
checkAnswer(
dfExample3.selectExpr("transform_values(y, (k, v) -> concat(k, cast(v as String)))"),
Seq(Row(Map("a" -> "a1", "b" -> "b2", "c" -> "c3"))))
checkAnswer(
dfExample4.selectExpr(
"transform_values(" +
"z,(k, v) -> map_from_arrays(ARRAY(1, 2, 3), " +
"ARRAY('one', 'two', 'three'))[k] || '_' || CAST(v AS String))"),
Seq(Row(Map(1 -> "one_1.0", 2 -> "two_1.4", 3 ->"three_1.7"))))
checkAnswer(
dfExample4.selectExpr("transform_values(z, (k, v) -> k-v)"),
Seq(Row(Map(1 -> 0.0, 2 -> 0.6000000000000001, 3 -> 1.3))))
checkAnswer(
dfExample5.selectExpr("transform_values(c, (k, v) -> k + cardinality(v))"),
Seq(Row(Map(1 -> 3))))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => k + v)),
Seq(Row(Map(1 -> 2, 9 -> 18, 8 -> 16, 7 -> 14))))
checkAnswer(dfExample2.select(
transform_values(col("x"), (k, v) => when(k, v).otherwise(k.cast("string")))),
Seq(Row(Map(false -> "false", true -> "def"))))
checkAnswer(dfExample2.select(transform_values(col("x"),
(k, v) => (!k) && v === "abc")),
Seq(Row(Map(false -> true, true -> false))))
checkAnswer(dfExample3.select(transform_values(col("y"), (k, v) => v * v)),
Seq(Row(Map("a" -> 1, "b" -> 4, "c" -> 9))))
checkAnswer(dfExample3.select(
transform_values(col("y"), (k, v) => concat(k, lit(":"), v.cast("string")))),
Seq(Row(Map("a" -> "a:1", "b" -> "b:2", "c" -> "c:3"))))
checkAnswer(
dfExample3.select(transform_values(col("y"), (k, v) => concat(k, v.cast("string")))),
Seq(Row(Map("a" -> "a1", "b" -> "b2", "c" -> "c3"))))
val testMap = map_from_arrays(
array(lit(1), lit(2), lit(3)),
array(lit("one"), lit("two"), lit("three"))
)
checkAnswer(
dfExample4.select(transform_values(col("z"),
(k, v) => concat(element_at(testMap, k), lit("_"), v.cast("string")))),
Seq(Row(Map(1 -> "one_1.0", 2 -> "two_1.4", 3 ->"three_1.7"))))
checkAnswer(
dfExample4.select(transform_values(col("z"), (k, v) => k - v)),
Seq(Row(Map(1 -> 0.0, 2 -> 0.6000000000000001, 3 -> 1.3))))
checkAnswer(
dfExample5.select(transform_values(col("c"), (k, v) => k + size(v))),
Seq(Row(Map(1 -> 3))))
}
// Test with local relation, the Project will be evaluated without codegen
testMapOfPrimitiveTypesCombination()
dfExample1.cache()
dfExample2.cache()
dfExample3.cache()
dfExample4.cache()
dfExample5.cache()
// Test with cached relation, the Project will be evaluated with codegen
testMapOfPrimitiveTypesCombination()
}
test("transform values function - test empty") {
val dfExample1 = Seq(
Map.empty[Integer, Integer]
).toDF("i")
val dfExample2 = Seq(
Map.empty[BigInt, String]
).toDF("j")
def testEmpty(): Unit = {
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> NULL)"),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> k)"),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> v)"),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> 0)"),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> 'value')"),
Seq(Row(Map.empty[Integer, String])))
checkAnswer(dfExample1.selectExpr("transform_values(i, (k, v) -> true)"),
Seq(Row(Map.empty[Integer, Boolean])))
checkAnswer(dfExample2.selectExpr("transform_values(j, (k, v) -> k + cast(v as BIGINT))"),
Seq(Row(Map.empty[BigInt, BigInt])))
checkAnswer(dfExample1.select(transform_values(col("i"),
(k, v) => lit(null).cast("int"))),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => k)),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => v)),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => lit(0))),
Seq(Row(Map.empty[Integer, Integer])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => lit("value"))),
Seq(Row(Map.empty[Integer, String])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => lit(true))),
Seq(Row(Map.empty[Integer, Boolean])))
checkAnswer(dfExample1.select(transform_values(col("i"), (k, v) => v.cast("bigint"))),
Seq(Row(Map.empty[BigInt, BigInt])))
}
testEmpty()
dfExample1.cache()
dfExample2.cache()
testEmpty()
}
test("transform values function - test null values") {
val dfExample1 = Seq(
Map[Int, Integer](1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4)
).toDF("a")
val dfExample2 = Seq(
Map[Int, String](1 -> "a", 2 -> "b", 3 -> null)
).toDF("b")
def testNullValue(): Unit = {
checkAnswer(dfExample1.selectExpr("transform_values(a, (k, v) -> null)"),
Seq(Row(Map[Int, Integer](1 -> null, 2 -> null, 3 -> null, 4 -> null))))
checkAnswer(dfExample2.selectExpr(
"transform_values(b, (k, v) -> IF(v IS NULL, k + 1, k + 2))"),
Seq(Row(Map(1 -> 3, 2 -> 4, 3 -> 4))))
checkAnswer(dfExample1.select(transform_values(col("a"),
(k, v) => lit(null).cast("int"))),
Seq(Row(Map[Int, Integer](1 -> null, 2 -> null, 3 -> null, 4 -> null))))
checkAnswer(dfExample2.select(
transform_values(col("b"), (k, v) => when(v.isNull, k + 1).otherwise(k + 2))
),
Seq(Row(Map(1 -> 3, 2 -> 4, 3 -> 4))))
}
testNullValue()
dfExample1.cache()
dfExample2.cache()
testNullValue()
}
test("transform values function - test invalid functions") {
val dfExample1 = Seq(
Map[Int, Int](1 -> 1, 9 -> 9, 8 -> 8, 7 -> 7)
).toDF("i")
val dfExample2 = Seq(
Map[String, String]("a" -> "b")
).toDF("j")
val dfExample3 = Seq(
Seq(1, 2, 3, 4)
).toDF("x")
def testInvalidLambdaFunctions(): Unit = {
val ex1 = intercept[AnalysisException] {
dfExample1.selectExpr("transform_values(i, k -> k)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '1' does not match"))
val ex2 = intercept[AnalysisException] {
dfExample2.selectExpr("transform_values(j, (k, v, x) -> k + 1)")
}
assert(ex2.getMessage.contains("The number of lambda function arguments '3' does not match"))
val ex3 = intercept[AnalysisException] {
dfExample3.selectExpr("transform_values(x, (k, v) -> k + 1)")
}
assert(ex3.getMessage.contains(
"data type mismatch: argument 1 requires map type"))
val ex3a = intercept[AnalysisException] {
dfExample3.select(transform_values(col("x"), (k, v) => k + 1))
}
assert(ex3a.getMessage.contains(
"data type mismatch: argument 1 requires map type"))
}
testInvalidLambdaFunctions()
dfExample1.cache()
dfExample2.cache()
dfExample3.cache()
testInvalidLambdaFunctions()
}
test("arrays zip_with function - for primitive types") {
val df1 = Seq[(Seq[Integer], Seq[Integer])](
(Seq(9001, 9002, 9003), Seq(4, 5, 6)),
(Seq(1, 2), Seq(3, 4)),
(Seq.empty, Seq.empty),
(null, null)
).toDF("val1", "val2")
val df2 = Seq[(Seq[Integer], Seq[Long])](
(Seq(1, null, 3), Seq(1L, 2L)),
(Seq(1, 2, 3), Seq(4L, 11L))
).toDF("val1", "val2")
val expectedValue1 = Seq(
Row(Seq(9005, 9007, 9009)),
Row(Seq(4, 6)),
Row(Seq.empty),
Row(null))
checkAnswer(df1.selectExpr("zip_with(val1, val2, (x, y) -> x + y)"), expectedValue1)
checkAnswer(df1.select(zip_with(df1("val1"), df1("val2"), (x, y) => x + y)), expectedValue1)
val expectedValue2 = Seq(
Row(Seq(Row(1L, 1), Row(2L, null), Row(null, 3))),
Row(Seq(Row(4L, 1), Row(11L, 2), Row(null, 3))))
checkAnswer(df2.selectExpr("zip_with(val1, val2, (x, y) -> (y, x))"), expectedValue2)
checkAnswer(
df2.select(zip_with(df2("val1"), df2("val2"), (x, y) => struct(y, x))),
expectedValue2
)
}
test("arrays zip_with function - for non-primitive types") {
val df = Seq(
(Seq("a"), Seq("x", "y", "z")),
(Seq("a", null), Seq("x", "y")),
(Seq.empty[String], Seq.empty[String]),
(Seq("a", "b", "c"), null)
).toDF("val1", "val2")
val expectedValue1 = Seq(
Row(Seq(Row("x", "a"), Row("y", null), Row("z", null))),
Row(Seq(Row("x", "a"), Row("y", null))),
Row(Seq.empty),
Row(null))
checkAnswer(
df.selectExpr("zip_with(val1, val2, (x, y) -> (y, x))"),
expectedValue1
)
checkAnswer(
df.select(zip_with(col("val1"), col("val2"), (x, y) => struct(y, x))),
expectedValue1
)
}
test("arrays zip_with function - invalid") {
val df = Seq(
(Seq("c", "a", "b"), Seq("x", "y", "z"), 1),
(Seq("b", null, "c", null), Seq("x"), 2),
(Seq.empty, Seq("x", "z"), 3),
(null, Seq("x", "z"), 4)
).toDF("a1", "a2", "i")
val ex1 = intercept[AnalysisException] {
df.selectExpr("zip_with(a1, a2, x -> x)")
}
assert(ex1.getMessage.contains("The number of lambda function arguments '1' does not match"))
val ex2 = intercept[AnalysisException] {
df.selectExpr("zip_with(a1, a2, (acc, x) -> x, (acc, x) -> x)")
}
assert(ex2.getMessage.contains("Invalid number of arguments for function zip_with"))
val ex3 = intercept[AnalysisException] {
df.selectExpr("zip_with(i, a2, (acc, x) -> x)")
}
assert(ex3.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex3a = intercept[AnalysisException] {
df.select(zip_with(df("i"), df("a2"), (acc, x) => x))
}
assert(ex3a.getMessage.contains("data type mismatch: argument 1 requires array type"))
val ex4 = intercept[AnalysisException] {
df.selectExpr("zip_with(a1, a, (acc, x) -> x)")
}
assert(ex4.getMessage.contains("cannot resolve '`a`'"))
}
private def assertValuesDoNotChangeAfterCoalesceOrUnion(v: Column): Unit = {
import DataFrameFunctionsSuite.CodegenFallbackExpr
for ((codegenFallback, wholeStage) <- Seq((true, false), (false, false), (false, true))) {
val c = if (codegenFallback) {
Column(CodegenFallbackExpr(v.expr))
} else {
v
}
withSQLConf(
(SQLConf.CODEGEN_FALLBACK.key, codegenFallback.toString),
(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key, wholeStage.toString)) {
val df = spark.range(0, 4, 1, 4).withColumn("c", c)
val rows = df.collect()
val rowsAfterCoalesce = df.coalesce(2).collect()
assert(rows === rowsAfterCoalesce, "Values changed after coalesce when " +
s"codegenFallback=$codegenFallback and wholeStage=$wholeStage.")
val df1 = spark.range(0, 2, 1, 2).withColumn("c", c)
val rows1 = df1.collect()
val df2 = spark.range(2, 4, 1, 2).withColumn("c", c)
val rows2 = df2.collect()
val rowsAfterUnion = df1.union(df2).collect()
assert(rowsAfterUnion === rows1 ++ rows2, "Values changed after union when " +
s"codegenFallback=$codegenFallback and wholeStage=$wholeStage.")
}
}
}
test("SPARK-14393: values generated by non-deterministic functions shouldn't change after " +
"coalesce or union") {
Seq(
monotonically_increasing_id(), spark_partition_id(),
rand(Random.nextLong()), randn(Random.nextLong())
).foreach(assertValuesDoNotChangeAfterCoalesceOrUnion(_))
}
test("SPARK-21281 fails if functions have no argument") {
val df = Seq(1).toDF("a")
val funcsMustHaveAtLeastOneArg =
("coalesce", (df: DataFrame) => df.select(coalesce())) ::
("coalesce", (df: DataFrame) => df.selectExpr("coalesce()")) ::
("hash", (df: DataFrame) => df.select(hash())) ::
("hash", (df: DataFrame) => df.selectExpr("hash()")) ::
("xxhash64", (df: DataFrame) => df.select(xxhash64())) ::
("xxhash64", (df: DataFrame) => df.selectExpr("xxhash64()")) :: Nil
funcsMustHaveAtLeastOneArg.foreach { case (name, func) =>
val errMsg = intercept[AnalysisException] { func(df) }.getMessage
assert(errMsg.contains(s"input to function $name requires at least one argument"))
}
val funcsMustHaveAtLeastTwoArgs =
("greatest", (df: DataFrame) => df.select(greatest())) ::
("greatest", (df: DataFrame) => df.selectExpr("greatest()")) ::
("least", (df: DataFrame) => df.select(least())) ::
("least", (df: DataFrame) => df.selectExpr("least()")) :: Nil
funcsMustHaveAtLeastTwoArgs.foreach { case (name, func) =>
val errMsg = intercept[AnalysisException] { func(df) }.getMessage
assert(errMsg.contains(s"input to function $name requires at least two arguments"))
}
}
test("SPARK-24734: Fix containsNull of Concat for array type") {
val df = Seq((Seq(1), Seq[Integer](null), Seq("a", "b"))).toDF("k1", "k2", "v")
val ex = intercept[Exception] {
df.select(map_from_arrays(concat($"k1", $"k2"), $"v")).show()
}
assert(ex.getMessage.contains("Cannot use null as map key"))
}
test("SPARK-26370: Fix resolution of higher-order function for the same identifier") {
val df = Seq(
(Seq(1, 9, 8, 7), 1, 2),
(Seq(5, 9, 7), 2, 2),
(Seq.empty, 3, 2),
(null, 4, 2)
).toDF("i", "x", "d")
checkAnswer(df.selectExpr("x", "exists(i, x -> x % d == 0)"),
Seq(
Row(1, true),
Row(2, false),
Row(3, false),
Row(4, null)))
checkAnswer(df.filter("exists(i, x -> x % d == 0)"),
Seq(Row(Seq(1, 9, 8, 7), 1, 2)))
checkAnswer(df.select("x").filter("exists(i, x -> x % d == 0)"),
Seq(Row(1)))
}
test("SPARK-29462: Empty array of NullType for array function with no arguments") {
Seq((true, StringType), (false, NullType)).foreach {
case (arrayDefaultToString, expectedType) =>
withSQLConf(SQLConf.LEGACY_CREATE_EMPTY_COLLECTION_USING_STRING_TYPE.key ->
arrayDefaultToString.toString) {
val schema = spark.range(1).select(array()).schema
assert(schema.nonEmpty && schema.head.dataType.isInstanceOf[ArrayType])
val actualType = schema.head.dataType.asInstanceOf[ArrayType].elementType
assert(actualType === expectedType)
}
}
}
test("SPARK-30790: Empty map with NullType as key/value type for map function with no argument") {
Seq((true, StringType), (false, NullType)).foreach {
case (mapDefaultToString, expectedType) =>
withSQLConf(SQLConf.LEGACY_CREATE_EMPTY_COLLECTION_USING_STRING_TYPE.key ->
mapDefaultToString.toString) {
val schema = spark.range(1).select(map()).schema
assert(schema.nonEmpty && schema.head.dataType.isInstanceOf[MapType])
val actualKeyType = schema.head.dataType.asInstanceOf[MapType].keyType
val actualValueType = schema.head.dataType.asInstanceOf[MapType].valueType
assert(actualKeyType === expectedType)
assert(actualValueType === expectedType)
}
}
}
test("SPARK-26071: convert map to array and use as map key") {
val df = Seq(Map(1 -> "a")).toDF("m")
intercept[AnalysisException](df.select(map($"m", lit(1))))
checkAnswer(
df.select(map(map_entries($"m"), lit(1))),
Row(Map(Seq(Row(1, "a")) -> 1)))
}
}
object DataFrameFunctionsSuite {
case class CodegenFallbackExpr(child: Expression) extends Expression with CodegenFallback {
override def children: Seq[Expression] = Seq(child)
override def nullable: Boolean = child.nullable
override def dataType: DataType = child.dataType
override lazy val resolved = true
override def eval(input: InternalRow): Any = child.eval(input)
}
}
|
spark-test/spark
|
sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala
|
Scala
|
apache-2.0
| 120,068 |
package org.http4s
package client
import cats.effect._
import cats.implicits._
import fs2._
import java.net.InetSocketAddress
import javax.servlet.ServletOutputStream
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import org.http4s.client.testroutes.GetRoutes
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.dsl.io._
import org.specs2.specification.core.Fragments
import scala.concurrent.duration._
abstract class ClientRouteTestBattery(name: String, client: Client[IO])
extends Http4sSpec
with Http4sClientDsl[IO] {
val timeout = 20.seconds
val jettyServ = new JettyScaffold(1)
var address: InetSocketAddress = null
def testServlet = new HttpServlet {
override def doGet(req: HttpServletRequest, srv: HttpServletResponse): Unit =
GetRoutes.getPaths.get(req.getRequestURI) match {
case Some(r) => renderResponse(srv, r)
case None => srv.sendError(404)
}
override def doPost(req: HttpServletRequest, srv: HttpServletResponse): Unit = {
srv.setStatus(200)
val s = scala.io.Source.fromInputStream(req.getInputStream).mkString
srv.getOutputStream.print(s)
srv.getOutputStream.flush()
}
}
Fragments.foreach(GetRoutes.getPaths.toSeq) {
case (path, expected) =>
s"Execute GET: $path" in {
val name = address.getHostName
val port = address.getPort
val req = Request[IO](uri = Uri.fromString(s"http://$name:$port$path").yolo)
client
.fetch(req)(resp => IO(checkResponse(resp, expected)))
.unsafeRunTimed(timeout)
.get
}
}
name should {
"Strip fragments from URI" in {
skipped("Can only reproduce against external resource. Help wanted.")
val uri = Uri.uri("https://en.wikipedia.org/wiki/Buckethead_discography#Studio_albums")
val body = client.fetch(Request[IO](uri = uri))(e => IO.pure(e.status))
body must returnValue(Ok)
}
"Repeat a simple request" in {
val path = GetRoutes.SimplePath
def fetchBody = client.toKleisli(_.as[String]).local { uri: Uri =>
Request(uri = uri)
}
val url = Uri.fromString(s"http://${address.getHostName}:${address.getPort}$path").yolo
async
.parallelTraverse((0 until 10).toVector)(_ => fetchBody.run(url).map(_.length))
.unsafeRunTimed(timeout)
.forall(_ mustNotEqual 0)
}
"POST an empty body" in {
val uri = Uri.fromString(s"http://${address.getHostName}:${address.getPort}/echo").yolo
val req = POST(uri)
val body = client.expect[String](req)
body must returnValue("")
}
"POST a normal body" in {
val uri = Uri.fromString(s"http://${address.getHostName}:${address.getPort}/echo").yolo
val req = POST(uri, "This is normal.")
val body = client.expect[String](req)
body must returnValue("This is normal.")
}
"POST a chunked body" in {
val uri = Uri.fromString(s"http://${address.getHostName}:${address.getPort}/echo").yolo
val req = POST(uri, Stream("This is chunked.").covary[IO])
val body = client.expect[String](req)
body must returnValue("This is chunked.")
}
}
override def map(fs: => Fragments): Fragments =
super.map(
step {
jettyServ.startServers(testServlet)
address = jettyServ.addresses.head
} ^ fs ^ step {
client.shutdown.unsafeRunSync()
jettyServ.stopServers()
}
)
private def checkResponse(rec: Response[IO], expected: Response[IO]) = {
val hs = rec.headers.toSeq
rec.status must be_==(expected.status)
collectBody(rec.body) must be_==(collectBody(expected.body))
expected.headers.foreach(h => h must beOneOf(hs: _*))
rec.httpVersion must be_==(expected.httpVersion)
}
private def renderResponse(srv: HttpServletResponse, resp: Response[IO]): Unit = {
srv.setStatus(resp.status.code)
resp.headers.foreach { h =>
srv.addHeader(h.name.toString, h.value)
}
val os: ServletOutputStream = srv.getOutputStream
val writeBody: IO[Unit] = resp.body
.evalMap { byte =>
IO(os.write(Array(byte)))
}
.compile
.drain
val flushOutputStream: IO[Unit] = IO(os.flush())
(writeBody *> flushOutputStream).unsafeRunSync()
}
private def collectBody(body: EntityBody[IO]): Array[Byte] =
body.compile.toVector.unsafeRunSync().toArray
}
|
reactormonk/http4s
|
client/src/test/scala/org/http4s/client/ClientRouteTestBattery.scala
|
Scala
|
apache-2.0
| 4,446 |
package provingground
import HoTT._
object MereProposition {
def isPropn[U <: Term with Subs[U]](typ: Typ[U]) = {
val x = typ.Var
val y = typ.Var
pi(x)(pi(y)(x =:= y))
}
// Should refine to type of unit
case class Truncation[U <: Term with Subs[U]](base: Typ[U])
extends Typ[Term] {
lazy val typ = base.typ
def subs(x: Term, y: Term) = Truncation(base.replace(x, y))
def newobj = Truncation(base.newobj)
def variable(name: AnySym) = SymbObj(name, this)
type Obj = Term
lazy val propWitness = isPropn(this)
}
case class Quotient[U <: Term with Subs[U]](base: Typ[U])
extends Func[U, Term] {
lazy val dom = base
lazy val codom = Truncation(base)
lazy val typ = dom ->: codom
def act(arg: U): provingground.HoTT.Term =
codom.symbObj(ApplnSym(this, arg))
def subs(x: provingground.HoTT.Term, y: provingground.HoTT.Term) =
Quotient(base.replace(x, y))
def newobj = Quotient(base.newobj)
}
case class Factorize[U <: Term with Subs[U], V <: Term with Subs[V]](
A: Typ[U],
B: Typ[V])
extends Func[Term, Func[Func[U, V], Func[Term, V]]]
with Subs[Factorize[U, V]] {
lazy val dom = isPropn(B)
lazy val codom = (A ->: B) ->: (Truncation(A) ->: B)
lazy val typ = (dom: Typ[Term]) ->: codom
def subs(x: Term, y: Term) = Factorize(A.replace(x, y), B.replace(x, y))
def newobj = Factorize(A.newobj, B.newobj)
def act(arg: Term) = codom.symbObj(ApplnSym(this, arg))
}
//mainly for testing
def mere[U <: Term with Subs[U], V <: Term with Subs[V]](fn: Func[U, V]) = {
val A = fn.dom
val B = fn.codom
Factorize(A, Truncation(B))(Truncation(B).propWitness)(fn)
}
}
|
siddhartha-gadgil/ProvingGround
|
core/src/main/scala/provingground/MereProposition.scala
|
Scala
|
mit
| 1,735 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.util.{List => JList}
import java.util.concurrent.CountDownLatch
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import com.google.common.base.Splitter
import org.apache.mesos.{MesosSchedulerDriver, Protos, Scheduler, SchedulerDriver}
import org.apache.mesos.Protos._
import org.apache.mesos.protobuf.{ByteString, GeneratedMessage}
import org.apache.spark.{SparkConf, SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* Shared trait for implementing a Mesos Scheduler. This holds common state and helper
* methods and Mesos scheduler will use.
*/
private[mesos] trait MesosSchedulerUtils extends Logging {
// Lock used to wait for scheduler to be registered
private final val registerLatch = new CountDownLatch(1)
// Driver for talking to Mesos
protected var mesosDriver: SchedulerDriver = null
/**
* Creates a new MesosSchedulerDriver that communicates to the Mesos master.
* @param masterUrl The url to connect to Mesos master
* @param scheduler the scheduler class to receive scheduler callbacks
* @param sparkUser User to impersonate with when running tasks
* @param appName The framework name to display on the Mesos UI
* @param conf Spark configuration
* @param webuiUrl The WebUI url to link from Mesos UI
* @param checkpoint Option to checkpoint tasks for failover
* @param failoverTimeout Duration Mesos master expect scheduler to reconnect on disconnect
* @param frameworkId The id of the new framework
*/
protected def createSchedulerDriver(
masterUrl: String,
scheduler: Scheduler,
sparkUser: String,
appName: String,
conf: SparkConf,
webuiUrl: Option[String] = None,
checkpoint: Option[Boolean] = None,
failoverTimeout: Option[Double] = None,
frameworkId: Option[String] = None): SchedulerDriver = {
val fwInfoBuilder = FrameworkInfo.newBuilder().setUser(sparkUser).setName(appName)
val credBuilder = Credential.newBuilder()
webuiUrl.foreach { url => fwInfoBuilder.setWebuiUrl(url) }
checkpoint.foreach { checkpoint => fwInfoBuilder.setCheckpoint(checkpoint) }
failoverTimeout.foreach { timeout => fwInfoBuilder.setFailoverTimeout(timeout) }
frameworkId.foreach { id =>
fwInfoBuilder.setId(FrameworkID.newBuilder().setValue(id).build())
}
conf.getOption("spark.mesos.principal").foreach { principal =>
fwInfoBuilder.setPrincipal(principal)
credBuilder.setPrincipal(principal)
}
conf.getOption("spark.mesos.secret").foreach { secret =>
credBuilder.setSecret(ByteString.copyFromUtf8(secret))
}
if (credBuilder.hasSecret && !fwInfoBuilder.hasPrincipal) {
throw new SparkException(
"spark.mesos.principal must be configured when spark.mesos.secret is set")
}
conf.getOption("spark.mesos.role").foreach { role =>
fwInfoBuilder.setRole(role)
}
if (credBuilder.hasPrincipal) {
new MesosSchedulerDriver(
scheduler, fwInfoBuilder.build(), masterUrl, credBuilder.build())
} else {
new MesosSchedulerDriver(scheduler, fwInfoBuilder.build(), masterUrl)
}
}
/**
* Starts the MesosSchedulerDriver and stores the current running driver to this new instance.
* This driver is expected to not be running.
* This method returns only after the scheduler has registered with Mesos.
*/
def startScheduler(newDriver: SchedulerDriver): Unit = {
synchronized {
if (mesosDriver != null) {
registerLatch.await()
return
}
@volatile
var error: Option[Exception] = None
// We create a new thread that will block inside `mesosDriver.run`
// until the scheduler exists
new Thread(Utils.getFormattedClassName(this) + "-mesos-driver") {
setDaemon(true)
override def run() {
try {
mesosDriver = newDriver
val ret = mesosDriver.run()
logInfo("driver.run() returned with code " + ret)
if (ret != null && ret.equals(Status.DRIVER_ABORTED)) {
error = Some(new SparkException("Error starting driver, DRIVER_ABORTED"))
markErr()
}
} catch {
case e: Exception =>
logError("driver.run() failed", e)
error = Some(e)
markErr()
}
}
}.start()
registerLatch.await()
// propagate any error to the calling thread. This ensures that SparkContext creation fails
// without leaving a broken context that won't be able to schedule any tasks
error.foreach(throw _)
}
}
def getResource(res: JList[Resource], name: String): Double = {
// A resource can have multiple values in the offer since it can either be from
// a specific role or wildcard.
res.asScala.filter(_.getName == name).map(_.getScalar.getValue).sum
}
/**
* Signal that the scheduler has registered with Mesos.
*/
protected def markRegistered(): Unit = {
registerLatch.countDown()
}
protected def markErr(): Unit = {
registerLatch.countDown()
}
def createResource(name: String, amount: Double, role: Option[String] = None): Resource = {
val builder = Resource.newBuilder()
.setName(name)
.setType(Value.Type.SCALAR)
.setScalar(Value.Scalar.newBuilder().setValue(amount).build())
role.foreach { r => builder.setRole(r) }
builder.build()
}
/**
* Partition the existing set of resources into two groups, those remaining to be
* scheduled and those requested to be used for a new task.
* @param resources The full list of available resources
* @param resourceName The name of the resource to take from the available resources
* @param amountToUse The amount of resources to take from the available resources
* @return The remaining resources list and the used resources list.
*/
def partitionResources(
resources: JList[Resource],
resourceName: String,
amountToUse: Double): (List[Resource], List[Resource]) = {
var remain = amountToUse
var requestedResources = new ArrayBuffer[Resource]
val remainingResources = resources.asScala.map {
case r =>
if (remain > 0 &&
r.getType == Value.Type.SCALAR &&
r.getScalar.getValue > 0.0 &&
r.getName == resourceName) {
val usage = Math.min(remain, r.getScalar.getValue)
requestedResources += createResource(resourceName, usage, Some(r.getRole))
remain -= usage
createResource(resourceName, r.getScalar.getValue - usage, Some(r.getRole))
} else {
r
}
}
// Filter any resource that has depleted.
val filteredResources =
remainingResources.filter(r => r.getType != Value.Type.SCALAR || r.getScalar.getValue > 0.0)
(filteredResources.toList, requestedResources.toList)
}
/** Helper method to get the key,value-set pair for a Mesos Attribute protobuf */
protected def getAttribute(attr: Attribute): (String, Set[String]) = {
(attr.getName, attr.getText.getValue.split(',').toSet)
}
/** Build a Mesos resource protobuf object */
protected def createResource(resourceName: String, quantity: Double): Protos.Resource = {
Resource.newBuilder()
.setName(resourceName)
.setType(Value.Type.SCALAR)
.setScalar(Value.Scalar.newBuilder().setValue(quantity).build())
.build()
}
/**
* Converts the attributes from the resource offer into a Map of name -> Attribute Value
* The attribute values are the mesos attribute types and they are
* @param offerAttributes
* @return
*/
protected def toAttributeMap(offerAttributes: JList[Attribute]): Map[String, GeneratedMessage] = {
offerAttributes.asScala.map { attr =>
val attrValue = attr.getType match {
case Value.Type.SCALAR => attr.getScalar
case Value.Type.RANGES => attr.getRanges
case Value.Type.SET => attr.getSet
case Value.Type.TEXT => attr.getText
}
(attr.getName, attrValue)
}.toMap
}
/**
* Match the requirements (if any) to the offer attributes.
* if attribute requirements are not specified - return true
* else if attribute is defined and no values are given, simple attribute presence is performed
* else if attribute name and value is specified, subset match is performed on slave attributes
*/
def matchesAttributeRequirements(
slaveOfferConstraints: Map[String, Set[String]],
offerAttributes: Map[String, GeneratedMessage]): Boolean = {
slaveOfferConstraints.forall {
// offer has the required attribute and subsumes the required values for that attribute
case (name, requiredValues) =>
offerAttributes.get(name) match {
case None => false
case Some(_) if requiredValues.isEmpty => true // empty value matches presence
case Some(scalarValue: Value.Scalar) =>
// check if provided values is less than equal to the offered values
requiredValues.map(_.toDouble).exists(_ <= scalarValue.getValue)
case Some(rangeValue: Value.Range) =>
val offerRange = rangeValue.getBegin to rangeValue.getEnd
// Check if there is some required value that is between the ranges specified
// Note: We only support the ability to specify discrete values, in the future
// we may expand it to subsume ranges specified with a XX..YY value or something
// similar to that.
requiredValues.map(_.toLong).exists(offerRange.contains(_))
case Some(offeredValue: Value.Set) =>
// check if the specified required values is a subset of offered set
requiredValues.subsetOf(offeredValue.getItemList.asScala.toSet)
case Some(textValue: Value.Text) =>
// check if the specified value is equal, if multiple values are specified
// we succeed if any of them match.
requiredValues.contains(textValue.getValue)
}
}
}
/**
* Parses the attributes constraints provided to spark and build a matching data struct:
* Map[<attribute-name>, Set[values-to-match]]
* The constraints are specified as ';' separated key-value pairs where keys and values
* are separated by ':'. The ':' implies equality (for singular values) and "is one of" for
* multiple values (comma separated). For example:
* {{{
* parseConstraintString("os:centos7;zone:us-east-1a,us-east-1b")
* // would result in
* <code>
* Map(
* "os" -> Set("centos7"),
* "zone": -> Set("us-east-1a", "us-east-1b")
* )
* }}}
*
* Mesos documentation: http://mesos.apache.org/documentation/attributes-resources/
* https://github.com/apache/mesos/blob/master/src/common/values.cpp
* https://github.com/apache/mesos/blob/master/src/common/attributes.cpp
*
* @param constraintsVal constaints string consisting of ';' separated key-value pairs (separated
* by ':')
* @return Map of constraints to match resources offers.
*/
def parseConstraintString(constraintsVal: String): Map[String, Set[String]] = {
/*
Based on mesos docs:
attributes : attribute ( ";" attribute )*
attribute : labelString ":" ( labelString | "," )+
labelString : [a-zA-Z0-9_/.-]
*/
val splitter = Splitter.on(';').trimResults().withKeyValueSeparator(':')
// kv splitter
if (constraintsVal.isEmpty) {
Map()
} else {
try {
splitter.split(constraintsVal).asScala.toMap.mapValues(v =>
if (v == null || v.isEmpty) {
Set[String]()
} else {
v.split(',').toSet
}
)
} catch {
case NonFatal(e) =>
throw new IllegalArgumentException(s"Bad constraint string: $constraintsVal", e)
}
}
}
// These defaults copied from YARN
private val MEMORY_OVERHEAD_FRACTION = 0.10
private val MEMORY_OVERHEAD_MINIMUM = 384
/**
* Return the amount of memory to allocate to each executor, taking into account
* container overheads.
* @param sc SparkContext to use to get `spark.mesos.executor.memoryOverhead` value
* @return memory requirement as (0.1 * <memoryOverhead>) or MEMORY_OVERHEAD_MINIMUM
* (whichever is larger)
*/
def executorMemory(sc: SparkContext): Int = {
sc.conf.getInt("spark.mesos.executor.memoryOverhead",
math.max(MEMORY_OVERHEAD_FRACTION * sc.executorMemory, MEMORY_OVERHEAD_MINIMUM).toInt) +
sc.executorMemory
}
def setupUris(uris: String, builder: CommandInfo.Builder): Unit = {
uris.split(",").foreach { uri =>
builder.addUris(CommandInfo.URI.newBuilder().setValue(uri.trim()))
}
}
protected def getRejectOfferDurationForUnmetConstraints(sc: SparkContext): Long = {
sc.conf.getTimeAsSeconds("spark.mesos.rejectOfferDurationForUnmetConstraints", "120s")
}
protected def getRejectOfferDurationForReachedMaxCores(sc: SparkContext): Long = {
sc.conf.getTimeAsSeconds("spark.mesos.rejectOfferDurationForReachedMaxCores", "120s")
}
}
|
gioenn/xSpark
|
core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala
|
Scala
|
apache-2.0
| 14,160 |
package forms
import javax.inject.Inject
import models.{Boards}
import play.api.data.validation.Constraints._
import play.api.data.validation.{Constraint, Invalid, Valid}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import play.api.data.{Form, Mapping}
import play.api.data.Forms._
case class NewBoardFormData(key: String, name: String, mobile_name: String, order: Int, search_flag: Boolean)
// @Singleton
class BoardForms @Inject()(val boards: Boards) {
private[this] def form() = Form(
mapping(
"key" -> text,
"name" -> text,
"mobile_name" -> text,
"order" -> number,
"search_flag" -> boolean
)(NewBoardFormData.apply)(NewBoardFormData.unapply)
)
val newForm = form()
// val registerProfile = profileForm()
}
|
ygpark2/play-ain-board
|
app/forms/Board.scala
|
Scala
|
bsd-3-clause
| 792 |
package cs220.queue3
/** A "faster" functional queue.
*
* This functional queue uses a functional [List] as its internal
* implementation of a queue.
*
* In this impementation we keep two lists: one for the leading
* elements that we will dequeue from and trailing elements which
* represent the end of the queue.
*/
class FasterQueue[T] private [queue3] (
// We use the val syntax and private in the constructor to create
// member variables representing the leading and trailing elements
// as well as private for information hiding:
// CHANGE: change leading/trailing to vars:
private var leading: List[T],
private var trailing: List[T]
) extends Queue[T] {
// mirror returns a queue with leading elements.
//
// In particular, if we have no leading elements we create a new
// queue with its leading elements being the reverse of the trailing
// elements and Nil as its trailing elements. If we do have leading
// elements then we do nothing.
//
// We use private to hide this method from outside this class.
// CHANGE: introduce vars so we only need to update leading and
// trailing once:
private def mirror() =
if (leading.isEmpty) {
while (!trailing.isEmpty) {
leading = trailing.head :: leading
trailing = trailing.tail
}
}
// The head of the queue is the head of the mirror of this queue.
//
// i-clicker: In what circumstance is this method not efficient?
//
// A) When leading contains elements.
// B) When trailing is empty.
// C) When head is called repeatedly without a call to tail.
// D) When tail is called repeatedly without a call to head.
// E) This method is always efficent.
//
// CHANGE: call mirror() for side-effect and return head.
def head = {
mirror()
leading.head
}
// The tail of the queue is a new queue where the leading elements
// are the tail of the mirror of this queue and its trailing elements
// are the trailing elements of the mirror.
// CHANGE: call mirror() for side-effect and return new queue.
def tail = {
mirror()
new FasterQueue(leading.tail, trailing)
}
// enqueue create a new queue with the leading elements being the
// leading elements of the new queue and the trailing elements being
// a new list constructed by consing the new element x to the
// trailing elements.
def enqueue[U >: T](x: U) =
new FasterQueue[U](leading, x :: trailing)
// We add a toString method to print it out nicely:
override def toString = {
val xs = leading ::: trailing.reverse
"Queue(" + xs.mkString(",") + ")"
}
def isEmpty: Boolean = leading.isEmpty && trailing.isEmpty
}
|
umass-cs-220/week-09-libraries
|
code/types/src/main/scala/cs220/queue3/FasterQueue.scala
|
Scala
|
apache-2.0
| 2,687 |
/*
Copyright (C) 2016 Mauricio Bustos ([email protected]) & 338.oakland creations
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com._338oaklandcreations.fabric.machinery
object WindflowersPlacement extends LedPlacement {
val templateFactor = 5.0
def circularPositions(count: Int, offset: Double): List[(Double, Double)] = {
(0 to count - 1).map(x => {
(offset * Math.sin(2.0 * Math.PI / count * x), offset * Math.cos(2.0 * Math.PI / count * x))
}).toList
}
val template = List(
(0.0, 0.0), (0.0, 5.0), (0.0, 10.0), (0.0, 15.0),
(10.0, -10.0), (10.0, -5.0), (10.0, 0.0), (10.0, 5.0), (10.0, 10.0), (10.0, 15.0), (10.0, 20.0), (10.0, 25.0),
(20.0, -25.0), (20.0, -20.0), (20.0, -15.0), (20.0, -10.0), (20.0, -5.0), (20.0, 0.0), (20.0, 5.0), (20.0, 10.0), (20.0, 15.0), (20.0, 20.0), (20.0, 25.0), (20.0, 30.0), (20.0, 35.0), (20.0, 40.0), (20.0, 45.0),
(30.0, -25.0), (30.0, -20.0), (30.0, -15.0), (30.0, -10.0), (30.0, -5.0), (30.0, 0.0), (30.0, 5.0), (30.0, 10.0), (30.0, 15.0), (30.0, 20.0), (30.0, 25.0), (30.0, 30.0), (30.0, 35.0), (30.0, 40.0), (30.0, 45.0),
(40.0, -10.0), (40.0, -5.0), (40.0, 0.0), (40.0, 5.0), (40.0, 10.0), (40.0, 15.0), (40.0, 20.0), (40.0, 25.0),
(50.0, 0.0), (50.0, 5.0), (50.0, 10.0), (50.0, 15.0))
def offset(start: List[(Double, Double)], offsetPoint: (Int, Int)): List[(Double, Double)] = {
start.map(point => (point._1 / templateFactor + offsetPoint._1.toDouble, point._2 / templateFactor + offsetPoint._2.toDouble))
}
def tails(offset: Int, count: Int): List[(Double, Double)] = {
(1 to count).map({ x =>
(offset.toDouble, (x * 2 + 10).toDouble)
}).toList
}
override val positions = {
tails(20, 4) ++ offset(template, (20, 0)) ++ tails(30, 4) ++
tails(35, 3) ++ offset(template, (35, 15)) ++ tails(45, 3) ++
tails(50, 2) ++ offset(template, (50, 0)) ++ tails(60, 2) ++
tails(65, 3) ++ offset(template, (65, 15)) ++ tails(75, 3) ++
tails(80, 2) ++ offset(template, (80, 0)) ++ tails(90, 2)
}
override val layoutWidth = 100
writePositions("windflowersPlacement.txt")
}
|
338oaklandcreations-fabric/machinery
|
src/main/scala/com/338oaklandcreations/fabric/machinery/WindflowersPlacement.scala
|
Scala
|
gpl-3.0
| 2,739 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.jobs
import scala.collection.mutable.{HashMap, HashSet, ListBuffer}
import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.ui.jobs.UIData._
/**
* :: DeveloperApi ::
* Tracks task-level information to be displayed in the UI.
*
* All access to the data structures in this class must be synchronized on the
* class, since the UI thread and the EventBus loop may otherwise be reading and
* updating the internal data structures concurrently.
*/
@DeveloperApi
class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
import JobProgressListener._
// Define a handful of type aliases so that data structures' types can serve as documentation.
// These type aliases are public because they're used in the types of public fields:
type JobId = Int
type StageId = Int
type StageAttemptId = Int
type PoolName = String
type ExecutorId = String
// Jobs:
val activeJobs = new HashMap[JobId, JobUIData]
val completedJobs = ListBuffer[JobUIData]()
val failedJobs = ListBuffer[JobUIData]()
val jobIdToData = new HashMap[JobId, JobUIData]
// Stages:
val pendingStages = new HashMap[StageId, StageInfo]
val activeStages = new HashMap[StageId, StageInfo]
val completedStages = ListBuffer[StageInfo]()
val skippedStages = ListBuffer[StageInfo]()
val failedStages = ListBuffer[StageInfo]()
val stageIdToData = new HashMap[(StageId, StageAttemptId), StageUIData]
val stageIdToInfo = new HashMap[StageId, StageInfo]
val stageIdToActiveJobIds = new HashMap[StageId, HashSet[JobId]]
val poolToActiveStages = HashMap[PoolName, HashMap[StageId, StageInfo]]()
// Total of completed and failed stages that have ever been run. These may be greater than
// `completedStages.size` and `failedStages.size` if we have run more stages or jobs than
// JobProgressListener's retention limits.
var numCompletedStages = 0
var numFailedStages = 0
// Misc:
val executorIdToBlockManagerId = HashMap[ExecutorId, BlockManagerId]()
def blockManagerIds = executorIdToBlockManagerId.values.toSeq
var schedulingMode: Option[SchedulingMode] = None
// To limit the total memory usage of JobProgressListener, we only track information for a fixed
// number of non-active jobs and stages (there is no limit for active jobs and stages):
val retainedStages = conf.getInt("spark.ui.retainedStages", DEFAULT_RETAINED_STAGES)
val retainedJobs = conf.getInt("spark.ui.retainedJobs", DEFAULT_RETAINED_JOBS)
// We can test for memory leaks by ensuring that collections that track non-active jobs and
// stages do not grow without bound and that collections for active jobs/stages eventually become
// empty once Spark is idle. Let's partition our collections into ones that should be empty
// once Spark is idle and ones that should have a hard- or soft-limited sizes.
// These methods are used by unit tests, but they're defined here so that people don't forget to
// update the tests when adding new collections. Some collections have multiple levels of
// nesting, etc, so this lets us customize our notion of "size" for each structure:
// These collections should all be empty once Spark is idle (no active stages / jobs):
private[spark] def getSizesOfActiveStateTrackingCollections: Map[String, Int] = {
Map(
"activeStages" -> activeStages.size,
"activeJobs" -> activeJobs.size,
"poolToActiveStages" -> poolToActiveStages.values.map(_.size).sum,
"stageIdToActiveJobIds" -> stageIdToActiveJobIds.values.map(_.size).sum
)
}
// These collections should stop growing once we have run at least `spark.ui.retainedStages`
// stages and `spark.ui.retainedJobs` jobs:
private[spark] def getSizesOfHardSizeLimitedCollections: Map[String, Int] = {
Map(
"completedJobs" -> completedJobs.size,
"failedJobs" -> failedJobs.size,
"completedStages" -> completedStages.size,
"skippedStages" -> skippedStages.size,
"failedStages" -> failedStages.size
)
}
// These collections may grow arbitrarily, but once Spark becomes idle they should shrink back to
// some bound based on the `spark.ui.retainedStages` and `spark.ui.retainedJobs` settings:
private[spark] def getSizesOfSoftSizeLimitedCollections: Map[String, Int] = {
Map(
"jobIdToData" -> jobIdToData.size,
"stageIdToData" -> stageIdToData.size,
"stageIdToStageInfo" -> stageIdToInfo.size
)
}
/** If stages is too large, remove and garbage collect old stages */
private def trimStagesIfNecessary(stages: ListBuffer[StageInfo]) = synchronized {
if (stages.size > retainedStages) {
val toRemove = math.max(retainedStages / 10, 1)
stages.take(toRemove).foreach { s =>
stageIdToData.remove((s.stageId, s.attemptId))
stageIdToInfo.remove(s.stageId)
}
stages.trimStart(toRemove)
}
}
/** If jobs is too large, remove and garbage collect old jobs */
private def trimJobsIfNecessary(jobs: ListBuffer[JobUIData]) = synchronized {
if (jobs.size > retainedJobs) {
val toRemove = math.max(retainedJobs / 10, 1)
jobs.take(toRemove).foreach { job =>
jobIdToData.remove(job.jobId)
}
jobs.trimStart(toRemove)
}
}
override def onJobStart(jobStart: SparkListenerJobStart) = synchronized {
val jobGroup = for (
props <- Option(jobStart.properties);
group <- Option(props.getProperty(SparkContext.SPARK_JOB_GROUP_ID))
) yield group
val jobData: JobUIData =
new JobUIData(
jobId = jobStart.jobId,
submissionTime = Option(jobStart.time).filter(_ >= 0),
stageIds = jobStart.stageIds,
jobGroup = jobGroup,
status = JobExecutionStatus.RUNNING)
jobStart.stageInfos.foreach(x => pendingStages(x.stageId) = x)
// Compute (a potential underestimate of) the number of tasks that will be run by this job.
// This may be an underestimate because the job start event references all of the result
// stages' transitive stage dependencies, but some of these stages might be skipped if their
// output is available from earlier runs.
// See https://github.com/apache/spark/pull/3009 for a more extensive discussion.
jobData.numTasks = {
val allStages = jobStart.stageInfos
val missingStages = allStages.filter(_.completionTime.isEmpty)
missingStages.map(_.numTasks).sum
}
jobIdToData(jobStart.jobId) = jobData
activeJobs(jobStart.jobId) = jobData
for (stageId <- jobStart.stageIds) {
stageIdToActiveJobIds.getOrElseUpdate(stageId, new HashSet[StageId]).add(jobStart.jobId)
}
// If there's no information for a stage, store the StageInfo received from the scheduler
// so that we can display stage descriptions for pending stages:
for (stageInfo <- jobStart.stageInfos) {
stageIdToInfo.getOrElseUpdate(stageInfo.stageId, stageInfo)
stageIdToData.getOrElseUpdate((stageInfo.stageId, stageInfo.attemptId), new StageUIData)
}
}
override def onJobEnd(jobEnd: SparkListenerJobEnd) = synchronized {
val jobData = activeJobs.remove(jobEnd.jobId).getOrElse {
logWarning(s"Job completed for unknown job ${jobEnd.jobId}")
new JobUIData(jobId = jobEnd.jobId)
}
jobData.completionTime = Option(jobEnd.time).filter(_ >= 0)
jobData.stageIds.foreach(pendingStages.remove)
jobEnd.jobResult match {
case JobSucceeded =>
completedJobs += jobData
trimJobsIfNecessary(completedJobs)
jobData.status = JobExecutionStatus.SUCCEEDED
case JobFailed(exception) =>
failedJobs += jobData
trimJobsIfNecessary(failedJobs)
jobData.status = JobExecutionStatus.FAILED
}
for (stageId <- jobData.stageIds) {
stageIdToActiveJobIds.get(stageId).foreach { jobsUsingStage =>
jobsUsingStage.remove(jobEnd.jobId)
if (jobsUsingStage.isEmpty) {
stageIdToActiveJobIds.remove(stageId)
}
stageIdToInfo.get(stageId).foreach { stageInfo =>
if (stageInfo.submissionTime.isEmpty) {
// if this stage is pending, it won't complete, so mark it as "skipped":
skippedStages += stageInfo
trimStagesIfNecessary(skippedStages)
jobData.numSkippedStages += 1
jobData.numSkippedTasks += stageInfo.numTasks
}
}
}
}
}
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted) = synchronized {
val stage = stageCompleted.stageInfo
stageIdToInfo(stage.stageId) = stage
val stageData = stageIdToData.getOrElseUpdate((stage.stageId, stage.attemptId), {
logWarning("Stage completed for unknown stage " + stage.stageId)
new StageUIData
})
for ((id, info) <- stageCompleted.stageInfo.accumulables) {
stageData.accumulables(id) = info
}
poolToActiveStages.get(stageData.schedulingPool).foreach { hashMap =>
hashMap.remove(stage.stageId)
}
activeStages.remove(stage.stageId)
if (stage.failureReason.isEmpty) {
completedStages += stage
numCompletedStages += 1
trimStagesIfNecessary(completedStages)
} else {
failedStages += stage
numFailedStages += 1
trimStagesIfNecessary(failedStages)
}
for (
activeJobsDependentOnStage <- stageIdToActiveJobIds.get(stage.stageId);
jobId <- activeJobsDependentOnStage;
jobData <- jobIdToData.get(jobId)
) {
jobData.numActiveStages -= 1
if (stage.failureReason.isEmpty) {
jobData.completedStageIndices.add(stage.stageId)
} else {
jobData.numFailedStages += 1
}
}
}
/** For FIFO, all stages are contained by "default" pool but "default" pool here is meaningless */
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) = synchronized {
val stage = stageSubmitted.stageInfo
activeStages(stage.stageId) = stage
pendingStages.remove(stage.stageId)
val poolName = Option(stageSubmitted.properties).map {
p => p.getProperty("spark.scheduler.pool", DEFAULT_POOL_NAME)
}.getOrElse(DEFAULT_POOL_NAME)
stageIdToInfo(stage.stageId) = stage
val stageData = stageIdToData.getOrElseUpdate((stage.stageId, stage.attemptId), new StageUIData)
stageData.schedulingPool = poolName
stageData.description = Option(stageSubmitted.properties).flatMap {
p => Option(p.getProperty(SparkContext.SPARK_JOB_DESCRIPTION))
}
val stages = poolToActiveStages.getOrElseUpdate(poolName, new HashMap[Int, StageInfo])
stages(stage.stageId) = stage
for (
activeJobsDependentOnStage <- stageIdToActiveJobIds.get(stage.stageId);
jobId <- activeJobsDependentOnStage;
jobData <- jobIdToData.get(jobId)
) {
jobData.numActiveStages += 1
}
}
override def onTaskStart(taskStart: SparkListenerTaskStart) = synchronized {
val taskInfo = taskStart.taskInfo
if (taskInfo != null) {
val stageData = stageIdToData.getOrElseUpdate((taskStart.stageId, taskStart.stageAttemptId), {
logWarning("Task start for unknown stage " + taskStart.stageId)
new StageUIData
})
stageData.numActiveTasks += 1
stageData.taskData.put(taskInfo.taskId, new TaskUIData(taskInfo))
}
for (
activeJobsDependentOnStage <- stageIdToActiveJobIds.get(taskStart.stageId);
jobId <- activeJobsDependentOnStage;
jobData <- jobIdToData.get(jobId)
) {
jobData.numActiveTasks += 1
}
}
override def onTaskGettingResult(taskGettingResult: SparkListenerTaskGettingResult) {
// Do nothing: because we don't do a deep copy of the TaskInfo, the TaskInfo in
// stageToTaskInfos already has the updated status.
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd) = synchronized {
val info = taskEnd.taskInfo
// If stage attempt id is -1, it means the DAGScheduler had no idea which attempt this task
// completion event is for. Let's just drop it here. This means we might have some speculation
// tasks on the web ui that's never marked as complete.
if (info != null && taskEnd.stageAttemptId != -1) {
val stageData = stageIdToData.getOrElseUpdate((taskEnd.stageId, taskEnd.stageAttemptId), {
logWarning("Task end for unknown stage " + taskEnd.stageId)
new StageUIData
})
for (accumulableInfo <- info.accumulables) {
stageData.accumulables(accumulableInfo.id) = accumulableInfo
}
val execSummaryMap = stageData.executorSummary
val execSummary = execSummaryMap.getOrElseUpdate(info.executorId, new ExecutorSummary)
taskEnd.reason match {
case Success =>
execSummary.succeededTasks += 1
case _ =>
execSummary.failedTasks += 1
}
execSummary.taskTime += info.duration
stageData.numActiveTasks -= 1
val (errorMessage, metrics): (Option[String], Option[TaskMetrics]) =
taskEnd.reason match {
case org.apache.spark.Success =>
stageData.completedIndices.add(info.index)
stageData.numCompleteTasks += 1
(None, Option(taskEnd.taskMetrics))
case e: ExceptionFailure => // Handle ExceptionFailure because we might have metrics
stageData.numFailedTasks += 1
(Some(e.toErrorString), e.metrics)
case e: TaskFailedReason => // All other failure cases
stageData.numFailedTasks += 1
(Some(e.toErrorString), None)
}
if (!metrics.isEmpty) {
val oldMetrics = stageData.taskData.get(info.taskId).flatMap(_.taskMetrics)
updateAggregateMetrics(stageData, info.executorId, metrics.get, oldMetrics)
}
val taskData = stageData.taskData.getOrElseUpdate(info.taskId, new TaskUIData(info))
taskData.taskInfo = info
taskData.taskMetrics = metrics
taskData.errorMessage = errorMessage
for (
activeJobsDependentOnStage <- stageIdToActiveJobIds.get(taskEnd.stageId);
jobId <- activeJobsDependentOnStage;
jobData <- jobIdToData.get(jobId)
) {
jobData.numActiveTasks -= 1
taskEnd.reason match {
case Success =>
jobData.numCompletedTasks += 1
case _ =>
jobData.numFailedTasks += 1
}
}
}
}
/**
* Upon receiving new metrics for a task, updates the per-stage and per-executor-per-stage
* aggregate metrics by calculating deltas between the currently recorded metrics and the new
* metrics.
*/
def updateAggregateMetrics(
stageData: StageUIData,
execId: String,
taskMetrics: TaskMetrics,
oldMetrics: Option[TaskMetrics]) {
val execSummary = stageData.executorSummary.getOrElseUpdate(execId, new ExecutorSummary)
val shuffleWriteDelta =
(taskMetrics.shuffleWriteMetrics.map(_.shuffleBytesWritten).getOrElse(0L)
- oldMetrics.flatMap(_.shuffleWriteMetrics).map(_.shuffleBytesWritten).getOrElse(0L))
stageData.shuffleWriteBytes += shuffleWriteDelta
execSummary.shuffleWrite += shuffleWriteDelta
val shuffleWriteRecordsDelta =
(taskMetrics.shuffleWriteMetrics.map(_.shuffleRecordsWritten).getOrElse(0L)
- oldMetrics.flatMap(_.shuffleWriteMetrics).map(_.shuffleRecordsWritten).getOrElse(0L))
stageData.shuffleWriteRecords += shuffleWriteRecordsDelta
execSummary.shuffleWriteRecords += shuffleWriteRecordsDelta
val shuffleReadDelta =
(taskMetrics.shuffleReadMetrics.map(_.totalBytesRead).getOrElse(0L)
- oldMetrics.flatMap(_.shuffleReadMetrics).map(_.totalBytesRead).getOrElse(0L))
stageData.shuffleReadTotalBytes += shuffleReadDelta
execSummary.shuffleRead += shuffleReadDelta
val shuffleReadRecordsDelta =
(taskMetrics.shuffleReadMetrics.map(_.recordsRead).getOrElse(0L)
- oldMetrics.flatMap(_.shuffleReadMetrics).map(_.recordsRead).getOrElse(0L))
stageData.shuffleReadRecords += shuffleReadRecordsDelta
execSummary.shuffleReadRecords += shuffleReadRecordsDelta
val inputBytesDelta =
(taskMetrics.inputMetrics.map(_.bytesRead).getOrElse(0L)
- oldMetrics.flatMap(_.inputMetrics).map(_.bytesRead).getOrElse(0L))
stageData.inputBytes += inputBytesDelta
execSummary.inputBytes += inputBytesDelta
val inputRecordsDelta =
(taskMetrics.inputMetrics.map(_.recordsRead).getOrElse(0L)
- oldMetrics.flatMap(_.inputMetrics).map(_.recordsRead).getOrElse(0L))
stageData.inputRecords += inputRecordsDelta
execSummary.inputRecords += inputRecordsDelta
val outputBytesDelta =
(taskMetrics.outputMetrics.map(_.bytesWritten).getOrElse(0L)
- oldMetrics.flatMap(_.outputMetrics).map(_.bytesWritten).getOrElse(0L))
stageData.outputBytes += outputBytesDelta
execSummary.outputBytes += outputBytesDelta
val outputRecordsDelta =
(taskMetrics.outputMetrics.map(_.recordsWritten).getOrElse(0L)
- oldMetrics.flatMap(_.outputMetrics).map(_.recordsWritten).getOrElse(0L))
stageData.outputRecords += outputRecordsDelta
execSummary.outputRecords += outputRecordsDelta
val diskSpillDelta =
taskMetrics.diskBytesSpilled - oldMetrics.map(_.diskBytesSpilled).getOrElse(0L)
stageData.diskBytesSpilled += diskSpillDelta
execSummary.diskBytesSpilled += diskSpillDelta
val memorySpillDelta =
taskMetrics.memoryBytesSpilled - oldMetrics.map(_.memoryBytesSpilled).getOrElse(0L)
stageData.memoryBytesSpilled += memorySpillDelta
execSummary.memoryBytesSpilled += memorySpillDelta
val timeDelta =
taskMetrics.executorRunTime - oldMetrics.map(_.executorRunTime).getOrElse(0L)
stageData.executorRunTime += timeDelta
}
override def onExecutorMetricsUpdate(executorMetricsUpdate: SparkListenerExecutorMetricsUpdate) {
for ((taskId, sid, sAttempt, taskMetrics) <- executorMetricsUpdate.taskMetrics) {
val stageData = stageIdToData.getOrElseUpdate((sid, sAttempt), {
logWarning("Metrics update for task in unknown stage " + sid)
new StageUIData
})
val taskData = stageData.taskData.get(taskId)
taskData.map { t =>
if (!t.taskInfo.finished) {
updateAggregateMetrics(stageData, executorMetricsUpdate.execId, taskMetrics,
t.taskMetrics)
// Overwrite task metrics
t.taskMetrics = Some(taskMetrics)
}
}
}
}
override def onEnvironmentUpdate(environmentUpdate: SparkListenerEnvironmentUpdate) {
synchronized {
schedulingMode = environmentUpdate
.environmentDetails("Spark Properties").toMap
.get("spark.scheduler.mode")
.map(SchedulingMode.withName)
}
}
override def onBlockManagerAdded(blockManagerAdded: SparkListenerBlockManagerAdded) {
synchronized {
val blockManagerId = blockManagerAdded.blockManagerId
val executorId = blockManagerId.executorId
executorIdToBlockManagerId(executorId) = blockManagerId
}
}
override def onBlockManagerRemoved(blockManagerRemoved: SparkListenerBlockManagerRemoved) {
synchronized {
val executorId = blockManagerRemoved.blockManagerId.executorId
executorIdToBlockManagerId.remove(executorId)
}
}
}
private object JobProgressListener {
val DEFAULT_POOL_NAME = "default"
val DEFAULT_RETAINED_STAGES = 1000
val DEFAULT_RETAINED_JOBS = 1000
}
|
hengyicai/OnlineAggregationUCAS
|
core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala
|
Scala
|
apache-2.0
| 20,456 |
package play.api.libs.concurrent
import org.specs2.mutable._
import scala.concurrent.ExecutionContext.Implicits.global
object PromiseSpec extends Specification {
"A Promise" should {
"recover after an exception using recover" in {
val promise = Promise[Int]()
promise.redeem(6/0)
promise.future.recover{ case e: ArithmeticException => 0 }
.value1.get must equalTo (0)
}
}
}
|
michaelahlers/team-awesome-wedding
|
vendor/play-2.2.1/framework/src/play/src/test/scala/play/api/PromiseSpec.scala
|
Scala
|
mit
| 420 |
package mesosphere.marathon.core.history.impl
import akka.actor.{ ActorRef, Props }
import akka.testkit.{ ImplicitSender, TestActorRef }
import mesosphere.marathon.core.event.{ MesosStatusUpdateEvent, UnhealthyTaskKillEvent }
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ TaskFailure, Timestamp }
import mesosphere.marathon.storage.repository.TaskFailureRepository
import mesosphere.marathon.test.{ MarathonActorSupport, MarathonSpec }
import org.apache.mesos.Protos.{ NetworkInfo, TaskState }
import org.mockito.Matchers.any
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{ BeforeAndAfterAll, Matchers }
import scala.collection.immutable.Seq
class HistoryActorTest
extends MarathonActorSupport
with MarathonSpec
with MockitoSugar
with BeforeAndAfterAll
with Matchers
with ImplicitSender {
import org.apache.mesos.Protos.TaskState._
var historyActor: ActorRef = _
var failureRepo: TaskFailureRepository = _
before {
failureRepo = mock[TaskFailureRepository]
historyActor = TestActorRef(Props(
new HistoryActor(system.eventStream, failureRepo)
))
}
test("Store TASK_FAILED") {
val message = statusMessage(TASK_FAILED)
historyActor ! message
verify(failureRepo).store(TaskFailure.FromMesosStatusUpdateEvent(message).get)
}
test("Store TASK_ERROR") {
val message = statusMessage(TASK_ERROR)
historyActor ! message
verify(failureRepo).store(TaskFailure.FromMesosStatusUpdateEvent(message).get)
}
test("Store TASK_LOST") {
val message = statusMessage(TASK_LOST)
historyActor ! message
verify(failureRepo).store(TaskFailure.FromMesosStatusUpdateEvent(message).get)
}
test("Ignore TASK_RUNNING") {
val message = statusMessage(TASK_RUNNING)
historyActor ! message
verify(failureRepo, times(0)).store(any())
}
test("Ignore TASK_FINISHED") {
val message = statusMessage(TASK_FINISHED)
historyActor ! message
verify(failureRepo, times(0)).store(any())
}
test("Ignore TASK_KILLED") {
val message = statusMessage(TASK_KILLED)
historyActor ! message
verify(failureRepo, times(0)).store(any())
}
test("Ignore TASK_STAGING") {
val message = statusMessage(TASK_STAGING)
historyActor ! message
verify(failureRepo, times(0)).store(any())
}
test("Store UnhealthyTaskKilled") {
val message = unhealthyTaskKilled()
historyActor ! message
verify(failureRepo).store(TaskFailure.FromUnhealthyTaskKillEvent(message))
}
private def statusMessage(state: TaskState) = {
val ipAddress: NetworkInfo.IPAddress =
NetworkInfo.IPAddress
.newBuilder()
.setIpAddress("123.123.123.123")
.setProtocol(NetworkInfo.Protocol.IPv4)
.build()
MesosStatusUpdateEvent(
slaveId = "slaveId",
taskId = Task.Id("taskId"),
taskStatus = state.name(),
message = "message",
appId = "appId".toPath,
host = "host",
ipAddresses = Some(Seq(ipAddress)),
ports = Nil,
version = Timestamp.now().toString
)
}
private def unhealthyTaskKilled() = {
val taskId = Task.Id("taskId")
UnhealthyTaskKillEvent(
appId = StringPathId("app").toPath,
taskId = taskId,
version = Timestamp(1024),
reason = "unknown",
host = "localhost",
slaveId = None
)
}
}
|
timcharper/marathon
|
src/test/scala/mesosphere/marathon/core/history/impl/HistoryActorTest.scala
|
Scala
|
apache-2.0
| 3,462 |
package com.holdenkarau.spark.testing
import org.apache.spark.api.java.JavaRDD
object JavaRDDComparisons extends JavaTestSuite {
/**
* Asserts two RDDs are equal (with the same order).
* If they are equal assertion succeeds, otherwise assertion fails.
*/
def assertRDDEqualsWithOrder[T](expected: JavaRDD[T], result: JavaRDD[T]): Unit = {
assertTrue(compareWithOrder(expected, result).isEmpty)
}
/**
* Compare two RDDs. If they are equal returns None, otherwise
* returns Some with the first mismatch. Assumes we have the same partitioner.
*/
def compareWithOrder[T](expected: JavaRDD[T], result: JavaRDD[T]): Option[(Option[T], Option[T])] = {
implicit val ctag = Utils.fakeClassTag[T]
RDDComparisons.compareWithOrder(expected.rdd, result.rdd)
}
/**
* Asserts two RDDs are equal (un ordered).
* If they are equal assertion succeeds, otherwise assertion fails.
*/
def assertRDDEquals[T](expected: JavaRDD[T], result: JavaRDD[T]): Unit = {
assertTrue(compare(expected, result).isEmpty)
}
/**
* Compare two RDDs where we do not require the order to be equal.
* If they are equal returns None, otherwise returns Some with the first mismatch.
*
* @return None if the two RDDs are equal, or Some That contains first mismatch information.
* Mismatch information will be Tuple3 of: (key, number of times this key occur in expected RDD,
* number of times this key occur in result RDD)
*/
def compare[T](expected: JavaRDD[T], result: JavaRDD[T]): Option[(T, Integer, Integer)] = {
implicit val ctag = Utils.fakeClassTag[T]
RDDComparisons.compare(expected.rdd, result.rdd)
.map(x => (x._1, Integer.valueOf(x._2), Integer.valueOf(x._3)))
}
}
|
mahmoudhanafy/spark-testing-base
|
src/main/1.3/scala/com/holdenkarau/spark/testing/JavaRDDComparisons.scala
|
Scala
|
apache-2.0
| 1,756 |
package models.relations
/**
* Created by ruraj on 7/17/14.
*/
case class DealerProduct(dealerId: Long = 0, products: Set[models.Product])
|
ruraj/stock-manager
|
app/models/relations/DealerProduct.scala
|
Scala
|
bsd-3-clause
| 143 |
package slick.jdbc
import java.sql.ResultSet
/** Represents a result set type. */
sealed abstract class ResultSetType(val intValue: Int) { self =>
/** Return this `ResultSetType`, unless it is `Auto` in which case
* the specified result set type is returned instead. */
def withDefault(r: ResultSetType) = this
}
object ResultSetType {
/** The current result set type of the JDBC driver */
case object Auto extends ResultSetType(ResultSet.TYPE_FORWARD_ONLY) {
override def withDefault(r: ResultSetType) = r
}
/** Represents a result set type that only allows result sets to be read sequentially
* (i.e. the cursor may only move forward). */
case object ForwardOnly extends ResultSetType(ResultSet.TYPE_FORWARD_ONLY)
/** Represents a result set type that allows result sets to be navigated in a
* non-linear way while keeping the original data in the result set intact. */
case object ScrollInsensitive extends ResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE)
/** Represents a result set type that allows result sets to be navigated in a
* non-linear way, and changes in the underlying data to be observed. */
case object ScrollSensitive extends ResultSetType(ResultSet.TYPE_SCROLL_SENSITIVE)
}
|
jkutner/slick
|
slick/src/main/scala/slick/jdbc/ResultSetType.scala
|
Scala
|
bsd-2-clause
| 1,242 |
package sbtavro
package filesorter
import filesorter.TypeComparator.strContainsType
import java.io.File
import scala.collection.mutable
import scala.io.Source
/**
* The order in which avsc files are compiled depends on the underlying file
* system (under OSX its is alphabetical, under some linux distros it's not).
* This is an issue when you have a record type that is used in different
* other types. This ensures that dependent types are compiled in the
* correct order. Code adapted from https://github.com/ch4mpy/sbt-avro/blob/master/src/main/scala/com/c4soft/sbtavro/SbtAvro.scala
* by Jerome Wascongne
*/
object AVSCFileSorter {
def sortSchemaFiles(files: Traversable[File]): Seq[File] = {
val sortedButReversed = mutable.MutableList.empty[File]
def normalizeInput(files: List[File]) = files.sortBy(file => file.getName)
var pending: Traversable[File] = normalizeInput(files.toList)
while(pending.nonEmpty) {
val (used, unused) = usedUnusedSchemas(pending)
sortedButReversed ++= unused
pending = used
}
sortedButReversed.reverse.toSeq
}
def usedUnusedSchemas(files: Traversable[File]): (Traversable[File], Traversable[File]) = {
val usedUnused = files.map { file =>
val fullName = extractFullName(file)
val numUsages = files.count { candidate =>
val candidateName = extractFullName(candidate)
strContainsType(candidateName, fileText(candidate), fullName)
}
(file, numUsages)
}.partition(usedUnused => usedUnused._2 > 0)
(usedUnused._1.map(_._1), usedUnused._2.map(_._1))
}
def extractFullName(f: File): String = {
val txt = fileText(f)
val namespace = namespaceRegex.findFirstMatchIn(txt)
val name = nameRegex.findFirstMatchIn(txt)
val nameGroup = name.get.group(1)
if(namespace.isEmpty) {
nameGroup
} else {
s"${namespace.get.group(1)}.$nameGroup"
}
}
def fileText(f: File): String = {
val src = Source.fromFile(f)
try {
src.getLines.mkString
} finally {
src.close()
}
}
val namespaceRegex = "\\\\\\"namespace\\\\\\"\\\\s*:\\\\s*\\"([^\\\\\\"]+)\\\\\\"".r
val nameRegex = "\\\\\\"name\\\\\\"\\\\s*:\\\\s*\\"([^\\\\\\"]+)\\\\\\"".r
}
|
julianpeeters/sbt-avro
|
src/main/scala/sbtavro/filesorter/AVSCFileSorter.scala
|
Scala
|
bsd-3-clause
| 2,213 |
package com.twitter.finatra.filters
import com.twitter.finagle.{Filter, Service}
import com.twitter.util.Future
class MergedFilter[Req, Resp](
filters: Filter[Req, Resp, Req, Resp]*)
extends Filter[Req, Resp, Req, Resp] {
private val CombinedFilter = filters reduceLeft {_ andThen _}
def apply(request: Req, service: Service[Req, Resp]): Future[Resp] = {
CombinedFilter(request, service)
}
}
|
tom-chan/finatra
|
utils/src/main/scala/com/twitter/finatra/filters/MergedFilter.scala
|
Scala
|
apache-2.0
| 410 |
package repositories
import repositories.UserAccountRepo.UserAccount
import scala.concurrent.ExecutionContext
class UserAccountRepo {
def findByUserNameO(userName: String)(implicit executor: ExecutionContext): Option[UserAccount] = {
if (userName == "user")
Some(UserAccount(userName, "user", userName))
else
None
}
}
object UserAccountRepo {
case class UserAccount(userName: String, role: String, password: String = "")
}
|
tolomaus/languagedetector_ui
|
app/repositories/UserAccountRepo.scala
|
Scala
|
mit
| 453 |
package codesniffer.decard.test
import java.lang.instrument.Instrumentation
/**
* Created by Bowen Cai on 1/16/2016.
*/
//object Simple extends App {
// for (i <- 0 until 5)
// println(i)
//
// val arr = Array(1, 3, 5, 8, 10)
// for (i <- arr) {
// println(i)
// }
//}
object Simplee {
def main(args: Array[String]) {
for (i <- 0 until 5)
println(i)
var i = 0
while (i < 5) {
println(i)
i+=1
}
// val arr = Array(1, 3, 5, 8, 10)
//
// for (i <- arr) {
// println(i)
// }
}
}
//object MyAgent {
// def premain(args: String, inst: Instrumentation) {
// val array = new Array[Object](10)
// val obj = new Object()
// val size = inst.getObjectSize(obj)
// System.out.println("Bytes used by object: " + size)
// }
//}
|
xkommando/CodeSniffer
|
core/src/test/scala/codesniffer/decard/test/Simple.scala
|
Scala
|
lgpl-3.0
| 839 |
package test.scala.models
import models.AppScanSourceXmlParser
import main.ParsedAppScanSourceXmlData
import models.wrappers.{ SystemEvents, EventReader }
import org.scalatest.FunSpec
import org.mockito.Mockito._
import org.mockito.{Mockito, Matchers}
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.PrivateMethodTester._
import scala.xml.{MetaData, Unparsed, UnprefixedAttribute, Null}
import scala.xml.pull.{EvElemEnd, EvElemStart}
import scala.collection.mutable
@RunWith( classOf[ JUnitRunner ] )
class AppScanSourceXmlParserSpec extends FunSpec
{
describe( "Testing the AppScanSourceXmlParser class" )
{
describe( "Testing the protected methods" )
{
def fixture =
new {
val system = mock( classOf[ SystemEvents ] )
doNothing().when( system ).exit( Matchers.anyInt() )
val parser = new AppScanSourceXmlParser( system, "Linux" )
val data = new ParsedAppScanSourceXmlData
val meta = new UnprefixedAttribute( "Test", Option( Seq( new Unparsed( "Test" ) ) ), Null )
}
describe( "Testing the getAttribute method" )
{
val getAttribute = PrivateMethod[ String ]( 'getAttribute )
it( "should return a string so long as the attribute is present in the MetaData object" )
{
val fix = fixture
val test = fix.parser invokePrivate getAttribute( fix.meta, "Test" )
assert( test == "Test" )
}
it( "should return null if the attribute does not exist in the MetaData object" )
{
val fix = fixture
val test = fix.parser invokePrivate getAttribute( fix.meta, "none" )
assert( test == null )
}
}
describe( "Testing the parseListOfMaps method" )
{
val parseListOfMaps = PrivateMethod( 'parseListOfMaps )
it( "throws an error if the wrong list type is provided" )
{
val fix = fixture
intercept[ Throwable ]
{
fix.parser invokePrivate parseListOfMaps( fix.meta, "incorrect", fix.data )
}
}
it( "adds finding data to the data object when the list type findings is provided" )
{
val fix = fixture
fix.parser invokePrivate parseListOfMaps( fix.meta, "findings", fix.data )
assert( !fix.data.getFindings.isEmpty )
assert( fix.data.getSites.isEmpty )
assert( fix.data.getTaints.isEmpty )
assert( fix.data.getTaintFindings.isEmpty )
}
it( "adds site data to the data object when the list type sites is provided" )
{
val fix = fixture
fix.parser invokePrivate parseListOfMaps( fix.meta, "sites", fix.data )
assert( fix.data.getFindings.isEmpty )
assert( !fix.data.getSites.isEmpty )
assert( fix.data.getTaints.isEmpty )
assert( fix.data.getTaintFindings.isEmpty )
}
it( "adds taint data to the data object when the list type taints is provided" )
{
val fix = fixture
fix.parser invokePrivate parseListOfMaps( fix.meta, "taints", fix.data )
assert( fix.data.getFindings.isEmpty )
assert( fix.data.getSites.isEmpty )
assert( !fix.data.getTaints.isEmpty )
assert( fix.data.getTaintFindings.isEmpty )
}
it( "adds taint finding data to the data object when the list type taintFinding is provided" )
{
val fix = fixture
fix.parser invokePrivate parseListOfMaps( fix.meta, "taintFinding", fix.data )
assert( fix.data.getFindings.isEmpty )
assert( fix.data.getSites.isEmpty )
assert( fix.data.getTaints.isEmpty )
assert( !fix.data.getTaintFindings.isEmpty )
}
}
describe( "Testing the setFiles method" )
{
val setFiles = PrivateMethod( 'setFiles )
it( "adds a file to the list if the path is valid, converts \\\\ characters to / characters on windows" )
{
val fix = fixture
val file = new UnprefixedAttribute( "value", Option( Seq( new Unparsed( "C:\\\\test\\\\path\\\\file.java" ) ) ), Null )
val id = new UnprefixedAttribute( "id", Option( Seq( new Unparsed( "2" ) ) ), file )
val parser = new AppScanSourceXmlParser( fix.system, "Windows" )
val data = new ParsedAppScanSourceXmlData
data.setOs( "Windows" )
data.setCodeLocation( "C:\\\\test\\\\" )
parser invokePrivate setFiles( id, data )
assert( data.getFiles.keys.head == "2")
assert( data.getFiles.get( data.getFiles.keys.head ).getOrElse( null ) == "path/file.java" )
}
it( "adds a file to the list if the path is valid, does not convert / characters on linux" )
{
val fix = fixture
val file = new UnprefixedAttribute( "value", Option( Seq( new Unparsed( "/test/path/file.java" ) ) ), Null )
val id = new UnprefixedAttribute( "id", Option( Seq( new Unparsed( "2" ) ) ), file )
fix.data.setOs( "Linux" )
fix.data.setCodeLocation( "/test/" )
fix.parser invokePrivate setFiles( id, fix.data )
assert( fix.data.getFiles.keys.head == "2" )
assert( fix.data.getFiles.get( fix.data.getFiles.keys.head ).getOrElse( null ) == "path/file.java" )
}
it( "does not add a file to the list if the path is invalid, and exits the application" )
{
val fix = fixture
val file = new UnprefixedAttribute( "value", Option( Seq( new Unparsed( "/test/path/file.java" ) ) ), Null )
val id = new UnprefixedAttribute( "id", Option( Seq( new Unparsed( "2" ) ) ), file )
fix.data.setOs( "Linux" )
fix.data.setCodeLocation( "/wrong/" )
fix.parser invokePrivate setFiles( id, fix.data )
assert( fix.data.getFiles.keys.isEmpty )
verify( fix.system, times( 1 ) ).exit( 1 )
}
}
describe( "Testing the setStrings method" )
{
val setStrings = PrivateMethod( 'setStrings )
it( "adds a new string to the list, so long as the key exists" )
{
val fix = fixture
val string = new UnprefixedAttribute( "id", Option( Seq( new Unparsed( "2" ) ) ), Null )
val id = new UnprefixedAttribute( "value", Option( Seq( new Unparsed( "test" ) ) ), string )
fix.parser invokePrivate setStrings( id, fix.data )
assert( fix.data.getStrings.keys.head == "2" )
assert( fix.data.getStrings.get( fix.data.getStrings.keys.head ).getOrElse( null ) == "test" )
}
}
}
describe( "Testing the main parsing loop" )
{
val fixture =
new {
val event = mock( classOf[ EventReader ] )
val system = mock( classOf[ SystemEvents ] )
doNothing().when( system ).exit( Matchers.anyInt() )
val parser = new AppScanSourceXmlParser( system, "Linux" )
val data = new ParsedAppScanSourceXmlData
}
it( "will store the assessment name and if the label of the element is Assessment" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
val assessmentName = new UnprefixedAttribute( "assessee_name", Option( Seq( new Unparsed( "Test" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "Assessment" , assessmentName, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
assert( fix.data.getAssessmentName == "Test" )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will hit the strings and files loop if the element label is StringPool" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "StringPool", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will hit the strings and files loop if the element label is FilePool" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "FilePool", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will call the final recursive loop for the element labeled SitePool" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "SitePool", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will call the final recursive loop for the element labeled FindingDataPool" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "FindingDataPool", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will call the final recursive loop for the element labeled TaintPool" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "TaintPool", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 1 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
it( "will not call any recursive loop if the element has a different label" )
{
val fix = fixture
val spyOnParser = Mockito.spy( fix.parser )
when( fix.event.next ).thenReturn( new EvElemStart( null, "WrongLabel", Null, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
spyOnParser.parse( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "sites", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "findings", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).siteAndFindingLoop( "taints", fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).stringsAndFilesLoop( fix.event, fix.data )
verify( spyOnParser, times( 0 ) ).assessmentLoopOuter( "Application", null, fix.event, fix.data )
}
}
describe( "Testing the recursive looping functions" )
{
class ParserStub( system : SystemEvents, operatingSystem : String ) extends AppScanSourceXmlParser( system : SystemEvents, operatingSystem : String )
{
override protected def setStrings( attribs : MetaData, parsedData : ParsedAppScanSourceXmlData )
{
parsedData.addString( "test", "test" )
}
override protected def setFiles( attribs : MetaData, parsedData : ParsedAppScanSourceXmlData )
{
parsedData.addFile( "test", "test" )
}
override protected def parseListOfMaps( attribs : MetaData, inprogressList : String,
parsedData : ParsedAppScanSourceXmlData )
{
var toAdd = new mutable.ListMap[ String, String ]
toAdd += ( "test" -> "test" )
inprogressList match
{
case "sites" =>
parsedData.addSite( toAdd.toMap )
case "findings" =>
parsedData.addFinding( toAdd.toMap )
case "taints" =>
parsedData.addTaint( toAdd.toMap )
case "taintFindings" =>
parsedData.addTaintFinding( toAdd.toMap )
}
}
}
val fixture =
new {
val event = mock( classOf[ EventReader ] )
val parser = new ParserStub( new SystemEvents(), "Linux" )
}
describe( "Testing the strings and files loop" )
{
it( "will add a string to the list if the element found has the label String" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "String", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.stringsAndFilesLoop( fix.event, data )
assert( data.getStrings.get( "test" ).getOrElse( null ) == "test" )
assert( data.getFiles.get( "test" ).getOrElse( null ) == null )
}
it( "will add a file to the list if the element found has the label File" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "File", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.stringsAndFilesLoop( fix.event, data )
assert( data.getStrings.get( "test" ).getOrElse( null ) == null )
assert( data.getFiles.get( "test" ).getOrElse( null ) == "test" )
}
it( "will not add a new string or file to the list if the element has a different label" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "Nope", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.stringsAndFilesLoop( fix.event, data )
assert( data.getStrings.get( "test" ).getOrElse( null ) == null )
assert( data.getFiles.get( "test" ).getOrElse( null ) == null )
}
it( "Does not recurse if the ending StringPool element is found" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemEnd( null, "StringPool" ) ).thenReturn(
new EvElemStart( null, "String", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( true ).thenReturn( false )
fix.parser.stringsAndFilesLoop( fix.event, data )
assert( data.getStrings.get( "test" ).getOrElse( null ) == null )
}
it( "does not recurse if the ending FilePool element is found" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemEnd( null, "FilePool" ) ).thenReturn(
new EvElemStart( null, "String", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( true ).thenReturn( false )
fix.parser.stringsAndFilesLoop( fix.event, data )
assert( data.getStrings.get( "test" ).getOrElse( null ) == null )
}
}
describe( "Testing the site, finding, and taint loop" )
{
it( "throws an error if the list type is not supported" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "Site", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true )
intercept[ Throwable ]
{
fix.parser.siteAndFindingLoop( "Wrong", fix.event, data )
}
}
it( "records the site data if the element found is Site" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "Site", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "sites", fix.event, data )
assert( data.getSites.result().head.get( "test" ).getOrElse( null ) == "test" )
assert( data.getFindings.result().length == 0 )
assert( data.getTaints.result().length == 0 )
}
it( "records the finding data if the element found is Site" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "FindingData", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "findings", fix.event, data )
assert( data.getFindings.result().head.get( "test" ).getOrElse( null ) == "test" )
assert( data.getSites.result().length == 0 )
assert( data.getTaints.result().length == 0 )
}
it( "records the taint data if the element found is Taint" )
{
val fix = fixture
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( fix.event.next ).thenReturn( new EvElemStart( null, "Taint", attribs, null ) )
when( fix.event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "taints", fix.event, data )
assert( data.getTaints.result().head.get( "test" ).getOrElse( null ) == "test" )
assert( data.getFindings.result().length == 0 )
assert( data.getSites.result().length == 0 )
}
it( "does not recurse if the ending element is found for the SitePool" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( event.next ).thenReturn( new EvElemEnd( null, "SitePool" ) ).thenReturn(
new EvElemStart( null, "Site", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "sites", event, data )
verify( event, times( 1 ) ).hasNext
assert( data.getSites.result().length == 0 )
assert( data.getFindings.result().length == 0 )
assert( data.getTaints.result().length == 0 )
}
it( "does not recurse if the ending element is found for the FindingDataPool" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( event.next ).thenReturn( new EvElemEnd( null, "FindingDataPool" ) ).thenReturn(
new EvElemStart( null, "FindingData", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "findings", event, data )
verify( event, times( 1 ) ).hasNext
assert( data.getSites.result().length == 0 )
assert( data.getFindings.result().length == 0 )
assert( data.getTaints.result().length == 0 )
}
it( "does not recurse if the ending element is found for the TaintPool" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "bar", Option( Seq( new Unparsed( "foo" ) ) ), Null )
when( event.next ).thenReturn( new EvElemEnd( null, "TaintPool" ) ).thenReturn(
new EvElemStart( null, "Taint", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( true ).thenReturn( false )
fix.parser.siteAndFindingLoop( "taints", event, data )
verify( event, times( 1 ) ).hasNext
assert( data.getSites.result().length == 0 )
assert( data.getFindings.result().length == 0 )
assert( data.getTaints.result().length == 0 )
}
}
describe( "Testing the assessment loop" )
{
it( "will re-call the loop and add no data for an Assessment element of assessee_type of Application" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val attribs = new UnprefixedAttribute( "assessee_type", Option( Seq( new Unparsed( "Application" ) ) ), Null )
when( event.next ).thenReturn( new EvElemStart( null, "Assessment", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.assessmentLoopOuter( "Application", null, event, data )
verify( event, times( 2 ) ).hasNext
assert( data.getProjects.length == 0 )
assert( data.getFilesByProject.length == 0 )
assert( data.getFileErrors.size == 0 )
assert( data.getTaintFindings.size == 0 )
}
it( "will re-call the loop and add project data for an Assessment element of assessee_type of Project" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val name = new UnprefixedAttribute( "assessee_name", Option( Seq( new Unparsed( "test" ) ) ), Null )
val attribs = new UnprefixedAttribute( "assessee_type", Option( Seq( new Unparsed( "Project" ) ) ), name )
when( event.next ).thenReturn( new EvElemStart( null, "Assessment", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.assessmentLoopOuter( "Application", null, event, data )
verify( event, times( 2 ) ).hasNext
assert( data.getProjects.length == 1 )
assert( data.getFilesByProject.length == 0 )
assert( data.getFileErrors.size == 0 )
assert( data.getTaintFindings.size == 0 )
}
it( "will end the assessments loop if an element of type Messages is reached" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
when( event.next ).thenReturn( new EvElemStart( null, "Messages", Null, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.assessmentLoopOuter( "Application", null, event, data )
verify( event, times( 1 ) ).hasNext
assert( data.getProjects.length == 0 )
assert( data.getFilesByProject.length == 0 )
assert( data.getFileErrors.size == 0 )
assert( data.getTaintFindings.size == 0 )
}
it( "will map a file to a project if an element of type AsmntFile is reached" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
val map = new mutable.ListMap[ String, String ]
map += ( "owner" -> "test" )
map += ( "name" -> "test" )
data.addProject( map.toMap )
val attribs = new UnprefixedAttribute( "file_id", Option( Seq( new Unparsed( "test" ) ) ), Null )
when( event.next ).thenReturn( new EvElemStart( null, "AsmntFile", attribs, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.assessmentLoopOuter( "Project", null, event, data )
verify( event, times( 2 ) ).hasNext
assert( data.getProjects.length == 1 )
assert( data.getFilesByProject.length == 1 )
assert( data.getFileErrors.size == 0 )
assert( data.getTaintFindings.size == 0 )
}
it( "will re-call the loop and add no data for an AssessmentStats element" )
{
val fix = fixture
val event = mock( classOf[ EventReader ] )
val data = new ParsedAppScanSourceXmlData
when( event.next ).thenReturn( new EvElemStart( null, "AssessmentStats", Null, null ) )
when( event.hasNext ).thenReturn( true ).thenReturn( false )
fix.parser.assessmentLoopOuter( "Application", null, event, data )
verify( event, times( 2 ) ).hasNext
assert( data.getProjects.length == 0 )
assert( data.getFilesByProject.length == 0 )
assert( data.getFileErrors.size == 0 )
assert( data.getTaintFindings.size == 0 )
}
}
}
}
}
|
blackboard/appscan-source-parser
|
src/test/scala/models/AppScanSourceXmlParserSpec.scala
|
Scala
|
bsd-3-clause
| 28,177 |
package tests.admission
import sisdn.admission.{AdmissionUser, Student}
import akka.actor
import akka.actor.{Props, ActorSystem}
import akka.testkit._
import org.scalatest.{BeforeAndAfterAll, Matchers, FlatSpecLike}
import AdmissionUser.{AdmissionStatusUpdateEvt, Admit}
import sisdn.common.User
import scala.concurrent.duration.DurationInt
class UserActorSpecs (_system: ActorSystem) extends TestKit(_system) with ImplicitSender
with FlatSpecLike with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("UserActorSpec"))
override def afterAll { TestKit.shutdownActorSystem(system) }
implicit val ec = system.dispatcher
val admitor = TestProbe()
val user = system.actorOf(AdmissionUser.props("1", admitor.ref))
val students = List.range(1,4).flatMap{ x => List(Student(x.toHexString,"",x,x,"org")) }
ignore should "extract correct number of admission to list" in {
user ! Admit(User("1","",None,None, None), students)
val admissions = admitor.receiveWhile(){ case a:AdmissionStatusUpdateEvt => a}
admissions.length shouldEqual 3
}
ignore should "set the status to Pending for new admissions" in {
fail("Not implemented")
}
ignore should "set the status to Valid for validated admissions" in {
fail("Not implemented")
}
ignore should "set the status to Rejected for rejected admissions" in {
fail("Not implemented")
}
ignore should "set the status to Accepted for accepted admissions" in {
fail("Not implemented")
}
ignore should "should not respond to duplicate admissions" in {
fail("Not implemented")
}
}
|
mhashimm/backend
|
src/test/scala/tests/admission/UserActorSpecs.scala
|
Scala
|
agpl-3.0
| 1,608 |
package mapmartadero
package model
/**
* Created by j2 on 12-08-14.
*/
import scala.xml._
import net.liftweb._
import common._
import record._
import record.field._
import net.liftweb.mongodb.record.field.DateField
import net.liftweb.mongodb.record.field.{MongoCaseClassField, ObjectIdPk}
import net.liftweb.mongodb.record.{MongoMetaRecord, MongoRecord}
import com.foursquare.rogue.LiftRogue._
import org.joda.time._
class GeneralEvent private () extends MongoRecord[GeneralEvent] with ObjectIdPk[GeneralEvent] {
def meta = GeneralEvent
object hour extends StringField(this, 250) {
override def displayName = "Hour"
override def helpAsHtml = Full(Text("Event's hour"))
}
object room extends MongoCaseClassField[GeneralEvent, LocalRoom](this)
object name extends StringField(this, 200)
object cost extends StringField(this, 300)
object area extends StringField(this, 300) //Activity Type
object date extends DateField(this)
object proposal extends LongField(this, 0)
object organize extends StringField(this, 300)
object status extends StringField(this, 300)
object text extends StringField(this, 12) {
override def displayName = "Text"
override def helpAsHtml = Full(Text("The book's text"))
override def validations =
valMinLen(2, "Text must be at least 2 characters") _ ::
valMaxLen(12, "Text must be 12 characters or less") _ ::
super.validations
}
}
object GeneralEvent extends GeneralEvent with MongoMetaRecord[GeneralEvent] {
def findByProposal(proposal: Long): Box[GeneralEvent] = {
Full(
GeneralEvent.where(_.proposal eqs proposal).fetch().headOption.
getOrElse(GeneralEvent.createRecord.proposal(proposal))
)
}
def fetchTodayEvents(page: Int, itemsPerPage: Int): List[GeneralEvent] = {
println(s"PAGE $page")
val now = DateTime.now()
val dayStart = now.withTimeAtStartOfDay().plusDays(4)
val dayEnd = dayStart.plusDays(1).withTimeAtStartOfDay()
GeneralEvent.where(_.date between(dayStart, dayEnd)).
and(_.status neqs "Por revisar").paginate(itemsPerPage).setPage(page).fetch()
}
def countTodayEvents(): Long = {
val now = DateTime.now()
val dayStart = now.withTimeAtStartOfDay().plusDays(4)
val dayEnd = dayStart.plusDays(1).withTimeAtStartOfDay()
GeneralEvent.where(_.date between(dayStart, dayEnd)).
and(_.status neqs "Por revisar").count()
}
}
case class LocalRoom(val name: String)
|
jgenso/mapamartadero
|
src/main/scala/mapmartadero/model/GeneralEvent.scala
|
Scala
|
apache-2.0
| 2,475 |
package com.example.recommender
import com.example.math.MyMath
class MyRecommender {
def recommend() = {
MyMath.nextInt()
}
}
|
fkautz/gradle-recipes
|
multiproject/recommender/src/main/scala/com/example/recommender/Recommender.scala
|
Scala
|
apache-2.0
| 136 |
package controllers
import com.google.inject.Inject
import models.JwtEnvironment
import play.api.i18n.MessagesApi
import play.api.mvc.Controller
import utils.JwtAuthentication
class LogoutController @Inject()(implicit val messagesApi: MessagesApi,
implicit val jwtEnvironment: JwtEnvironment)
extends Controller with JwtAuthentication {
def logout = JwtAuthenticatedAction { jwtToken =>
jwtEnvironment.blacklist.add(jwtToken)
Accepted
}
}
|
GMadorell/play-jwt
|
app/controllers/LogoutController.scala
|
Scala
|
mit
| 489 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.logical
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.util.{FlinkRelOptUtil, RelExplainUtil}
import org.apache.calcite.plan._
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core.Sort
import org.apache.calcite.rel.logical.LogicalSort
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.rel.{RelCollation, RelCollationTraitDef, RelNode}
import org.apache.calcite.rex.{RexLiteral, RexNode}
/**
* Sub-class of [[Sort]] that is a relational expression which imposes
* a particular sort order on its input without otherwise changing its content in Flink.
*/
class FlinkLogicalSort(
cluster: RelOptCluster,
traits: RelTraitSet,
child: RelNode,
collation: RelCollation,
offset: RexNode,
fetch: RexNode)
extends Sort(cluster, traits, child, collation, offset, fetch)
with FlinkLogicalRel {
private lazy val limitStart: Long = FlinkRelOptUtil.getLimitStart(offset)
override def copy(
traitSet: RelTraitSet,
newInput: RelNode,
newCollation: RelCollation,
offset: RexNode,
fetch: RexNode): Sort = {
new FlinkLogicalSort(cluster, traitSet, newInput, newCollation, offset, fetch)
}
override def estimateRowCount(mq: RelMetadataQuery): Double = {
val inputRowCnt = mq.getRowCount(this.getInput)
if (inputRowCnt == null) {
inputRowCnt
} else {
val rowCount = (inputRowCnt - limitStart).max(1.0)
if (fetch != null) {
val limit = RexLiteral.intValue(fetch)
rowCount.min(limit)
} else {
rowCount
}
}
}
override def computeSelfCost(planner: RelOptPlanner, mq: RelMetadataQuery): RelOptCost = {
// by default, assume cost is proportional to number of rows
val rowCount: Double = mq.getRowCount(this)
planner.getCostFactory.makeCost(rowCount, rowCount, 0)
}
}
class FlinkLogicalSortStreamConverter
extends ConverterRule(
classOf[LogicalSort],
Convention.NONE,
FlinkConventions.LOGICAL,
"FlinkLogicalSortStreamConverter") {
override def convert(rel: RelNode): RelNode = {
val sort = rel.asInstanceOf[LogicalSort]
val newInput = RelOptRule.convert(sort.getInput, FlinkConventions.LOGICAL)
FlinkLogicalSort.create(newInput, sort.getCollation, sort.offset, sort.fetch)
}
}
class FlinkLogicalSortBatchConverter extends ConverterRule(
classOf[LogicalSort],
Convention.NONE,
FlinkConventions.LOGICAL,
"FlinkLogicalSortBatchConverter") {
override def convert(rel: RelNode): RelNode = {
val sort = rel.asInstanceOf[LogicalSort]
val newInput = RelOptRule.convert(sort.getInput, FlinkConventions.LOGICAL)
// TODO supports range sort
FlinkLogicalSort.create(newInput, sort.getCollation, sort.offset, sort.fetch)
}
}
object FlinkLogicalSort {
val BATCH_CONVERTER: RelOptRule = new FlinkLogicalSortBatchConverter
val STREAM_CONVERTER: RelOptRule = new FlinkLogicalSortStreamConverter
def create(
input: RelNode,
collation: RelCollation,
sortOffset: RexNode,
sortFetch: RexNode): FlinkLogicalSort = {
val cluster = input.getCluster
val collationTrait = RelCollationTraitDef.INSTANCE.canonize(collation)
val traitSet = input.getTraitSet.replace(FlinkConventions.LOGICAL)
.replace(collationTrait).simplify()
new FlinkLogicalSort(cluster, traitSet, input, collation, sortOffset, sortFetch)
}
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/logical/FlinkLogicalSort.scala
|
Scala
|
apache-2.0
| 4,309 |
// Minimized from scala.collection.generic.GenTraversableFactory plus dependencies
import scala.annotation.unchecked.uncheckedVariance
trait GT[A] extends GTT[A, GT]
trait HNB[B]
trait GTT[+C, DD[X] <: GT[X]] extends HNB[DD[C] @uncheckedVariance] // Can be any annotation and still crash
class GTF[EE[X] <: GT[X] with GTT[X, EE]]
{
def foo[F]: EE[F] = ???
def bar[G](f: G): EE[G] = ???
def tabulate: EE[EE[Int]] = bar(foo)
}
|
densh/dotty
|
tests/pos/hk-deep-subtype.scala
|
Scala
|
bsd-3-clause
| 435 |
package controllers.builder
import controllers.BaseAuthConfig
import play.api.mvc.RequestHeader
import play.api.mvc.Results._
import scala.concurrent.{Future, ExecutionContext}
trait AuthConfigImpl extends BaseAuthConfig {
def loginSucceeded(request: RequestHeader)(implicit ctx: ExecutionContext) = Future.successful(Redirect(routes.Messages.main))
def logoutSucceeded(request: RequestHeader)(implicit ctx: ExecutionContext) = Future.successful(Redirect(routes.Sessions.login))
def authenticationFailed(request: RequestHeader)(implicit ctx: ExecutionContext) = Future.successful(Redirect(routes.Sessions.login))
}
|
indykish/play2-auth
|
sample/app/controllers/builder/AuthConfigImpl.scala
|
Scala
|
apache-2.0
| 627 |
package by.pavelverk.hardwrite.core.feature
import by.pavelverk.hardwrite.core.{InMemoryStorage, Features}
import by.pavelverk.hardwrite.utils.db.DatabaseConnector
import scala.concurrent.{ExecutionContext, Future}
sealed trait FeaturesStorage {
def getFeatures(id: String): Future[Option[Features]]
def saveFeatures(sample: Features): Future[Features]
}
class JdbcFeaturesStorage(val databaseConnector: DatabaseConnector)(implicit executionContext: ExecutionContext)
extends FeaturesTable with FeaturesStorage {
import databaseConnector._
import databaseConnector.profile.api._
def getFeatures(id: String): Future[Option[Features]] = db.run(features.filter(_.id === id).result.headOption)
def saveFeatures(sample: Features): Future[Features] =
db.run(features.insertOrUpdate(sample)).map(_ => sample)
}
class InMemoryFeatureStorage extends InMemoryStorage[String, Features] with FeaturesStorage {
override def getFeatures(id: String): Future[Option[Features]] = get(id)
override def saveFeatures(sample: Features): Future[Features] = save(sample)
}
|
VerkhovtsovPavel/BSUIR_Labs
|
Master/back/akka-http-rest-master/src/main/scala/by/pavelverk/hardwrite/core/feature/FeatureStorage.scala
|
Scala
|
mit
| 1,086 |
package com.github.challenge
import akka.actor._
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.Await
import scala.concurrent.duration._
class ProblemSolver[A <: problem.Problem](workers: Int, autoDie: Boolean, name: String) {
/**
* Bring up the actor system
*/
val system = ActorSystem("ChallengeSystem" + name)
/**
* This is the actor object responsible for managing the solver objects
*/
val master = system.actorOf(Props(ProblemMaster[A](workers, autoDie)), name = "problem_master")
def addInfo(info: problem.ProblemInfo[A]) {
master ! info
}
def solve() {
master ! problem.ProcessProblem
}
implicit val timeout = Timeout(5.seconds)
def isDone: Boolean = {
val future = master ? problem.DoneProcessing
Await.result(future, timeout.duration).asInstanceOf[Boolean]
}
def shutdown() {
system.shutdown()
}
}
|
challenge-helper/challenge-helper
|
src/main/scala/com/github/challenge/ProblemSolver.scala
|
Scala
|
apache-2.0
| 899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.