code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package code.snippet
import Angular._
import code.model.PonyService
class NgPonyService {
def render = renderIfNotAlreadyDefined(
angular.module("lift.pony")
.factory("ponyService", jsObjFactory()
.jsonCall("getBestPony", PonyService.getRandomPony)
)
)
}
|
htmldoug/ng-lift-proxy
|
src/main/scala/code/snippet/NgPonyService.scala
|
Scala
|
apache-2.0
| 281 |
package skinny.controller
import skinny.micro.AsyncSkinnyMicroServlet
/**
* SkinnyController as a Servlet.
*/
class AsyncSkinnyServlet
extends AsyncSkinnyMicroServlet
with AsyncSkinnyControllerBase
with AsyncSkinnyWebPageControllerFeatures
|
seratch/skinny-framework
|
framework/src/main/scala/skinny/controller/AsyncSkinnyServlet.scala
|
Scala
|
mit
| 258 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio._
import kafka.network._
import kafka.utils._
import kafka.api._
object MultiFetchRequest {
def readFrom(buffer: ByteBuffer): MultiFetchRequest = {
val count = buffer.getShort
val fetches = new Array[FetchRequest](count)
for(i <- 0 until fetches.length)
fetches(i) = FetchRequest.readFrom(buffer)
new MultiFetchRequest(fetches)
}
}
class MultiFetchRequest(val fetches: Array[FetchRequest]) extends Request(RequestKeys.MultiFetch) {
def writeTo(buffer: ByteBuffer) {
buffer.putShort(fetches.length.toShort)
for(fetch <- fetches)
fetch.writeTo(buffer)
}
def sizeInBytes: Int = {
var size = 2
for(fetch <- fetches)
size += fetch.sizeInBytes
size
}
}
|
jinfei21/kafka
|
src/kafka/api/MultiFetchRequest.scala
|
Scala
|
apache-2.0
| 1,347 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.impl
import java.sql.SQLException
import java.util.Properties
import com.pivotal.gemfirexd.internal.engine.fabricservice.FabricServerImpl
import io.snappydata.{ProtocolOverrides, Server}
/**
* This class ties up few things that is Snappy specific.
* for e.g. Connection url & ClusterCallback
*/
class ServerImpl extends FabricServerImpl with Server with ProtocolOverrides {
@throws(classOf[SQLException])
override def start(bootProperties: Properties): Unit = start(bootProperties, false)
override def isServer: Boolean = true
}
|
vjr/snappydata
|
core/src/main/scala/io/snappydata/impl/ServerImpl.scala
|
Scala
|
apache-2.0
| 1,223 |
/*
* Copyright (C) 2015 Language Technology Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package preprocess
import java.nio.file.Path
import com.typesafe.scalalogging.slf4j.LazyLogging
import model.Document
import utils.Benchmark.toBenchmarkable
import utils.RichString.richString
import utils.io.IoUtils
/**
* Out of a given corpus, produces a map of true cases. This is a frequency based approach
* where the common usage of the term is assumed to be a true case.
*
* @param termFreq maps terms to its frequency.
*/
case class TrueCaser(termFreq: Map[String, Int]) extends LazyLogging {
// Maps words in lowercase to its true cases
private val trueCaseMap: Map[String, String] = calculateTrueCases()
/**
* Out of the termFreq map, assign for each word a true case that is the the spelling
* with the highest frequency.
*/
private def calculateTrueCases(): Map[String, String] = {
val group = termFreq.groupBy { case (k, _) => k.toLowerCase }
val trueCase = group.map {
case (k, forms) =>
val (trueCasingForm, _) = forms.maxBy { case (_, freq) => freq }
k.toLowerCase -> trueCasingForm
}
trueCase
}
/**
* Returns the true case for a given text.
*
* @param text The input text to get its true case.
* @return The true case of the input text.
*/
def applyTrueCasing(text: String): String = {
val words = text.words()
val trueCasedWords = words.map { w => trueCaseMap.getOrElse(w.toLowerCase, w) }
trueCasedWords.mkString(" ")
}
/**
* Returns the true case for a given [[model.Document]].
*
* @param document the original document.
* @return [[model.Document]] with applied true casing.
*/
def applyTrueCasing(document: Document): Document = {
val trueCasedContent = applyTrueCasing(document.content)
Document(document.id, trueCasedContent, document.created)
}
}
/**
* Companion object for [[TrueCaser]] that provides factory methods to
* create instances.
*/
object TrueCaser {
/**
* Factory method to create a [[TrueCaser]] from a given corpus file.
*
* @param file the corpus file used to determine the term frequency map.
* @return [[TrueCaser]] derived from a term frequency map consisting of the
* words from the given corpus file.
*/
def apply(file: Path)(ioUtils: IoUtils = new IoUtils): TrueCaser = {
val tf = createFrequencyMap(file, ioUtils).withBenchmark("Creating term frequency map for TrueCaser...")
TrueCaser(tf)
}
private def createFrequencyMap(file: Path, ioUtils: IoUtils): Map[String, Int] = {
val content = ioUtils.fromFile(file)(_.mkString(""))
val words = content.words()
words.groupBy(identity).map { case (term, counts) => (term, counts.length) }
}
}
|
tudarmstadt-lt/newsleak
|
core/src/main/scala/preprocess/TrueCaser.scala
|
Scala
|
agpl-3.0
| 3,405 |
package skinny.test
import javax.servlet.http.HttpUpgradeHandler
import org.scalatest._
class MockHttpServletRequestSpec extends FlatSpec with Matchers {
it should "be available" in {
val req = new MockHttpServletRequest
req.getDispatcherType should equal(null)
req.getAsyncContext should equal(null)
req.isAsyncSupported should equal(true)
req.isAsyncStarted should equal(false)
req.startAsync(null, null)
req.startAsync
req.getServletContext should not equal (null)
req.getRealPath("/") should equal(null)
req.getLocalPort should equal(80)
req.getLocalAddr should equal("127.0.0.1")
req.getLocalName should equal("localhost")
req.getServerPort should equal(80)
req.getServerName should equal("localhost")
req.getRemotePort should equal(80)
req.getRemoteAddr should equal("127.0.0.1")
req.getRemoteHost should equal("localhost")
req.getRequestDispatcher("/") should not equal (null)
req.isSecure should equal(false)
req.getLocales.hasMoreElements should equal(false)
req.getLocale should equal(null)
req.getAttributeNames.hasMoreElements should equal(false)
req.removeAttribute("foo")
req.getReader should not equal (null)
req.getInputStream should not equal (null)
req.getScheme should equal("http")
req.getProtocol should equal("http")
req.getParameterValues("foo").size should equal(0)
req.getParameterNames should not equal (null)
Option(req.getParameter("foo")) should equal(None)
req.getContentType should equal(null)
req.getContentLength should equal(-1)
req.setCharacterEncoding("test")
req.updateContentTypeHeader()
req.doAddHeaderValue("foo", "bar", true)
req.doAddHeaderValue("foo", "bar", false)
req.getCharacterEncoding should equal("test")
req.getPart("foo") should equal(null)
req.getParts.size() should equal(0)
req.logout()
intercept[UnsupportedOperationException] {
req.login("foo", "bar")
}
intercept[UnsupportedOperationException] {
req.authenticate(null)
}
req.isRequestedSessionIdFromUrl should equal(false)
req.isRequestedSessionIdFromURL should equal(false)
req.isRequestedSessionIdFromCookie should equal(true)
req.isRequestedSessionIdValid should equal(true)
req.getSession should not equal (null)
req.getSession(true) should not equal (null)
req.getServletPath should equal(null)
req.requestURI = "/foo"
req.getRequestURL.toString should equal("http://localhost/foo")
req.getRequestedSessionId should equal(null)
req.getUserPrincipal should equal(null)
req.isUserInRole(null) should equal(false)
req.remoteUser = "user"
req.getRemoteUser should equal("user")
req.getContextPath should equal("")
req.getPathInfo should equal(null)
req.getMethod should equal(null)
req.getHeaderNames should not equal (null)
req.getHeader("foo") should equal("bar")
req.getCookies.size should equal(0)
req.authType = "at"
req.getAuthType should equal("at")
intercept[UnsupportedOperationException] {
req.changeSessionId()
}
intercept[UnsupportedOperationException] {
req.upgrade(classOf[HttpUpgradeHandler])
}
req.getContentLengthLong should equal(-1)
}
}
|
holycattle/skinny-framework
|
test/src/test/scala/skinny/test/MockHttpServletRequestSpec.scala
|
Scala
|
mit
| 3,312 |
object B {
inline def inlinedAny(inline x: String): x.type = x
}
|
dotty-staging/dotty
|
sbt-test/source-dependencies/inline-rec-change-inline/changes/B1.scala
|
Scala
|
apache-2.0
| 69 |
package jp.scid.specs2
package object jsoup {
/** マッチャーをミックインせずに参照するためのインスタンスオブジェクト */
private[jsoup] object Specs2Matchers extends org.specs2.matcher.Matchers
}
|
rhiguchi/jsoup-specs2-matchers
|
src/main/scala/jp.scid.specs2/jsoup/package.scala
|
Scala
|
mit
| 233 |
/*
* Copyright 2006-2009 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package net.liftweb
package http
import common.{Box,Full,Empty,Failure}
import util.Props
import scala.xml.{NodeSeq, Text}
/**
* A collection of types and implicit transformations used to allow composition
* of page elements based upon the types of rendered objects.
*
* In Lift, a "snippet" is a function from NodeSeq => NodeSeq, where the argument
* to the function is a template, and the result is a fragment of a page to be
* rendered. Of course, this is a bit of an abbreviation; the snippet function
* also has an argument which is the application state made available from S.
* A DataBinding[T] is very similar in this respect; it is a function from some
* piece of information of type T to a function from NodeSeq => NodeSeq. Since
* DataBinding is strongly typed with respect to the type of information being
* rendered, DataBinding instances are ideal for the rendering of objects that
* is used to build up snippets. For example:
*
* <pre>
* import net.liftweb.http.Bindings._
* case class MyClass(str: String, i: Int, other: MyOtherClass)
* case class MyOtherClass(foo: String)
*
* trait MyClassBinding extends DataBinding[MyClass] {
* implicit val otherBinding: DataBinding[MyOtherClass]
*
* override def apply(entity: MyClass) = (xhtml: NodeSeq) => {
* val otherTemplate = chooseTemplate("myclass", "other", xhtml)
* bind(
* "myclass", xhtml,
* "str" -> Text("#" + entity.str + "#"),
* "i" -> Text(entity.i.toString),
* "other" -> entity.other.bind(otherTemplate)
* )
* }
*
* }
*
* object myOtherClassBinding extends DataBinding[MyOtherClass] {
* override def apply(other: MyOtherClass) = (xhtml: NodeSeq) => {
* bind("other", xhtml, "foo" -> Text("%" + other.foo + "%"))
* }
* }
*
* object MyClassConcreteBinding extends MyClassBinding {
* override val otherBinding = myOtherClassBinding
* }
* </pre>
*
* In this example, two classes and their associated bindings are constructed;
* the first binding for MyClass is abstract, needing a specific instance of
* DataBinding[MyOtherClass] to enable the implicit conversion needed to render
* the contained MyOtherClass instance. A subtemplate is selected, and the
* call to other.bind both necessitates the implicit conversion to a Bindings.Binder
* instance and applies the appropriate formatting. You can see how this
* usage keeps the concerns of the view and the model nicely separated, while
* allowing composition over object graphs.
*
* Please see the tests, as well as <a href="http://logji.blogspot.com/2009/09/composable-bindings-in-lift.html">this blog post</a> for additional details.
*/
object Bindings {
type Binding = NodeSeq => NodeSeq
type DataBinding[T] = T => NodeSeq => NodeSeq
/**
* Implicitly convert the specified object to a binder for that object if a DataBinding for
* that object's type is available in implicit scope. This essentially adds a bind() method
* to an object if an appropriate implicit DataBinding is available.
*/
implicit def binder[T](t: T)(implicit binding: DataBinding[T]): Binder = Binder(binding(t))
/**
* Wrap the specified Binding (a function from NodeSeq => NodeSeq) in a Binder so that
* it can be applied using Binder's bind methods.
*/
implicit def binder(binding: Binding): Binder = Binder(binding)
/**
* A decorator for a binding function that allows it to be called as bind() rather than apply().
* This class also provides facilities for binding to a specific template
*/
case class Binder(val binding: Binding) {
/**
* Apply this binder's binding function to the specified NodeSeq.
*/
def bind(xhtml: NodeSeq): NodeSeq = binding.apply(xhtml)
/**
* Apply this binder's binding function to the specified templated
* looked up using Templates.apply
*/
def bind(templatePath: List[String]): NodeSeq = {
Templates(templatePath) map binding match {
case Full(xhtml) => xhtml
case Failure(msg, ex, _) if Props.mode == Props.RunModes.Development => Text(ex.map(_.getMessage).openOr(msg))
case Empty if Props.mode == Props.RunModes.Development => Text("Unable to find template with path " + templatePath.mkString("/", "/", ""))
case _ => NodeSeq.Empty
}
}
}
/**
* Bind any input value to the empty NodeSeq.
*/
object EmptyBinding extends Binding {
override def apply(xhtml : NodeSeq) : NodeSeq = NodeSeq.Empty
}
}
|
sortable/framework
|
web/webkit/src/main/scala/net/liftweb/http/Bindings.scala
|
Scala
|
apache-2.0
| 5,272 |
package ch.squan.game.model.command
import org.newdawn.slick.command.BasicCommand
/**
* Created by chris on 22/01/16.
*/
case object CommandLeft extends BasicCommand("Left")
|
cbenning/space2d
|
src/main/scala/ch/squan/game/model/command/CommandLeft.scala
|
Scala
|
apache-2.0
| 180 |
package com.themillhousegroup.l7
import scala.StringBuilder
import org.apache.commons.lang3.StringUtils
import com.typesafe.scalalogging.LazyLogging
import java.io.File
import com.themillhousegroup.l7.commands.Command
object HierarchyVisualiser {
val indentSize = 4
def visualise(hierarchy: Seq[TopLevelNode]): String = {
val sb = new StringBuilder()
visualise(sb, hierarchy, 0)
sb.toString
}
private def visualise(sb: StringBuilder, nodes: Seq[HierarchyNode], indentLevel: Int): Unit = {
val sorted = nodes.sortWith {
case (l, r) => l.name.toLowerCase.compareTo(r.name.toLowerCase) < 0
}
sorted.foreach { n =>
sb.append(spaces(indentLevel))
sb.append(n)
sb.append("\\n")
visualise(sb, n.children, indentLevel + indentSize)
}
}
private def spaces(i: Int): String = {
StringUtils.leftPad("", i)
}
}
|
themillhousegroup/l7-merge
|
src/main/scala/com/themillhousegroup/l7/HierarchyVisualiser.scala
|
Scala
|
mit
| 882 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.avro.dsl
import java.lang.{Integer => JInt}
import java.lang.{Iterable => JIterable}
import java.lang.{Long => JLong}
import java.util.{Map => JMap}
import scala.collection.JavaConverters._
import scala.collection.JavaConverters.asScalaIteratorConverter
import scala.collection.JavaConverters.mapAsJavaMapConverter
import scala.collection.JavaConverters.mapAsScalaMapConverter
import scala.collection.mutable.Buffer
import org.apache.avro.Schema
import org.apache.avro.Schema.Type
import org.apache.avro.generic.GenericData
import org.apache.avro.generic.GenericRecord
import org.apache.avro.generic.GenericRecordBuilder
import org.apache.commons.lang.StringEscapeUtils
import org.apache.avro.generic.GenericContainer
/**
* Helper function to serialize an Avro value into a string representation.
* TODO(SCHEMA-619): Handle recursive value
*/
object AvroValue {
/**
* Serializes an Avro value to string.
*
* @param value Avro value to serialize.
* @param schema Avro schema of the value to serialize.
* @return the string representation of the Avro value.
*/
def toString(value: Any, schema: Schema): String = {
schema.getType match {
case Type.NULL => {
require(value == null)
return "null"
}
case Type.BOOLEAN => {
return value.asInstanceOf[Boolean].toString
}
case Type.INT => {
return value.asInstanceOf[JInt].toString
}
case Type.LONG => {
return value.asInstanceOf[JLong].toString + "L"
}
case Type.FLOAT => {
return value.asInstanceOf[Float].toString + "f"
}
case Type.DOUBLE => {
return value.asInstanceOf[Double].toString + "d"
}
case Type.BYTES => {
val bytes = value.asInstanceOf[Array[Byte]]
return "bytes(%s)".format(bytes.map {byte => "%02x".format(byte)}.mkString(","))
}
case Type.STRING => {
return """"%s"""".format(
StringEscapeUtils.escapeJava(value.asInstanceOf[CharSequence].toString)
)
}
case Type.FIXED => {
val fixed = value.asInstanceOf[GenericData.Fixed]
return "%s(%s)".format(
schema.getFullName,
fixed.bytes.map {byte => "%02x".format(byte)}.mkString(",")
)
}
case Type.ENUM => {
return "%s(%s)".format(schema.getFullName, value.toString)
}
case Type.RECORD => {
val record = value.asInstanceOf[GenericRecord]
val fields = schema.getFields.asScala.map {
field: Schema.Field =>
"%s=%s".format(field.name, toString(record.get(field.name), field.schema))
}
return "%s{%s}".format(
schema.getFullName,
fields.mkString(",")
)
}
case Type.ARRAY => {
val elementSchema = schema.getElementType
val iterator = {
value match {
case array: Array[_] => array.iterator
case iterable: JIterable[_] => iterable.iterator.asScala
case _ => sys.error("Not an array: " + value)
}
}
return "[%s]".format(
iterator
.map { element => toString(element, elementSchema) }
.mkString(",")
)
}
case Type.MAP => {
val valueSchema = schema.getValueType
val iterator: Iterator[(String, _)] = {
value match {
case jmap: JMap[String, _] => jmap.asScala.iterator
case _ => sys.error("Not a recognized map: " + value)
}
}
return "{%s}".format(
iterator
.map { case (key: String, v) =>
""""%s":%s""".format(StringEscapeUtils.escapeJava(key), toString(v, valueSchema))
}
.mkString(",")
)
}
case Type.UNION => {
value match {
case container: GenericContainer => {
val actualSchema = container.getSchema
require(schema.getTypes.contains(actualSchema))
return toString(container, actualSchema)
}
case _ => {
val errors = Buffer[Exception]()
for (avroType <- schema.getTypes.asScala) {
try {
return toString(value, avroType)
} catch {
// This is terrible :(
case error: ClassCastException => {
errors += error
}
case error: IllegalArgumentException => {
errors += error
}
}
}
sys.error("Unable to serialize union value %s according to schema: %s; errors where: %s"
.format(value, schema, errors))
}
}
sys.error("dead code")
}
case _ => sys.error("Unknown or unexpected schema: " + schema)
}
}
}
|
iafek/kiji-base
|
kiji-schema-framework/kiji-schema-extras/src/main/scala/org/kiji/avro/dsl/AvroValue.scala
|
Scala
|
apache-2.0
| 5,613 |
package scwebapp.data
import scutil.core.implicits.*
import scwebapp.format.*
import scparse.ng.text.*
object RangePattern {
lazy val parser:TextParser[RangePattern] =
parsers.value
def unparse(it:RangePattern):String =
it match {
case Begin(s) => s.toString + "-"
case End(c) => "-" + c.toString
case FromTo(s,e) => s.toString + "-" + e.toString
}
private object parsers {
import HttpParsers.*
val bytePos:TextParser[Long] = DIGIT.nes.stringify map { _.toLong } eatLeft LWSP
val byteRangeSpec:TextParser[(Long,Option[Long])] = bytePos left symbol('-') next bytePos.option
val suffixByteRangeSpec:TextParser[Long] = symbol('-') right bytePos
val value:TextParser[RangePattern] =
byteRangeSpec either suffixByteRangeSpec map {
case Left((a, None)) => Begin(a)
case Left((a, Some(b))) => FromTo(a, b)
case Right(b) => End(b)
}
}
//------------------------------------------------------------------------------
final case class Begin(start:Long) extends RangePattern
final case class FromTo(start:Long, end:Long) extends RangePattern
final case class End(size:Long) extends RangePattern
}
sealed trait RangePattern {
def toInclusiveRange(total:Long):Option[InclusiveRange] = {
val last = total - 1
this matchOption {
case RangePattern.FromTo(start, end) if start >= 0 && start <= last && end < last => InclusiveRange(start, end)
case RangePattern.Begin(start) if start >= 0 && start <= last => InclusiveRange(start, last)
case RangePattern.End(count) if count > 0 && count <= total => InclusiveRange(total - count, last)
}
}
}
|
ritschwumm/scwebapp
|
modules/core/src/main/scala/scwebapp/data/RangePattern.scala
|
Scala
|
bsd-2-clause
| 1,637 |
// Copyright 2016 zakski.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.szadowsz.maeve.core.instruction.actions
import com.szadowsz.maeve.core.browser.MaeveBrowser
import com.szadowsz.common.net.Uri
/**
* Executor for Javascript Actions; provides context points at the start of a scrape, after a page load, before extraction is called, after extraction is
* called, as a final cleanup action after a scrape.
*
* Created on 13/10/2016.
*/
trait ActionExecutor {
/**
* Function to execute actions at the start of a scrape.
*
* @param browser the browser to interact with.
* @param firstTarget the expected first url that will be accessed after this.
*/
def doFirstExecutionAction(browser: MaeveBrowser, firstTarget: Uri): Unit
/**
* Function to execute actions after a page load.
*
* @param browser the browser to interact with.
*/
def doInitialPageAction(browser: MaeveBrowser): Unit
/**
* Function to execute actions before extraction is called.
*
* @param browser the browser to interact with.
*/
def doBeforeExtractAction(browser: MaeveBrowser): Unit
/**
* Function to execute actions after extraction is called.
*
* @param browser the browser to interact with.
*/
def doAfterExtractAction(browser: MaeveBrowser): Unit
/**
* Function to execute final cleanup actions after a scrape.
*
* @param browser the browser to interact with.
*/
def doFinalExecutionAction(browser: MaeveBrowser): Unit
}
|
zakski/project-maeve
|
src/main/scala/com/szadowsz/maeve/core/instruction/actions/ActionExecutor.scala
|
Scala
|
apache-2.0
| 2,156 |
package text.vector
/**
* @author K.Sakamoto
* Created on 2016/05/22
*/
trait VectorMerger[Vector] {
def merge(vectors: Seq[Vector]): Vector
}
|
ktr-skmt/FelisCatusZero
|
src/main/scala/text/vector/VectorMerger.scala
|
Scala
|
apache-2.0
| 159 |
case class A(private var foo: Any) {
def m = { def foo = 42 /*will be lambda lifted to `A#foo$1`*/ }
}
object Test {
def main(args: Array[String]): Unit = {
val A("") = new A("")
new A("").m
}
}
|
lrytz/scala
|
test/files/run/t8944b.scala
|
Scala
|
apache-2.0
| 211 |
package me.yingrui.segment.word2vec
import java.lang.Math.abs
import me.yingrui.segment.neural.Sigmoid
import me.yingrui.segment.word2vec.SimplifiedActivationUtil._
import org.scalatest.{FunSuite, Matchers}
import scala.util.Random
class SimplifiedActivationUtilTest extends FunSuite with Matchers {
test("simplify sigmoid activation") {
val sigmoid = Sigmoid()
for(i <- -6 until 6) {
abs(simplifiedSigmoid(i.toDouble) - sigmoid.activate(i.toDouble)) shouldBe < (1E-2)
}
}
test("return 1D when input exceed MAX_EXP 6") {
val doubleGenerator = new Random();
for (i <- 1 to 100) {
simplifiedSigmoid(6D + doubleGenerator.nextDouble() * 100D) should be (1D)
}
}
test("return 0D when input less than -6") {
val doubleGenerator = new Random();
for (i <- 1 to 100) {
simplifiedSigmoid(-6D - doubleGenerator.nextDouble() * 100D) should be (0D)
}
}
}
|
yingrui/mahjong
|
lib-segment/src/test/scala/me/yingrui/segment/word2vec/SimplifiedActivationUtilTest.scala
|
Scala
|
gpl-3.0
| 916 |
/*
* (c) Copyright 2019 EntIT Software LLC, a Micro Focus company, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.content.google.actions.authentication
import java.nio.charset.StandardCharsets
import java.util
import com.hp.oo.sdk.content.annotations.{Action, Output, Param, Response}
import com.hp.oo.sdk.content.plugin.ActionMetadata.MatchType.COMPARE_EQUAL
import com.hp.oo.sdk.content.plugin.ActionMetadata.ResponseType
import io.cloudslang.content.constants.OutputNames.{EXCEPTION, RETURN_CODE, RETURN_RESULT}
import io.cloudslang.content.constants.{ResponseNames, ReturnCodes}
import io.cloudslang.content.google.utils.Constants.NEW_LINE
import io.cloudslang.content.google.utils.action.DefaultValues.{DEFAULT_PROXY_PORT, DEFAULT_SCOPES_DELIMITER, DEFAULT_TIMEOUT}
import io.cloudslang.content.google.utils.action.InputNames._
import io.cloudslang.content.google.utils.action.InputUtils.verifyEmpty
import io.cloudslang.content.google.utils.action.InputValidator.{validateNonNegativeLong, validateProxyPort}
import io.cloudslang.content.google.utils.service.{GoogleAuth, HttpTransportUtils, JsonFactoryUtils}
import io.cloudslang.content.utils.NumberUtilities.{toInteger, toLong}
import io.cloudslang.content.utils.OutputUtilities.{getFailureResultsMap, getSuccessResultsMap}
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.StringUtils.{EMPTY, defaultIfEmpty}
/**
* Created by victor on 28.02.2017.
*/
class GetAccessToken {
/**
* This operation can be used to retrieve an access token to be used in subsequent google compute operations.
*
* @param jsonToken Content of the Google Cloud service account JSON.
* @param scopes Scopes that you might need to request to access Google Compute APIs, depending on the level of access
* you need. One or more scopes may be specified delimited by the <scopesDelimiter>.
* Example: 'https://www.googleapis.com/auth/compute.readonly'
* Note: It is recommended to use the minimum necessary scope in order to perform the requests.
* For a full list of scopes see https://developers.google.com/identity/protocols/googlescopes#computev1
* @param scopesDelInp Optional - Delimiter that will be used for the <scopes> input.
* Default: ","
* @param timeoutInp Optional - Timeout of the resulting access token, in seconds.
* Default: "600"
* @param proxyHost Optional - Proxy server used to access the provider services.
* @param proxyPortInp Optional - Proxy server port used to access the provider services.
* Default: "8080"
* @param proxyUsername Optional - Proxy server user name.
* @param proxyPasswordInp Optional - Proxy server password associated with the <proxyUsername> input value.
* @return a map containing an access token as returnResult
*/
@Action(name = "Get the access token for Google Cloud",
outputs = Array(
new Output(RETURN_CODE),
new Output(RETURN_RESULT),
new Output(EXCEPTION)
),
responses = Array(
new Response(text = ResponseNames.SUCCESS, field = RETURN_CODE, value = ReturnCodes.SUCCESS, matchType = COMPARE_EQUAL, responseType = ResponseType.RESOLVED),
new Response(text = ResponseNames.FAILURE, field = RETURN_CODE, value = ReturnCodes.FAILURE, matchType = COMPARE_EQUAL, responseType = ResponseType.ERROR, isOnFail = true)
)
)
def execute(@Param(value = JSON_TOKEN, required = true, encrypted = true) jsonToken: String,
@Param(value = SCOPES, required = true) scopes: String,
@Param(value = SCOPES_DELIMITER) scopesDelInp: String,
@Param(value = TIMEOUT) timeoutInp: String,
@Param(value = PROXY_HOST) proxyHost: String,
@Param(value = PROXY_PORT) proxyPortInp: String,
@Param(value = PROXY_USERNAME) proxyUsername: String,
@Param(value = PROXY_PASSWORD, encrypted = true) proxyPasswordInp: String): util.Map[String, String] = {
val proxyHostOpt = verifyEmpty(proxyHost)
val proxyUsernameOpt = verifyEmpty(proxyUsername)
val proxyPortStr = defaultIfEmpty(proxyPortInp, DEFAULT_PROXY_PORT)
val proxyPassword = defaultIfEmpty(proxyPasswordInp, EMPTY)
val scopesDel = defaultIfEmpty(scopesDelInp, DEFAULT_SCOPES_DELIMITER)
val timeoutStr = defaultIfEmpty(timeoutInp, DEFAULT_TIMEOUT)
val validationStream = validateProxyPort(proxyPortStr) ++
validateNonNegativeLong(timeoutStr, TIMEOUT)
if (validationStream.nonEmpty) {
return getFailureResultsMap(validationStream.mkString(NEW_LINE))
}
val proxyPort = toInteger(proxyPortStr)
val timeout = toLong(timeoutStr)
try {
val httpTransport = HttpTransportUtils.getNetHttpTransport(proxyHostOpt, proxyPort, proxyUsernameOpt, proxyPassword)
val jsonFactory = JsonFactoryUtils.getDefaultJacksonFactory
val credential = GoogleAuth.fromJsonWithScopes(IOUtils.toInputStream(jsonToken, StandardCharsets.UTF_8),
httpTransport, jsonFactory, scopes.split(scopesDel), timeout)
val accessToken = GoogleAuth.getAccessTokenFromCredentials(credential)
getSuccessResultsMap(accessToken)
} catch {
case e: Throwable => getFailureResultsMap(e)
}
}
}
|
CloudSlang/cs-actions
|
cs-google/src/main/scala/io/cloudslang/content/google/actions/authentication/GetAccessToken.scala
|
Scala
|
apache-2.0
| 5,999 |
/**
* *****************************************************************************
* Copyright 2016 Katja Hahn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ****************************************************************************
*/
package com.github.katjahahn.parser.sections.idata
import com.github.katjahahn.parser.sections.SpecialSection
import com.github.katjahahn.parser.PhysicalLocation
import com.github.katjahahn.parser.Location
import scala.collection.JavaConverters._
import com.github.katjahahn.parser.sections.SectionLoader.LoadInfo
import com.github.katjahahn.parser.IOUtil.SpecificationFormat
import com.github.katjahahn.parser.IOUtil
class BoundImportSection private (
private val offset: Long) extends SpecialSection {
def getImports(): java.util.List[ImportDLL] = null //TODO implement
/**
* {@inheritDoc}
*/
override def getOffset(): Long = offset
/**
* {@inheritDoc}
*/
override def isEmpty(): Boolean = false
/**
*
* @return a list with all locations the import information has been written to.
*/
def getPhysicalLocations(): java.util.List[PhysicalLocation] = {
List.empty[PhysicalLocation].asJava
}
/**
* Returns a decription of all entries in the import section.
*
* @return a description of all entries in the import section
*/
override def getInfo(): String =
s"""|--------------
|Bound Imports
|--------------
|
|-todo-""".stripMargin
}
object BoundImportSection {
def apply(loadInfo: LoadInfo): BoundImportSection = {
val format = new SpecificationFormat(0, 1, 2, 3)
null
}
/**
* The instance of this class is usually created by the section loader.
*
* @param loadInfo
* @return ImportSection instance
*/
def newInstance(loadInfo: LoadInfo): BoundImportSection = apply(loadInfo)
}
|
katjahahn/PortEx
|
src/main/java/com/github/katjahahn/parser/sections/idata/BoundImportSection.scala
|
Scala
|
apache-2.0
| 2,379 |
import sbt.{Def, _}
import sbtassembly.AssemblyPlugin.autoImport.{MergeStrategy, assembly, assemblyMergeStrategy}
import sbtassembly.{AssemblyPlugin, PathList}
import Keys._
object Build extends AutoPlugin {
override def trigger = allRequirements
override def requires: Plugins = AssemblyPlugin
override def projectSettings: Seq[Def.Setting[_]] =
Vector(
resolvers ++= Vector(
"Sonatype SNAPSHOTS" at "https://oss.sonatype.org/content/repositories/snapshots/"
),
scalaVersion := Version.Scala,
assemblyMergeStrategy in assembly := {
case PathList("META-INF", "MANIFEST.MF") => MergeStrategy.discard
case PathList("META-INF", xs @ _*) => MergeStrategy.last
case PathList("META-INF", "io.netty.versions.properties") => MergeStrategy.last
case PathList("codegen.json") => MergeStrategy.discard
case x =>
val oldStrategy = (assemblyMergeStrategy in assembly).value
oldStrategy(x)
},
scalacOptions ++= Vector(
"-unchecked",
"-deprecation",
"-language:_",
"-target:jvm-1.8",
"-encoding", "UTF-8"
),
mainClass := Some("io.vertx.core.Launcher"),
unmanagedSourceDirectories in Compile := Vector(scalaSource.in(Compile).value),
unmanagedSourceDirectories in Test := Vector(scalaSource.in(Test).value),
initialCommands in console := """|import io.vertx.lang.scala._
|import io.vertx.lang.scala.ScalaVerticle.nameForVerticle
|import io.vertx.scala.core._
|import scala.concurrent.Future
|import scala.concurrent.Promise
|import scala.util.Success
|import scala.util.Failure
|val vertx = Vertx.vertx
|implicit val executionContext = io.vertx.lang.scala.VertxExecutionContext(vertx.getOrCreateContext)
|""".stripMargin
)
}
|
gengstrand/clojure-news-feed
|
server/feed11/project/Build.scala
|
Scala
|
epl-1.0
| 2,154 |
/*
* Copyright 2015 Renaud Bruneliere
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.bruneli.scalaopt.core.constraint
/**
* Lower or equal inequality operator
*
* @author bruneli
*/
object LowerOrEqualOperator extends ConstraintOperator {
/**
* Check if left and right operands satisfy the constraint defined by the operator
*
* @param left left operand real-value
* @param right right operand real-value
* @return true if constraint is satisfied
*/
def apply(left: Double, right: Double): Boolean = left <= right
}
|
bruneli/scalaopt
|
core/src/main/scala/com/github/bruneli/scalaopt/core/constraint/LowerOrEqualOperator.scala
|
Scala
|
apache-2.0
| 1,079 |
trait Num:
type Nat
object IsInt:
def unapply(using num: Num)(sc: num.Nat): Option[Int] = ???
def test(using num: Num)(x: num.Nat) =
x match
case IsInt(i) =>
|
dotty-staging/dotty
|
tests/pos/i8972.scala
|
Scala
|
apache-2.0
| 170 |
package mypipe.mysql
import java.net.InetAddress
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import akka.actor.Cancellable
import com.github.mauricio.async.db.mysql.MySQLConnection
import com.github.mauricio.async.db.{Configuration, Connection}
import com.github.shyiko.mysql.binlog.BinaryLogClient
import com.typesafe.config.Config
import mypipe.api.consumer.{BinaryLogConsumer, BinaryLogConsumerErrorHandler, BinaryLogConsumerListener, BinaryLogConsumerTableFinder}
import mypipe.api.data.Table
import mypipe.api.event._
import mypipe.api.{Conf, HostPortUserPass}
import mypipe.util
import mypipe.util.{Eval, Listener, Paperboy}
import org.slf4j.LoggerFactory
import scala.collection.JavaConversions._
import scala.collection.mutable.HashMap
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
trait Connections {
val configs: List[HostPortUserPass]
}
trait ConfigBasedConnections extends Connections {
protected val config: Config
override val configs = config.getStringList("source").map(HostPortUserPass(_)).toList
}
trait ClientPool extends Connections {
def getClient: BinaryLogClient
def getClientInfo: HostPortUserPass
}
trait SimpleClientPool extends ClientPool with ConfigBasedConnections with Paperboy {
private val log = LoggerFactory.getLogger(getClass)
protected val pool = new LinkedBlockingQueue[BinaryLogClient]()
protected val instances = HashMap.empty[BinaryLogClient, HostPortUserPass]
protected var current: BinaryLogClient = null
configs.foreach { i ⇒
Try(new BinaryLogClient(i.host, i.port, i.user, i.password)) match {
case Success(c) ⇒
log.info(s"Adding mysql client to pool: ${i.host}:${i.port}")
pool.offer(c)
instances += c → i
case Failure(e) ⇒ log.error("BinaryLogClient init error: ", e)
}
}
override def getClient: BinaryLogClient = Option(pool.peek()) match {
case Some(client) ⇒
if (current != client) current = client
client
case None ⇒
paperboy.error("No available client at the time! host:" + InetAddress.getLocalHost().getHostName(), "mypipe")
throw new RuntimeException("No available client at the time!")
}
def getClientInfo: HostPortUserPass = instances(getClient)
}
trait HeartBeatClientPool extends SimpleClientPool { self: MySQLBinaryLogConsumer ⇒
protected var heartBeat: HeartBeat = null
protected var heartbeatThread: Cancellable = null
protected var recoveryThread: Cancellable = null
override def getClient: BinaryLogClient = {
val client = super.getClient
if (heartBeat == null) {
heartBeat = newHeartBeat(instances(current))
heartbeatThread = heartBeat.beat()
}
client
}
def newHeartBeat(info: HostPortUserPass) = {
val hb = new HeartBeat(info.host, info.port, info.user, info.password) {
override def onFailure() = {
paperboy.error(s"Mysql instance is down! ${info.host}:${info.port}", "mypipe")
val prev = pool.poll(5, TimeUnit.SECONDS)
val next = pool.peek()
val nextInfo = instances(next)
if (next != null && !next.isConnected) {
// start new connection
log.error(s"Switching to another mysql instance..., [${nextInfo.host}:${nextInfo.port}], " +
s"binlog file: ${prev.getBinlogFilename}, pos: ${prev.getBinlogPosition}")
next.setBinlogFilename(prev.getBinlogFilename)
next.setBinlogPosition(prev.getBinlogPosition)
onStart()
// close old connection
prev.disconnect()
}
heartBeat = null
if (Conf.MYSQL_DO_RECOVER_AFTER_DOWN) doRecover(info)
}
override def onSuccess(): Unit = if (log.isDebugEnabled) log.debug(s"Heartbeat detection success: ${info.host}-${info.port}")
}
hb.addListener(new Listener {
override def onEvent(evt: util.Event) = evt match {
case BeatFailure ⇒ heartbeatThread.cancel()
case _ ⇒
}
})
hb
}
protected def doRecover(info: HostPortUserPass): Unit = {
val heartBeat = new HeartBeat(info.host, info.port, info.user, info.password, Conf.MYSQL_SECONDS_BEFORE_RECOVER_AFTER_DOWN.seconds) {
@volatile var nSuccess = 0
override def onFailure(): Unit = {}
override def onSuccess(): Unit = {
nSuccess += 1
if (nSuccess == 3) {
log.info(s"Recover success! Putting mysql client back to pool: ${info.host}-${info.port}")
pool.offer(new BinaryLogClient(info.host, info.port, info.user, info.password))
}
}
}
heartBeat.addListener(new Listener {
@volatile var nFailure = 0
override def onEvent(evt: util.Event) = evt match {
case BeatSuccess ⇒ recoveryThread.cancel()
case _ ⇒
nFailure += 1
if (nFailure == Conf.MYSQL_HEARTBEAT_MAX_RETRY) {
log.info(s"Recover failed: ${info.host}-${info.port}")
recoveryThread.cancel()
}
}
})
recoveryThread = heartBeat.beat()
}
}
case class Db(hostname: String, port: Int, username: String, password: String, dbName: String) {
private val configuration = new Configuration(username, hostname, port, Some(password))
var connection: Connection = _
def connect(): Unit = connect(timeoutMillis = 5000)
def connect(timeoutMillis: Int) {
connection = new MySQLConnection(configuration)
val future = connection.connect
Await.result(future, timeoutMillis.millis)
}
def select(db: String): Unit = {
}
def disconnect(): Unit = disconnect(timeoutMillis = 5000)
def disconnect(timeoutMillis: Int) {
val future = connection.disconnect
Await.result(future, timeoutMillis.millis)
}
}
/** Used when no event skipping behaviour is desired.
*/
trait NoEventSkippingBehaviour {
this: BinaryLogConsumer[_] ⇒
protected def skipEvent(e: TableContainingEvent): Boolean = false
}
/** Used alongside the the configuration in order to read in and
* compile the code responsible for keeping or skipping events.
*/
// TODO: write a test for this functionality
trait ConfigBasedEventSkippingBehaviour {
this: BinaryLogConsumer[_] ⇒
val includeEventCond = Conf.INCLUDE_EVENT_CONDITION
val skipFn: (String, String) ⇒ Boolean =
if (includeEventCond.isDefined)
Eval(s"""{ (db: String, table: String) => { ! ( ${includeEventCond.get} ) } }""")
else
(_, _) ⇒ false
protected def skipEvent(e: TableContainingEvent): Boolean = {
skipFn(e.table.db, e.table.name)
}
}
trait ConfigBasedErrorHandlingBehaviour[BinaryLogEvent] extends BinaryLogConsumerErrorHandler[BinaryLogEvent] {
val handler = Conf.loadClassesForKey[BinaryLogConsumerErrorHandler[BinaryLogEvent]]("mypipe.error.handler")
.headOption
.map(_._2.map(_.newInstance()))
.getOrElse(None)
def handleEventError(event: Option[Event], binaryLogEvent: BinaryLogEvent): Boolean =
handler.exists(_.handleEventError(event, binaryLogEvent))
def handleMutationError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(mutation: Mutation): Boolean =
handler.exists(_.handleMutationError(listeners, listener)(mutation))
def handleMutationsError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(mutations: Seq[Mutation]): Boolean =
handler.exists(_.handleMutationsError(listeners, listener)(mutations))
def handleTableMapError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(table: Table, event: TableMapEvent): Boolean =
handler.exists(_.handleTableMapError(listeners, listener)(table, event))
def handleAlterError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(table: Table, event: AlterEvent): Boolean =
handler.exists(_.handleAlterError(listeners, listener)(table, event))
def handleCommitError(mutationList: List[Mutation], faultyMutation: Mutation): Boolean =
handler.exists(_.handleCommitError(mutationList, faultyMutation))
def handleEventDecodeError(binaryLogEvent: BinaryLogEvent): Boolean =
handler.exists(_.handleEventDecodeError(binaryLogEvent))
def handleEmptyCommitError(queryList: List[QueryEvent]): Boolean =
handler.exists(_.handleEmptyCommitError(queryList))
}
class ConfigBasedErrorHandler[BinaryLogEvent] extends BinaryLogConsumerErrorHandler[BinaryLogEvent] {
private val log = LoggerFactory.getLogger(getClass)
private val quitOnEventHandlerFailure = Conf.QUIT_ON_EVENT_HANDLER_FAILURE
private val quitOnEventDecodeFailure = Conf.QUIT_ON_EVENT_DECODE_FAILURE
private val quitOnEmptyMutationCommitFailure = Conf.QUIT_ON_EMPTY_MUTATION_COMMIT_FAILURE
private val quitOnEventListenerFailure = Conf.QUIT_ON_LISTENER_FAILURE
override def handleEventError(event: Option[Event], binaryLogEvent: BinaryLogEvent): Boolean = {
log.error("Could not handle event {} from raw event {}", event, binaryLogEvent)
!quitOnEventHandlerFailure
}
override def handleMutationError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(mutation: Mutation): Boolean = {
log.error("Could not handle mutation {} from listener {}", mutation.asInstanceOf[Any], listener)
!quitOnEventListenerFailure
}
override def handleMutationsError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(mutations: Seq[Mutation]): Boolean = {
log.error("Could not handle {} mutation(s) from listener {}", mutations.length, listener)
!quitOnEventListenerFailure
}
override def handleTableMapError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(table: Table, event: TableMapEvent): Boolean = {
log.error("Could not handle table map event {} for table from listener {}", table, event, listener)
!quitOnEventListenerFailure
}
override def handleAlterError(listeners: List[BinaryLogConsumerListener[BinaryLogEvent]], listener: BinaryLogConsumerListener[BinaryLogEvent])(table: Table, event: AlterEvent): Boolean = {
log.error("Could not handle alter event {} for table {} from listener {}", table, event, listener)
!quitOnEventListenerFailure
}
override def handleCommitError(mutationList: List[Mutation], faultyMutation: Mutation): Boolean = {
log.error("Could not handle commit due to faulty mutation {} for mutations {}", faultyMutation.asInstanceOf[Any], mutationList)
!quitOnEventHandlerFailure
}
override def handleEmptyCommitError(queryList: List[QueryEvent]): Boolean = {
val l: (String, Any) ⇒ Unit = if (quitOnEmptyMutationCommitFailure) log.error else log.debug
l("Could not handle commit due to empty mutation list, missed queries: {}", queryList)
!quitOnEmptyMutationCommitFailure
}
override def handleEventDecodeError(binaryLogEvent: BinaryLogEvent): Boolean = {
log.trace("Event could not be decoded {}", binaryLogEvent)
!quitOnEventDecodeFailure
}
}
trait CacheableTableMapBehaviour extends BinaryLogConsumerTableFinder with SimpleClientPool {
private var _tableInfo: HostPortUserPass = null
private var _tableCache: TableCache = null
protected def tableCache = if (_tableInfo != getClientInfo) {
_tableInfo = getClientInfo
_tableCache = new TableCache(_tableInfo.host, _tableInfo.port, _tableInfo.user, _tableInfo.password)
_tableCache
} else _tableCache
override protected def findTable(tableMapEvent: TableMapEvent): Option[Table] = {
Await.result(tableCache.addTableByEvent(tableMapEvent), Duration.Inf)
}
override protected def findTable(tableId: java.lang.Long): Option[Table] =
tableCache.getTable(tableId)
override protected def findTable(database: String, table: String): Option[Table] = {
Await.result(tableCache.refreshTable(database, table), Duration.Inf)
}
}
|
tramchamploo/mypipe
|
mypipe-api/src/main/scala/mypipe/mysql/binaryLogConsumerTraits.scala
|
Scala
|
apache-2.0
| 12,136 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import java.io.PrintStream
import java.util.Properties
import kafka.utils.CommandLineUtils
import org.apache.kafka.common.utils.Utils
import org.apache.kafka.clients.CommonClientConfigs
import joptsimple._
import scala.util.{Failure, Success}
/**
* A command for retrieving broker version information.
*/
object BrokerApiVersionsCommand {
def main(args: Array[String]): Unit = {
execute(args, System.out)
}
def execute(args: Array[String], out: PrintStream): Unit = {
val opts = new BrokerVersionCommandOptions(args)
val adminClient = createAdminClient(opts)
adminClient.awaitBrokers()
val brokerMap = adminClient.listAllBrokerVersionInfo()
brokerMap.foreach { case (broker, versionInfoOrError) =>
versionInfoOrError match {
case Success(v) => out.print(s"${broker} -> ${v.toString(true)}\\n")
case Failure(v) => out.print(s"${broker} -> ERROR: ${v}\\n")
}
}
adminClient.close()
}
private def createAdminClient(opts: BrokerVersionCommandOptions): AdminClient = {
val props = if (opts.options.has(opts.commandConfigOpt))
Utils.loadProps(opts.options.valueOf(opts.commandConfigOpt))
else
new Properties()
props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, opts.options.valueOf(opts.bootstrapServerOpt))
AdminClient.create(props)
}
class BrokerVersionCommandOptions(args: Array[String]) {
val BootstrapServerDoc = "REQUIRED: The server to connect to."
val CommandConfigDoc = "A property file containing configs to be passed to Admin Client."
val parser = new OptionParser(false)
val commandConfigOpt = parser.accepts("command-config", CommandConfigDoc)
.withRequiredArg
.describedAs("command config property file")
.ofType(classOf[String])
val bootstrapServerOpt = parser.accepts("bootstrap-server", BootstrapServerDoc)
.withRequiredArg
.describedAs("server(s) to use for bootstrapping")
.ofType(classOf[String])
val options = parser.parse(args : _*)
checkArgs()
def checkArgs() {
// check required args
CommandLineUtils.checkRequiredArgs(parser, options, bootstrapServerOpt)
}
}
}
|
themarkypantz/kafka
|
core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala
|
Scala
|
apache-2.0
| 3,173 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.rest
import com.bwsw.sj.common.config.ConfigLiterals
import com.fasterxml.jackson.annotation.JsonProperty
import scala.annotation.meta.field
import scala.collection.mutable
class RestResponse(@(JsonProperty @field)("status-code") val statusCode: Int = 0, entity: ResponseEntity = new ResponseEntity {})
case class OkRestResponse(var entity: ResponseEntity) extends RestResponse(200, entity)
case class CreatedRestResponse(var entity: ResponseEntity) extends RestResponse(201, entity)
case class BadRequestRestResponse(var entity: ResponseEntity) extends RestResponse(400, entity)
case class NotFoundRestResponse(var entity: ResponseEntity) extends RestResponse(404, entity)
case class ConflictRestResponse(var entity: ResponseEntity) extends RestResponse(409, entity)
case class UnprocessableEntityRestResponse(var entity: ResponseEntity) extends RestResponse(422, entity)
case class InternalServerErrorRestResponse(var entity: ResponseEntity) extends RestResponse(500, entity)
trait ResponseEntity
case class FrameworkTask(id: String,
state: String,
stateChange: String,
reason: String,
node: String,
lastNode: String,
directories: scala.collection.mutable.ListBuffer[Directory])
case class Directory(name: String, path: String)
case class FrameworkRestEntity(tasks: Seq[FrameworkTask]) extends ResponseEntity
case class MessageResponseEntity(message: String) extends ResponseEntity
case class KeyedMessageResponseEntity(message: String, key: String) extends ResponseEntity
case class TypesResponseEntity(types: Seq[Type]) extends ResponseEntity
case class RelatedToStreamResponseEntity(instances: mutable.Buffer[String] = mutable.Buffer()) extends ResponseEntity
case class DomainsResponseEntity(domains: Seq[Type] = ConfigLiterals.domainTypes) extends ResponseEntity
case class Type(id: String, name: String)
|
bwsw/sj-platform
|
core/sj-common/src/main/scala/com/bwsw/sj/common/rest/RestResponse.scala
|
Scala
|
apache-2.0
| 2,819 |
import com.nathanstoddard._
import common._
import common.geom._
import common.random._
import common.log._
import renderer._
import org.lwjgl
import org.lwjgl._
import org.lwjgl.opengl._
import org.lwjgl.input._
import org.lwjgl.opengl.GL11._
import org.lwjgl.opengl.GL12._
import org.lwjgl.opengl.GL13._
import org.lwjgl.opengl.GL15._
import org.lwjgl.opengl.GL20._
import org.lwjgl.opengl.GL21._
import io._
import java.util.Date
import java.text.SimpleDateFormat
import java.io._
import java.awt.image._
import javax.imageio._
import scala.collection.mutable
class Body[M](var pos:Vec3, var rot:Quat, val mesh:M, val program:GLProgram) {
var vel = Vec3.zero
var rotVel = Vec3.zero
def simulate(dt:Real, gravity:Boolean) {
if (gravity) {
vel += Main.gravImpulse(this)*dt
}
pos += vel*dt
val theta = rotVel * (dt/2)
val dq = if (theta.lengthSquared*theta.lengthSquared/24 < 1.0e-10) {
Quat(1 - theta.lengthSquared/2, theta * (1.0 - theta.lengthSquared/6.0))
} else {
Quat(Math.cos(theta.length), theta.normalize * Math.sin(theta.length))
}
rot = dq * rot
}
/* def rotate(amount:Real) = {
val theta = rotVel * (amount/2)
val dq = if (theta.lengthSquared*theta.lengthSquared/24 < 1.0e-10) {
Quat(1 - theta.lengthSquared/2, theta * (1.0 - theta.lengthSquared/6.0))
} else {
Quat(Math.cos(theta.length), theta.normalize * Math.sin(theta.length))
}
rot = dq * rot
}*/
// TODO: what exactly do these functions do?
def drawMatrix = Matrix4.translate(pos) * rot.toMatrix
def getMatrix = rot.toMatrix.translate(-pos)
}
object ObjReader {
def read(path:String) = {
Renderer.checkGLError("read start")
val mesh = new LitProgram.Mesh(VertexDataUsage.Static)
Renderer.checkGLError("read after creating mesh")
val _verts = mutable.Queue[Vec3]();
val verts = mutable.ArrayBuffer[MeshIndex]()
val lines = FileUtils.read(path).split("\\n")
for (line <- lines) {
line.head match {
case 'v' if line(1) != 'n' =>
Renderer.checkGLError("read vertex")
val xs = line.drop(2).split(" ").filter(!_.isEmpty).map(java.lang.Double.parseDouble(_))
_verts += Vec3(xs(0), xs(1), xs(2))
case 'v' if line(1) == 'n' =>
Renderer.checkGLError("read normal")
val xs = line.drop(2).split(" ").filter(!_.isEmpty).map(java.lang.Double.parseDouble(_))
val normal = Vec3(xs(0), xs(1), xs(2))
val vert = mesh.verts(Seq((_verts.dequeue(), normal, Color.white)))
verts += vert(0)
case 'f' =>
Renderer.checkGLError("read face")
val xs = line.drop(2).split(" ").filter(!_.isEmpty)
val xs2 = xs.map(x => x.take(x.indexOf('/')))
val verts2 = xs2.map(x => verts(Integer.parseInt(x) - 1))
mesh.polygon(verts2)
case _ =>
}
}
mesh
}
}
object Shader {
val fragment = new FragmentShader(new File("shaders/fragment.glsl"))
val fragmentTextured = new FragmentShader(new File("shaders/fragmenttextured.glsl"))
val fragmentTextured2 = new FragmentShader(new File("shaders/fragmenttextured2.glsl"))
val litVertex = new VertexShader(new File("shaders/litvertex.glsl"))
val litVertexTextured = new VertexShader(new File("shaders/litvertextextured.glsl"))
val litVertexTextured2 = new VertexShader(new File("shaders/litvertextextured2.glsl"))
val unlitVertex = new VertexShader(new File("shaders/unlitvertex.glsl"))
}
object UnlitProgram extends GLProgram(Shader.unlitVertex, Shader.fragment, Seq(("position",3), ("color",4))) {
val _modelViewMatrix = new MatrixUniform("modelViewMatrix", this)
val _projMatrix = new MatrixUniform("projMatrix", this)
val _fcoef = new RealUniform("Fcoef", this)
val _fcoefHalf = new RealUniform("Fcoef_half", this)
class Mesh(usage:VertexDataUsage) extends renderer.Mesh(this, usage) {
def verts(xs:Seq[(Vec3,Color)]) = _verts(xs.map{case (pos,color) => Seq(pos.x, pos.y, pos.z, color.r, color.g, color.b, color.a)})
def draw(modelViewMatrix:Matrix4, projMatrix:Matrix4, fcoef:Real, fcoefHalf:Real) = {
_modelViewMatrix := modelViewMatrix
_projMatrix := projMatrix
_fcoef := fcoef
_fcoefHalf := fcoefHalf
_draw()
}
}
}
object LitProgram extends GLProgram(Shader.litVertex, Shader.fragment, Seq(("position",3), ("normal",3), ("color",4))) {
val _modelViewMatrix = new MatrixUniform("modelViewMatrix", this)
val _projMatrix = new MatrixUniform("projMatrix", this)
val _fcoef = new RealUniform("Fcoef", this)
val _fcoefHalf = new RealUniform("Fcoef_half", this)
val _lightDir = new Vec4Uniform("lightDir", this)
val _lightColor = new ColorUniform("lightColor", this)
val _ambientColor = new ColorUniform("ambientColor", this)
class Mesh(usage:VertexDataUsage) extends renderer.Mesh(this, usage) {
def verts(xs:Seq[(Vec3,Vec3,Color)]) = _verts(xs.map{case (pos,normal,color) =>
Seq(pos.x, pos.y, pos.z, normal.x, normal.y, normal.z, color.r, color.g, color.b, color.a)})
def draw(modelViewMatrix:Matrix4, projMatrix:Matrix4, lightDir:Vec4, lightColor:Color, ambientColor:Color, fcoef:Real, fcoefHalf:Real) = {
_modelViewMatrix := modelViewMatrix
_projMatrix := projMatrix
_lightDir := lightDir
_lightColor := lightColor
_ambientColor := ambientColor
_fcoef := fcoef
_fcoefHalf := fcoefHalf
_draw()
}
}
}
object LitProgramTextured extends GLProgram(Shader.litVertexTextured, Shader.fragmentTextured, Seq(("position",3), ("normal",3), ("color",4), ("texcoord",2))) {
val _modelViewMatrix = new MatrixUniform("modelViewMatrix", this)
val _projMatrix = new MatrixUniform("projMatrix", this)
val _fcoef = new RealUniform("Fcoef", this)
val _fcoefHalf = new RealUniform("Fcoef_half", this)
val _lightDir = new Vec4Uniform("lightDir", this)
val _lightColor = new ColorUniform("lightColor", this)
class Mesh(usage:VertexDataUsage) extends renderer.Mesh(this, usage) {
def verts(xs:Seq[(Vec3,Vec3,Color,Vec2)]) = _verts(xs.map{case (pos,normal,color,texcoord) =>
Seq(pos.x, pos.y, pos.z, normal.x, normal.y, normal.z, color.r, color.g, color.b, color.a, texcoord.x, texcoord.y)})
def draw(modelViewMatrix:Matrix4, projMatrix:Matrix4, lightDir:Vec4, lightColor:Color, fcoef:Real, fcoefHalf:Real) = {
_modelViewMatrix := modelViewMatrix
_projMatrix := projMatrix
_lightDir := lightDir
_lightColor := lightColor
_fcoef := fcoef
_fcoefHalf := fcoefHalf
_draw()
}
}
}
object LitProgramTextured2 extends GLProgram(Shader.litVertexTextured2, Shader.fragmentTextured2, Seq(("position",3), ("normal",3), ("color",4), ("texcoord",2))) {
val _modelViewMatrix = new MatrixUniform("modelViewMatrix", this)
val _projMatrix = new MatrixUniform("projMatrix", this)
val _fcoef = new RealUniform("Fcoef", this)
val _fcoefHalf = new RealUniform("Fcoef_half", this)
val _lightDir = new Vec4Uniform("lightDir", this)
val _lightColor = new ColorUniform("lightColor", this)
val _ambientColor = new ColorUniform("ambientColor", this)
class Mesh(usage:VertexDataUsage) extends renderer.Mesh(this, usage) {
def verts(xs:Seq[(Vec3,Vec3,Color,Vec2)]) = _verts(xs.map{case (pos,normal,color,texcoord) =>
Seq(pos.x, pos.y, pos.z, normal.x, normal.y, normal.z, color.r, color.g, color.b, color.a, texcoord.x, texcoord.y)})
def draw(modelViewMatrix:Matrix4, projMatrix:Matrix4, lightDir:Vec4, lightColor:Color, ambientColor:Color, fcoef:Real, fcoefHalf:Real) = {
_modelViewMatrix := modelViewMatrix
_projMatrix := projMatrix
_lightDir := lightDir
_lightColor := lightColor
_ambientColor := ambientColor
_fcoef := fcoef
_fcoefHalf := fcoefHalf
_draw()
}
}
}
object Main extends InputHandler {
val fps = 120
val dt = 1.0/fps
val startFov = tau/6.0
var fov = startFov
val defaultFlySpeed = 100.0
var flySpeed = defaultFlySpeed
def rotSpeed = 1.44 * fov/startFov
val keyRotSpeed = 2.0
val earthRadius = 6.37e+6
val orbitHeight = 424.1e+3
val earthPos = Vec3(earthRadius+orbitHeight,0,0)
val earthMass = 5.97e+24
val g = 6.67e-11
val dayLength = 60 * 60 * 24
val moonPos = Vec3(0,0,-381435e+3)
val moonRadius = 1737.5e+3
val moonMass = 7.349e+22
val sunPos = Vec3(-1.521e+11,0,0)
val sunRadius = 695.5e+6
val sunColor = Color(toReal(0xff)/255, toReal(0xf3)/255, toReal(0xea)/255)
val starDist = 1e+16
val useSmallTextures = true
val fullscreen = true
val projRadius = 0.1
val weaponCooldown = 0.5
val defaultProjSpeed = 10.0
val defaultProjColor = Color.green
var projSpeed = defaultProjSpeed
var projColor = defaultProjColor
val you = new Body(Vec3.zero, Quat.one, null, null)
you.vel = Vec3(0,0,speedAtHeight(orbitHeight + earthRadius))
val projs = mutable.Set[Body[LitProgram.Mesh]]()
val planets = mutable.Set[Body[LitProgramTextured.Mesh]]()
val farPlanets = mutable.Set[Body[UnlitProgram.Mesh]]()
val moons = mutable.Set[Body[LitProgramTextured2.Mesh]]()
var earthDayTexture:Texture = _
var earthNightTexture:Texture = _
var moonTexture:Texture = _
def moveInDir(dist:Real, dir:Vec3) = (you.rot * Quat.fromAngleAxis(tau/4, dir)).rotate(Vec3(dist,0,0))
def makeSphere(r:Real, isStar:Boolean, isMoon:Boolean) = {
Renderer.checkGLError("makeSphere start")
val mesh = if (isStar) new UnlitProgram.Mesh(VertexDataUsage.Static)
else if (isMoon) new LitProgramTextured2.Mesh(VertexDataUsage.Static)
else new LitProgramTextured.Mesh(VertexDataUsage.Static)
var theta = 0.0
val diff = tau/360
def next(x:Real) = x + diff
def point(theta:Real, phi:Real) = Vec3(
r * Math.sin(theta) * Math.cos(phi),
r * Math.sin(theta) * Math.sin(phi),
r * Math.cos(theta)
)
def texAt(theta:Real, phi:Real) = Vec2(phi/tau, theta/(tau*0.5))
while (theta < tau*0.5) {
var phi = 0.0
while (phi < tau) {
val a = point(theta, phi)
val b = point(next(theta), phi)
val c = point(next(theta), next(phi))
val d = point(theta, next(phi))
val at = texAt(theta, phi)
val bt = texAt(next(theta), phi)
val ct = texAt(next(theta), next(phi))
val dt = texAt(theta, next(phi))
def planetColorAt(x:Vec3) = if (isStar) sunColor else Color.white
val verts = if (isStar) mesh.asInstanceOf[UnlitProgram.Mesh].verts(Seq(a, b, c, d).map(x => (x, planetColorAt(x))))
else if (isMoon) mesh.asInstanceOf[LitProgramTextured2.Mesh].verts(Seq((a,at), (b,bt), (c,ct), (d,dt)).map{case (x,y) => (x, x, planetColorAt(x), y)})
else mesh.asInstanceOf[LitProgramTextured.Mesh].verts(Seq((a,at), (b,bt), (c,ct), (d,dt)).map{case (x,y) => (x, x, planetColorAt(x), y)})
mesh.polygon(verts)
phi = next(phi)
}
theta = next(theta)
}
mesh
}
def gravImpulseFrom[M](x:Body[M], bodyPos:Vec3, bodyMass:Real) = (bodyPos-x.pos).normalize * (g * bodyMass / (x.pos distSquared bodyPos))
def gravImpulse[M](x:Body[M]) = gravImpulseFrom(x, earthPos, earthMass) + gravImpulseFrom(x, moonPos, moonMass)
def speedAtHeight(r:Real) = Math.sqrt(g * earthMass / r)
def main(args:Array[String]) {
if (fullscreen) Renderer.initFullscreen("Space combat", this, fps)
else Renderer.init(800, 600, "Space combat", this, fps)
Renderer.checkGLError("before loading font")
val font = Font.dialog(15)
val largeFont = Font.dialog(30)
Renderer.printCentered(largeFont, Vec2(Renderer.windowWidth/2, Renderer.windowHeight/2),
"Loading textures. This may take a while...")
Renderer.updateScreen()
val is64Bit = System.getProperty("sun.arch.data.model") == "64"
val maxTexSize = glGetInteger(GL_MAX_TEXTURE_SIZE)
println("Maximum texture size: " + maxTexSize)
val texSize = if (maxTexSize >= 16200 && !useSmallTextures && is64Bit) 16200
else if (maxTexSize >= 8192 && !useSmallTextures && is64Bit) 8192
else 4096
val (dayTex,nightTex) = if (texSize >= 16200) ("1_earth_16k.jpg", "5_night_16k.jpg")
else if (texSize >= 8192) ("1_earth_8k.jpg", "5_night_8k.jpg")
else ("1_earth_4k.jpg", "5_night_4k.jpg")
val moonTex = if (texSize >= 8192) "moon_8k_color_brim16.jpg"
else "moon_4k_color_brim16.jpg"
var texSizeString = "Texture size: " + texSize
if (!is64Bit && !useSmallTextures && texSize < maxTexSize) texSizeString += " (use 64-bit JVM for larger textures)"
LitProgramTextured2.use()
moonTexture = new Texture(new File("textures/" + moonTex), true)
glUniform1i(glGetUniformLocation(LitProgramTextured2.glProgram, "tex"), 0);
Renderer.checkGLError("after loading moon tex")
LitProgramTextured.use()
earthDayTexture = new Texture(new File("textures/" + dayTex), true)
glUniform1i(glGetUniformLocation(LitProgramTextured.glProgram, "dayTex"), 0);
Renderer.checkGLError("after loading day tex")
earthNightTexture = new Texture(new File("textures/" + nightTex), true) // TODO: this doesn't include clouds
glUniform1i(glGetUniformLocation(LitProgramTextured.glProgram, "nightTex"), 1);
Renderer.checkGLError("after loading night tex")
// This allows you to see essentially forever (1e+25 units away). It could possibly cause z-fighting issues, but I haven't seen any.
val fcoef = 2.0 / (Math.log(1e+25 + 1.0) / Math.log(2.0))
val fcoefHalf = fcoef*0.5
Renderer.grabMouse()
val fpsLogger = new FPSLogger(1.0)
Renderer.checkGLError("before loading ship1")
val objBody = new Body(Vec3(0,0,-10), Quat.one, ObjReader.read("ship1.obj"), LitProgram)
objBody.vel = Vec3(0,0,speedAtHeight(orbitHeight + earthRadius))
Renderer.checkGLError("after loading ship1")
val earth = new Body(earthPos, Quat.one, makeSphere(earthRadius, false, false).asInstanceOf[LitProgramTextured.Mesh], LitProgramTextured)
earth.rotVel = Vec3(0,0,tau/dayLength)
planets += earth
val moon = new Body(moonPos, Quat.one, makeSphere(moonRadius, false, true).asInstanceOf[LitProgramTextured2.Mesh], LitProgramTextured2)
moons += moon
farPlanets += new Body(sunPos, Quat.one, makeSphere(sunRadius, true, false).asInstanceOf[UnlitProgram.Mesh], UnlitProgram)
val random = new java.util.Random()
val skyboxMesh = new UnlitProgram.Mesh(VertexDataUsage.Static)
for (i <- 0 until 5000) {
val point = Vec3(random.nextGaussian(), random.nextGaussian(), random.nextGaussian()).normalize * starDist
val vert = skyboxMesh.verts(Seq((point, getStarColor())))
skyboxMesh.point(vert(0))
}
var cooldown = 0.0
while (true) {
Renderer.checkGLError("Main loop")
fpsLogger.update()
val _lightColor = sunColor
val lightDir = Vec4(-1,0,0,0)
val lightColorAmb = _lightColor*0.9
val ambientColor = _lightColor*0.1
val lightColorNoAmb = _lightColor
val projMatrix = Matrix4.perspective(fov, toReal(Renderer.windowWidth)/Renderer.windowHeight)
skyboxMesh.draw(you.rot.toMatrix, projMatrix, fcoef, fcoefHalf)
var moveDir = Vec3.zero
if (Key.A.pressed) moveDir += moveInDir(-1, Vec3.xAxis)
if (Key.D.pressed) moveDir += moveInDir(1, Vec3.xAxis)
if (Key.W.pressed) moveDir += moveInDir(1, Vec3.yAxis)
if (Key.S.pressed) moveDir += moveInDir(-1, Vec3.yAxis)
if (Key.Shift.pressed) moveDir += moveInDir(1, Vec3.zAxis)
if (Key.Ctrl.pressed) moveDir += moveInDir(-1, Vec3.zAxis)
if (Key.Q.pressed) you.rotVel += you.rot * Vec3(0, 0, keyRotSpeed*rotSpeed*dt)
if (Key.E.pressed) you.rotVel += you.rot * Vec3(0, 0, -keyRotSpeed*rotSpeed*dt)
// TODO: why doesn't this work?
/*if (Key.T.pressed) earth.rotate(60*30*dt)
if (Key.G.pressed) earth.rotate(-60*30*dt)*/
cooldown -= dt
if (Key.Space.pressed && cooldown <= 0) {
cooldown = weaponCooldown
val mesh = new LitProgram.Mesh(VertexDataUsage.Static)
val centralSquare = Seq(Vec3(1,0,0), Vec3(0,1,0), Vec3(-1,0,0), Vec3(0,-1,0), Vec3(1,0,0)).map(_*projRadius)
val top = Vec3(0,0,1)*projRadius
val bottom = Vec3(0,0,-1)*projRadius
for (i <- 0 until 4) {
val vertsTop = mesh.verts(Seq(top, centralSquare(i), centralSquare(i+1)).map(vert => (vert,vert,projColor)))
val vertsBot = mesh.verts(Seq(bottom, centralSquare(i), centralSquare(i+1)).map(vert => (vert,vert,projColor)))
mesh.polygon(vertsTop.strip)
mesh.polygon(vertsBot.strip)
}
val body = new Body(you.pos, you.rot, mesh, LitProgram)
body.vel = you.vel + you.rot.rotate(Vec3(0,0,-projSpeed))
projs += body
}
if (Key.R.pressed) {
fov /= 1.005
println(round(fov))
}
if (Key.F.pressed) {
fov *= 1.005
println(round(fov))
}
var moveDist = if (moveDir == Vec3.zero) Vec3.zero else moveDir.normalize * (flySpeed*dt)
you.vel += moveDist
def objects = Seq(you, objBody) ++ projs.toSeq
objects.foreach(_.simulate(dt, true))
planets.foreach(_.simulate(dt, false))
val matrix = you.getMatrix
for (star <- farPlanets) star.mesh.draw(matrix*star.drawMatrix, projMatrix, fcoef, fcoefHalf)
moonTexture.bind(0)
for (moon <- moons) moon.mesh.draw(matrix*moon.drawMatrix, projMatrix, matrix*lightDir, lightColorAmb*1.5, ambientColor*0.5, fcoef, fcoefHalf)
earthDayTexture.bind(0)
earthNightTexture.bind(1)
for (planet <- planets) planet.mesh.draw(matrix*planet.drawMatrix, projMatrix, matrix*lightDir, lightColorNoAmb, fcoef, fcoefHalf)
for (obj <- objects if obj.mesh != null) obj.mesh.draw(matrix*obj.drawMatrix, projMatrix, matrix*lightDir, lightColorAmb, ambientColor, fcoef, fcoefHalf)
def round2(x:Vec3) = "(" + round(x.x) + ", " + round(x.y) + ", " + round(x.z) + ")"
glDisable(GL_DEPTH_TEST)
Renderer.print(font, Vec2.zero,
"pos: " + round2(you.pos/1000) + " km",
"pos': " + round2(you.vel/1000) + " km/s",
"speed: " + round(you.vel.length/1000) + " km/s",
"rot: " + you.rot.round2,
"rot': " + round2(you.rotVel),
"dist to earth: " + round((earthPos.dist(you.pos) - earthRadius)/1000) + " km",
"dist to moon: " + round((moonPos.dist(you.pos) - moonRadius)/1000) + " km",
"dist to sun: " + round((sunPos.dist(you.pos) - sunRadius)/1000) + " km",
texSizeString
)
glEnable(GL_DEPTH_TEST)
Renderer.updateScreen()
Renderer.handleInput()
}
exit()
}
def getStarColor() = {
Color(1 - Math.sqrt(Random.nextReal())) //This ensures that there's more dim stars than bright ones
}
override def handleKeyPress(key:Key, char:Option[Char]) = key match {
case Key.Escape => exit(0)
case Key.One =>
projSpeed = defaultProjSpeed/3
projColor = Color.blue
flySpeed = defaultFlySpeed/10
case Key.Two =>
projSpeed = defaultProjSpeed
projColor = defaultProjColor
flySpeed = defaultFlySpeed
case Key.Three =>
projSpeed = defaultProjSpeed*2
projColor = Color.red
flySpeed = defaultFlySpeed*10
case Key.Four =>
projSpeed = defaultProjSpeed*4
projColor = Color.yellow
flySpeed = defaultFlySpeed*100
case Key.Five =>
projSpeed = defaultProjSpeed*8
projColor = Color.cyan
flySpeed = defaultFlySpeed*1000
case Key.Six =>
projSpeed = defaultProjSpeed*16
projColor = Color.magenta
flySpeed = defaultFlySpeed*10000
case Key.Seven =>
projSpeed = defaultProjSpeed*32
projColor = Color.blue
flySpeed = defaultFlySpeed*100000
case Key.Eight =>
projSpeed = defaultProjSpeed*64
projColor = Color.green
flySpeed = defaultFlySpeed*10000000
case Key.Nine =>
projSpeed = defaultProjSpeed*128
projColor = Color.red
flySpeed = defaultFlySpeed*100000000
case Key.Period => you.rotVel = Vec3(0,0,0)
case Key.SysRq =>
glReadBuffer(GL_FRONT)
val width = Renderer.windowWidth
val height = Renderer.windowHeight
val bpp = 4
val buffer = BufferUtils.createByteBuffer(width * height * bpp)
glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, buffer)
val dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS").format(new Date())
val file = new File("screenshots/screenshot-" + dateFormat + ".png")
val format = "PNG"
val image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB)
for (x <- 0 until width; y <- 0 until height) {
val i = (x + width*y) * bpp
val r = buffer.get(i) & 0xff
val g = buffer.get(i+1) & 0xff
val b = buffer.get(i+2) & 0xff
image.setRGB(x, height - (y+1), (0xff << 24) | (r << 16) | (g << 8) | b)
}
ImageIO.write(image, format, file)
case _ =>
}
override def handleMouseMove(pos:IVec2, d:IVec2) {
you.rotVel += you.rot * Vec3(-d.y*rotSpeed*dt, 0, 0)
you.rotVel += you.rot * Vec3(0, -d.x*rotSpeed*dt, 0)
}
}
|
nstoddard/space-demo
|
src/Main.scala
|
Scala
|
mit
| 21,185 |
package com.rklaehn.abc
import cats._
import cats.implicits._
case class NoEquals(x: Int) {
override def hashCode(): Int = throw new UnsupportedOperationException
override def equals(x: Any): Boolean = throw new UnsupportedOperationException
override def toString: String = throw new UnsupportedOperationException
}
trait NoEquals0 {
implicit def eqv: Eq[NoEquals] = Eq.by(_.x)
}
trait NoEquals1 extends NoEquals0 {
implicit def order: Order[NoEquals] = Order.by(_.x)
}
object NoEquals extends NoEquals1 {
implicit def hash: Hash[NoEquals] = Hash.by(_.x)
implicit def show: Show[NoEquals] = Show.show(_.x.toString)
}
|
rklaehn/abc
|
tests/src/test/scala/com/rklaehn/abc/NoEquals.scala
|
Scala
|
apache-2.0
| 636 |
package observatory
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.prop.Checkers
import scala.collection.concurrent.TrieMap
trait InteractionTest extends FunSuite with Checkers {
}
|
masipauskas/coursera-scala
|
capstone/observatory/src/test/scala/observatory/InteractionTest.scala
|
Scala
|
unlicense
| 260 |
package com.versionone.httpclient.sampleApp
import com.versionone.httpclient._
import org.slf4j.LoggerFactory
object SampleApp {
def main(args: Array[String]): Unit = {
OAuth2SettingsFuncs fromFiles("client_secrets.json", "stored_credentials.json" ) match {
case None =>
sys error "Unable to read Oauth2 settings"
case Some(settings) => {
object logAdapter extends SimpleLogger {
val log = LoggerFactory getLogger "SampleApp"
def debug(s:String) = log debug s
def info(s:String) = log info s
def error(s:String) = log error s
}
val v1 = new V1HttpClient(settings, logAdapter, "V1Http-ScalaSample/1.0")
val me = v1 Query """
from: Member
select:
- Name
where:
isSelf: true
"""
println(s"$me")
}
}
}
}
|
versionone/HttpClient.Scala
|
src/test/scala/com/versionone/httpclient/sampleApp/SampleApp.scala
|
Scala
|
bsd-3-clause
| 890 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.views.application.assets.insurancePolicy
import iht.controllers.application.assets.insurancePolicy.routes
import iht.testhelpers.CommonBuilder
import iht.testhelpers.TestHelper._
import iht.views.application.{ApplicationPageBehaviour, CancelComponent, Guidance}
import iht.views.html.application.asset.insurancePolicy.insurance_policy_details_final_guidance
import play.api.mvc.Call
class InsurancePolicyDetailsGuidanceViewTest extends ApplicationPageBehaviour {
val giftsLocation = CommonBuilder.DefaultCall1
val deceasedName:String = "Deceased"
override def pageTitle = messagesApi("iht.estateReport.assets.insurancePolicies.premiumsPaidByOther", deceasedName)
override def browserTitle = messagesApi("iht.estateReport.assets.insurancePolicies.premiumsPaidByOther", messagesApi("iht.the.deceased"))
lazy val insurancePolicyDetailsFinalGuidanceView: insurance_policy_details_final_guidance = app.injector.instanceOf[insurance_policy_details_final_guidance]
override def view:String = insurancePolicyDetailsFinalGuidanceView(giftsLocation, deceasedName)(
createFakeRequest(isAuthorised = false), messages).toString()
override def guidance: Guidance = guidance(Set(messagesApi("page.iht.application.insurance.policies.section7.guidance", deceasedName),
messagesApi("page.iht.application.insurance.policies.section7.guidance2", deceasedName)))
override def formTarget: Option[Call] = None
override def cancelComponent: Option[CancelComponent] = Some(CancelComponent(
routes.InsurancePolicyOverviewController.onPageLoad(),
messagesApi("site.link.return.insurance.policies"),
InsurancePlacedInTrustYesNoID
))
"InsurancePolicyDetailsGuidanceView" must {
behave like applicationPage
"show the return link with to the gifts" in {
val giftsButton = doc.getElementById("return-button-gifts")
giftsButton.attr("href") mustBe giftsLocation.url
giftsButton.text() mustBe messagesApi("site.link.go.to.gifts", deceasedName)
}
}
}
|
hmrc/iht-frontend
|
test/iht/views/application/assets/insurancePolicy/InsurancePolicyDetailsGuidanceViewTest.scala
|
Scala
|
apache-2.0
| 2,619 |
class Lst[+A] {
def map[B, That](f: A => B)(implicit bf: collection.BuildFrom[List[A], B, That]): That = ???
}
object Test {
def foo(l: Lst[Int]) = l.map[Int, List[String]](x => 1)
}
|
scala/scala
|
test/files/neg/t2462a.scala
|
Scala
|
apache-2.0
| 188 |
package org.bitcoins.marshallers.transaction
import org.bitcoins.marshallers.RawBitcoinSerializer
import org.bitcoins.protocol.transaction.{TransactionOutPointImpl, TransactionOutPoint}
import org.bitcoins.util.BitcoinSUtil
/**
* Source for serialization
* https://bitcoin.org/en/developer-reference#outpoint
*
*/
trait RawTransactionOutPointParser extends RawBitcoinSerializer[TransactionOutPoint] {
override def read(bytes : List[Byte]) : TransactionOutPoint = {
val txId : List[Byte] = bytes.slice(0,32).reverse
val index : BigInt = BigInt(bytes.slice(32, bytes.size).toArray.reverse)
TransactionOutPointImpl(BitcoinSUtil.encodeHex(txId), index.toInt)
}
def write(outPoint : TransactionOutPoint) : String = {
val indexHexWithoutPadding : String = addPrecedingZero(outPoint.vout.toHexString)
val indexHex = addPadding(8,indexHexWithoutPadding)
val littleEndianTxId = BitcoinSUtil.encodeHex(BitcoinSUtil.decodeHex(outPoint.txId).reverse)
littleEndianTxId + indexHex
}
}
object RawTransactionOutPointParser extends RawTransactionOutPointParser
|
Christewart/scalacoin
|
src/main/scala/org/bitcoins/marshallers/transaction/RawTransactionOutPointParser.scala
|
Scala
|
mit
| 1,093 |
package spark.repl
import scala.collection.mutable.Set
object Main {
private var _interp: SparkILoop = null
def interp = _interp
private[repl] def interp_=(i: SparkILoop) { _interp = i }
def main(args: Array[String]) {
_interp = new SparkILoop
_interp.process(args)
}
}
|
javelinjs/spark
|
repl/src/main/scala/spark/repl/Main.scala
|
Scala
|
bsd-3-clause
| 299 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.concurrent.Future
import scala.concurrent.duration.Duration
import org.apache.spark.{ExecutorAllocationClient, SparkEnv, SparkException, TaskState}
import org.apache.spark.internal.Logging
import org.apache.spark.rpc._
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._
import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend.ENDPOINT_NAME
import org.apache.spark.util.{RpcUtils, SerializableBuffer, ThreadUtils, Utils}
/**
* A scheduler backend that waits for coarse-grained executors to connect.
* This backend holds onto each executor for the duration of the Spark job rather than relinquishing
* executors whenever a task is done and asking the scheduler to launch a new executor for
* each new task. Executors may be launched in a variety of ways, such as Mesos tasks for the
* coarse-grained Mesos mode or standalone processes for Spark's standalone deploy mode
* (spark.deploy.*).
*/
private[spark]
class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, val rpcEnv: RpcEnv)
extends ExecutorAllocationClient with SchedulerBackend with Logging
{
// Use an atomic variable to track total number of cores in the cluster for simplicity and speed
protected val totalCoreCount = new AtomicInteger(0)
// Total number of executors that are currently registered
protected val totalRegisteredExecutors = new AtomicInteger(0)
protected val conf = scheduler.sc.conf
private val maxRpcMessageSize = RpcUtils.maxMessageSizeBytes(conf)
private val defaultAskTimeout = RpcUtils.askRpcTimeout(conf)
// Submit tasks only after (registered resources / total expected resources)
// is equal to at least this value, that is double between 0 and 1.
private val _minRegisteredRatio =
math.min(1, conf.getDouble("spark.scheduler.minRegisteredResourcesRatio", 0))
// Submit tasks after maxRegisteredWaitingTime milliseconds
// if minRegisteredRatio has not yet been reached
private val maxRegisteredWaitingTimeMs =
conf.getTimeAsMs("spark.scheduler.maxRegisteredResourcesWaitingTime", "30s")
private val createTime = System.currentTimeMillis()
// Accessing `executorDataMap` in `DriverEndpoint.receive/receiveAndReply` doesn't need any
// protection. But accessing `executorDataMap` out of `DriverEndpoint.receive/receiveAndReply`
// must be protected by `CoarseGrainedSchedulerBackend.this`. Besides, `executorDataMap` should
// only be modified in `DriverEndpoint.receive/receiveAndReply` with protection by
// `CoarseGrainedSchedulerBackend.this`.
private val executorDataMap = new HashMap[String, ExecutorData]
// Number of executors requested from the cluster manager that have not registered yet
@GuardedBy("CoarseGrainedSchedulerBackend.this")
private var numPendingExecutors = 0
private val listenerBus = scheduler.sc.listenerBus
// Executors we have requested the cluster manager to kill that have not died yet; maps
// the executor ID to whether it was explicitly killed by the driver (and thus shouldn't
// be considered an app-related failure).
@GuardedBy("CoarseGrainedSchedulerBackend.this")
private val executorsPendingToRemove = new HashMap[String, Boolean]
// A map to store hostname with its possible task number running on it
@GuardedBy("CoarseGrainedSchedulerBackend.this")
protected var hostToLocalTaskCount: Map[String, Int] = Map.empty
// The number of pending tasks which is locality required
@GuardedBy("CoarseGrainedSchedulerBackend.this")
protected var localityAwareTasks = 0
// The num of current max ExecutorId used to re-register appMaster
@volatile protected var currentExecutorIdCounter = 0
class DriverEndpoint(override val rpcEnv: RpcEnv, sparkProperties: Seq[(String, String)])
extends ThreadSafeRpcEndpoint with Logging {
// Executors that have been lost, but for which we don't yet know the real exit reason.
protected val executorsPendingLossReason = new HashSet[String]
protected val addressToExecutorId = new HashMap[RpcAddress, String]
private val reviveThread =
ThreadUtils.newDaemonSingleThreadScheduledExecutor("driver-revive-thread")
override def onStart() {
// Periodically revive offers to allow delay scheduling to work
val reviveIntervalMs = conf.getTimeAsMs("spark.scheduler.revive.interval", "1s")
reviveThread.scheduleAtFixedRate(new Runnable {
override def run(): Unit = Utils.tryLogNonFatalError {
Option(self).foreach(_.send(ReviveOffers))
}
}, 0, reviveIntervalMs, TimeUnit.MILLISECONDS)
}
override def receive: PartialFunction[Any, Unit] = {
case StatusUpdate(executorId, taskId, state, data) =>
scheduler.statusUpdate(taskId, state, data.value)
if (TaskState.isFinished(state)) {
executorDataMap.get(executorId) match {
case Some(executorInfo) =>
executorInfo.freeCores += scheduler.CPUS_PER_TASK
makeOffers(executorId)
case None =>
// Ignoring the update since we don't know about the executor.
logWarning(s"Ignored task status update ($taskId state $state) " +
s"from unknown executor with ID $executorId")
}
}
case ReviveOffers =>
makeOffers()
case KillTask(taskId, executorId, interruptThread) =>
executorDataMap.get(executorId) match {
case Some(executorInfo) =>
executorInfo.executorEndpoint.send(KillTask(taskId, executorId, interruptThread))
case None =>
// Ignoring the task kill since the executor is not registered.
logWarning(s"Attempted to kill task $taskId for unknown executor $executorId.")
}
case KillExecutorsOnHost(host) =>
scheduler.getExecutorsAliveOnHost(host).foreach { exec =>
killExecutors(exec.toSeq, replace = true, force = true)
}
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case RegisterExecutor(executorId, executorRef, hostname, cores, logUrls) =>
if (executorDataMap.contains(executorId)) {
executorRef.send(RegisterExecutorFailed("Duplicate executor ID: " + executorId))
context.reply(true)
} else if (scheduler.nodeBlacklist != null &&
scheduler.nodeBlacklist.contains(hostname)) {
// If the cluster manager gives us an executor on a blacklisted node (because it
// already started allocating those resources before we informed it of our blacklist,
// or if it ignored our blacklist), then we reject that executor immediately.
logInfo(s"Rejecting $executorId as it has been blacklisted.")
executorRef.send(RegisterExecutorFailed(s"Executor is blacklisted: $executorId"))
context.reply(true)
} else {
// If the executor's rpc env is not listening for incoming connections, `hostPort`
// will be null, and the client connection should be used to contact the executor.
val executorAddress = if (executorRef.address != null) {
executorRef.address
} else {
context.senderAddress
}
logInfo(s"Registered executor $executorRef ($executorAddress) with ID $executorId")
addressToExecutorId(executorAddress) = executorId
totalCoreCount.addAndGet(cores)
totalRegisteredExecutors.addAndGet(1)
val data = new ExecutorData(executorRef, executorRef.address, hostname,
cores, cores, logUrls)
// This must be synchronized because variables mutated
// in this block are read when requesting executors
CoarseGrainedSchedulerBackend.this.synchronized {
executorDataMap.put(executorId, data)
if (currentExecutorIdCounter < executorId.toInt) {
currentExecutorIdCounter = executorId.toInt
}
if (numPendingExecutors > 0) {
numPendingExecutors -= 1
logDebug(s"Decremented number of pending executors ($numPendingExecutors left)")
}
}
executorRef.send(RegisteredExecutor)
// Note: some tests expect the reply to come after we put the executor in the map
context.reply(true)
listenerBus.post(
SparkListenerExecutorAdded(System.currentTimeMillis(), executorId, data))
makeOffers()
}
case StopDriver =>
context.reply(true)
stop()
case StopExecutors =>
logInfo("Asking each executor to shut down")
for ((_, executorData) <- executorDataMap) {
executorData.executorEndpoint.send(StopExecutor)
}
context.reply(true)
case RemoveExecutor(executorId, reason) =>
// We will remove the executor's state and cannot restore it. However, the connection
// between the driver and the executor may be still alive so that the executor won't exit
// automatically, so try to tell the executor to stop itself. See SPARK-13519.
executorDataMap.get(executorId).foreach(_.executorEndpoint.send(StopExecutor))
removeExecutor(executorId, reason)
context.reply(true)
case RetrieveSparkAppConfig =>
val reply = SparkAppConfig(sparkProperties,
SparkEnv.get.securityManager.getIOEncryptionKey())
context.reply(reply)
}
// Make fake resource offers on all executors
private def makeOffers() {
// Make sure no executor is killed while some task is launching on it
val taskDescs = CoarseGrainedSchedulerBackend.this.synchronized {
// Filter out executors under killing
val activeExecutors = executorDataMap.filterKeys(executorIsAlive)
val workOffers = activeExecutors.map { case (id, executorData) =>
new WorkerOffer(id, executorData.executorHost, executorData.freeCores)
}.toIndexedSeq
scheduler.resourceOffers(workOffers)
}
if (!taskDescs.isEmpty) {
launchTasks(taskDescs)
}
}
override def onDisconnected(remoteAddress: RpcAddress): Unit = {
addressToExecutorId
.get(remoteAddress)
.foreach(removeExecutor(_, SlaveLost("Remote RPC client disassociated. Likely due to " +
"containers exceeding thresholds, or network issues. Check driver logs for WARN " +
"messages.")))
}
// Make fake resource offers on just one executor
private def makeOffers(executorId: String) {
// Make sure no executor is killed while some task is launching on it
val taskDescs = CoarseGrainedSchedulerBackend.this.synchronized {
// Filter out executors under killing
if (executorIsAlive(executorId)) {
val executorData = executorDataMap(executorId)
val workOffers = IndexedSeq(
new WorkerOffer(executorId, executorData.executorHost, executorData.freeCores))
scheduler.resourceOffers(workOffers)
} else {
Seq.empty
}
}
if (!taskDescs.isEmpty) {
launchTasks(taskDescs)
}
}
private def executorIsAlive(executorId: String): Boolean = synchronized {
!executorsPendingToRemove.contains(executorId) &&
!executorsPendingLossReason.contains(executorId)
}
// Launch tasks returned by a set of resource offers
private def launchTasks(tasks: Seq[Seq[TaskDescription]]) {
for (task <- tasks.flatten) {
val serializedTask = TaskDescription.encode(task)
if (serializedTask.limit >= maxRpcMessageSize) {
scheduler.taskIdToTaskSetManager.get(task.taskId).foreach { taskSetMgr =>
try {
var msg = "Serialized task %s:%d was %d bytes, which exceeds max allowed: " +
"spark.rpc.message.maxSize (%d bytes). Consider increasing " +
"spark.rpc.message.maxSize or using broadcast variables for large values."
msg = msg.format(task.taskId, task.index, serializedTask.limit, maxRpcMessageSize)
taskSetMgr.abort(msg)
} catch {
case e: Exception => logError("Exception in error callback", e)
}
}
}
else {
val executorData = executorDataMap(task.executorId)
executorData.freeCores -= scheduler.CPUS_PER_TASK
logDebug(s"Launching task ${task.taskId} on executor id: ${task.executorId} hostname: " +
s"${executorData.executorHost}.")
executorData.executorEndpoint.send(LaunchTask(new SerializableBuffer(serializedTask)))
}
}
}
// Remove a disconnected slave from the cluster
private def removeExecutor(executorId: String, reason: ExecutorLossReason): Unit = {
logDebug(s"Asked to remove executor $executorId with reason $reason")
executorDataMap.get(executorId) match {
case Some(executorInfo) =>
// This must be synchronized because variables mutated
// in this block are read when requesting executors
val killed = CoarseGrainedSchedulerBackend.this.synchronized {
addressToExecutorId -= executorInfo.executorAddress
executorDataMap -= executorId
executorsPendingLossReason -= executorId
executorsPendingToRemove.remove(executorId).getOrElse(false)
}
totalCoreCount.addAndGet(-executorInfo.totalCores)
totalRegisteredExecutors.addAndGet(-1)
scheduler.executorLost(executorId, if (killed) ExecutorKilled else reason)
listenerBus.post(
SparkListenerExecutorRemoved(System.currentTimeMillis(), executorId, reason.toString))
case None =>
// SPARK-15262: If an executor is still alive even after the scheduler has removed
// its metadata, we may receive a heartbeat from that executor and tell its block
// manager to reregister itself. If that happens, the block manager master will know
// about the executor, but the scheduler will not. Therefore, we should remove the
// executor from the block manager when we hit this case.
scheduler.sc.env.blockManager.master.removeExecutorAsync(executorId)
logInfo(s"Asked to remove non-existent executor $executorId")
}
}
/**
* Stop making resource offers for the given executor. The executor is marked as lost with
* the loss reason still pending.
*
* @return Whether executor should be disabled
*/
protected def disableExecutor(executorId: String): Boolean = {
val shouldDisable = CoarseGrainedSchedulerBackend.this.synchronized {
if (executorIsAlive(executorId)) {
executorsPendingLossReason += executorId
true
} else {
// Returns true for explicitly killed executors, we also need to get pending loss reasons;
// For others return false.
executorsPendingToRemove.contains(executorId)
}
}
if (shouldDisable) {
logInfo(s"Disabling executor $executorId.")
scheduler.executorLost(executorId, LossReasonPending)
}
shouldDisable
}
override def onStop() {
reviveThread.shutdownNow()
}
}
var driverEndpoint: RpcEndpointRef = null
protected def minRegisteredRatio: Double = _minRegisteredRatio
override def start() {
val properties = new ArrayBuffer[(String, String)]
for ((key, value) <- scheduler.sc.conf.getAll) {
if (key.startsWith("spark.")) {
properties += ((key, value))
}
}
// TODO (prashant) send conf instead of properties
driverEndpoint = createDriverEndpointRef(properties)
}
protected def createDriverEndpointRef(
properties: ArrayBuffer[(String, String)]): RpcEndpointRef = {
rpcEnv.setupEndpoint(ENDPOINT_NAME, createDriverEndpoint(properties))
}
protected def createDriverEndpoint(properties: Seq[(String, String)]): DriverEndpoint = {
new DriverEndpoint(rpcEnv, properties)
}
def stopExecutors() {
try {
if (driverEndpoint != null) {
logInfo("Shutting down all executors")
driverEndpoint.askSync[Boolean](StopExecutors)
}
} catch {
case e: Exception =>
throw new SparkException("Error asking standalone scheduler to shut down executors", e)
}
}
override def stop() {
stopExecutors()
try {
if (driverEndpoint != null) {
driverEndpoint.askSync[Boolean](StopDriver)
}
} catch {
case e: Exception =>
throw new SparkException("Error stopping standalone scheduler's driver endpoint", e)
}
}
/**
* Reset the state of CoarseGrainedSchedulerBackend to the initial state. Currently it will only
* be called in the yarn-client mode when AM re-registers after a failure.
* */
protected def reset(): Unit = {
val executors = synchronized {
numPendingExecutors = 0
executorsPendingToRemove.clear()
Set() ++ executorDataMap.keys
}
// Remove all the lingering executors that should be removed but not yet. The reason might be
// because (1) disconnected event is not yet received; (2) executors die silently.
executors.foreach { eid =>
removeExecutor(eid, SlaveLost("Stale executor after cluster manager re-registered."))
}
}
override def reviveOffers() {
driverEndpoint.send(ReviveOffers)
}
override def killTask(taskId: Long, executorId: String, interruptThread: Boolean) {
driverEndpoint.send(KillTask(taskId, executorId, interruptThread))
}
override def defaultParallelism(): Int = {
conf.getInt("spark.default.parallelism", math.max(totalCoreCount.get(), 2))
}
/**
* Called by subclasses when notified of a lost worker. It just fires the message and returns
* at once.
*/
protected def removeExecutor(executorId: String, reason: ExecutorLossReason): Unit = {
// Only log the failure since we don't care about the result.
driverEndpoint.ask[Boolean](RemoveExecutor(executorId, reason)).onFailure { case t =>
logError(t.getMessage, t)
}(ThreadUtils.sameThread)
}
def sufficientResourcesRegistered(): Boolean = true
override def isReady(): Boolean = {
if (sufficientResourcesRegistered) {
logInfo("SchedulerBackend is ready for scheduling beginning after " +
s"reached minRegisteredResourcesRatio: $minRegisteredRatio")
return true
}
if ((System.currentTimeMillis() - createTime) >= maxRegisteredWaitingTimeMs) {
logInfo("SchedulerBackend is ready for scheduling beginning after waiting " +
s"maxRegisteredResourcesWaitingTime: $maxRegisteredWaitingTimeMs(ms)")
return true
}
false
}
/**
* Return the number of executors currently registered with this backend.
*/
private def numExistingExecutors: Int = executorDataMap.size
override def getExecutorIds(): Seq[String] = {
executorDataMap.keySet.toSeq
}
/**
* Request an additional number of executors from the cluster manager.
* @return whether the request is acknowledged.
*/
final override def requestExecutors(numAdditionalExecutors: Int): Boolean = {
if (numAdditionalExecutors < 0) {
throw new IllegalArgumentException(
"Attempted to request a negative number of additional executor(s) " +
s"$numAdditionalExecutors from the cluster manager. Please specify a positive number!")
}
logInfo(s"Requesting $numAdditionalExecutors additional executor(s) from the cluster manager")
val response = synchronized {
numPendingExecutors += numAdditionalExecutors
logDebug(s"Number of pending executors is now $numPendingExecutors")
// Account for executors pending to be added or removed
doRequestTotalExecutors(
numExistingExecutors + numPendingExecutors - executorsPendingToRemove.size)
}
defaultAskTimeout.awaitResult(response)
}
/**
* Update the cluster manager on our scheduling needs. Three bits of information are included
* to help it make decisions.
* @param numExecutors The total number of executors we'd like to have. The cluster manager
* shouldn't kill any running executor to reach this number, but,
* if all existing executors were to die, this is the number of executors
* we'd want to be allocated.
* @param localityAwareTasks The number of tasks in all active stages that have a locality
* preferences. This includes running, pending, and completed tasks.
* @param hostToLocalTaskCount A map of hosts to the number of tasks from all active stages
* that would like to like to run on that host.
* This includes running, pending, and completed tasks.
* @return whether the request is acknowledged by the cluster manager.
*/
final override def requestTotalExecutors(
numExecutors: Int,
localityAwareTasks: Int,
hostToLocalTaskCount: Map[String, Int]
): Boolean = {
if (numExecutors < 0) {
throw new IllegalArgumentException(
"Attempted to request a negative number of executor(s) " +
s"$numExecutors from the cluster manager. Please specify a positive number!")
}
val response = synchronized {
this.localityAwareTasks = localityAwareTasks
this.hostToLocalTaskCount = hostToLocalTaskCount
numPendingExecutors =
math.max(numExecutors - numExistingExecutors + executorsPendingToRemove.size, 0)
doRequestTotalExecutors(numExecutors)
}
defaultAskTimeout.awaitResult(response)
}
/**
* Request executors from the cluster manager by specifying the total number desired,
* including existing pending and running executors.
*
* The semantics here guarantee that we do not over-allocate executors for this application,
* since a later request overrides the value of any prior request. The alternative interface
* of requesting a delta of executors risks double counting new executors when there are
* insufficient resources to satisfy the first request. We make the assumption here that the
* cluster manager will eventually fulfill all requests when resources free up.
*
* @return a future whose evaluation indicates whether the request is acknowledged.
*/
protected def doRequestTotalExecutors(requestedTotal: Int): Future[Boolean] =
Future.successful(false)
/**
* Request that the cluster manager kill the specified executors.
*
* When asking the executor to be replaced, the executor loss is considered a failure, and
* killed tasks that are running on the executor will count towards the failure limits. If no
* replacement is being requested, then the tasks will not count towards the limit.
*
* @param executorIds identifiers of executors to kill
* @param replace whether to replace the killed executors with new ones, default false
* @param force whether to force kill busy executors, default false
* @return the ids of the executors acknowledged by the cluster manager to be removed.
*/
final override def killExecutors(
executorIds: Seq[String],
replace: Boolean,
force: Boolean): Seq[String] = {
logInfo(s"Requesting to kill executor(s) ${executorIds.mkString(", ")}")
val response = synchronized {
val (knownExecutors, unknownExecutors) = executorIds.partition(executorDataMap.contains)
unknownExecutors.foreach { id =>
logWarning(s"Executor to kill $id does not exist!")
}
// If an executor is already pending to be removed, do not kill it again (SPARK-9795)
// If this executor is busy, do not kill it unless we are told to force kill it (SPARK-9552)
val executorsToKill = knownExecutors
.filter { id => !executorsPendingToRemove.contains(id) }
.filter { id => force || !scheduler.isExecutorBusy(id) }
executorsToKill.foreach { id => executorsPendingToRemove(id) = !replace }
logInfo(s"Actual list of executor(s) to be killed is ${executorsToKill.mkString(", ")}")
// If we do not wish to replace the executors we kill, sync the target number of executors
// with the cluster manager to avoid allocating new ones. When computing the new target,
// take into account executors that are pending to be added or removed.
val adjustTotalExecutors =
if (!replace) {
doRequestTotalExecutors(
numExistingExecutors + numPendingExecutors - executorsPendingToRemove.size)
} else {
numPendingExecutors += knownExecutors.size
Future.successful(true)
}
val killExecutors: Boolean => Future[Boolean] =
if (!executorsToKill.isEmpty) {
_ => doKillExecutors(executorsToKill)
} else {
_ => Future.successful(false)
}
val killResponse = adjustTotalExecutors.flatMap(killExecutors)(ThreadUtils.sameThread)
killResponse.flatMap(killSuccessful =>
Future.successful (if (killSuccessful) executorsToKill else Seq.empty[String])
)(ThreadUtils.sameThread)
}
defaultAskTimeout.awaitResult(response)
}
/**
* Kill the given list of executors through the cluster manager.
* @return whether the kill request is acknowledged.
*/
protected def doKillExecutors(executorIds: Seq[String]): Future[Boolean] =
Future.successful(false)
/**
* Request that the cluster manager kill all executors on a given host.
* @return whether the kill request is acknowledged.
*/
final override def killExecutorsOnHost(host: String): Boolean = {
logInfo(s"Requesting to kill any and all executors on host ${host}")
// A potential race exists if a new executor attempts to register on a host
// that is on the blacklist and is no no longer valid. To avoid this race,
// all executor registration and killing happens in the event loop. This way, either
// an executor will fail to register, or will be killed when all executors on a host
// are killed.
// Kill all the executors on this host in an event loop to ensure serialization.
driverEndpoint.send(KillExecutorsOnHost(host))
true
}
}
private[spark] object CoarseGrainedSchedulerBackend {
val ENDPOINT_NAME = "CoarseGrainedScheduler"
}
|
jianran/spark
|
core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
|
Scala
|
apache-2.0
| 27,531 |
package scala.slick.driver
import scala.slick.ql.Query
import scala.slick.session.{Session, PositionedParameters}
import scala.slick.util.RecordLinearizer
class BasicUpdateInvoker[T] (query: Query[_, T], profile: BasicProfile) {
protected lazy val built = profile.buildUpdateStatement(query)
def updateStatement = getStatement
protected def getStatement = built.sql
def update(value: T)(implicit session: Session): Int = session.withPreparedStatement(updateStatement) { st =>
st.clearParameters
val pp = new PositionedParameters(st)
built.linearizer.narrowedLinearizer.asInstanceOf[RecordLinearizer[T]].setParameter(profile, pp, Some(value))
built.setter(pp, null)
st.executeUpdate
}
def updateInvoker: this.type = this
}
|
szeiger/scala-query
|
src/main/scala/scala/slick/driver/BasicUpdateInvoker.scala
|
Scala
|
bsd-2-clause
| 761 |
package com.sksamuel.avro4s.record.decoder
import com.sksamuel.avro4s._
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.GenericData
import org.apache.avro.util.Utf8
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.language.higherKinds
case class TestVectorBooleans(booleans: Vector[Boolean])
case class TestArrayBooleans(booleans: Array[Boolean])
case class TestListBooleans(booleans: List[Boolean])
case class TestSetBooleans(booleans: Set[Boolean])
case class TestSetString(strings: Set[String])
case class TestSetDoubles(doubles: Set[Double])
case class TestSeqBooleans(booleans: Seq[Boolean])
case class TestArrayRecords(records: Array[Record])
case class TestSeqRecords(records: Seq[Record])
case class TestListRecords(records: List[Record])
case class TestSetRecords(records: Set[Record])
case class TestVectorRecords(records: Vector[Record])
case class Record(str: String, double: Double)
case class TestSeqTuple2(tuples: Seq[Tuple2[String, Int]])
case class TestSeqTuple3(tuples: Seq[Tuple3[String, Int, Boolean]])
class ArrayDecoderTest extends AnyWordSpec with Matchers {
import scala.collection.JavaConverters._
"Decoder" should {
"support array for a vector of primitives" in {
val schema = AvroSchema[TestVectorBooleans]
val record = new GenericData.Record(schema)
record.put("booleans", List(true, false, true).asJava)
Decoder[TestVectorBooleans].decode(schema).apply(record) shouldBe TestVectorBooleans(Vector(true, false, true))
}
"support array for an vector of records" in {
val containerSchema = AvroSchema[TestVectorRecords]
val recordSchema = AvroSchema[Record]
val record1 = new GenericData.Record(recordSchema)
record1.put("str", "qwe")
record1.put("double", 123.4)
val record2 = new GenericData.Record(recordSchema)
record2.put("str", "wer")
record2.put("double", 8234.324)
val container = new GenericData.Record(containerSchema)
container.put("records", List(record1, record2).asJava)
Decoder[TestVectorRecords].decode(containerSchema).apply(container) shouldBe TestVectorRecords(Vector(Record("qwe", 123.4), Record("wer", 8234.324)))
}
"support array for a scala.collection.immutable.Seq of primitives" in {
case class Test(seq: Seq[String])
val schema = AvroSchema[Test]
val record = new GenericData.Record(schema)
record.put("seq", Seq("a", "34", "fgD").asJava)
Decoder[Test].decode(schema).apply(record) shouldBe Test(Seq("a", "34", "fgD"))
}
"support array for an Array of primitives" in {
val schema = AvroSchema[TestArrayBooleans]
val record = new GenericData.Record(schema)
record.put("booleans", List(true, false, true).asJava)
Decoder[TestArrayBooleans].decode(schema).apply(record).booleans.toVector shouldBe Vector(true, false, true)
}
"support array for a List of primitives" in {
val schema = AvroSchema[TestListBooleans]
val record = new GenericData.Record(schema)
record.put("booleans", List(true, false, true).asJava)
Decoder[TestListBooleans].decode(schema).apply(record) shouldBe TestListBooleans(List(true, false, true))
}
"support array for a List of records" in {
val containerSchema = AvroSchema[TestListRecords]
val recordSchema = AvroSchema[Record]
val record1 = new GenericData.Record(recordSchema)
record1.put("str", "qwe")
record1.put("double", 123.4)
val record2 = new GenericData.Record(recordSchema)
record2.put("str", "wer")
record2.put("double", 8234.324)
val container = new GenericData.Record(containerSchema)
container.put("records", List(record1, record2).asJava)
Decoder[TestListRecords].decode(containerSchema).apply(container) shouldBe TestListRecords(List(Record("qwe", 123.4), Record("wer", 8234.324)))
}
"support array for a scala.collection.immutable.Seq of records" in {
val containerSchema = AvroSchema[TestSeqRecords]
val recordSchema = AvroSchema[Record]
val record1 = new GenericData.Record(recordSchema)
record1.put("str", "qwe")
record1.put("double", 123.4)
val record2 = new GenericData.Record(recordSchema)
record2.put("str", "wer")
record2.put("double", 8234.324)
val container = new GenericData.Record(containerSchema)
container.put("records", List(record1, record2).asJava)
Decoder[TestSeqRecords].decode(containerSchema).apply(container) shouldBe TestSeqRecords(Seq(Record("qwe", 123.4), Record("wer", 8234.324)))
}
"support array for an Array of records" in {
val containerSchema = AvroSchema[TestArrayRecords]
val recordSchema = AvroSchema[Record]
val record1 = new GenericData.Record(recordSchema)
record1.put("str", "qwe")
record1.put("double", 123.4)
val record2 = new GenericData.Record(recordSchema)
record2.put("str", "wer")
record2.put("double", 8234.324)
val container = new GenericData.Record(containerSchema)
container.put("records", List(record1, record2).asJava)
Decoder[TestArrayRecords].decode(containerSchema).apply(container).records.toVector shouldBe Vector(Record("qwe", 123.4), Record("wer", 8234.324))
}
"support array for a Set of records" in {
val containerSchema = AvroSchema[TestSetRecords]
val recordSchema = AvroSchema[Record]
val record1 = new GenericData.Record(recordSchema)
record1.put("str", "qwe")
record1.put("double", 123.4)
val record2 = new GenericData.Record(recordSchema)
record2.put("str", "wer")
record2.put("double", 8234.324)
val container = new GenericData.Record(containerSchema)
container.put("records", List(record1, record2).asJava)
Decoder[TestSetRecords].decode(containerSchema).apply(container) shouldBe TestSetRecords(Set(Record("qwe", 123.4), Record("wer", 8234.324)))
}
"support array for a Set of strings" in {
val schema = AvroSchema[TestSetString]
val record = new GenericData.Record(schema)
record.put("strings", List("Qwe", "324", "q").asJava)
Decoder[TestSetString].decode(schema).apply(record) shouldBe TestSetString(Set("Qwe", "324", "q"))
}
"support array for a Set of doubles" in {
val schema = AvroSchema[TestSetDoubles]
val record = new GenericData.Record(schema)
record.put("doubles", List(132.4324, 5.4, 0.123).asJava)
Decoder[TestSetDoubles].decode(schema).apply(record) shouldBe TestSetDoubles(Set(132.4324, 5.4, 0.123))
}
"support Seq[Tuple2] issue #156" in {
val schema = AvroSchema[TestSeqTuple2]
val z = new GenericData.Record(AvroSchema[(String, Int)])
z.put("_1", new Utf8("hello"))
z.put("_2", java.lang.Integer.valueOf(214))
val record = new GenericData.Record(schema)
record.put("tuples", List(z).asJava)
Decoder[TestSeqTuple2].decode(schema).apply(record) shouldBe TestSeqTuple2(Seq(("hello", 214)))
}
"support Seq[Tuple3]" in {
val schema = AvroSchema[TestSeqTuple3]
val z = new GenericData.Record(AvroSchema[(String, Int, Boolean)])
z.put("_1", new Utf8("hello"))
z.put("_2", java.lang.Integer.valueOf(214))
z.put("_3", java.lang.Boolean.valueOf(true))
val record = new GenericData.Record(schema)
record.put("tuples", List(z).asJava)
Decoder[TestSeqTuple3].decode(schema).apply(record) shouldBe TestSeqTuple3(Seq(("hello", 214, true)))
}
"support top level Seq[Double]" in {
val schema = SchemaBuilder.array().items(SchemaBuilder.builder().doubleType())
Decoder[Seq[Double]].decode(schema).apply(Array(1.2, 34.5, 54.3)) shouldBe Seq(1.2, 34.5, 54.3)
}
"support top level List[Int]" in {
val schema = SchemaBuilder.array().items(SchemaBuilder.builder().intType())
Decoder[List[Int]].decode(schema).apply(Array(1, 4, 9)) shouldBe List(1, 4, 9)
}
"support top level Vector[String]" in {
val schema = SchemaBuilder.array().items(SchemaBuilder.builder().stringType())
Decoder[Vector[String]].decode(schema).apply(Array("a", "z")) shouldBe Vector("a", "z")
}
"support top level Set[Boolean]" in {
val schema = SchemaBuilder.array().items(SchemaBuilder.builder().stringType())
Decoder[Set[Boolean]].decode(schema).apply(Array(true, false, true)) shouldBe Set(true, false)
}
}
}
|
sksamuel/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/record/decoder/ArrayDecoderTest.scala
|
Scala
|
apache-2.0
| 8,516 |
package com.github.gdefacci.briscola.service.tournament
import com.github.gdefacci.briscola.player.PlayerId
import com.github.gdefacci.briscola.game._
import com.github.gdefacci.briscola.service.game._
import com.github.gdefacci.briscola.competition.MatchKind
import scalaz.{ -\\/, \\/, \\/- }
import com.github.gdefacci.briscola._
import com.github.gdefacci.briscola.tournament._
import rx.lang.scala.Observable
import com.github.gdefacci.ddd.StateChange
import rx.lang.scala.Subscription
import com.github.gdefacci.briscola.player.GamePlayers
import com.github.gdefacci.ddd.rx.ObservableCommandRunner
trait TournamentService {
def startTournament(player: GamePlayers, kind: MatchKind): TournamentError \\/ TournamentState
def tournamentById(id: TournamentId): Option[TournamentState]
def allTournament: Iterable[TournamentState]
def changes: Observable[StateChange[TournamentState, TournamentEvent]]
}
trait TournamentRepository {
def all:Iterable[TournamentState]
def byId(id:TournamentId):Option[TournamentState]
def store(id:TournamentState):Unit
}
class TournamentServiceImpl(
runner: ObservableCommandRunner[TournamentState, TournamentCommand, TournamentEvent, TournamentError],
repository: TournamentRepository,
gameService: GameService) extends TournamentService {
lazy val changes = runner.changes
private def startGame(ts: ActiveTournamentState): TournamentError \\/ TournamentState = {
gameService.startGame(ts.players) match {
case -\\/(gmErr) =>
-\\/(TournamentGameError(gmErr))
case \\/-(game @ ActiveGameState(gid, _, _, _, _,_)) =>
val res = runner.run(ts, SetTournamentGame(game))
val tsId = ts.id
res.foreach {
case _:ActiveTournamentState =>
lazy val finishedGameSubscription: Subscription = gameService.finishedGames.subscribe { fgm =>
setGameOutcome(tsId, fgm.newState)
droppedGameSubscription.unsubscribe()
finishedGameSubscription.unsubscribe()
}
lazy val droppedGameSubscription: Subscription = gameService.droppedGames.subscribe { fgm =>
dropTournamentGame(tsId, fgm.newState)
droppedGameSubscription.unsubscribe()
finishedGameSubscription.unsubscribe()
}
case _ => ()
}
res
case \\/-(EmptyGameState) =>
-\\/(TournamentGameError(GameNotStarted))
case \\/-(DroppedGameState(_, _, _, _, _, _, _)) =>
-\\/(TournamentGameError(GameAlreadyDropped))
case \\/-(FinalGameState(_, _, _, _)) =>
-\\/(TournamentGameError(GameAlreadyFinished))
}
}
def startTournament(players: GamePlayers, kind: MatchKind): TournamentError \\/ TournamentState =
runner.run(EmptyTournamentState, StartTournament(players, kind)).flatMap {
case ts @ ActiveTournamentState(id, players, kind, finished, actives) =>
startGame(ts)
case x => \\/-(x)
}
private def setTournamentGame(tournamentId: TournamentId, game: ActiveGameState): Option[TournamentError \\/ TournamentState] =
repository.byId(tournamentId).map { ts =>
runner.run(ts, SetTournamentGame(game))
}
private def setGameOutcome(tournamentId: TournamentId, game: FinalGameState): Option[TournamentError \\/ TournamentState] =
repository.byId(tournamentId).map { ts =>
runner.run(ts, SetGameOutcome(game)) match {
case \\/-(ts1 @ ActiveTournamentState(_,_,_,_,_)) => startGame(ts1)
case x => x
}
}
private def dropTournamentGame(tournamentId: TournamentId, game: DroppedGameState): Option[TournamentError \\/ TournamentState] =
repository.byId(tournamentId).map { ts =>
runner.run(ts, DropTournamentGame(game))
}
def tournamentById(id: TournamentId): Option[TournamentState] = repository.byId(id)
def allTournament: Iterable[TournamentState] = repository.all
}
|
gdefacci/briscola
|
ddd-briscola/src/main/scala/com/github/gdefacci/briscola/service/tournament/tournament.scala
|
Scala
|
bsd-3-clause
| 3,937 |
package pokestats.client.screens
import autowire._
import org.scalajs.dom
import pokestats.Api
import pokestats.client.screens.Common._
import pokestats.client.{Ajaxer, Client, PokemonDetailsScreen}
import pokestats.model.PokemonSummary
import rx._
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scalatags.JsDom.all._
import scalatags.rx.all._
object PokemonList {
private val pokemons = Var(Seq[PokemonSummary]())
private val searchText = Var("")
def screen()(implicit ctx: Ctx.Owner): Rx[dom.Element] = Rx {
// List of pokemons to display
val pokemonsList = Rx {
filteredPokemonsList(pokemons(), searchText())
}
// searchBox
val inputBox = input.render
inputBox.value = searchText.now
val updateSearchText = (e: dom.Event) => searchText() = inputBox.value
inputBox.onchange = updateSearchText
inputBox.onkeyup = updateSearchText
loadAllPokemons()
page(
row(h1("All Pokémons")),
row(p(inputBox)),
row(p(pokemonsList))
).render
}
private def loadAllPokemons(): Unit = {
Ajaxer[Api].listPokemons().call().foreach { p =>
pokemons() = p
}
}
private def filteredPokemonsList(
pokemons: Seq[PokemonSummary],
search: String) = {
ul(
for {
pokemon <- filterPokemons(pokemons, search)
label = pokemon.name + " [" + pokemon.id + "]"
} yield
li(
alink(label, onclick := { () =>
Client.router.goto(PokemonDetailsScreen(pokemon))
})
)
).render
}
private def filterPokemons(pokemons: Seq[PokemonSummary], search: String) = {
def matcher(p: PokemonSummary) =
p.name.contains(search) || p.id.toString == search
if (search.isEmpty) {
pokemons
} else {
pokemons.filter(matcher)
}
}
}
|
guilgaly/pokemon-stats
|
client/src/main/scala/pokestats/client/screens/PokemonList.scala
|
Scala
|
apache-2.0
| 1,843 |
package lila.simul
final case class SimulApplicant(
player: SimulPlayer,
accepted: Boolean
) {
def is(userId: String): Boolean = player is userId
def is(other: SimulPlayer): Boolean = player is other
}
private[simul] object SimulApplicant {
def make(player: SimulPlayer): SimulApplicant =
new SimulApplicant(
player = player,
accepted = false
)
}
|
luanlv/lila
|
modules/simul/src/main/SimulApplicant.scala
|
Scala
|
mit
| 389 |
package maker.task
import java.util.concurrent.atomic.AtomicReference
import maker.utils.TestIdentifier
import maker.utils.TestFailure
import maker.utils.RichString._
import maker.utils.TableBuilder
object FailingTests{
private var mostRecentFailures : List[(TestIdentifier, TestFailure)] = Nil
def setFailures(failures : List[(TestIdentifier, TestFailure)]) = synchronized{
mostRecentFailures = failures
}
def clear(){
setFailures(Nil)
}
def report {
if (mostRecentFailures.isEmpty){
println("There were no failing tests".inBlue)
return
}
val tb = TableBuilder(
"No ",
"Suite ",
"Test ",
"Message"
)
mostRecentFailures.zipWithIndex.foreach{
case ((TestIdentifier(suite, _, test), TestFailure(message, _)), i) =>
tb.addRow(i, suite, test, message)
}
println("Failing Tests".inBlue)
println(tb)
println("\nEnter maker.task.FailingTests.failure(i) for more information on the i'th failing test\n\n".inRed)
}
def failure(i : Int){
if (mostRecentFailures.isEmpty)
println("There were no failing tests")
else if (mostRecentFailures.size < i + 1)
println("There were only " + mostRecentFailures.size + " failing tests")
else {
val (testID, testFailure) = mostRecentFailures(i)
println(testFailure.formatted(testID.suiteClass))
}
}
}
|
syl20bnr/maker
|
maker/src/maker/task/FailingTests.scala
|
Scala
|
bsd-2-clause
| 1,436 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.execution.internal.collection.ChunkedArrayStack
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
private[tail] object IterantInterleave {
/**
* Implementation for `Iterant.interleave`.
*/
def apply[F[_], A](l: Iterant[F, A], r: Iterant[F, A]) (implicit F: Sync[F]): Iterant[F, A] =
Suspend(F.delay(new Loop().apply(l, r)))
private final class Loop[F[_], A](implicit F: Sync[F])
extends ((Iterant[F, A], Iterant[F, A]) => Iterant[F, A]) {
loop =>
def apply(lh: Iterant[F, A], rh: Iterant[F, A]): Iterant[F, A] =
lhLoop.visit(lh, rh)
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
// Used by Concat:
private[this] var _lhStack: ChunkedArrayStack[F[Iterant[F, A]]] = _
private[this] var _rhStack: ChunkedArrayStack[F[Iterant[F, A]]] = _
private def lhStackPush(ref: F[Iterant[F, A]]): Unit = {
if (_lhStack == null) _lhStack = ChunkedArrayStack()
_lhStack.push(ref)
}
private def lhStackPop(): F[Iterant[F, A]] =
if (_lhStack == null) null.asInstanceOf[F[Iterant[F, A]]]
else _lhStack.pop()
private def rhStackPush(ref: F[Iterant[F, A]]): Unit = {
if (_rhStack == null) _rhStack = ChunkedArrayStack()
_rhStack.push(ref)
}
private def rhStackPop(): F[Iterant[F, A]] =
if (_rhStack == null) null.asInstanceOf[F[Iterant[F, A]]]
else _rhStack.pop()
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
private[this] val lhLoop = new LHLoop
private[this] val rhLoop = new RHLoop
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
private final class LHLoop extends Iterant.Visitor[F, A, Iterant[F, A]] {
protected var rhRef: F[Iterant[F, A]] = _
def continue(lh: F[Iterant[F, A]], rh: F[Iterant[F, A]]): F[Iterant[F, A]] = {
rhRef = rh
lh.map(this)
}
def continueRight(lhRest: F[Iterant[F, A]]) =
rhLoop.continue(lhRest, rhRef)
def visit(lh: Iterant[F, A], rh: Iterant[F, A]): Iterant[F, A] = {
rhRef = F.pure(rh)
apply(lh)
}
def visit(ref: Next[F, A]): Iterant[F, A] =
Next(ref.item, continueRight(ref.rest))
def visit(ref: NextBatch[F, A]): Iterant[F, A] =
visit(ref.toNextCursor())
def visit(ref: NextCursor[F, A]): Iterant[F, A] = {
val cursor = ref.cursor
if (cursor.hasNext()) {
val item = cursor.next()
val rest: F[Iterant[F, A]] = if (cursor.hasNext()) F.pure(ref) else ref.rest
Next(item, continueRight(rest))
} else {
Suspend(ref.rest.map(this))
}
}
def visit(ref: Suspend[F, A]): Iterant[F, A] =
Suspend(ref.rest.map(this))
def visit(ref: Concat[F, A]): Iterant[F, A] = {
lhStackPush(ref.rh)
Suspend(ref.lh.map(this))
}
def visit[S](ref: Scope[F, S, A]): Iterant[F, A] =
ref.runMap(this)
def visit(ref: Last[F, A]): Iterant[F, A] =
lhStackPop() match {
case null =>
Next(ref.item, continueRight(F.pure(Iterant.empty)))
case xs =>
Next(ref.item, continueRight(xs))
}
def visit(ref: Halt[F, A]): Iterant[F, A] =
ref.e match {
case None =>
lhStackPop() match {
case null => ref
case xs => Suspend(xs.map(this))
}
case _ =>
ref
}
def fail(e: Throwable): Iterant[F, A] =
Iterant.raiseError(e)
}
private final class RHLoop extends Iterant.Visitor[F, A, Iterant[F, A]] {
protected var lhRef: F[Iterant[F, A]] = _
def continue(lh: F[Iterant[F, A]], rh: F[Iterant[F, A]]): F[Iterant[F, A]] = {
lhRef = lh
rh.map(this)
}
def continueLeft(rhRest: F[Iterant[F, A]]) =
lhLoop.continue(lhRef, rhRest)
def visit(ref: Next[F, A]): Iterant[F, A] =
Next(ref.item, continueLeft(ref.rest))
def visit(ref: NextBatch[F, A]): Iterant[F, A] =
visit(ref.toNextCursor())
def visit(ref: NextCursor[F, A]): Iterant[F, A] = {
val cursor = ref.cursor
if (cursor.hasNext()) {
val item = cursor.next()
val rest: F[Iterant[F, A]] = if (cursor.hasNext()) F.pure(ref) else ref.rest
Next(item, continueLeft(rest))
} else {
Suspend(ref.rest.map(this))
}
}
def visit(ref: Suspend[F, A]): Iterant[F, A] =
Suspend(ref.rest.map(this))
def visit(ref: Concat[F, A]): Iterant[F, A] = {
rhStackPush(ref.rh)
Suspend(ref.lh.map(this))
}
def visit[S](ref: Scope[F, S, A]): Iterant[F, A] =
ref.runMap(this)
def visit(ref: Last[F, A]): Iterant[F, A] =
rhStackPop() match {
case null =>
Next(ref.item, continueLeft(F.pure(Iterant.empty)))
case xs =>
Next(ref.item, continueLeft(xs))
}
def visit(ref: Halt[F, A]): Iterant[F, A] =
ref.e match {
case None =>
rhStackPop() match {
case null => ref
case xs => Suspend(xs.map(this))
}
case _ =>
ref
}
def fail(e: Throwable): Iterant[F, A] =
Iterant.raiseError(e)
}
}
}
|
Wogan/monix
|
monix-tail/shared/src/main/scala/monix/tail/internal/IterantInterleave.scala
|
Scala
|
apache-2.0
| 6,177 |
/*
* Copyright (c) 2015, PagerDuty
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted
* provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of conditions
* and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided with
* the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
* WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.pagerduty.eris.schema
/**
* Common trait for all supported Repliation Strategies.
*/
sealed trait ReplicationStrategy {
def strategyClass: String
def options: Map[String, String]
}
/**
* Simple replication strategy.
*
* @param replicationFactor number of replicas
*/
case class SimpleStrategy(replicationFactor: Int) extends ReplicationStrategy {
val strategyClass = "org.apache.cassandra.locator.SimpleStrategy"
val options = Map("replication_factor" -> replicationFactor.toString)
}
/**
* Network topology aware replication strategy.
*
* @param topology a seq of tuples: datacenterName -> numberOfReplicas
*/
case class NetworkTopologyStrategy(topology: (String, Int)*) extends ReplicationStrategy {
val strategyClass = "org.apache.cassandra.locator.NetworkTopologyStrategy"
val options = topology.map {
case (datacenterName, numberOfReplicas) =>
datacenterName -> numberOfReplicas.toString
}.toMap
}
|
PagerDuty/eris-core
|
main/src/main/scala/com/pagerduty/eris/schema/ReplicationStrategy.scala
|
Scala
|
bsd-3-clause
| 2,517 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Enterprise Data Management Council
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.edmcouncil.rdf_toolkit.command
import de.derivo.sparqldlapi.Query
import de.derivo.sparqldlapi.QueryEngine
import de.derivo.sparqldlapi.QueryResult
import de.derivo.sparqldlapi.exceptions.QueryEngineException
import de.derivo.sparqldlapi.exceptions.QueryParserException
import org.semanticweb.owlapi.model.OWLOntologyManager
import org.semanticweb.owlapi.reasoner.OWLReasoner
import org.semanticweb.owlapi.reasoner.structural.StructuralReasonerFactory
/**
* The SparqlDlQueryEngine is used to query the Configuration File (which is an ontology) with SPARQL-DL
*/
class SparqlDlQueryEngine(manager: OWLOntologyManager, reasoner: OWLReasoner) {
//
// Create an instance of the SPARQL-DL query engine
//
lazy val engine = QueryEngine.create(manager, reasoner)
// Some queries which demonstrate more sophisticated language constructs of SPARQL-DL
// The empty ASK is true by default
processQuery(
"ASK {}"
)
def processQuery(q: String) = {
try {
val startTime = System.currentTimeMillis()
// Create a SPARQL-DL query
val query = Query.create(q)
System.out.println("Excecute query:")
System.out.println(q)
System.out.println("-------------------------------------------------")
// Execute the query and generate the result set
val result = engine.execute(query)
if (query.isAsk()) {
System.out.print("Result: ")
if (result.ask()) {
System.out.println("yes")
} else {
System.out.println("no")
}
} else {
if (!result.ask()) {
System.out.println("Query has no solution.\n")
} else {
System.out.println("Results:")
System.out.print(result)
System.out.println("-------------------------------------------------")
System.out.println("Size of result set: " + result.size())
}
}
System.out.println("-------------------------------------------------")
System.out.println("Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + "s\n")
} catch {
case e: QueryParserException ⇒ System.out.println("Query parser error: " + e)
case e: QueryEngineException ⇒ System.out.println("Query engine error: " + e)
}
}
}
|
edmcouncil/rdf-serializer
|
src/main/scala/org/edmcouncil/rdf_toolkit/command/SparqlDlQueryEngine.scala
|
Scala
|
mit
| 3,454 |
package chapter15
object Exercise5 extends App {
def loadFile: String = {
io.Source.fromFile("data/chapter15/exercise5.txt").getLines() mkString "\\n"
}
// Try it in main/chapter15/JavaExercise5
println("Scala: " + loadFile)
}
|
vsuharnikov/books-exercises
|
scala/scala-for-the-impatient/src/main/scala/chapter15/Exercise5.scala
|
Scala
|
mit
| 240 |
/*
* The MIT License
*
* Copyright (c) 2019 Fulcrum Genomics LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.fulcrumgenomics.vcf.api
import com.fulcrumgenomics.FgBioDef._
import scala.collection.compat._
import scala.reflect.ClassTag
object ArrayAttr {
/** Constructs an instance with the values supplied. */
def apply[A : ClassTag](values: IterableOnce[A]): ArrayAttr[A] = {
new ArrayAttr[A](values.iterator.toArray)
}
/** Apply method that re-uses the supplied array. Should only be used from within the `api` package
* and only when it can be guaranteed no other references to the array exist and no other caller
* can modify the values.
*/
private[api] def apply[A](values: Array[A]): ArrayAttr[A] = new ArrayAttr[A](values)
}
/**
* Class that is used to store multi-valued attributes from a VCF (e.g. `AD=10,20`) and correctly
* handle missing values.
*
* It is possible for one or all values in the collection to be missing. If accessed directly, e.g. by index
* or by iteration, missing values will return one of the following based on the type of the attribute:
* - [[Variant.Missing]] for Strings
* - [[Variant.MissingInt]] for Ints
* - [[Variant.MissingFloat]] for Floating point numbers
*
* If you need to deal with the possibility of missing values it is strongly recommended that you use
* the [[isMissing()]] and/or [[isDefined()]] methods or use the [[get()]] which returns an option type.
*
* @param values the values stored in the collection
* @tparam A the type of values stored in the collection
*/
class ArrayAttr[A] private(private val values: Array[A]) extends IndexedSeq[A] {
/** True if there are any missing values in the collection. */
def hasMissingValues: Boolean = values.exists(Variant.isMissingValue)
/** True if the element at the index is missing, false otherwise. */
def isMissing(idx: Int): Boolean = Variant.isMissingValue(values(idx))
/** True if the element at the index is not missing, false otherwise. */
def isDefined(idx: Int): Boolean = !isMissing(idx)
/** The number of elements (including missing) in the collection. */
override def length: Int = values.length
/** Accesses the value at the specified index. May return a Missing value if no value is defined at the index.
* To avoid dealing with Missing values use [[isMissing(idx)]] or [[isDefined(idx)]] prior to accessing the
* element, or use [[get()]] instead.
*/
override def apply(idx: Int): A = this.values(idx)
def get(idx: Int): Option[A] = if (isDefined(idx)) Some(apply(idx)) else None
}
|
fulcrumgenomics/fgbio
|
src/main/scala/com/fulcrumgenomics/vcf/api/ArrayAttr.scala
|
Scala
|
mit
| 3,655 |
package controllers
import db.PasswordResetRequestsDao
import com.bryzek.apidoc.api.v0.models.{PasswordReset, PasswordResetSuccess, PasswordResetRequest, User}
import com.bryzek.apidoc.api.v0.errors.ErrorsResponse
import java.util.UUID
import play.api.test._
import play.api.test.Helpers._
class PasswordResetsSpec extends BaseSpec {
import scala.concurrent.ExecutionContext.Implicits.global
def resetPassword(token: String, pwd: String): PasswordResetSuccess = {
await(
client.passwordResets.post(
PasswordReset(token = token, password = pwd)
)
)
}
"POST /password_resets" in new WithServer {
val user = createUser()
createPasswordRequest(user.email)
val pr = passwordResetRequestsDao.findAll(userGuid = Some(user.guid)).head
val pwd = "some password"
userPasswordsDao.isValid(user.guid, pwd) must be(false)
val result = resetPassword(pr.token, pwd)
result.userGuid must be(user.guid)
userPasswordsDao.isValid(user.guid, pwd) must be(true)
// Make sure token cannot be reused
intercept[ErrorsResponse] {
resetPassword(pr.token, pwd)
}.errors.map(_.message) must be(Seq(s"Token not found"))
}
"POST /password_resets validates password" in new WithServer {
val user = createUser()
createPasswordRequest(user.email)
val pr = passwordResetRequestsDao.findAll(userGuid = Some(user.guid)).head
intercept[ErrorsResponse] {
resetPassword(pr.token, "foo")
}.errors.map(_.message) must be(Seq(s"Password must be at least 5 characters"))
}
"POST /password_reset_requests/:token validates token" in new WithServer {
intercept[ErrorsResponse] {
resetPassword(UUID.randomUUID.toString, "testing")
}.errors.map(_.message) must be(Seq(s"Token not found"))
}
}
|
Seanstoppable/apidoc
|
api/test/controllers/PasswordResetsSpec.scala
|
Scala
|
mit
| 1,798 |
// See LICENSE for license details.
package util
import chisel3._
abstract class GenericParameterizedBundle[T <: Object](val params: T) extends Bundle
{
override def cloneType = {
try {
this.getClass.getConstructors.head.newInstance(params).asInstanceOf[this.type]
} catch {
case e: java.lang.IllegalArgumentException =>
throw new Exception("Unable to use GenericParameterizedBundle.cloneType on " +
this.getClass + ", probably because " + this.getClass +
"() takes more than one argument. Consider overriding " +
"cloneType() on " + this.getClass, e)
}
}
}
|
stanford-ppl/spatial-lang
|
spatial/core/resources/chiselgen/template-level/fringeHW/util/GenericParameterizedBundle.scala
|
Scala
|
mit
| 668 |
/*
* Copyright (c) 2015-2022 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.weather
package providers
package openweather
import org.scalacheck._
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Gen._
import responses._
import Cache._
/**
* Trait with methods for random weather generation
*/
trait WeatherGenerator {
val someTenths = (0 until 300).by(10).map(Some(_))
def genWind: Gen[Wind] =
for {
speed <- arbitrary[BigDecimal]
deg <- arbitrary[BigDecimal]
varBeg <- Gen.oneOf(someTenths)
varEnd <- Gen.oneOf(someTenths)
gust <- arbitrary[Option[BigDecimal]]
} yield Wind(speed, deg, gust, varBeg, varBeg.flatMap(x => varEnd.map(_ + x)))
def genClouds: Gen[Clouds] =
for {
all <- arbitrary[BigInt]
} yield Clouds(all)
def genRain: Gen[Rain] =
for {
oneHour <- arbitrary[Option[BigDecimal]]
threeHour <- arbitrary[Option[BigDecimal]]
} yield Rain(oneHour, threeHour)
def genSnow: Gen[Snow] =
for {
oneHour <- arbitrary[Option[BigDecimal]]
threeHour <- arbitrary[Option[BigDecimal]]
} yield Snow(oneHour, threeHour)
def genMain: Gen[MainInfo] =
for {
grndLevel <- arbitrary[Option[BigDecimal]]
humidity <- arbitrary[BigDecimal]
pressure <- arbitrary[BigDecimal]
seaLevel <- arbitrary[Option[BigDecimal]]
temp <- arbitrary[BigDecimal]
tempMin <- arbitrary[BigDecimal]
tempMax <- arbitrary[BigDecimal]
} yield MainInfo(grndLevel, humidity, pressure, seaLevel, temp, tempMin, tempMax)
// 12 Feb 2015 - 01 Dec 2016
def genTimestamp: Gen[Long] = Gen.choose(1423729852, 1480582792)
def genWeatherDescription: Gen[List[WeatherCondition]] =
for {
main <- Gen.oneOf(TestData.owmWeatherConditions)
description <- Gen.oneOf(TestData.owmWeatherDescriptions.filter(_.contains(main.toLowerCase)))
icon <- Gen.oneOf(TestData.owmIcons)
id <- Gen.oneOf(TestData.owmDescriptionIds)
size <- Gen.oneOf(List(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3))
result <- Gen.listOfN(size, WeatherCondition(main, description, id, icon))
} yield result
def genWeatherStamp: Gen[Weather] =
for {
main <- genMain
wind <- genWind
clouds <- genClouds
rain <- arbitrary[Option[Rain]]
snow <- arbitrary[Option[Snow]]
description <- genWeatherDescription
dt <- genTimestamp
} yield Weather(main, wind, clouds, rain, snow, description, dt)
def genEmptyHistoryBatch: Gen[History] =
for {
cnt <- Gen.choose(1, 23)
} yield History(cnt, "200", Nil)
def genNonEmptyHistoryBatch: Gen[History] =
for {
cnt <- Gen.choose(2, 20)
seed <- Gen.choose(-1, 3)
stamps <- arbitrary[Weather]
} yield History(cnt, "200", List.fill(cnt + seed)(stamps))
implicit def arbitraryRain: Arbitrary[Rain] = Arbitrary(genRain)
implicit def arbitrarySnow: Arbitrary[Snow] = Arbitrary(genSnow)
implicit def arbitraryWeatherStamp: Arbitrary[Weather] =
Arbitrary(genWeatherStamp)
/**
* Pick random position predefined in `TestData` and distort it with <30km
*/
def genPredefinedPosition(positions: Vector[(Float, Float)]): Gen[Position] =
for {
seedLat <- Gen.choose(-0.4f, 0.4f) // Distort lat and lon little bit
seedLon <- Gen.choose(-0.8f, 0.8f)
pos <- Gen.choose(0, positions.length - 1)
} yield {
val (lat, long) = positions(pos)
val position = Position(lat, long)
position.copy(latitude = position.latitude + seedLat, longitude = position.longitude + seedLon)
}
/**
* Generate timestamp somewhere between two weeks ago and yesterday
*
* @return timestamp in seconds
*/
def genLastWeekTimeStamp: Gen[Long] = {
val now = System.currentTimeMillis() / 1000
for {
seed <- Gen.choose(86400 * 2, 1209600) // Between yesterday and two weeks back
} yield now - seed
}
implicit def arbitraryPosition: Arbitrary[Position] =
Arbitrary(genPredefinedPosition(TestData.randomCities))
}
|
snowplow/scala-weather
|
src/test/scala/com.snowplowanalytics.weather/providers/openweather/WeatherGenerator.scala
|
Scala
|
apache-2.0
| 4,827 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observers.buffers
import monix.execution.Ack
import monix.execution.Ack.{Continue, Stop}
import monix.execution.internal.collection.JSArrayQueue
import monix.execution.internal.math.nextPowerOf2
import scala.util.control.NonFatal
import monix.reactive.observers.{BufferedSubscriber, Subscriber}
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success}
/** Shared internals between [[BackPressuredBufferedSubscriber]] and
* [[BatchedBufferedSubscriber]].
*/
private[observers] abstract class AbstractBackPressuredBufferedSubscriber[A,R]
(out: Subscriber[R], _size: Int)
extends BufferedSubscriber[A] {
require(_size > 0, "bufferSize must be a strictly positive number")
private[this] val bufferSize = nextPowerOf2(_size)
private[this] val em = out.scheduler.executionModel
implicit final val scheduler = out.scheduler
private[this] var upstreamIsComplete = false
private[this] var downstreamIsComplete = false
private[this] var errorThrown: Throwable = null
private[this] var isLoopActive = false
private[this] var backPressured: Promise[Ack] = null
private[this] var lastIterationAck: Future[Ack] = Continue
protected val queue = JSArrayQueue.unbounded[A]
final def onNext(elem: A): Future[Ack] = {
if (upstreamIsComplete || downstreamIsComplete)
Stop
else if (elem == null) {
onError(new NullPointerException("Null not supported in onNext"))
Stop
}
else backPressured match {
case null =>
if (queue.length < bufferSize) {
queue.offer(elem)
pushToConsumer()
Continue
} else {
backPressured = Promise[Ack]()
queue.offer(elem)
pushToConsumer()
backPressured.future
}
case promise =>
queue.offer(elem)
pushToConsumer()
promise.future
}
}
final def onError(ex: Throwable): Unit = {
if (!upstreamIsComplete && !downstreamIsComplete) {
errorThrown = ex
upstreamIsComplete = true
pushToConsumer()
}
}
final def onComplete(): Unit = {
if (!upstreamIsComplete && !downstreamIsComplete) {
upstreamIsComplete = true
pushToConsumer()
}
}
private def pushToConsumer(): Unit =
if (!isLoopActive) {
isLoopActive = true
scheduler.execute(consumerRunLoop)
}
protected def fetchNext(): R
private[this] val consumerRunLoop = new Runnable {
def run(): Unit = fastLoop(lastIterationAck, 0)
private final def signalNext(next: R): Future[Ack] =
try {
val ack = out.onNext(next)
// Tries flattening the Future[Ack] to a
// synchronous value
if (ack == Continue || ack == Stop)
ack
else ack.value match {
case Some(Success(success)) =>
success
case Some(Failure(ex)) =>
downstreamSignalComplete(ex)
Stop
case None =>
ack
}
} catch {
case ex if NonFatal(ex) =>
downstreamSignalComplete(ex)
Stop
}
private def downstreamSignalComplete(ex: Throwable = null): Unit = {
downstreamIsComplete = true
try {
if (ex != null) out.onError(ex)
else out.onComplete()
} catch {
case err if NonFatal(err) =>
scheduler.reportFailure(err)
}
}
private final def goAsync(next: R, ack: Future[Ack]): Unit = {
ack.onComplete {
case Success(Continue) =>
val nextAck = signalNext(next)
val isSync = ack == Continue || ack == Stop
val nextFrame = if (isSync) em.nextFrameIndex(0) else 0
fastLoop(nextAck, nextFrame)
case Success(Stop) =>
// ending loop
downstreamIsComplete = true
isLoopActive = false
case Failure(ex) =>
// ending loop
isLoopActive = false
downstreamSignalComplete(ex)
}
}
private def stopStreaming(): Unit = {
downstreamIsComplete = true
isLoopActive = false
if (backPressured != null) {
backPressured.success(Stop)
backPressured = null
}
}
private final def fastLoop(prevAck: Future[Ack], startIndex: Int): Unit = {
var ack = if (prevAck == null) Continue else prevAck
var isFirstIteration = ack == Continue
var nextIndex = startIndex
var streamErrors = true
try while (isLoopActive && !downstreamIsComplete) {
val next = fetchNext()
if (next != null) {
if (nextIndex > 0 || isFirstIteration) {
isFirstIteration = false
ack match {
case Continue =>
ack = signalNext(next)
if (ack == Stop) {
stopStreaming()
return
} else {
val isSync = ack == Continue
nextIndex = if (isSync) em.nextFrameIndex(nextIndex) else 0
}
case Stop =>
stopStreaming()
return
case async =>
goAsync(next, ack)
return
}
}
else {
goAsync(next, ack)
return
}
}
else {
// Ending loop
if (backPressured != null) {
backPressured.success(if (upstreamIsComplete) Stop else Continue)
backPressured = null
}
streamErrors = false
if (upstreamIsComplete)
downstreamSignalComplete(errorThrown)
lastIterationAck = ack
isLoopActive = false
return
}
}
catch {
case ex if NonFatal(ex) =>
if (streamErrors) downstreamSignalComplete(ex)
else scheduler.reportFailure(ex)
lastIterationAck = Stop
isLoopActive = false
}
}
}
}
|
Wogan/monix
|
monix-reactive/js/src/main/scala/monix/reactive/observers/buffers/AbstractBackPressuredBufferedSubscriber.scala
|
Scala
|
apache-2.0
| 6,626 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controllers.auth
import controllers.traits.auth.RegisterCtrl
import services.UserRegisterService
class RegisterController extends RegisterCtrl {
val userRegisterService = UserRegisterService
}
|
cjww-development/rest-api
|
app/controllers/auth/RegisterController.scala
|
Scala
|
apache-2.0
| 935 |
package scodec.protocols
package ip
package v6
import scala.util.Try
import scodec.bits._
import scodec.Codec
import scodec.codecs
case class Address(bytes: ByteVector) {
require(bytes.size == 16)
override def toString = {
def condense[A](xs: List[(A, Int)]): List[(A, Int, Int)] = xs match {
case Nil => Nil
case h :: t =>
val segment = t takeWhile { case (x, _) => x == h._1 }
(h._1, h._2, segment.size + 1) +: condense(t.drop(segment.size))
}
def show(octets: List[ByteVector]): String =
octets.map { _.toHex.replaceAll("^0+", "0") }.mkString(":")
val grp = bytes.grouped(2).toList
val condensedZeroes = condense(grp.zipWithIndex).filter { case (octet, _, size) => octet == hex"0000" && size > 1 }
if (condensedZeroes.isEmpty) {
show(grp)
} else {
val (_, idx, size) = condensedZeroes.maxBy { case (_, _, size) => size }
show(grp.take(idx)) ++ "::" ++ show(grp.drop(idx + size))
}
}
}
object Address {
implicit val codec: Codec[Address] = codecs.bytes(16).as[Address]
def fromString(str: String): Either[String, Address] = {
// FIXME: this implementation erroneously supports hostnames and can be slow as a result
val result = Try {
java.net.InetAddress.getByName(str) match {
case v6: java.net.Inet6Address => Address(ByteVector(v6.getAddress))
case v4: java.net.Inet4Address => ip.v4.Address(ByteVector(v4.getAddress).toInt()).toV6
}
}.toOption
result.map(Right.apply).getOrElse(Left(s"invalid IPv6 address: $str"))
}
def fromStringValid(str: String): Address =
fromString(str).fold(err => throw new IllegalArgumentException(err), identity)
}
|
scodec/scodec-protocols
|
src/main/scala/scodec/protocols/ip/v6/Address.scala
|
Scala
|
bsd-3-clause
| 1,704 |
import awscala._
import com.amazonaws.services.{s3 => aws}
object S3Object {
def apply(bucket: Bucket, obj: aws.model.S3Object): S3Object = new S3Object(
bucket = bucket,
key = obj.getKey,
content = obj.getObjectContent(),
redirectLocation = obj.getRedirectLocation,
metadata = obj.getObjectMetadata
)
}
case class S3Object(
bucket: Bucket, key: String, content: java.io.InputStream = null,
redirectLocation: String = null, metadata: aws.model.ObjectMetadata = null
)
extends aws.model.S3Object {
setBucketName(bucket.name)
setKey(key)
setObjectContent(content)
setRedirectLocation(redirectLocation)
setObjectMetadata(metadata)
import aws.model.{ CannedAccessControlList => CannedACL }
def setAsAuthenticatedRead()(implicit s3: S3) = s3.acl(this, CannedACL.AuthenticatedRead)
def setAsBucketOwnerFullControl()(implicit s3: S3) = s3.acl(this, CannedACL.BucketOwnerFullControl)
def setAsBucketOwnerRead()(implicit s3: S3) = s3.acl(this, CannedACL.BucketOwnerRead)
def setAsLogDeliveryWrite()(implicit s3: S3) = s3.acl(this, CannedACL.LogDeliveryWrite)
def setAsPrivate()(implicit s3: S3) = s3.acl(this, CannedACL.Private)
def setAsPublicRead()(implicit s3: S3) = s3.acl(this, CannedACL.PublicRead)
def setAsPublicReadWrite()(implicit s3: S3) = s3.acl(this, CannedACL.PublicReadWrite)
def publicUrl: java.net.URL = new java.net.URL(s"http://${bucket.name}.s3.amazonaws.com/${key}")
def generatePresignedUrl(expiration: DateTime)(implicit s3: S3): java.net.URL = {
s3.generatePresignedUrl(this, expiration)
}
lazy val versionId: String = Option(metadata).map(_.getVersionId).getOrElse(null)
def destroy()(implicit s3: S3): Unit = s3.deleteObject(this)
}
|
hirokikonishi/awscala
|
aws/s3/src/main/scala/S3Object.scala
|
Scala
|
apache-2.0
| 1,732 |
/**
* Copyright 2014 Getty Imges, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tecsisa.akka.http.swagger
import akka.actor.{Actor, ActorSystem}
import akka.http.scaladsl.marshalling._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.wordnik.swagger.config.SwaggerConfig
import com.wordnik.swagger.core.SwaggerSpec
import com.wordnik.swagger.model._
import org.json4s.Formats
import scala.concurrent.ExecutionContextExecutor
import scala.reflect.runtime.universe.Type
trait SwaggerHttpService {
_: Actor =>
def apiTypes: Seq[Type]
def apiVersion: String
def swaggerVersion: String = SwaggerSpec.version
def baseUrl: String //url of api
def docsPath: String = "api-docs" //path to swagger's endpoint
def apiInfo: Option[ApiInfo] = None
def authorizations: List[AuthorizationType] = List()
implicit val system: ActorSystem
implicit def executor: ExecutionContextExecutor
implicit val formats: Formats
implicit def tem[A <: AnyRef](implicit formats: Formats): ToEntityMarshaller[A]
private val api =
new SwaggerApiBuilder(
new SwaggerConfig(
apiVersion,
swaggerVersion,
baseUrl,
"", //api path, baseUrl is used instead
authorizations, //authorizations
apiInfo
), apiTypes
)
final def swaggerRoutes: Route =
(path(docsPath) & get) {
complete(api.getResourceListing())
} ~ (for (
(subPath, apiListing) <- api.listings
) yield {
path(docsPath / subPath.drop(1).split('/').map(
segmentStringToPathMatcher _
).reduceLeft(_ / _)) {
get{
complete(apiListing)
}
}
}).reduceLeft(_ ~ _)
}
|
Tecsisa/akka-http-swagger
|
src/main/scala/com/tecsisa/akka/http/swagger/SwaggerHttpService.scala
|
Scala
|
apache-2.0
| 2,294 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package system.basic
import java.util.{Date, UUID}
import scala.language.postfixOps
import scala.collection.mutable.HashMap
import scala.concurrent.duration.DurationInt
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common._
import spray.json._
import spray.json.DefaultJsonProtocol.StringJsonFormat
import common.WskProps
import common.rest.WskRestOperations
import org.apache.openwhisk.core.entity.WhiskActivation
@RunWith(classOf[JUnitRunner])
class WskPackageTests extends TestHelpers with WskTestHelpers with WskActorSystem {
implicit val wskprops = WskProps()
val wsk: WskOperations = new WskRestOperations
val LOG_DELAY = 80 seconds
behavior of "Wsk Package"
it should "allow creation and deletion of a package" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils.retry(
{
val name = "simplepackage-" + UUID.randomUUID().toString()
assetHelper.withCleaner(wsk.pkg, name) { (pkg, _) =>
pkg.create(name, Map.empty)
}
},
10,
Some(1.second),
Some(
s"system.basic.WskPackageTests.Wsk Package.should allow creation and deletion of a package not successful, retrying.."))
}
val params1 = Map("p1" -> "v1".toJson, "p2" -> "".toJson)
val params2 = Map("p1" -> "v1".toJson, "p2" -> "v2".toJson, "p3" -> "v3".toJson)
it should "allow creation of a package with parameters" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils.retry(
{
val name = "simplepackagewithparams-" + UUID.randomUUID().toString()
assetHelper.withCleaner(wsk.pkg, name) { (pkg, _) =>
pkg.create(name, params1)
}
},
10,
Some(1.second),
Some(
s"system.basic.WskPackageTests.Wsk Package.should allow creation of a package with parameters not successful, retrying.."))
}
it should "allow updating a package" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils.retry(
{
val name = "simplepackagetoupdate-" + UUID.randomUUID().toString()
assetHelper.withCleaner(wsk.pkg, name) { (pkg, _) =>
pkg.create(name, params1)
pkg.create(name, params2, update = true)
}
},
10,
Some(1.second),
Some(s"system.basic.WskPackageTests.Wsk Package.should allow updating a package not successful, retrying.."))
}
it should "allow binding of a package" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils.retry(
{
val name = "simplepackagetobind-" + UUID.randomUUID().toString()
val bindName = "simplebind-" + UUID.randomUUID().toString()
assetHelper.withCleaner(wsk.pkg, name) { (pkg, _) =>
pkg.create(name, params1)
}
assetHelper.withCleaner(wsk.pkg, bindName) { (pkg, _) =>
pkg.bind(name, bindName, params2)
}
},
10,
Some(1.second),
Some(s"system.basic.WskPackageTests.Wsk Package.should allow binding of a package not successful, retrying.."))
}
it should "perform package binds so parameters are inherited" in withAssetCleaner(wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
val packageName = "package1-" + UUID.randomUUID().toString()
val bindName = "package2-" + UUID.randomUUID().toString()
val actionName = "print"
val packageActionName = packageName + "/" + actionName
val bindActionName = bindName + "/" + actionName
val packageParams = Map("key1a" -> "value1a".toJson, "key1b" -> "value1b".toJson)
val bindParams = Map("key2a" -> "value2a".toJson, "key1b" -> "value2b".toJson)
val actionParams = Map("key0" -> "value0".toJson)
val file = TestUtils.getTestActionFilename("printParams.js")
assetHelper.withCleaner(wsk.pkg, packageName) { (pkg, _) =>
pkg.create(packageName, packageParams)
}
assetHelper.withCleaner(wsk.action, packageActionName) { (action, _) =>
action.create(packageActionName, Some(file), parameters = actionParams)
}
assetHelper.withCleaner(wsk.pkg, bindName) { (pkg, _) =>
pkg.bind(packageName, bindName, bindParams)
}
// Check that the description of packages and actions includes all the inherited parameters.
val packageDescription = wsk.pkg.get(packageName).stdout
val bindDescription = wsk.pkg.get(bindName).stdout
val packageActionDescription = wsk.action.get(packageActionName).stdout
val bindActionDescription = wsk.action.get(bindActionName).stdout
checkForParameters(packageDescription, packageParams)
checkForParameters(bindDescription, packageParams, bindParams)
checkForParameters(packageActionDescription, packageParams, actionParams)
checkForParameters(bindActionDescription, packageParams, bindParams, actionParams)
// Check that inherited parameters are passed to the action.
val now = new Date().toString()
val run = wsk.action.invoke(bindActionName, Map("payload" -> now.toJson))
withActivation(wsk.activation, run, totalWait = LOG_DELAY) {
_.logs.get.mkString(" ") should include regex (String
.format(".*key0: value0.*key1a: value1a.*key1b: value2b.*key2a: value2a.*payload: %s", now))
}
},
10,
Some(1.second),
Some(
s"system.basic.WskPackageTests.Wsk Package.should perform package binds so parameters are inherited not successful, retrying.."))
}
it should "contain an binding annotation if invoked action is in the package binding" in withAssetCleaner(wskprops) {
org.apache.openwhisk.utils
.retry(
{ (wp, assetHelper) =>
val ns = wsk.namespace.whois()
val packageName = "package1-" + UUID.randomUUID().toString()
val bindName = "package2-" + UUID.randomUUID().toString()
val actionName = "print"
val packageActionName = packageName + "/" + actionName
val bindActionName = bindName + "/" + actionName
val file = TestUtils.getTestActionFilename("echo.js")
assetHelper.withCleaner(wsk.pkg, packageName) { (pkg, _) =>
pkg.create(packageName)
}
assetHelper.withCleaner(wsk.action, packageActionName) { (action, _) =>
action.create(packageActionName, Some(file))
}
assetHelper.withCleaner(wsk.pkg, bindName) { (pkg, _) =>
pkg.bind(packageName, bindName)
}
val run = wsk.action.invoke(bindActionName)
withActivation(wsk.activation, run, totalWait = LOG_DELAY) { activation =>
val binding = activation.getAnnotationValue(WhiskActivation.bindingAnnotation)
binding shouldBe defined
binding.get shouldBe JsString(ns + "/" + bindName)
}
},
10,
Some(1.second),
Some(
s"system.basic.WskPackageTests.Wsk Package.should contain an binding annotation if invoked action is in the package binding not successful, retrying.."))
}
it should "not contain an binding annotation if invoked action is not in the package binding" in withAssetCleaner(
wskprops) {
org.apache.openwhisk.utils
.retry(
{ (wp, assetHelper) =>
val packageName = "package1-" + UUID.randomUUID().toString()
val actionName = "print-" + UUID.randomUUID().toString()
val packageActionName = packageName + "/" + actionName
val file = TestUtils.getTestActionFilename("echo.js")
assetHelper.withCleaner(wsk.pkg, packageName) { (pkg, _) =>
pkg.create(packageName)
}
assetHelper.withCleaner(wsk.action, packageActionName) { (action, _) =>
action.create(packageActionName, Some(file))
}
assetHelper.withCleaner(wsk.action, actionName) { (action, _) =>
action.create(actionName, Some(file))
}
withActivation(wsk.activation, wsk.action.invoke(packageActionName), totalWait = LOG_DELAY) { activation =>
val binding = activation.getAnnotationValue(WhiskActivation.bindingAnnotation)
binding shouldBe empty
}
withActivation(wsk.activation, wsk.action.invoke(actionName), totalWait = LOG_DELAY) { activation =>
val binding = activation.getAnnotationValue(WhiskActivation.bindingAnnotation)
binding shouldBe empty
}
},
10,
Some(1.second),
Some(
s"system.basic.WskPackageTests.Wsk Package.should not contain an binding annotation if invoked action is not in the package binding not successful, retrying.."))
}
/**
* Check that a description of an item includes the specified parameters.
* Parameters keys in later parameter maps override earlier ones.
*/
def checkForParameters(itemDescription: String, paramSets: Map[String, JsValue]*): Unit = {
// Merge and the parameters handling overrides.
val merged = HashMap.empty[String, JsValue]
paramSets.foreach { merged ++= _ }
val flatDescription = itemDescription.replace("\n", "").replace("\r", "")
merged.foreach {
case (key: String, value: JsValue) =>
val toFind = s""""key":.*"${key}",.*"value":.*${value.toString}"""
flatDescription should include regex toFind
}
}
}
|
jasonpet/openwhisk
|
tests/src/test/scala/system/basic/WskPackageTests.scala
|
Scala
|
apache-2.0
| 10,367 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.check.extractor.xpath
import java.io.StringReader
import javax.xml.namespace.NamespaceContext
import javax.xml.parsers.{ DocumentBuilder, DocumentBuilderFactory }
import javax.xml.xpath.XPathConstants._
import javax.xml.xpath.XPathFactory
import io.gatling.core.config.GatlingConfiguration
import org.w3c.dom.{ Node, NodeList, Document }
import org.xml.sax.{ InputSource, EntityResolver }
class JdkXmlParsers(configuration: GatlingConfiguration) {
val xpathFactoryTL = new ThreadLocal[XPathFactory] {
override def initialValue() = XPathFactory.newInstance
}
val documentBuilderFactoryInstance = {
System.setProperty("org.apache.xml.dtm.DTMManager", "org.apache.xml.dtm.ref.DTMManagerDefault")
System.setProperty("com.sun.org.apache.xml.internal.dtm.DTMManager", "com.sun.org.apache.xml.internal.dtm.ref.DTMManagerDefault")
System.setProperty("javax.xml.xpath.XPathFactory", "org.apache.xpath.jaxp.XPathFactoryImpl")
val instance = DocumentBuilderFactory.newInstance
instance.setExpandEntityReferences(false)
instance.setNamespaceAware(true)
instance
}
val noopEntityResolver = new EntityResolver {
// FIXME can't we create only one StringReader?
def resolveEntity(publicId: String, systemId: String) = new InputSource(new StringReader(""))
}
val documentBuilderTL = new ThreadLocal[DocumentBuilder] {
override def initialValue() = {
val builder = documentBuilderFactoryInstance.newDocumentBuilder
builder.setEntityResolver(noopEntityResolver)
builder
}
}
def parse(inputSource: InputSource): Document =
documentBuilderTL.get.parse(inputSource)
def nodeList(document: Document, expression: String, namespaces: List[(String, String)]): NodeList = {
val path = xpathFactoryTL.get.newXPath
if (namespaces.nonEmpty) {
val namespaceCtx = new NamespaceContext {
val map: Map[String, String] = namespaces.toMap
def getNamespaceURI(prefix: String) = map(prefix)
def getPrefix(uri: String) = throw new UnsupportedOperationException
def getPrefixes(uri: String) = throw new UnsupportedOperationException
}
path.setNamespaceContext(namespaceCtx)
}
val xpathExpression = path.compile(expression)
xpathExpression.evaluate(document, NODESET).asInstanceOf[NodeList]
}
def extractAll(document: Document, expression: String, namespaces: List[(String, String)]): Seq[String] = {
val nodes = nodeList(document, expression, namespaces)
(for {
i <- 0 until nodes.getLength
} yield {
val item = nodes.item(i)
item.getNodeType match {
case Node.ELEMENT_NODE if item.getChildNodes.getLength > 0 =>
val firstChild = item.getChildNodes.item(0)
if (firstChild.getNodeType == Node.TEXT_NODE)
Some(firstChild.getNodeValue)
else None
case _ =>
Option(item.getNodeValue)
}
}).flatten
}
}
|
timve/gatling
|
gatling-core/src/main/scala/io/gatling/core/check/extractor/xpath/JdkXmlParsers.scala
|
Scala
|
apache-2.0
| 3,596 |
import java.io.{FileWriter, BufferedWriter, File}
// I'm sure stuff like this exists somewhere...
// But oh well for now.
object StringUtils {
implicit def pimpString(s:String) = new {
def replaceBetween(startDelim:String, endDelim:String, replacement:String): Option[String] = {
val startIndex = s.indexOf(startDelim)
val endIndex = s.indexOf(endDelim)
if(startIndex > -1 && endIndex >= startIndex)
Some(s.take(startIndex + startDelim.length) + replacement + s.takeRight(s.length - endIndex))
else None
}
def replaceAfter(delim:String, replacement:String): Option[String] = {
val startIndex = s.indexOf(delim)
if(startIndex > -1) Some(s.take(startIndex + delim.length) + replacement) else None
}
}
}
object FileUtils {
import StringUtils._
implicit def pimpFile(file:File) = new {
def read(): String = scala.io.Source.fromFile(file).getLines().mkString("\\n")
def write(s: String): File = {
val w = new BufferedWriter(new FileWriter(file))
w.write(s); w.close()
file
}
def map(f: String => String): File = {
val oldContents = read()
val newContents = f(oldContents)
if(newContents != oldContents) write(newContents) else file
}
def replaceBetween(startDelim:String, endDelim:String, replacement:String): File =
map(s => s.replaceBetween(startDelim, endDelim, replacement).getOrElse(s))
def replaceAfter(delim:String, replacement:String): File =
map(s => s.replaceAfter(delim, replacement).getOrElse(s))
}
}
|
khalen/f0
|
project/Utils.scala
|
Scala
|
mit
| 1,554 |
package levar
import org.scalatest._
class ServerSpec extends FlatSpec {
}
|
peoplepattern/LeVar
|
levar-cli/src/test/scala/levar/ServerSpec.scala
|
Scala
|
apache-2.0
| 78 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.trees
import java.math.BigInteger
import java.util.UUID
import scala.collection.mutable.ArrayBuffer
import org.json4s.jackson.JsonMethods
import org.json4s.jackson.JsonMethods._
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource, JarResource}
import org.apache.spark.sql.catalyst.dsl.expressions.DslString
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.plans.{LeftOuter, NaturalJoin}
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, Union}
import org.apache.spark.sql.catalyst.plans.physical.{IdentityBroadcastMode, RoundRobinPartitioning, SinglePartition}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.types.{BooleanType, DoubleType, FloatType, IntegerType, Metadata, NullType, StringType, StructField, StructType}
import org.apache.spark.storage.StorageLevel
case class Dummy(optKey: Option[Expression]) extends Expression with CodegenFallback {
override def children: Seq[Expression] = optKey.toSeq
override def nullable: Boolean = true
override def dataType: NullType = NullType
override lazy val resolved = true
override def eval(input: InternalRow): Any = null.asInstanceOf[Any]
}
case class ComplexPlan(exprs: Seq[Seq[Expression]])
extends org.apache.spark.sql.catalyst.plans.logical.LeafNode {
override def output: Seq[Attribute] = Nil
}
case class ExpressionInMap(map: Map[String, Expression]) extends Expression with Unevaluable {
override def children: Seq[Expression] = map.values.toSeq
override def nullable: Boolean = true
override def dataType: NullType = NullType
override lazy val resolved = true
}
case class JsonTestTreeNode(arg: Any) extends LeafNode {
override def output: Seq[Attribute] = Seq.empty[Attribute]
}
case class NameValue(name: String, value: Any)
case object DummyObject
case class SelfReferenceUDF(
var config: Map[String, Any] = Map.empty[String, Any]) extends Function1[String, Boolean] {
config += "self" -> this
def apply(key: String): Boolean = config.contains(key)
}
class TreeNodeSuite extends SparkFunSuite {
test("top node changed") {
val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
assert(after === Literal(2))
}
test("one child changed") {
val before = Add(Literal(1), Literal(2))
val after = before transform { case Literal(2, _) => Literal(1) }
assert(after === Add(Literal(1), Literal(1)))
}
test("no change") {
val before = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val after = before transform { case Literal(5, _) => Literal(1)}
assert(before === after)
// Ensure that the objects after are the same objects before the transformation.
before.map(identity[Expression]).zip(after.map(identity[Expression])).foreach {
case (b, a) => assert(b eq a)
}
}
test("collect") {
val tree = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val literals = tree collect {case l: Literal => l}
assert(literals.size === 4)
(1 to 4).foreach(i => assert(literals contains Literal(i)))
}
test("pre-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("+", "1", "*", "2", "-", "3", "4")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformDown {
case b: BinaryOperator => actual += b.symbol; b
case l: Literal => actual += l.toString; l
}
assert(expected === actual)
}
test("post-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformUp {
case b: BinaryOperator => actual += b.symbol; b
case l: Literal => actual += l.toString; l
}
assert(expected === actual)
}
test("transform works on nodes with Option children") {
val dummy1 = Dummy(Some(Literal.create("1", StringType)))
val dummy2 = Dummy(None)
val toZero: PartialFunction[Expression, Expression] = { case Literal(_, _) => Literal(0) }
var actual = dummy1 transformDown toZero
assert(actual === Dummy(Some(Literal(0))))
actual = dummy1 transformUp toZero
assert(actual === Dummy(Some(Literal(0))))
actual = dummy2 transform toZero
assert(actual === Dummy(None))
}
test("preserves origin") {
CurrentOrigin.setPosition(1, 1)
val add = Add(Literal(1), Literal(1))
CurrentOrigin.reset()
val transformed = add transform {
case Literal(1, _) => Literal(2)
}
assert(transformed.origin.line.isDefined)
assert(transformed.origin.startPosition.isDefined)
}
test("foreach up") {
val actual = new ArrayBuffer[String]()
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression foreachUp {
case b: BinaryOperator => actual += b.symbol;
case l: Literal => actual += l.toString;
}
assert(expected === actual)
}
test("find") {
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
// Find the top node.
var actual: Option[Expression] = expression.find {
case add: Add => true
case other => false
}
var expected: Option[Expression] =
Some(Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4)))))
assert(expected === actual)
// Find the first children.
actual = expression.find {
case Literal(1, IntegerType) => true
case other => false
}
expected = Some(Literal(1))
assert(expected === actual)
// Find an internal node (Subtract).
actual = expression.find {
case sub: Subtract => true
case other => false
}
expected = Some(Subtract(Literal(3), Literal(4)))
assert(expected === actual)
// Find a leaf node.
actual = expression.find {
case Literal(3, IntegerType) => true
case other => false
}
expected = Some(Literal(3))
assert(expected === actual)
// Find nothing.
actual = expression.find {
case Literal(100, IntegerType) => true
case other => false
}
expected = None
assert(expected === actual)
}
test("collectFirst") {
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
// Collect the top node.
{
val actual = expression.collectFirst {
case add: Add => add
}
val expected =
Some(Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4)))))
assert(expected === actual)
}
// Collect the first children.
{
val actual = expression.collectFirst {
case l @ Literal(1, IntegerType) => l
}
val expected = Some(Literal(1))
assert(expected === actual)
}
// Collect an internal node (Subtract).
{
val actual = expression.collectFirst {
case sub: Subtract => sub
}
val expected = Some(Subtract(Literal(3), Literal(4)))
assert(expected === actual)
}
// Collect a leaf node.
{
val actual = expression.collectFirst {
case l @ Literal(3, IntegerType) => l
}
val expected = Some(Literal(3))
assert(expected === actual)
}
// Collect nothing.
{
val actual = expression.collectFirst {
case l @ Literal(100, IntegerType) => l
}
val expected = None
assert(expected === actual)
}
}
test("transformExpressions on nested expression sequence") {
val plan = ComplexPlan(Seq(Seq(Literal(1)), Seq(Literal(2))))
val actual = plan.transformExpressions {
case Literal(value, _) => Literal(value.toString)
}
val expected = ComplexPlan(Seq(Seq(Literal("1")), Seq(Literal("2"))))
assert(expected === actual)
}
test("expressions inside a map") {
val expression = ExpressionInMap(Map("1" -> Literal(1), "2" -> Literal(2)))
{
val actual = expression.transform {
case Literal(i: Int, _) => Literal(i + 1)
}
val expected = ExpressionInMap(Map("1" -> Literal(2), "2" -> Literal(3)))
assert(actual === expected)
}
{
val actual = expression.withNewChildren(Seq(Literal(2), Literal(3)))
val expected = ExpressionInMap(Map("1" -> Literal(2), "2" -> Literal(3)))
assert(actual === expected)
}
}
test("toJSON") {
def assertJSON(input: Any, json: JValue): Unit = {
val expected =
s"""
|[{
| "class": "${classOf[JsonTestTreeNode].getName}",
| "num-children": 0,
| "arg": ${compact(render(json))}
|}]
""".stripMargin
compareJSON(JsonTestTreeNode(input).toJSON, expected)
}
// Converts simple types to JSON
assertJSON(true, true)
assertJSON(33.toByte, 33)
assertJSON(44, 44)
assertJSON(55L, 55L)
assertJSON(3.0, 3.0)
assertJSON(4.0D, 4.0D)
assertJSON(BigInt(BigInteger.valueOf(88L)), 88L)
assertJSON(null, JNull)
assertJSON("text", "text")
assertJSON(Some("text"), "text")
compareJSON(JsonTestTreeNode(None).toJSON,
s"""[
| {
| "class": "${classOf[JsonTestTreeNode].getName}",
| "num-children": 0
| }
|]
""".stripMargin)
val uuid = UUID.randomUUID()
assertJSON(uuid, uuid.toString)
// Converts Spark Sql DataType to JSON
assertJSON(IntegerType, "integer")
assertJSON(Metadata.empty, JObject(Nil))
assertJSON(
StorageLevel.NONE,
JObject(
"useDisk" -> false,
"useMemory" -> false,
"useOffHeap" -> false,
"deserialized" -> false,
"replication" -> 1)
)
// Converts TreeNode argument to JSON
assertJSON(
Literal(333),
List(
JObject(
"class" -> classOf[Literal].getName,
"num-children" -> 0,
"value" -> "333",
"dataType" -> "integer")))
// Converts Seq[String] to JSON
assertJSON(Seq("1", "2", "3"), "[1, 2, 3]")
// Converts Seq[DataType] to JSON
assertJSON(Seq(IntegerType, DoubleType, FloatType), List("integer", "double", "float"))
// Converts Seq[Partitioning] to JSON
assertJSON(
Seq(SinglePartition, RoundRobinPartitioning(numPartitions = 3)),
List(
JObject("object" -> JString(SinglePartition.getClass.getName)),
JObject(
"product-class" -> classOf[RoundRobinPartitioning].getName,
"numPartitions" -> 3)))
// Converts case object to JSON
assertJSON(DummyObject, JObject("object" -> JString(DummyObject.getClass.getName)))
// Converts ExprId to JSON
assertJSON(
ExprId(0, uuid),
JObject(
"product-class" -> classOf[ExprId].getName,
"id" -> 0,
"jvmId" -> uuid.toString))
// Converts StructField to JSON
assertJSON(
StructField("field", IntegerType),
JObject(
"product-class" -> classOf[StructField].getName,
"name" -> "field",
"dataType" -> "integer",
"nullable" -> true,
"metadata" -> JObject(Nil)))
// Converts TableIdentifier to JSON
assertJSON(
TableIdentifier("table"),
JObject(
"product-class" -> classOf[TableIdentifier].getName,
"table" -> "table"))
// Converts JoinType to JSON
assertJSON(
NaturalJoin(LeftOuter),
JObject(
"product-class" -> classOf[NaturalJoin].getName,
"tpe" -> JObject("object" -> JString(LeftOuter.getClass.getName))))
// Converts FunctionIdentifier to JSON
assertJSON(
FunctionIdentifier("function", None),
JObject(
"product-class" -> JString(classOf[FunctionIdentifier].getName),
"funcName" -> "function"))
// Converts BucketSpec to JSON
assertJSON(
BucketSpec(1, Seq("bucket"), Seq("sort")),
JObject(
"product-class" -> classOf[BucketSpec].getName,
"numBuckets" -> 1,
"bucketColumnNames" -> "[bucket]",
"sortColumnNames" -> "[sort]"))
// Converts FrameBoundary to JSON
assertJSON(
ValueFollowing(3),
JObject(
"product-class" -> classOf[ValueFollowing].getName,
"value" -> 3))
// Converts WindowFrame to JSON
assertJSON(
SpecifiedWindowFrame(RowFrame, UnboundedFollowing, CurrentRow),
JObject(
"product-class" -> classOf[SpecifiedWindowFrame].getName,
"frameType" -> JObject("object" -> JString(RowFrame.getClass.getName)),
"frameStart" -> JObject("object" -> JString(UnboundedFollowing.getClass.getName)),
"frameEnd" -> JObject("object" -> JString(CurrentRow.getClass.getName))))
// Converts Partitioning to JSON
assertJSON(
RoundRobinPartitioning(numPartitions = 3),
JObject(
"product-class" -> classOf[RoundRobinPartitioning].getName,
"numPartitions" -> 3))
// Converts FunctionResource to JSON
assertJSON(
FunctionResource(JarResource, "file:///"),
JObject(
"product-class" -> JString(classOf[FunctionResource].getName),
"resourceType" -> JObject("object" -> JString(JarResource.getClass.getName)),
"uri" -> "file:///"))
// Converts BroadcastMode to JSON
assertJSON(
IdentityBroadcastMode,
JObject("object" -> JString(IdentityBroadcastMode.getClass.getName)))
// Converts CatalogTable to JSON
assertJSON(
CatalogTable(
TableIdentifier("table"),
CatalogTableType.MANAGED,
CatalogStorageFormat.empty,
StructType(StructField("a", IntegerType, true) :: Nil),
createTime = 0L),
JObject(
"product-class" -> classOf[CatalogTable].getName,
"identifier" -> JObject(
"product-class" -> classOf[TableIdentifier].getName,
"table" -> "table"
),
"tableType" -> JObject(
"product-class" -> classOf[CatalogTableType].getName,
"name" -> "MANAGED"
),
"storage" -> JObject(
"product-class" -> classOf[CatalogStorageFormat].getName,
"compressed" -> false,
"properties" -> JNull
),
"schema" -> JObject(
"type" -> "struct",
"fields" -> List(
JObject(
"name" -> "a",
"type" -> "integer",
"nullable" -> true,
"metadata" -> JObject(Nil)))),
"partitionColumnNames" -> List.empty[String],
"owner" -> "",
"createTime" -> 0,
"lastAccessTime" -> -1,
"tracksPartitionsInCatalog" -> false,
"properties" -> JNull,
"unsupportedFeatures" -> List.empty[String],
"schemaPreservesCase" -> JBool(true),
"ignoredProperties" -> JNull))
// For unknown case class, returns JNull.
val bigValue = new Array[Int](10000)
assertJSON(NameValue("name", bigValue), JNull)
// Converts Seq[TreeNode] to JSON recursively
assertJSON(
Seq(Literal(1), Literal(2)),
List(
List(
JObject(
"class" -> JString(classOf[Literal].getName),
"num-children" -> 0,
"value" -> "1",
"dataType" -> "integer")),
List(
JObject(
"class" -> JString(classOf[Literal].getName),
"num-children" -> 0,
"value" -> "2",
"dataType" -> "integer"))))
// Other Seq is converted to JNull, to reduce the risk of out of memory
assertJSON(Seq(1, 2, 3), JNull)
// All Map type is converted to JNull, to reduce the risk of out of memory
assertJSON(Map("key" -> "value"), JNull)
// Unknown type is converted to JNull, to reduce the risk of out of memory
assertJSON(new Object {}, JNull)
// Convert all TreeNode children to JSON
assertJSON(
Union(Seq(JsonTestTreeNode("0"), JsonTestTreeNode("1"))),
List(
JObject(
"class" -> classOf[Union].getName,
"num-children" -> 2,
"children" -> List(0, 1)),
JObject(
"class" -> classOf[JsonTestTreeNode].getName,
"num-children" -> 0,
"arg" -> "0"),
JObject(
"class" -> classOf[JsonTestTreeNode].getName,
"num-children" -> 0,
"arg" -> "1")))
}
test("toJSON should not throws java.lang.StackOverflowError") {
val udf = ScalaUDF(SelfReferenceUDF(), BooleanType, Seq("col1".attr))
// Should not throw java.lang.StackOverflowError
udf.toJSON
}
private def compareJSON(leftJson: String, rightJson: String): Unit = {
val left = JsonMethods.parse(leftJson)
val right = JsonMethods.parse(rightJson)
assert(left == right)
}
}
|
setjet/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala
|
Scala
|
apache-2.0
| 18,003 |
package org.scalaide.ui.internal.preferences
import org.eclipse.jface.preference.FieldEditorPreferencePage
import org.eclipse.ui.IWorkbenchPreferencePage
import org.eclipse.ui.IWorkbench
import org.scalaide.core.IScalaPlugin
import org.eclipse.jface.preference.BooleanFieldEditor
/**
* Abstract base class for simple preference pages to avoid code duplication.
*/
abstract class BasicFieldEditorPreferencePage(description: String) extends FieldEditorPreferencePage with IWorkbenchPreferencePage {
setPreferenceStore(IScalaPlugin().getPreferenceStore)
setDescription(description)
override def init(workbench: IWorkbench) = Unit
protected def addBooleanFieldEditors(editors: (String, String)*): Unit = {
for ((name, label) <- editors) {
addField(new BooleanFieldEditor(name, label, getFieldEditorParent))
}
}
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/preferences/BasicFieldEditorPreferencePage.scala
|
Scala
|
bsd-3-clause
| 839 |
package me.invkrh.raft.core
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.Random
import akka.actor.{ActorRef, Cancellable, Scheduler}
import me.invkrh.raft.message.RaftMessage
trait Timer {
protected var cancellable: Cancellable = _
def start(): Unit
def stop(): Unit = {
if (cancellable != null && !cancellable.isCancelled) {
cancellable.cancel()
}
}
def restart(): Unit = {
stop()
start()
}
}
class RandomizedTimer(min: FiniteDuration, max: FiniteDuration, event: RaftMessage)(
implicit scheduler: Scheduler,
executor: ExecutionContext,
target: ActorRef)
extends Timer {
def start(): Unit = {
require(target != null, "Timer target can not be null")
val rd = min.toMillis + Random.nextInt((max.toMillis - min.toMillis + 1).toInt)
cancellable = scheduler.scheduleOnce(rd milliseconds, target, event)
}
}
class PeriodicTimer(duration: FiniteDuration, event: RaftMessage)(
implicit scheduler: Scheduler,
executor: ExecutionContext,
target: ActorRef)
extends Timer {
def start(): Unit = {
require(target != null, "Timer target can not be null")
cancellable = scheduler.schedule(Duration.Zero, duration, target, event)
}
}
|
invkrh/akka-raft
|
src/main/scala/me/invkrh/raft/core/Timer.scala
|
Scala
|
mit
| 1,297 |
package api
/**
* Helper for pagination.
*/
case class Page[+A](items: Seq[A], page: Int, size: Int, total: Long) {
def offset = (page - 1) * size + 1
}
|
davidgraig/foosball
|
server/app/api/Page.scala
|
Scala
|
mit
| 157 |
package mesosphere.marathon.core.externalvolume.impl.providers
import com.wix.accord._
import com.wix.accord.dsl._
import mesosphere.marathon.state._
import scala.util.Try
protected[providers] object OptionSupport {
import OptionLabelPatterns._
/** a validator to enforce that values conform to expectations of "labels" */
lazy val validLabel: Validator[String] = validator[String] { v =>
v should matchRegex(LabelRegex)
}
/** a validator to enforce that values parse to natural (whole, positive) numbers */
lazy val validNaturalNumber: Validator[String] = new Validator[String] {
override def apply(v: String): Result = {
import scala.util.Try
val parsed: Try[Long] = Try(v.toLong)
if (parsed.isSuccess && parsed.get > 0) Success
else Failure(Set(RuleViolation(v, s"Expected a valid, positive integer instead of $v", None)))
}
}
/** a validator to enforce that values parse to booleans */
import mesosphere.marathon.api.v2.Validation.isTrue
lazy val validBoolean: Validator[String] = isTrue[String](s"Expected a valid boolean")(s =>
Try(s.toBoolean).getOrElse(false)
)
}
|
ss75710541/marathon
|
src/main/scala/mesosphere/marathon/core/externalvolume/impl/providers/Helpers.scala
|
Scala
|
apache-2.0
| 1,140 |
/*
* Copyright 2016 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala.bson.codecs.macrocodecs
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.bson.codecs.configuration.{ CodecConfigurationException, CodecRegistries, CodecRegistry }
import org.bson.{ BsonReader, BsonType, BsonValue, BsonWriter }
import org.bson.codecs.{ Codec, DecoderContext, Encoder, EncoderContext }
import org.mongodb.scala.bson.BsonNull
/**
*
* @tparam T the case class type for the codec
* @since 2.0
*/
trait MacroCodec[T] extends Codec[T] {
/**
* Creates a `Map[String, Class[_]]` mapping the case class name and the type.
*/
val caseClassesMap: Map[String, Class[_]]
/**
* Creates a `Map[Class[_], Boolean]` mapping field types to a boolean representing if they are a case class.
*/
val classToCaseClassMap: Map[Class[_], Boolean]
/**
* A nested map of case class name to a Map of the given field names and a list of the field types.
*/
val classFieldTypeArgsMap: Map[String, Map[String, List[Class[_]]]]
/**
* The case class type for the codec
*/
val encoderClass: Class[T]
/**
* The `CodecRegistry` for use with the codec
*/
val codecRegistry: CodecRegistry
/**
* Creates a new instance of the case class with the provided data
*
* @param className the name of the class to be instantiated
* @param fieldsData the Map of data for the class
* @return the new instance of the class
*/
def getInstance(className: String, fieldsData: Map[String, Any]): T
/**
* The method that writes the data for the case class
*
* @param className the name of the current case class being written
* @param writer the `BsonWriter`
* @param value the value to the case class
* @param encoderContext the `EncoderContext`
*/
def writeCaseClassData(className: String, writer: BsonWriter, value: T, encoderContext: EncoderContext): Unit
/**
* The field used to save the class name when saving sealed case classes.
*/
val classFieldName = "_t"
lazy val hasClassFieldName: Boolean = caseClassesMap.size > 1
lazy val caseClassesMapInv: Map[Class[_], String] = caseClassesMap.map(_.swap)
protected val registry: CodecRegistry = CodecRegistries.fromRegistries(List(codecRegistry, CodecRegistries.fromCodecs(this)).asJava)
protected val unknownTypeArgs: List[Class[BsonValue]] = List[Class[BsonValue]](classOf[BsonValue])
protected val bsonNull = BsonNull()
override def encode(writer: BsonWriter, value: T, encoderContext: EncoderContext): Unit = writeValue(writer, value, encoderContext)
override def decode(reader: BsonReader, decoderContext: DecoderContext): T = {
val className = getClassname(reader, decoderContext)
val fieldTypeArgsMap = classFieldTypeArgsMap(className)
val map = mutable.Map[String, Any]()
reader.readStartDocument()
while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) {
val name = reader.readName
val typeArgs = if (name == classFieldName) List(classOf[String]) else fieldTypeArgsMap.getOrElse(name, unknownTypeArgs)
map += (name -> readValue(reader, decoderContext, typeArgs.head, typeArgs.tail, fieldTypeArgsMap))
}
reader.readEndDocument()
getInstance(className, map.toMap)
}
override def getEncoderClass: Class[T] = encoderClass
protected def getClassname(reader: BsonReader, decoderContext: DecoderContext): String = {
if (hasClassFieldName) {
// Find the class name
reader.mark()
reader.readStartDocument()
var optionalClassName: Option[String] = None
while (optionalClassName.isEmpty && (reader.readBsonType ne BsonType.END_OF_DOCUMENT)) {
val name = reader.readName
if (name == classFieldName) {
optionalClassName = Some(codecRegistry.get(classOf[String]).decode(reader, decoderContext))
} else {
reader.skipValue()
}
}
reader.reset()
// Validate the class name
if (optionalClassName.isEmpty) {
throw new CodecConfigurationException(s"Could not decode sealed case class. Missing '$classFieldName' field.")
}
val className = optionalClassName.get
if (!caseClassesMap.contains(className)) {
throw new CodecConfigurationException(s"Could not decode sealed case class, unknown class $className.")
}
className
} else {
caseClassesMap.head._1
}
}
protected def writeClassFieldName(writer: BsonWriter, className: String, encoderContext: EncoderContext): Unit = {
if (hasClassFieldName) {
writer.writeName(classFieldName)
this.writeValue(writer, className, encoderContext)
}
}
protected def writeValue[V](writer: BsonWriter, value: V, encoderContext: EncoderContext): Unit = {
val clazz = value.getClass
caseClassesMapInv.get(clazz) match {
case Some(className) => writeCaseClassData(className: String, writer: BsonWriter, value.asInstanceOf[T], encoderContext: EncoderContext)
case None =>
val codec = registry.get(clazz).asInstanceOf[Encoder[V]]
encoderContext.encodeWithChildContext(codec, writer, value)
}
}
protected def readValue[V](reader: BsonReader, decoderContext: DecoderContext, clazz: Class[V], typeArgs: List[Class[_]],
fieldTypeArgsMap: Map[String, List[Class[_]]]): V = {
val currentType = reader.getCurrentBsonType
currentType match {
case BsonType.DOCUMENT => readDocument(reader, decoderContext, clazz, typeArgs, fieldTypeArgsMap)
case BsonType.ARRAY => readArray(reader, decoderContext, clazz, typeArgs, fieldTypeArgsMap)
case BsonType.NULL =>
reader.readNull()
null.asInstanceOf[V] // scalastyle:ignore
case _ => registry.get(clazz).decode(reader, decoderContext)
}
}
protected def readArray[V](reader: BsonReader, decoderContext: DecoderContext, clazz: Class[V], typeArgs: List[Class[_]],
fieldTypeArgsMap: Map[String, List[Class[_]]]): V = {
reader.readStartArray()
val list = mutable.ListBuffer[Any]()
while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) {
list.append(readValue(reader, decoderContext, typeArgs.head, typeArgs.tail, fieldTypeArgsMap))
}
reader.readEndArray()
list.toList.asInstanceOf[V]
}
protected def readDocument[V](reader: BsonReader, decoderContext: DecoderContext, clazz: Class[V], typeArgs: List[Class[_]],
fieldTypeArgsMap: Map[String, List[Class[_]]]): V = {
val isCaseClass = classToCaseClassMap.getOrElse(clazz, false)
if (isCaseClass) {
registry.get(clazz).decode(reader, decoderContext)
} else {
val map = mutable.Map[String, Any]()
reader.readStartDocument()
while (reader.readBsonType ne BsonType.END_OF_DOCUMENT) {
val name = reader.readName
val fieldClazzTypeArgs = fieldTypeArgsMap.getOrElse(name, typeArgs)
map += (name -> readValue(reader, decoderContext, fieldClazzTypeArgs.head, fieldClazzTypeArgs.tail, fieldTypeArgsMap))
}
reader.readEndDocument()
map.toMap.asInstanceOf[V]
}
}
}
|
jCalamari/mongo-scala-driver
|
bson/src/main/scala/org/mongodb/scala/bson/codecs/macrocodecs/MacroCodec.scala
|
Scala
|
apache-2.0
| 7,646 |
package com.sksamuel.elastic4s.searches
import com.sksamuel.exts.OptionImplicits._
import scala.concurrent.duration.FiniteDuration
trait ScrollApi {
def searchScroll(id: String, keepAlive: String): SearchScrollDefinition = SearchScrollDefinition(id).keepAlive(keepAlive)
def searchScroll(id: String): SearchScrollDefinition = SearchScrollDefinition(id)
def clearScroll(first: String, rest: String*): ClearScrollDefinition = clearScroll(first +: rest)
def clearScroll(ids: Iterable[String]): ClearScrollDefinition = ClearScrollDefinition(ids.toSeq)
}
case class SearchScrollDefinition(id: String,
keepAlive: Option[String] = None) {
def keepAlive(keepAlive: String): SearchScrollDefinition = copy(keepAlive = keepAlive.some)
def keepAlive(duration: FiniteDuration): SearchScrollDefinition = copy(keepAlive = Some(duration.toSeconds + "s"))
}
case class ClearScrollDefinition(ids: Seq[String])
|
FabienPennequin/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/ScrollApi.scala
|
Scala
|
apache-2.0
| 946 |
/*
* Copyright 2015 IBM Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.couchdb.api
import com.ibm.couchdb.Res
import com.ibm.couchdb.core.Client
import org.http4s.Status
import scalaz.concurrent.Task
class Databases(client: Client) {
def get(name: String): Task[Res.DbInfo] = {
client.get[Res.DbInfo](s"/$name", Status.Ok)
}
def getAll: Task[Seq[String]] = {
client.get[Seq[String]]("/_all_dbs", Status.Ok)
}
def create(name: String): Task[Res.Ok] = {
client.put[Res.Ok](s"/$name", Status.Created)
}
def delete(name: String): Task[Res.Ok] = {
client.delete[Res.Ok](s"/$name", Status.Ok)
}
}
|
mrmechko/couchdb-scala
|
src/main/scala/com/ibm/couchdb/api/Databases.scala
|
Scala
|
apache-2.0
| 1,172 |
class Z protected (x: String) {
def this() = this(")")
new /* line: 1 */Z("")
}
object Z {
new /* line: 1 */Z("")
}
class G extends /* line: 1 */Z("") {
new /* line: 2, applicable: false, name: this */Z("")
}
object G {
new /* line: 2, applicable: false, name: this */Z("")
}
|
ilinum/intellij-scala
|
testdata/resolve2/bug2/SCL3539.scala
|
Scala
|
apache-2.0
| 288 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, AttributeSet, CurrentDate, CurrentTimestamp, MonotonicallyIncreasingID}
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.planning.ExtractEquiJoinKeys
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes
import org.apache.spark.sql.streaming.OutputMode
/**
* Analyzes the presence of unsupported operations in a logical plan.
*/
object UnsupportedOperationChecker {
def checkForBatch(plan: LogicalPlan): Unit = {
plan.foreachUp {
case p if p.isStreaming =>
throwError("Queries with streaming sources must be executed with writeStream.start()")(p)
case _ =>
}
}
def checkForStreaming(plan: LogicalPlan, outputMode: OutputMode): Unit = {
if (!plan.isStreaming) {
throwError(
"Queries without streaming sources cannot be executed with writeStream.start()")(plan)
}
/** Collect all the streaming aggregates in a sub plan */
def collectStreamingAggregates(subplan: LogicalPlan): Seq[Aggregate] = {
subplan.collect { case a: Aggregate if a.isStreaming => a }
}
val mapGroupsWithStates = plan.collect {
case f: FlatMapGroupsWithState if f.isStreaming && f.isMapGroupsWithState => f
}
// Disallow multiple `mapGroupsWithState`s.
if (mapGroupsWithStates.size >= 2) {
throwError(
"Multiple mapGroupsWithStates are not supported on a streaming DataFrames/Datasets")(plan)
}
val flatMapGroupsWithStates = plan.collect {
case f: FlatMapGroupsWithState if f.isStreaming && !f.isMapGroupsWithState => f
}
// Disallow mixing `mapGroupsWithState`s and `flatMapGroupsWithState`s
if (mapGroupsWithStates.nonEmpty && flatMapGroupsWithStates.nonEmpty) {
throwError(
"Mixing mapGroupsWithStates and flatMapGroupsWithStates are not supported on a " +
"streaming DataFrames/Datasets")(plan)
}
// Only allow multiple `FlatMapGroupsWithState(Append)`s in append mode.
if (flatMapGroupsWithStates.size >= 2 && (
outputMode != InternalOutputModes.Append ||
flatMapGroupsWithStates.exists(_.outputMode != InternalOutputModes.Append)
)) {
throwError(
"Multiple flatMapGroupsWithStates are not supported when they are not all in append mode" +
" or the output mode is not append on a streaming DataFrames/Datasets")(plan)
}
// Disallow multiple streaming aggregations
val aggregates = collectStreamingAggregates(plan)
if (aggregates.size > 1) {
throwError(
"Multiple streaming aggregations are not supported with " +
"streaming DataFrames/Datasets")(plan)
}
// Disallow some output mode
outputMode match {
case InternalOutputModes.Append if aggregates.nonEmpty =>
val aggregate = aggregates.head
// Find any attributes that are associated with an eventTime watermark.
val watermarkAttributes = aggregate.groupingExpressions.collect {
case a: Attribute if a.metadata.contains(EventTimeWatermark.delayKey) => a
}
// We can append rows to the sink once the group is under the watermark. Without this
// watermark a group is never "finished" so we would never output anything.
if (watermarkAttributes.isEmpty) {
throwError(
s"$outputMode output mode not supported when there are streaming aggregations on " +
s"streaming DataFrames/DataSets without watermark")(plan)
}
case InternalOutputModes.Complete if aggregates.isEmpty =>
throwError(
s"$outputMode output mode not supported when there are no streaming aggregations on " +
s"streaming DataFrames/Datasets")(plan)
case _ =>
}
/**
* Whether the subplan will contain complete data or incremental data in every incremental
* execution. Some operations may be allowed only when the child logical plan gives complete
* data.
*/
def containsCompleteData(subplan: LogicalPlan): Boolean = {
val aggs = subplan.collect { case a@Aggregate(_, _, _) if a.isStreaming => a }
// Either the subplan has no streaming source, or it has aggregation with Complete mode
!subplan.isStreaming || (aggs.nonEmpty && outputMode == InternalOutputModes.Complete)
}
def checkUnsupportedExpressions(implicit operator: LogicalPlan): Unit = {
val unsupportedExprs = operator.expressions.flatMap(_.collect {
case m: MonotonicallyIncreasingID => m
}).distinct
if (unsupportedExprs.nonEmpty) {
throwError("Expression(s): " + unsupportedExprs.map(_.sql).mkString(", ") +
" is not supported with streaming DataFrames/Datasets")
}
}
plan.foreachUp { implicit subPlan =>
// Operations that cannot exists anywhere in a streaming plan
subPlan match {
case Aggregate(_, aggregateExpressions, child) =>
val distinctAggExprs = aggregateExpressions.flatMap { expr =>
expr.collect { case ae: AggregateExpression if ae.isDistinct => ae }
}
throwErrorIf(
child.isStreaming && distinctAggExprs.nonEmpty,
"Distinct aggregations are not supported on streaming DataFrames/Datasets. Consider " +
"using approx_count_distinct() instead.")
case _: Command =>
throwError("Commands like CreateTable*, AlterTable*, Show* are not supported with " +
"streaming DataFrames/Datasets")
case _: InsertIntoDir =>
throwError("InsertIntoDir is not supported with streaming DataFrames/Datasets")
// mapGroupsWithState and flatMapGroupsWithState
case m: FlatMapGroupsWithState if m.isStreaming =>
// Check compatibility with output modes and aggregations in query
val aggsAfterFlatMapGroups = collectStreamingAggregates(plan)
if (m.isMapGroupsWithState) { // check mapGroupsWithState
// allowed only in update query output mode and without aggregation
if (aggsAfterFlatMapGroups.nonEmpty) {
throwError(
"mapGroupsWithState is not supported with aggregation " +
"on a streaming DataFrame/Dataset")
} else if (outputMode != InternalOutputModes.Update) {
throwError(
"mapGroupsWithState is not supported with " +
s"$outputMode output mode on a streaming DataFrame/Dataset")
}
} else { // check latMapGroupsWithState
if (aggsAfterFlatMapGroups.isEmpty) {
// flatMapGroupsWithState without aggregation: operation's output mode must
// match query output mode
m.outputMode match {
case InternalOutputModes.Update if outputMode != InternalOutputModes.Update =>
throwError(
"flatMapGroupsWithState in update mode is not supported with " +
s"$outputMode output mode on a streaming DataFrame/Dataset")
case InternalOutputModes.Append if outputMode != InternalOutputModes.Append =>
throwError(
"flatMapGroupsWithState in append mode is not supported with " +
s"$outputMode output mode on a streaming DataFrame/Dataset")
case _ =>
}
} else {
// flatMapGroupsWithState with aggregation: update operation mode not allowed, and
// *groupsWithState after aggregation not allowed
if (m.outputMode == InternalOutputModes.Update) {
throwError(
"flatMapGroupsWithState in update mode is not supported with " +
"aggregation on a streaming DataFrame/Dataset")
} else if (collectStreamingAggregates(m).nonEmpty) {
throwError(
"flatMapGroupsWithState in append mode is not supported after " +
s"aggregation on a streaming DataFrame/Dataset")
}
}
}
// Check compatibility with timeout configs
if (m.timeout == EventTimeTimeout) {
// With event time timeout, watermark must be defined.
val watermarkAttributes = m.child.output.collect {
case a: Attribute if a.metadata.contains(EventTimeWatermark.delayKey) => a
}
if (watermarkAttributes.isEmpty) {
throwError(
"Watermark must be specified in the query using " +
"'[Dataset/DataFrame].withWatermark()' for using event-time timeout in a " +
"[map|flatMap]GroupsWithState. Event-time timeout not supported without " +
"watermark.")(plan)
}
}
case d: Deduplicate if collectStreamingAggregates(d).nonEmpty =>
throwError("dropDuplicates is not supported after aggregation on a " +
"streaming DataFrame/Dataset")
case Join(left, right, joinType, condition) =>
joinType match {
case _: InnerLike =>
if (left.isStreaming && right.isStreaming &&
outputMode != InternalOutputModes.Append) {
throwError("Inner join between two streaming DataFrames/Datasets is not supported" +
s" in ${outputMode} output mode, only in Append output mode")
}
case FullOuter =>
if (left.isStreaming || right.isStreaming) {
throwError("Full outer joins with streaming DataFrames/Datasets are not supported")
}
case LeftSemi | LeftAnti =>
if (right.isStreaming) {
throwError("Left semi/anti joins with a streaming DataFrame/Dataset " +
"on the right are not supported")
}
// We support streaming left outer joins with static on the right always, and with
// stream on both sides under the appropriate conditions.
case LeftOuter =>
if (!left.isStreaming && right.isStreaming) {
throwError("Left outer join with a streaming DataFrame/Dataset " +
"on the right and a static DataFrame/Dataset on the left is not supported")
} else if (left.isStreaming && right.isStreaming) {
val watermarkInJoinKeys = StreamingJoinHelper.isWatermarkInJoinKeys(subPlan)
val hasValidWatermarkRange =
StreamingJoinHelper.getStateValueWatermark(
left.outputSet, right.outputSet, condition, Some(1000000)).isDefined
if (!watermarkInJoinKeys && !hasValidWatermarkRange) {
throwError("Stream-stream outer join between two streaming DataFrame/Datasets " +
"is not supported without a watermark in the join keys, or a watermark on " +
"the nullable side and an appropriate range condition")
}
}
// We support streaming right outer joins with static on the left always, and with
// stream on both sides under the appropriate conditions.
case RightOuter =>
if (left.isStreaming && !right.isStreaming) {
throwError("Right outer join with a streaming DataFrame/Dataset on the left and " +
"a static DataFrame/DataSet on the right not supported")
} else if (left.isStreaming && right.isStreaming) {
val isWatermarkInJoinKeys = StreamingJoinHelper.isWatermarkInJoinKeys(subPlan)
// Check if the nullable side has a watermark, and there's a range condition which
// implies a state value watermark on the first side.
val hasValidWatermarkRange =
StreamingJoinHelper.getStateValueWatermark(
right.outputSet, left.outputSet, condition, Some(1000000)).isDefined
if (!isWatermarkInJoinKeys && !hasValidWatermarkRange) {
throwError("Stream-stream outer join between two streaming DataFrame/Datasets " +
"is not supported without a watermark in the join keys, or a watermark on " +
"the nullable side and an appropriate range condition")
}
}
case NaturalJoin(_) | UsingJoin(_, _) =>
// They should not appear in an analyzed plan.
case _ =>
throwError(s"Join type $joinType is not supported with streaming DataFrame/Dataset")
}
case c: CoGroup if c.children.exists(_.isStreaming) =>
throwError("CoGrouping with a streaming DataFrame/Dataset is not supported")
case u: Union if u.children.map(_.isStreaming).distinct.size == 2 =>
throwError("Union between streaming and batch DataFrames/Datasets is not supported")
case Except(left, right) if right.isStreaming =>
throwError("Except on a streaming DataFrame/Dataset on the right is not supported")
case Intersect(left, right) if left.isStreaming && right.isStreaming =>
throwError("Intersect between two streaming DataFrames/Datasets is not supported")
case GroupingSets(_, _, child, _) if child.isStreaming =>
throwError("GroupingSets is not supported on streaming DataFrames/Datasets")
case GlobalLimit(_, _) | LocalLimit(_, _) if subPlan.children.forall(_.isStreaming) =>
throwError("Limits are not supported on streaming DataFrames/Datasets")
case Sort(_, _, _) if !containsCompleteData(subPlan) =>
throwError("Sorting is not supported on streaming DataFrames/Datasets, unless it is on " +
"aggregated DataFrame/Dataset in Complete output mode")
case Sample(_, _, _, _, child) if child.isStreaming =>
throwError("Sampling is not supported on streaming DataFrames/Datasets")
case Window(_, _, _, child) if child.isStreaming =>
throwError("Non-time-based windows are not supported on streaming DataFrames/Datasets")
case ReturnAnswer(child) if child.isStreaming =>
throwError("Cannot return immediate result on streaming DataFrames/Dataset. Queries " +
"with streaming DataFrames/Datasets must be executed with writeStream.start().")
case _ =>
}
// Check if there are unsupported expressions in streaming query plan.
checkUnsupportedExpressions(subPlan)
}
}
def checkForContinuous(plan: LogicalPlan, outputMode: OutputMode): Unit = {
checkForStreaming(plan, outputMode)
plan.foreachUp { implicit subPlan =>
subPlan match {
case (_: Project | _: Filter | _: MapElements | _: MapPartitions |
_: DeserializeToObject | _: SerializeFromObject) =>
case node if node.nodeName == "StreamingRelationV2" =>
case node =>
throwError(s"Continuous processing does not support ${node.nodeName} operations.")
}
subPlan.expressions.foreach { e =>
if (e.collectLeaves().exists {
case (_: CurrentTimestamp | _: CurrentDate) => true
case _ => false
}) {
throwError(s"Continuous processing does not support current time operations.")
}
}
}
}
private def throwErrorIf(
condition: Boolean,
msg: String)(implicit operator: LogicalPlan): Unit = {
if (condition) {
throwError(msg)
}
}
private def throwError(msg: String)(implicit operator: LogicalPlan): Nothing = {
throw new AnalysisException(
msg, operator.origin.line, operator.origin.startPosition, Some(operator))
}
}
|
ioana-delaney/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/UnsupportedOperationChecker.scala
|
Scala
|
apache-2.0
| 16,945 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.coders.instances
import com.spotify.scio.coders.Coder
import com.twitter.algebird.{BF, Batched, CMS, Moments, TopK}
trait AlgebirdCoders {
implicit def cmsCoder[K]: Coder[CMS[K]] = Coder.kryo
implicit def bfCoder[K]: Coder[BF[K]] = Coder.kryo
implicit def topKCoder[K]: Coder[TopK[K]] = Coder.kryo
implicit def batchedCoder[U]: Coder[Batched[U]] = Coder.kryo
implicit def momentsCoder[U]: Coder[Moments] =
Coder.xmap(Coder[(Double, Double, Double, Double, Double)])(
{ case (m0D, m1, m2, m3, m4) => new Moments(m0D, m1, m2, m3, m4) },
m => (m.m0D, m.m1, m.m2, m.m3, m.m4)
)
}
|
spotify/scio
|
scio-core/src/main/scala/com/spotify/scio/coders/instances/AlgebirdCoders.scala
|
Scala
|
apache-2.0
| 1,233 |
package monocle.std
import monocle.TestUtil._
import monocle.function._
import monocle.law.{LensLaws, OptionalLaws, PrismLaws, TraversalLaws}
import org.specs2.scalaz.Spec
class MapSpec extends Spec {
checkAll("at Map", LensLaws(at[Map[Int, String], Int, String](2)))
checkAll("each Map", TraversalLaws(each[Map[Int, String], String]))
checkAll("empty Map", PrismLaws(empty[Map[Int, String]]))
checkAll("filterIndex Map", TraversalLaws(filterIndex[Map[Int, Char], Int, Char](_ % 2 == 0)))
checkAll("index Map", OptionalLaws(index[Map[Int, String], Int, String](3)))
}
|
CapeSepias/Monocle
|
test/src/test/scala/monocle/std/MapSpec.scala
|
Scala
|
mit
| 586 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.util
import scala.util.Try
object HtmlUtil {
def unescapeHtml(escaped: String): Option[String] = {
Try {
val result = escaped.foldLeft[(String, Option[String])](("", None)) {
case ((acc, escapedElemAcc), c) =>
(c, escapedElemAcc) match {
case ('&', None) =>
(acc, Some(""))
case (_, None) =>
(acc + c, None)
case ('&', Some(_)) =>
throw new IllegalArgumentException("nested escape sequences not supported")
case (';', Some(escapedElem)) =>
(acc + unescapeMap(escapedElem), None)
case (_, Some(incompleteEscapedElem)) =>
(acc, Some(incompleteEscapedElem + c))
}
}
result match {
case (escaped, None) =>
escaped
case _ =>
throw new IllegalArgumentException("unfinished escape sequence not supported")
}
}.toOption
}
//Minimal unescape map based on scala.xml.Utility.unescape() should be enough for most HTML
private val unescapeMap = Map(
"lt" -> '<',
"gt" -> '>',
"amp" -> '&',
"quot" -> '"',
"apos" -> '\\''
)
}
|
VlachJosef/ensime-server
|
core/src/main/scala/org/ensime/util/HtmlUtil.scala
|
Scala
|
gpl-3.0
| 1,319 |
val l1tmp = opts.string("srcLang","The source language")
val l2tmp = opts.string("trgLang","The target langauge")
val splitSize = opts.intValue("l","The number of lines for each split (<= 0 for no split)",-1)
val resume = opts.flag("resume","Resume based on previous state")
val mert = !opts.flag("nomert","Do not tune using MERT")
val clean = !opts.flag("noclean","Do not clean on completion")
opts.restAsSystemProperties
opts.verify
val l1 = List(l1tmp,l2tmp).min
val l2 = List(l1tmp,l2tmp).max
val WORKING = System.getProperty("working",System.getProperty("user.dir") + "/working/" + l1 + "-" + l2)
val heads = System.getProperty("heads","4").toInt
val MOSES_DIR = System.getProperty("mosesDir",System.getProperty("user.home")+"/moses")
val CDEC_DIR = System.getProperty("cdecDir",System.getProperty("user.home")+"/cdec")
val doFilter = System.getProperty("filter","true").toBoolean
if(!resume) {
mkdir(WORKING).p
mkdir(WORKING).p
gunzip("corpus/corpus-%s-%s.%s.gz" % (l1,l2,l1))
Do(MOSES_DIR+"/mosesdecoder/scripts/tokenizer/tokenizer.perl","-l",l1) < ("corpus/corpus-%s-%s.%s" % (l1,l2,l1)) > (WORKING + "/corpus-%s-%s.tok.%s" % (l1,l2,l1))
rm("corpus/corpus-%s-%s.%s" % (l1,l2,l1))
checkExists(MOSES_DIR+"/truecaser/truecase."+l1)
Do(MOSES_DIR+"/mosesdecoder/scripts/recaser/truecase.perl","--model",MOSES_DIR+"/truecaser/truecase."+l1) < (WORKING +
"/corpus-%s-%s.tok.%s" % (l1,l2,l1)) > (WORKING + "/corpus-%s-%s.true.%s" % (l1,l2,l1))
gunzip("corpus/corpus-%s-%s.%s.gz" % (l1,l2,l2))
Do(MOSES_DIR+"/mosesdecoder/scripts/tokenizer/tokenizer.perl","-l",l2) < ("corpus/corpus-%s-%s.%s" % (l1,l2,l2)) > (WORKING + "/corpus-%s-%s.tok.%s" % (l1,l2,l2))
rm("corpus/corpus-%s-%s.%s" % (l1,l2,l2))
checkExists(MOSES_DIR+"/truecaser/truecase."+l2)
Do(MOSES_DIR+"/mosesdecoder/scripts/recaser/truecase.perl","--model",MOSES_DIR+"/truecaser/truecase."+l2) < (WORKING +
"/corpus-%s-%s.tok.%s" % (l1,l2,l2)) > (WORKING + "/corpus-%s-%s.true.%s" % (l1,l2,l2))
Do(MOSES_DIR+"/mosesdecoder/scripts/training/clean-corpus-n.perl",
WORKING + "/corpus-%s-%s.true" % (l1,l2),
l1,l2,WORKING + "/corpus-%s-%s.clean" % (l1,l2),"1","80")
}
val WORKING_CORPUS = WORKING + "/corpus-%s-%s" % (l1,l2)
def buildLM(lm : String) {
if(!resume || !new File(WORKING + "/lm/"+lm).exists) {
mkdir(WORKING + "/lm").p
Do(MOSES_DIR+"/irstlm/bin/add-start-end.sh") < (WORKING_CORPUS + ".clean." + lm) > (WORKING_CORPUS + ".sb." + lm)
rm(WORKING + "/lm/" + lm + ".tmp").ifexists
Do(MOSES_DIR+"/irstlm/bin/build-lm.sh",
"-i",WORKING_CORPUS + ".sb." + lm,
"-t","lm_tmp_directory",
"-p","-s","improved-kneser-ney",
"-o",WORKING + "/lm/" + lm+".tmp").env("IRSTLM",MOSES_DIR+"/irstlm")
Do (MOSES_DIR+"/irstlm/bin/compile-lm",
"--text","yes",
WORKING + "/lm/"+lm+".tmp.gz",
WORKING + "/lm/"+lm+".tmp")
subTask("scripts/remove-zeros.scala",
WORKING + "/lm/"+lm+".tmp",
WORKING + "/lm/"+lm)
Do(MOSES_DIR+"/mosesdecoder/bin/build_binary",
WORKING + "/lm/"+lm,
WORKING + "/lm/"+lm+".bin")
if(clean) {
rm(WORKING + "/lm/" + lm + ".tmp").ifexists
rm(WORKING + "/lm/" + lm + ".tmp.gz").ifexists
}
}
//}
}
buildLM(l1)
buildLM(l2)
def buildTranslationModel(WORKING : String, WORKING_CORPUS : String, LM_DIR : String) = {
if(!resume || !new File(WORKING+"/imodel/phrase-table-filtered.gz").exists) {
mkdir(WORKING + "/model").p
mkdir(WORKING + "/imodel").p
//block("Alignment") {
Do(CDEC_DIR+"/corpus/paste-files.pl",
WORKING_CORPUS + "." + l1,
WORKING_CORPUS + "." + l2) > (WORKING_CORPUS + ".train")
Do(CDEC_DIR+"/word-aligner/fast_align",
"-i",WORKING_CORPUS + ".train",
"-d","-v","-o") > (WORKING + "/%s-%s.fwd_align" % (l1,l2))
Do(CDEC_DIR+"/word-aligner/fast_align",
"-i",WORKING_CORPUS + ".train",
"-d","-v","-o","-r") > (WORKING + "/%s-%s.rev_align" % (l1,l2))
Do(CDEC_DIR+"/utils/atools",
"-c","grow-diag-final-and",
"-i",WORKING + "/%s-%s.fwd_align" % (l1,l2),
"-j",WORKING + "/%s-%s.rev_align" % (l1,l2)) > (WORKING + "/model/aligned.grow-diag-final-and")
Do(CDEC_DIR+"/utils/atools",
"-c","invert",
"-i",WORKING + "/%s-%s.fwd_align" % (l1,l2)) > (WORKING + "/imodel/fwd_align")
Do(CDEC_DIR+"/utils/atools",
"-c","invert",
"-i",WORKING + "/%s-%s.rev_align" % (l1,l2)) > (WORKING + "/imodel/rev_align")
Do(CDEC_DIR+"/utils/atools",
"-c","grow-diag-final-and",
"-i",WORKING + "/imodel/fwd_align" % (l1,l2),
"-j",WORKING + "/imodel/rev_align" % (l1,l2)) > (WORKING + "/imodel/aligned.grow-diag-final-and")
//}
//block("Phrase table generation") {
val lmFile1 = new File(LM_DIR+l1).getCanonicalPath()
Do(MOSES_DIR+"/mosesdecoder/scripts/training/train-model.perl",
"-do-steps","4-9","-root-dir",WORKING,
"-corpus",WORKING_CORPUS,
"-f",l1,"-e",l2,
"-alignment","grow-diag-final-and",
"-reordering","msd-bidirectional-fe",
"-lm","0:3:"+lmFile1+":8",
"-external-bin-dir",MOSES_DIR + "/tools")
val lmFile2 = new File(LM_DIR+l2).getCanonicalPath()
Do(MOSES_DIR+"/mosesdecoder/scripts/training/train-model.perl",
"-do-steps","4-9","-root-dir",WORKING,
"-corpus",WORKING_CORPUS,
"-f",l2,"-e",l1,
"-alignment","grow-diag-final-and",
"-reordering","msd-bidirectional-fe",
"-lm","0:3:"+lmFile2+":8",
"-model-dir",WORKING + "/imodel",
"-external-bin-dir",MOSES_DIR + "/tools")
val N = if(splitSize <= 0) {
500000
} else {
splitSize
}
if(doFilter) {
subTask("scripts/mt/simple-entropy.scala","20",
WORKING+"/model/phrase-table.gz",WORKING+"/model/phrase-table-filtered.gz")
subTask("scripts/mt/simple-entropy.scala","20",
WORKING+"/imodel/phrase-table.gz",WORKING+"/imodel/phrase-table-filtered.gz")
}
}
}
if(splitSize <= 0) {
buildTranslationModel(WORKING,WORKING_CORPUS+".clean",WORKING + "/lm/")
} else {
val nSplits = (wc("corpus/corpus-%s-%s.%s.gz" % (l1,l2,l1)).toDouble / splitSize).ceil.toInt
split(splitSize,WORKING_CORPUS + ".clean." + l1) { i =>
WORKING + "/" + i + "/corpus."+l1
}
split(splitSize,WORKING_CORPUS + ".clean." + l2) { i =>
WORKING + "/" + i + "/corpus."+l2
}
namedTask("Preparing splits") {
val l = (WORKING + "/").ls filter (_.matches("\\\\d+"))
val groups = if(l.size >= heads) {
(l grouped l.size/heads).toList
} else {
Nil
}
val tail = l.size - l.size / heads * heads
println("===SHARDS===")
for(i <- 1 to heads) {
val shards = (groups(i-1) map (WORKING + "/" + _) mkString (" ")) + (
if(i % heads <= tail && i != heads) {
" " + WORKING + "/" + l(l.size - (i % heads))
} else {
""
}
)
set("HEAD"+i,shards)
println("HEAD " + i + ": " + shards)
}
}
threadPool(heads,"Build Translation Model")( i => {
for(splitWorking <- get("HEAD"+i).split(" ")) {
buildTranslationModel(splitWorking, splitWorking + "/corpus", WORKING + "/lm/")
}
})
mkdir(WORKING + "/model").p
mkdir(WORKING + "/imodel").p
cat(find(WORKING)(file => {
if(doFilter) {
file.getPath() endsWith "/model/phrase-table-filtered.gz"
} else {
file.getPath() endsWith "/model/phrase-table.gz"
}
}).apply) > (WORKING + "/model/phrase-table-all")
sort(WORKING + "/model/phrase-table-all") > (WORKING + "/model/phrase-table-sorted")
gzip(WORKING + "/model/phrase-table-sorted")
cat(find(WORKING)(file => {
if(doFilter) {
file.getPath() endsWith "/imodel/phrase-table-filtered.gz"
} else {
file.getPath() endsWith "/imodel/phrase-table.gz"
}
}).apply) > (WORKING + "/imodel/phrase-table-all")
sort(WORKING + "/imodel/phrase-table-all") > (WORKING + "/imodel/phrase-table-sorted")
gzip(WORKING + "/imodel/phrase-table-sorted")
cat(find(WORKING)(file => {
file.getPath() endsWith "/model/lex.e2f"
}).apply) > (WORKING + "/model/lex.e2f.tmp")
cat(find(WORKING)(file => {
file.getPath() endsWith "/model/lex.f2e"
}).apply) > (WORKING + "/model/lex.f2e.tmp")
cat(find(WORKING)(file => {
file.getPath() endsWith "/imodel/lex.e2f"
}).apply) > (WORKING + "/imodel/lex.e2f.tmp")
cat(find(WORKING)(file => {
file.getPath() endsWith "/imodel/lex.f2e"
}).apply) > (WORKING + "/imodel/lex.f2e.tmp")
cat(find(WORKING)(file => {
file.getPath() endsWith "/model/reordering-table.wbe-msd-bidirectional-fe.gz"
}).apply) > (WORKING + "/model/reordering-table")
cat(find(WORKING)(file => {
file.getPath() endsWith "/imodel/reordering-table.wbe-msd-bidirectional-fe.gz"
}).apply) > (WORKING + "/imodel/reordering-table")
sort(WORKING + "/model/reordering-table") > (WORKING + "/model/reordering-table.sorted")
sort(WORKING + "/imodel/reordering-table") > (WORKING + "/imodel/reordering-table.sorted")
subTask("scripts/merge-lex.scala",WORKING + "/model/lex.e2f.tmp", nSplits.toString, WORKING + "/model/lex.e2f")
subTask("scripts/merge-lex.scala",WORKING + "/model/lex.f2e.tmp", nSplits.toString, WORKING + "/model/lex.f2e")
subTask("scripts/merge-lex.scala",WORKING + "/imodel/lex.e2f.tmp", nSplits.toString, WORKING + "/imodel/lex.e2f")
subTask("scripts/merge-lex.scala",WORKING + "/imodel/lex.f2e.tmp", nSplits.toString, WORKING + "/imodel/lex.f2e")
subTask("scripts/merge-pts.scala",
WORKING + "/model/phrase-table-sorted",
WORKING + "/model/lex.e2f",
WORKING + "/model/lex.f2e",
WORKING + "/model/phrase-table-filtered.gz")
subTask("scripts/merge-pts.scala",
WORKING + "/imodel/phrase-table-sorted",
WORKING + "/imodel/lex.e2f",
WORKING + "/imodel/lex.f2e",
WORKING + "/imodel/phrase-table-filtered.gz")
subTask("scripts/merge-rot.scala",
WORKING + "/model/reordering-table.sorted",
WORKING + "/model/phrase-table-filtered.gz",
WORKING + "/model/reordering-table.wbe-msd-bidirectional-fe.gz")
subTask("scripts/merge-rot.scala",
WORKING + "/imodel/reordering-table.sorted",
WORKING + "/imodel/phrase-table-filtered.gz",
WORKING + "/imodel/reordering-table.wbe-msd-bidirectional-fe.gz")
if(clean) {
for(i <- 1 to nSplits) {
rm(WORKING + "/" + i).ifexists.r
}
}
}
if(clean) {
rm(WORKING + ("/corpus-%s-%s.clean.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/corpus-%s-%s.clean.%s" % (l1,l2,l2))).ifexists
rm(WORKING + ("/corpus-%s-%s.sb.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/corpus-%s-%s.sb.%s" % (l1,l2,l2))).ifexists
rm(WORKING + ("/corpus-%s-%s.tok.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/corpus-%s-%s.tok.%s" % (l1,l2,l2))).ifexists
rm(WORKING + ("/corpus-%s-%s.true.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/corpus-%s-%s.true.%s" % (l1,l2,l2))).ifexists
rm(WORKING + "/model/aligned-grow-diag-final-and").ifexists
rm(WORKING + "/model/extract.inv.sorted.gz").ifexists
rm(WORKING + "/model/extract.o.sorted.gz").ifexists
rm(WORKING + "/model/extract.sorted.gz").ifexists
rm(WORKING + "/model/lex.e2f").ifexists
rm(WORKING + "/model/lex.e2f.tmp").ifexists
rm(WORKING + "/model/lex.f2e").ifexists
rm(WORKING + "/model/lex.f2e.tmp").ifexists
rm(WORKING + "/model/phrase-table-all").ifexists
rm(WORKING + "/model/phrase-table.gz").ifexists
rm(WORKING + "/model/phrase-table-sorted.gz").ifexists
rm(WORKING + "/model/phrase-table-sorted").ifexists
rm(WORKING + "/model/reordering-table.sorted").ifexists
rm(WORKING + "/model/reordering-table").ifexists
rm(WORKING + "/imodel/aligned-grow-diag-final-and").ifexists
rm(WORKING + "/imodel/extract.inv.sorted.gz").ifexists
rm(WORKING + "/imodel/extract.o.sorted.gz").ifexists
rm(WORKING + "/imodel/extract.sorted.gz").ifexists
rm(WORKING + "/imodel/lex.e2f").ifexists
rm(WORKING + "/imodel/lex.e2f.tmp").ifexists
rm(WORKING + "/imodel/lex.f2e").ifexists
rm(WORKING + "/imodel/lex.f2e.tmp").ifexists
rm(WORKING + "/imodel/phrase-table-all").ifexists
rm(WORKING + "/imodel/phrase-table.gz").ifexists
rm(WORKING + "/imodel/phrase-table-sorted.gz").ifexists
rm(WORKING + "/imodel/phrase-table-sorted").ifexists
rm(WORKING + "/imodel/reordering-table.sorted").ifexists
rm(WORKING + "/imodel/reordering-table").ifexists
}
if(mert) {
Do(MOSES_DIR+"/mosesdecoder/scripts/tokenizer/tokenizer.perl","-l",l1) < ("corpus/dev-%s-%s.%s" % (l1,l2,l1)) > (WORKING + "/dev-%s-%s.tok.%s" % (l1,l2,l1))
Do(MOSES_DIR+"/mosesdecoder/scripts/recaser/truecase.perl","--model",MOSES_DIR+"/truecaser/truecase."+l1) < (WORKING + "/dev-%s-%s.tok.%s" %
(l1,l2,l1)) > (WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l1))
Do(MOSES_DIR+"/mosesdecoder/scripts/tokenizer/tokenizer.perl","-l",l2) < ("corpus/dev-%s-%s.%s" % (l1,l2,l2)) > (WORKING + "/dev-%s-%s.tok.%s" % (l1,l2,l2))
Do(MOSES_DIR+"/mosesdecoder/scripts/recaser/truecase.perl","--model",MOSES_DIR+"/truecaser/truecase."+l2) < (WORKING + "/dev-%s-%s.tok.%s" %
(l1,l2,l2)) > (WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l2))
subTask("scripts/write-mosesini.scala",
WORKING + "/model/moses.ini",
WORKING + "/model",
WORKING + "/lm/" + l1,"-forMert")
Do(MOSES_DIR+"/mosesdecoder/scripts/training/mert-moses.pl",
WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l1),
WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l2),
MOSES_DIR+"/mosesdecoder/bin/moses",
WORKING + "/model/moses.ini",
"--pairwise-ranked",
"--mertdir",MOSES_DIR+"/mosesdecoder/bin",
"--decoder-flags=-threads "+heads+" -s 10") > (WORKING + "/model/mert.out") err (WORKING + "/model/mert.err") dir (WORKING + "/model")
}
subTask("scripts/write-mosesini.scala",
WORKING + "/model/moses.ini",
WORKING + "/model",
WORKING + "/lm/" + l1 + ".bin")
if(mert) {
subTask("scripts/write-mosesini.scala",
WORKING + "/imodel/moses.ini",
WORKING + "/imodel",
WORKING + "/lm/" + l2,"-forMert")
Do(MOSES_DIR+"/mosesdecoder/scripts/training/mert-moses.pl",
WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l2),
WORKING + "/dev-%s-%s.true.%s" % (l1,l2,l1),
MOSES_DIR+"/mosesdecoder/bin/moses",
WORKING + "/imodel/moses.ini",
"--pairwise-ranked",
"--mertdir",MOSES_DIR+"/mosesdecoder/bin",
"--decoder-flags=-threads "+heads+" -s 10") > (WORKING + "/imodel/mert.out") err (WORKING + "/imodel/mert.err") dir (WORKING + "/imodel")
}
subTask("scripts/write-mosesini.scala",
WORKING + "/imodel/moses.ini",
WORKING + "/imodel",
WORKING + "/lm/" + l2 +".bin")
Do(MOSES_DIR+"/mosesdecoder/bin/processPhraseTable","-ttable","0","0",
WORKING + "/model/phrase-table-filtered.gz",
"-nscores","5","-out",
WORKING + "/model/phrase-table.bin")
Do(MOSES_DIR+"/mosesdecoder/bin/processPhraseTable","-ttable","0","0",
WORKING + "/imodel/phrase-table-filtered.gz",
"-nscores","5","-out",
WORKING + "/imodel/phrase-table.bin")
Do(MOSES_DIR+"/mosesdecoder/bin/processLexicalTable",
"-in",WORKING + "/model/reordering-table.wbe-msd-bidirectional-fe.gz",
"-out",WORKING + "/model/reordering-table")
Do(MOSES_DIR+"/mosesdecoder/bin/processLexicalTable",
"-in",WORKING + "/imodel/reordering-table.wbe-msd-bidirectional-fe.gz",
"-out",WORKING + "/imodel/reordering-table")
def updateMosesIni(fileName : String) {
val in = io.Source.fromFile(fileName)
val lines = in.getLines.toList
in.close
val out = new java.io.PrintWriter(fileName)
for(line <- lines) {
if(line endsWith "phrase-table") {
out.println(line + ".bin")
} else if(line endsWith "phrase-table-filtered.gz") {
System.err.println("This one!")
out.println(line.replace("phrase-table-filtered.gz","phrase-table.bin"))
} else {
out.println(line)
}
}
out.flush
out.close
}
namedTask("update model/moses.ini") {
updateMosesIni(WORKING + "/model/moses.ini")
}
namedTask("update imodel/moses.ini") {
updateMosesIni(WORKING + "/imodel/moses.ini")
}
if(clean && mert) {
rm(WORKING + ("/dev-%s-%s.tok.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/dev-%s-%s.tok.%s" % (l1,l2,l2))).ifexists
rm(WORKING + ("/dev-%s-%s.true.%s" % (l1,l2,l1))).ifexists
rm(WORKING + ("/dev-%s-%s.true.%s" % (l1,l2,l2))).ifexists
rm(WORKING + "/model/mert.err").ifexists
rm(WORKING + "/model/mert.out").ifexists
rm(WORKING + "/model/mert-work").ifexists.r
rm(WORKING + "/imodel/mert.err").ifexists
rm(WORKING + "/imodel/mert.out").ifexists
rm(WORKING + "/imodel/mert-work").ifexists.r
}
|
jmccrae/nimrod
|
scripts/moses-train.scala
|
Scala
|
apache-2.0
| 16,486 |
package objektwerks.function
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import scala.annotation.tailrec
import scala.util.Random
import scala.util.chaining._
import scala.language.postfixOps
class FunctionTest extends AnyFunSuite with Matchers {
test("literal") {
val add = (x: Int, y: Int) => x + y
add(3, 3) shouldEqual 6
val multiply = (x: Int, y: Int) => x * y: Int
multiply(3, 3) shouldEqual 9
val subtract: (Int, Int) => Int = (x, y) => x - y
subtract(9, 3) shouldEqual 6
}
test("def expression") {
def isEven(i: Int): Boolean = i % 2 == 0
isEven(2) shouldBe true
}
test("def block") {
def isOdd(i: Int): Boolean = {
i % 2 != 0
}
isOdd(3) shouldBe true
}
test("def match") {
def sum(xs: List[Int]): Int = xs match {
case Nil => 0
case head :: tail => head + sum(tail)
}
sum(List(1, 2, 3)) shouldEqual 6
}
test("currying") {
def greeting(greeting: String): String => String = (name: String) => {
greeting + ", " + name + "!"
}
val hello = greeting("Hello")
hello("John") shouldEqual "Hello, John!"
}
test("call by value") {
def callByValue(r: Long): (Long, Long) = (r, r)
val (r1, r2) = callByValue(Random.nextLong())
r1 shouldEqual r2
}
test("call by name") {
def callByName(r: => Long): (Long, Long) = (r, r)
val (r1, r2) = callByName(Random.nextLong())
r1 should not equal r2
}
test("default args") {
def multiply(x: Int, y: Int = 1): Int = x * y
multiply(1) shouldEqual 1
}
test("var args") {
def add(varargs: Int*): Int = varargs.sum
add(1, 2, 3) shouldEqual 6
add(List(1, 2, 3):_*) shouldEqual 6
}
test("closure") {
val legalDrinkingAge = 21
def isLegallyOldEnoughToDrink(age: Int): Boolean = age >= legalDrinkingAge
isLegallyOldEnoughToDrink(22) shouldBe true
}
test("higher order") {
def square(f: Int => Int, i: Int) = f(i)
square((x: Int) => x * x, 2) shouldEqual 4
}
test("partially applied") {
def multiplier(x: Int, y: Int): Int = x * y
val product = multiplier _
val multiplyByFive = multiplier(5, _: Int)
product(5, 20) shouldEqual 100
multiplyByFive(20) shouldEqual 100
}
test("partial function") {
val fraction = new PartialFunction[Int, Int] {
def apply(d: Int) = 2 / d
def isDefinedAt(d: Int): Boolean = d <= 0
}
fraction(2) shouldEqual 1
fraction.isDefinedAt(-42) shouldBe true
}
test("curry") {
def multiply(x: Int): Int => Int = (y: Int) => x * y
multiply(3)(3) shouldEqual 9
def add(x: Int)(y: Int): Int = x + y
add(1)(2) shouldEqual 3
}
test("curried") {
val sum = (x: Int, y: Int) => x + y
val curriedSum = sum.curried
curriedSum(1)(2) shouldEqual 3
}
test ("lambda") {
val list = List(1, 2, 3, 4)
list.filter(_ % 2 == 0) shouldEqual List(2, 4)
}
test("non-tailrec") {
def factorial(n: Int): Int = n match {
case i if i < 1 => 1
case _ => n * factorial(n - 1)
}
factorial(3) shouldEqual 6
}
test("tailrec") {
@tailrec
def factorial(n: Int, acc: Int = 1): Int = n match {
case i if i < 1 => acc
case _ => factorial(n - 1, acc * n)
}
factorial(9) shouldEqual 362880
}
test("impure function") {
def add(x: Int, y: Int): Int = {
val sum = x + y
println(sum) // Simulating side-effecting IO
sum
}
add(1, 2) shouldEqual 3
}
test("pure function") {
def add(x: Int, y: Int): Int = {
x + y // No side-effecting IO.
}
add(1, 2) shouldEqual 3
}
test("compose > andThen") {
val incr = (n: Int) => n + 1
val decr = (n: Int) => n - 1
val incrComposeDecr = incr compose decr
val incrAndThenDecr = incr andThen decr
val incrDecrAsList = List(incr, decr)
val incrDecrAsListWithReduce = incrDecrAsList reduce ( _ andThen _ )
val xs = (1 to 10).toList
val ys = xs map incr map decr
val zs = xs map incrComposeDecr map incrAndThenDecr
val fs = xs map ( incrDecrAsList reduce ( _ compose _ ) )
val gs = xs map ( incrDecrAsList reduce ( _ andThen _ ) )
val us = xs map incrDecrAsListWithReduce
xs shouldEqual ys
ys shouldEqual zs
fs shouldEqual zs
gs shouldEqual fs
us shouldEqual gs
}
test("pipe") {
val square = (n: Int) => n * n
assert( 2.pipe(square) == 4 )
}
test("select by index") {
def selectByIndex(source: List[Int], index: Int): Option[Int] = {
@tailrec
def loop(source: List[Int], index: Int, acc: Int = 0): Option[Int] = source match {
case Nil => None
case head :: tail => if (acc == index) Some(head) else loop(tail, index, acc + 1)
}
loop(source, index)
}
val xs = 1 to 10 toList
val ys = List[Int]()
val zs = List(1, 2, 3, 4)
val x = selectByIndex(xs, 5)
val y = selectByIndex(ys, 5)
val z = selectByIndex(zs, 5)
x.get shouldEqual xs(5)
y.isEmpty shouldBe true
z.isEmpty shouldBe true
}
test("intersection") {
def intersection(source: List[Int], target: List[Int]): List[Int] = {
for (s <- source if target.contains(s)) yield s
}
val xs = List.range(1, 10)
val ys = List.range(1, 20)
val zs = List.range(30, 40)
intersection(xs, ys) shouldEqual xs.intersect(ys)
intersection(ys, xs) shouldEqual ys.intersect(xs)
intersection(ys, zs) shouldEqual ys.intersect(zs)
}
}
|
objektwerks/scala
|
src/test/scala/objektwerks/function/FunctionTest.scala
|
Scala
|
apache-2.0
| 5,504 |
package com.oct
import java.net.URI
import com.oct.sclaav.Mode.Mode
import com.sksamuel.scrimage.Image
package object sclaav {
object MapsModes {
def apply(mode: String) = mode match {
case "permute" => Mode.MOSAIC_PERMUTE_ALL_FILES
case "single" => Mode.MOSAIC_SINGLE_FILE
case "mosaic-of-mosaics" => Mode.MOSAIC_OF_MOSAICS
case "free-random-composite" => Mode.FREE_COMPOSITE_RANDOM
case "free-ga-composite" => Mode.FREE_COMPOSITE_GA
// case "similarity-permute" => Mode.SIMILARITY_PERMUTE
case "similarity" => Mode.SIMILARITY
}
}
object Mode extends Enumeration {
type Mode = Value
val MOSAIC_PERMUTE_ALL_FILES,
MOSAIC_SINGLE_FILE,
MOSAIC_OF_MOSAICS,
FREE_COMPOSITE_RANDOM,
FREE_COMPOSITE_GA,
// SIMILARITY_PERMUTE,
SIMILARITY = Value
}
case class Config(
maxSamplePhotos: Option[Int] = Some(10),
rows: Option[Int] = None,
cols: Option[Int] = None,
mode: Mode = Mode.MOSAIC_SINGLE_FILE,
in: Option[URI] = None,
out: Option[URI] = None,
singleTarget: Option[URI] = None,
manipulate: Boolean = false,
verbose: Boolean = false,
debug: Boolean = false) {
def validate: Either[String, Unit] = {
val validations = Seq(
validateMode,
validateMosaic,
validateOutputDir
)
validations.foldLeft[Either[String, Unit]](Right(Unit)) { (vs, v) =>
if (vs.isLeft)
vs
else if (v.isLeft)
v
else
Right(Unit)
}
}
def validateMode: Either[String, Unit] = (mode, singleTarget) match {
case (Mode.MOSAIC_SINGLE_FILE, None) =>
Left("Should provide a target file when using Mosaic mode with a single file")
case (Mode.MOSAIC_PERMUTE_ALL_FILES, Some(_)) =>
Left("Should not provide a target file when using Mosaic mode with permuting all input files")
case (Mode.SIMILARITY, None) =>
Left("Should provide a single target for similarity")
// case (Mode.SIMILARITY_PERMUTE, Some(_)) =>
// Left("Should not provide target image when permuting over images")
case (_, _) =>
Right(Unit)
}
def validateMosaic: Either[String, Unit] = (mode, rows, cols) match {
case (Mode.MOSAIC_SINGLE_FILE, Some(r), Some(c)) => Right(Unit)
case (Mode.MOSAIC_PERMUTE_ALL_FILES, Some(r), Some(c)) => Right(Unit)
case (Mode.MOSAIC_SINGLE_FILE, _, _) => Left("Should provide rows and cols for mosaic")
case (Mode.MOSAIC_PERMUTE_ALL_FILES, _, _) => Right("Should provide rows and cols for mosaic")
case (_, _, _) => Right(Unit)
}
def validateOutputDir: Either[String, Unit] = (mode, out) match {
case (Mode.MOSAIC_SINGLE_FILE, None) => Right(Unit)
case (_, None) => Left("Should provide output dir")
case _ => Right(Unit)
}
}
case class Argb(a: Int, r: Int, g: Int, b: Int)
trait ArgbEstimator {
def apply(img: Image): Argb
}
trait ArgbDistance {
def apply(argb1: Argb, argb2: Argb): Double
}
object ImageManipulationMonoid {
def mappend(i1: Image, i2: Image) = ???
}
trait ImageManipulator {
def apply(img: Image): Image
}
trait Similarity {
def apply(img1: Image, img2: Image, scaleWidth: Int, scaleHeight: Int): Double
}
trait UniformGridCropper {
def apply(gridSize: (Int, Int), locationToCrop: (Int, Int), img: Image): Image
}
trait AbsoluteCropper {
def apply(startH: Int, startW: Int, endH: Int, endW: Int, img: Image): Image
}
trait SingleAbsoluteAssembler {
def apply(backgroundImage: Image, pixelLocation: (Int, Int), theImageToInsert: Image): Image
}
trait CompleteGridAssembler {
def apply(backgroundImage: Image, imagesWIndex: Array[(Image, (Int, Int))], gridSize: (Int, Int)): Image
}
trait CompleteAssembler {
def apply(theReferenceImage: Image, theBackgroundImage: Image, samples: Array[Image]): Image
}
trait CompleteAbsoluteAssembler {
def apply(backgroundImage: Image, imagesWPosition: Array[(Image, (Int, Int))]): Image
}
trait PixelLocationComputer {
def apply(gridSize: (Int, Int), theGridLocation: (Int, Int), canvasSizeInPixels: (Int, Int)): (Int, Int)
}
trait ManipulationsCrossHybridizer {
def apply(mans1: Array[ImageManipulator], mans2: Array[ImageManipulator]): Array[ImageManipulator]
}
trait ManipulationsHybridizer {
def apply(man: Array[ImageManipulator]): Array[ImageManipulator]
}
case class IterationStats(
chainSizeMeans: Array[Double] = Array(),
chainSizeStddevs: Array[Double] = Array(),
populationFitness: Array[Double] = Array(),
populationDistanceMeans: Array[Double] = Array(),
populationDistanceStddevs: Array[Double] = Array(),
bestDistances: Array[Double] = Array(),
worstDistances: Array[Double] = Array())
case class QuadrilateralCell(
startCol: Int,
startRow: Int,
endCol: Int,
endRow: Int)
case class QuadrilateralGrid(
rows: Int,
cols: Int,
listOfTheStuff: Array[QuadrilateralCell])
case class AbsoluteQuadrilateralPosition(startW: Int, startH: Int, endW: Int, endH: Int)
class QuadrilateralGridToAbsolutePositions(sizeW: Int, sizeH: Int) {
def apply(grid: QuadrilateralGrid): Array[AbsoluteQuadrilateralPosition] = {
val cols = grid.cols
val rows = grid.rows
val colPixels = sizeW / cols
val rowPixels = sizeH / rows
grid.listOfTheStuff.map { cell =>
val colW = math.max(cell.endCol - cell.startCol, 1)
val rowW = math.max(cell.endRow - cell.startRow, 1)
val startWP = cell.startCol * colPixels
val endWP = (cell.endCol + 1) * colPixels
val startHP = cell.startRow * rowPixels
val endHP = (cell.endRow + 1) * rowPixels
new AbsoluteQuadrilateralPosition(startWP, startHP, endWP, endHP)
}
}
}
trait ImageToQuadGridThing {
def apply(img: Image, rows: Int, cols: Int): QuadrilateralGrid
}
}
|
ogeagla/sclaav
|
src/main/scala/com/oct/sclaav/package.scala
|
Scala
|
apache-2.0
| 6,070 |
package com.jcalc.feed
import kafka.serializer.Decoder
import kafka.serializer.Encoder
import kafka.utils.VerifiableProperties
class SingleTransactionDecoder(props: VerifiableProperties) extends Decoder[SingleTransaction] {
def fromBytes(bytes: Array[Byte]): SingleTransaction = {
new SingleTransaction(new String(bytes))
}
}
class SingleTransactionEncoder(props: VerifiableProperties) extends Encoder[SingleTransaction] {
def toBytes(singleTransaction: SingleTransaction): Array[Byte] = {
singleTransaction.toBytes()
}
}
|
belled10468/marseille
|
src/main/scala/com/jcalc/feed/Codec.scala
|
Scala
|
apache-2.0
| 541 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.orc
import java.nio.charset.StandardCharsets
import java.sql.Timestamp
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.io.orc.{OrcStruct, SparkOrcNewRecordReader}
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogRelation
import org.apache.spark.sql.execution.datasources.{LogicalRelation, RecordReaderIterator}
import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.hive.test.TestHive._
import org.apache.spark.sql.hive.test.TestHive.implicits._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{IntegerType, StructType}
import org.apache.spark.util.Utils
case class AllDataTypesWithNonPrimitiveType(
stringField: String,
intField: Int,
longField: Long,
floatField: Float,
doubleField: Double,
shortField: Short,
byteField: Byte,
booleanField: Boolean,
array: Seq[Int],
arrayContainsNull: Seq[Option[Int]],
map: Map[Int, Long],
mapValueContainsNull: Map[Int, Option[Long]],
data: (Seq[Int], (Int, String)))
case class BinaryData(binaryData: Array[Byte])
case class Contact(name: String, phone: String)
case class Person(name: String, age: Int, contacts: Seq[Contact])
class OrcQuerySuite extends QueryTest with BeforeAndAfterAll with OrcTest {
test("Read/write All Types") {
val data = (0 to 255).map { i =>
(s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0)
}
withOrcFile(data) { file =>
checkAnswer(
spark.read.orc(file),
data.toDF().collect())
}
}
test("Read/write binary data") {
withOrcFile(BinaryData("test".getBytes(StandardCharsets.UTF_8)) :: Nil) { file =>
val bytes = read.orc(file).head().getAs[Array[Byte]](0)
assert(new String(bytes, StandardCharsets.UTF_8) === "test")
}
}
test("Read/write all types with non-primitive type") {
val data: Seq[AllDataTypesWithNonPrimitiveType] = (0 to 255).map { i =>
AllDataTypesWithNonPrimitiveType(
s"$i", i, i.toLong, i.toFloat, i.toDouble, i.toShort, i.toByte, i % 2 == 0,
0 until i,
(0 until i).map(Option(_).filter(_ % 3 == 0)),
(0 until i).map(i => i -> i.toLong).toMap,
(0 until i).map(i => i -> Option(i.toLong)).toMap + (i -> None),
(0 until i, (i, s"$i")))
}
withOrcFile(data) { file =>
checkAnswer(
read.orc(file),
data.toDF().collect())
}
}
test("Read/write UserDefinedType") {
withTempPath { path =>
val data = Seq((1, new UDT.MyDenseVector(Array(0.25, 2.25, 4.25))))
val udtDF = data.toDF("id", "vectors")
udtDF.write.orc(path.getAbsolutePath)
val readBack = spark.read.schema(udtDF.schema).orc(path.getAbsolutePath)
checkAnswer(udtDF, readBack)
}
}
test("Creating case class RDD table") {
val data = (1 to 100).map(i => (i, s"val_$i"))
sparkContext.parallelize(data).toDF().createOrReplaceTempView("t")
withTempView("t") {
checkAnswer(sql("SELECT * FROM t"), data.toDF().collect())
}
}
test("Simple selection form ORC table") {
val data = (1 to 10).map { i =>
Person(s"name_$i", i, (0 to 1).map { m => Contact(s"contact_$m", s"phone_$m") })
}
withOrcTable(data, "t") {
// ppd:
// leaf-0 = (LESS_THAN_EQUALS age 5)
// expr = leaf-0
assert(sql("SELECT name FROM t WHERE age <= 5").count() === 5)
// ppd:
// leaf-0 = (LESS_THAN_EQUALS age 5)
// expr = (not leaf-0)
assertResult(10) {
sql("SELECT name, contacts FROM t where age > 5")
.rdd
.flatMap(_.getAs[Seq[_]]("contacts"))
.count()
}
// ppd:
// leaf-0 = (LESS_THAN_EQUALS age 5)
// leaf-1 = (LESS_THAN age 8)
// expr = (and (not leaf-0) leaf-1)
{
val df = sql("SELECT name, contacts FROM t WHERE age > 5 AND age < 8")
assert(df.count() === 2)
assertResult(4) {
df.rdd.flatMap(_.getAs[Seq[_]]("contacts")).count()
}
}
// ppd:
// leaf-0 = (LESS_THAN age 2)
// leaf-1 = (LESS_THAN_EQUALS age 8)
// expr = (or leaf-0 (not leaf-1))
{
val df = sql("SELECT name, contacts FROM t WHERE age < 2 OR age > 8")
assert(df.count() === 3)
assertResult(6) {
df.rdd.flatMap(_.getAs[Seq[_]]("contacts")).count()
}
}
}
}
test("save and load case class RDD with `None`s as orc") {
val data = (
Option.empty[Int],
Option.empty[Long],
Option.empty[Float],
Option.empty[Double],
Option.empty[Boolean]
) :: Nil
withOrcFile(data) { file =>
checkAnswer(
read.orc(file),
Row(Seq.fill(5)(null): _*))
}
}
test("SPARK-16610: Respect orc.compress option when compression is unset") {
// Respect `orc.compress`.
withTempPath { file =>
spark.range(0, 10).write
.option("orc.compress", "ZLIB")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("ZLIB" === expectedCompressionKind.name())
}
// `compression` overrides `orc.compress`.
withTempPath { file =>
spark.range(0, 10).write
.option("compression", "ZLIB")
.option("orc.compress", "SNAPPY")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("ZLIB" === expectedCompressionKind.name())
}
}
// Hive supports zlib, snappy and none for Hive 1.2.1.
test("Compression options for writing to an ORC file (SNAPPY, ZLIB and NONE)") {
withTempPath { file =>
spark.range(0, 10).write
.option("compression", "ZLIB")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("ZLIB" === expectedCompressionKind.name())
}
withTempPath { file =>
spark.range(0, 10).write
.option("compression", "SNAPPY")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("SNAPPY" === expectedCompressionKind.name())
}
withTempPath { file =>
spark.range(0, 10).write
.option("compression", "NONE")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("NONE" === expectedCompressionKind.name())
}
}
// Following codec is not supported in Hive 1.2.1, ignore it now
ignore("LZO compression options for writing to an ORC file not supported in Hive 1.2.1") {
withTempPath { file =>
spark.range(0, 10).write
.option("compression", "LZO")
.orc(file.getCanonicalPath)
val expectedCompressionKind =
OrcFileOperator.getFileReader(file.getCanonicalPath).get.getCompression
assert("LZO" === expectedCompressionKind.name())
}
}
test("simple select queries") {
withOrcTable((0 until 10).map(i => (i, i.toString)), "t") {
checkAnswer(
sql("SELECT `_1` FROM t where t.`_1` > 5"),
(6 until 10).map(Row.apply(_)))
checkAnswer(
sql("SELECT `_1` FROM t as tmp where tmp.`_1` < 5"),
(0 until 5).map(Row.apply(_)))
}
}
test("appending") {
val data = (0 until 10).map(i => (i, i.toString))
createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
withOrcTable(data, "t") {
sql("INSERT INTO TABLE t SELECT * FROM tmp")
checkAnswer(table("t"), (data ++ data).map(Row.fromTuple))
}
sessionState.catalog.dropTable(TableIdentifier("tmp"), ignoreIfNotExists = true, purge = false)
}
test("overwriting") {
val data = (0 until 10).map(i => (i, i.toString))
createDataFrame(data).toDF("c1", "c2").createOrReplaceTempView("tmp")
withOrcTable(data, "t") {
sql("INSERT OVERWRITE TABLE t SELECT * FROM tmp")
checkAnswer(table("t"), data.map(Row.fromTuple))
}
sessionState.catalog.dropTable(TableIdentifier("tmp"), ignoreIfNotExists = true, purge = false)
}
test("self-join") {
// 4 rows, cells of column 1 of row 2 and row 4 are null
val data = (1 to 4).map { i =>
val maybeInt = if (i % 2 == 0) None else Some(i)
(maybeInt, i.toString)
}
withOrcTable(data, "t") {
val selfJoin = sql("SELECT * FROM t x JOIN t y WHERE x.`_1` = y.`_1`")
val queryOutput = selfJoin.queryExecution.analyzed.output
assertResult(4, "Field count mismatches")(queryOutput.size)
assertResult(2, "Duplicated expression ID in query plan:\\n $selfJoin") {
queryOutput.filter(_.name == "_1").map(_.exprId).size
}
checkAnswer(selfJoin, List(Row(1, "1", 1, "1"), Row(3, "3", 3, "3")))
}
}
test("nested data - struct with array field") {
val data = (1 to 10).map(i => Tuple1((i, Seq("val_$i"))))
withOrcTable(data, "t") {
checkAnswer(sql("SELECT `_1`.`_2`[0] FROM t"), data.map {
case Tuple1((_, Seq(string))) => Row(string)
})
}
}
test("nested data - array of struct") {
val data = (1 to 10).map(i => Tuple1(Seq(i -> "val_$i")))
withOrcTable(data, "t") {
checkAnswer(sql("SELECT `_1`[0].`_2` FROM t"), data.map {
case Tuple1(Seq((_, string))) => Row(string)
})
}
}
test("columns only referenced by pushed down filters should remain") {
withOrcTable((1 to 10).map(Tuple1.apply), "t") {
checkAnswer(sql("SELECT `_1` FROM t WHERE `_1` < 10"), (1 to 9).map(Row.apply(_)))
}
}
test("SPARK-5309 strings stored using dictionary compression in orc") {
withOrcTable((0 until 1000).map(i => ("same", "run_" + i / 100, 1)), "t") {
checkAnswer(
sql("SELECT `_1`, `_2`, SUM(`_3`) FROM t GROUP BY `_1`, `_2`"),
(0 until 10).map(i => Row("same", "run_" + i, 100)))
checkAnswer(
sql("SELECT `_1`, `_2`, SUM(`_3`) FROM t WHERE `_2` = 'run_5' GROUP BY `_1`, `_2`"),
List(Row("same", "run_5", 100)))
}
}
test("SPARK-9170: Don't implicitly lowercase of user-provided columns") {
withTempPath { dir =>
val path = dir.getCanonicalPath
spark.range(0, 10).select('id as "Acol").write.format("orc").save(path)
spark.read.format("orc").load(path).schema("Acol")
intercept[IllegalArgumentException] {
spark.read.format("orc").load(path).schema("acol")
}
checkAnswer(spark.read.format("orc").load(path).select("acol").sort("acol"),
(0 until 10).map(Row(_)))
}
}
test("SPARK-8501: Avoids discovery schema from empty ORC files") {
withTempPath { dir =>
val path = dir.getCanonicalPath
withTable("empty_orc") {
withTempView("empty", "single") {
spark.sql(
s"""CREATE TABLE empty_orc(key INT, value STRING)
|STORED AS ORC
|LOCATION '${dir.toURI}'
""".stripMargin)
val emptyDF = Seq.empty[(Int, String)].toDF("key", "value").coalesce(1)
emptyDF.createOrReplaceTempView("empty")
// This creates 1 empty ORC file with Hive ORC SerDe. We are using this trick because
// Spark SQL ORC data source always avoids write empty ORC files.
spark.sql(
s"""INSERT INTO TABLE empty_orc
|SELECT key, value FROM empty
""".stripMargin)
val errorMessage = intercept[AnalysisException] {
spark.read.orc(path)
}.getMessage
assert(errorMessage.contains("Unable to infer schema for ORC"))
val singleRowDF = Seq((0, "foo")).toDF("key", "value").coalesce(1)
singleRowDF.createOrReplaceTempView("single")
spark.sql(
s"""INSERT INTO TABLE empty_orc
|SELECT key, value FROM single
""".stripMargin)
val df = spark.read.orc(path)
assert(df.schema === singleRowDF.schema.asNullable)
checkAnswer(df, singleRowDF)
}
}
}
}
test("SPARK-10623 Enable ORC PPD") {
withTempPath { dir =>
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> "true") {
import testImplicits._
val path = dir.getCanonicalPath
// For field "a", the first column has odds integers. This is to check the filtered count
// when `isNull` is performed. For Field "b", `isNotNull` of ORC file filters rows
// only when all the values are null (maybe this works differently when the data
// or query is complicated). So, simply here a column only having `null` is added.
val data = (0 until 10).map { i =>
val maybeInt = if (i % 2 == 0) None else Some(i)
val nullValue: Option[String] = None
(maybeInt, nullValue)
}
// It needs to repartition data so that we can have several ORC files
// in order to skip stripes in ORC.
createDataFrame(data).toDF("a", "b").repartition(10).write.orc(path)
val df = spark.read.orc(path)
def checkPredicate(pred: Column, answer: Seq[Row]): Unit = {
val sourceDf = stripSparkFilter(df.where(pred))
val data = sourceDf.collect().toSet
val expectedData = answer.toSet
// When a filter is pushed to ORC, ORC can apply it to rows. So, we can check
// the number of rows returned from the ORC to make sure our filter pushdown work.
// A tricky part is, ORC does not process filter rows fully but return some possible
// results. So, this checks if the number of result is less than the original count
// of data, and then checks if it contains the expected data.
assert(
sourceDf.count < 10 && expectedData.subsetOf(data),
s"No data was filtered for predicate: $pred")
}
checkPredicate('a === 5, List(5).map(Row(_, null)))
checkPredicate('a <=> 5, List(5).map(Row(_, null)))
checkPredicate('a < 5, List(1, 3).map(Row(_, null)))
checkPredicate('a <= 5, List(1, 3, 5).map(Row(_, null)))
checkPredicate('a > 5, List(7, 9).map(Row(_, null)))
checkPredicate('a >= 5, List(5, 7, 9).map(Row(_, null)))
checkPredicate('a.isNull, List(null).map(Row(_, null)))
checkPredicate('b.isNotNull, List())
checkPredicate('a.isin(3, 5, 7), List(3, 5, 7).map(Row(_, null)))
checkPredicate('a > 0 && 'a < 3, List(1).map(Row(_, null)))
checkPredicate('a < 1 || 'a > 8, List(9).map(Row(_, null)))
checkPredicate(!('a > 3), List(1, 3).map(Row(_, null)))
checkPredicate(!('a > 0 && 'a < 3), List(3, 5, 7, 9).map(Row(_, null)))
}
}
}
test("Verify the ORC conversion parameter: CONVERT_METASTORE_ORC") {
withTempView("single") {
val singleRowDF = Seq((0, "foo")).toDF("key", "value")
singleRowDF.createOrReplaceTempView("single")
Seq("true", "false").foreach { orcConversion =>
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> orcConversion) {
withTable("dummy_orc") {
withTempPath { dir =>
val path = dir.getCanonicalPath
spark.sql(
s"""
|CREATE TABLE dummy_orc(key INT, value STRING)
|STORED AS ORC
|LOCATION '${dir.toURI}'
""".stripMargin)
spark.sql(
s"""
|INSERT INTO TABLE dummy_orc
|SELECT key, value FROM single
""".stripMargin)
val df = spark.sql("SELECT * FROM dummy_orc WHERE key=0")
checkAnswer(df, singleRowDF)
val queryExecution = df.queryExecution
if (orcConversion == "true") {
queryExecution.analyzed.collectFirst {
case _: LogicalRelation => ()
}.getOrElse {
fail(s"Expecting the query plan to convert orc to data sources, " +
s"but got:\\n$queryExecution")
}
} else {
queryExecution.analyzed.collectFirst {
case _: CatalogRelation => ()
}.getOrElse {
fail(s"Expecting no conversion from orc to data sources, " +
s"but got:\\n$queryExecution")
}
}
}
}
}
}
}
}
test("converted ORC table supports resolving mixed case field") {
withSQLConf(HiveUtils.CONVERT_METASTORE_ORC.key -> "true") {
withTable("dummy_orc") {
withTempPath { dir =>
val df = spark.range(5).selectExpr("id", "id as valueField", "id as partitionValue")
df.write
.partitionBy("partitionValue")
.mode("overwrite")
.orc(dir.getAbsolutePath)
spark.sql(s"""
|create external table dummy_orc (id long, valueField long)
|partitioned by (partitionValue int)
|stored as orc
|location "${dir.toURI}"""".stripMargin)
spark.sql(s"msck repair table dummy_orc")
checkAnswer(spark.sql("select * from dummy_orc"), df)
}
}
}
}
test("SPARK-14962 Produce correct results on array type with isnotnull") {
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> "true") {
val data = (0 until 10).map(i => Tuple1(Array(i)))
withOrcFile(data) { file =>
val actual = spark
.read
.orc(file)
.where("_1 is not null")
val expected = data.toDF()
checkAnswer(actual, expected)
}
}
}
test("SPARK-15198 Support for pushing down filters for boolean types") {
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> "true") {
val data = (0 until 10).map(_ => (true, false))
withOrcFile(data) { file =>
val df = spark.read.orc(file).where("_2 == true")
val actual = stripSparkFilter(df).count()
// ORC filter should be applied and the total count should be 0.
assert(actual === 0)
}
}
}
test("Support for pushing down filters for decimal types") {
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> "true") {
val data = (0 until 10).map(i => Tuple1(BigDecimal.valueOf(i)))
withTempPath { file =>
// It needs to repartition data so that we can have several ORC files
// in order to skip stripes in ORC.
createDataFrame(data).toDF("a").repartition(10).write.orc(file.getCanonicalPath)
val df = spark.read.orc(file.getCanonicalPath).where("a == 2")
val actual = stripSparkFilter(df).count()
assert(actual < 10)
}
}
}
test("Support for pushing down filters for timestamp types") {
withSQLConf(SQLConf.ORC_FILTER_PUSHDOWN_ENABLED.key -> "true") {
val timeString = "2015-08-20 14:57:00"
val data = (0 until 10).map { i =>
val milliseconds = Timestamp.valueOf(timeString).getTime + i * 3600
Tuple1(new Timestamp(milliseconds))
}
withTempPath { file =>
// It needs to repartition data so that we can have several ORC files
// in order to skip stripes in ORC.
createDataFrame(data).toDF("a").repartition(10).write.orc(file.getCanonicalPath)
val df = spark.read.orc(file.getCanonicalPath).where(s"a == '$timeString'")
val actual = stripSparkFilter(df).count()
assert(actual < 10)
}
}
}
test("column nullability and comment - write and then read") {
val schema = (new StructType)
.add("cl1", IntegerType, nullable = false, comment = "test")
.add("cl2", IntegerType, nullable = true)
.add("cl3", IntegerType, nullable = true)
val row = Row(3, null, 4)
val df = spark.createDataFrame(sparkContext.parallelize(row :: Nil), schema)
val tableName = "tab"
withTable(tableName) {
df.write.format("orc").mode("overwrite").saveAsTable(tableName)
// Verify the DDL command result: DESCRIBE TABLE
checkAnswer(
sql(s"desc $tableName").select("col_name", "comment").where($"comment" === "test"),
Row("cl1", "test") :: Nil)
// Verify the schema
val expectedFields = schema.fields.map(f => f.copy(nullable = true))
assert(spark.table(tableName).schema == schema.copy(fields = expectedFields))
}
}
test("Empty schema does not read data from ORC file") {
val data = Seq((1, 1), (2, 2))
withOrcFile(data) { path =>
val requestedSchema = StructType(Nil)
val conf = new Configuration()
val physicalSchema = OrcFileOperator.readSchema(Seq(path), Some(conf)).get
OrcRelation.setRequiredColumns(conf, physicalSchema, requestedSchema)
val maybeOrcReader = OrcFileOperator.getFileReader(path, Some(conf))
assert(maybeOrcReader.isDefined)
val orcRecordReader = new SparkOrcNewRecordReader(
maybeOrcReader.get, conf, 0, maybeOrcReader.get.getContentLength)
val recordsIterator = new RecordReaderIterator[OrcStruct](orcRecordReader)
try {
assert(recordsIterator.next().toString == "{null, null}")
} finally {
recordsIterator.close()
}
}
}
test("read from multiple orc input paths") {
val path1 = Utils.createTempDir()
val path2 = Utils.createTempDir()
makeOrcFile((1 to 10).map(Tuple1.apply), path1)
makeOrcFile((1 to 10).map(Tuple1.apply), path2)
assertResult(20)(read.orc(path1.getCanonicalPath, path2.getCanonicalPath).count())
}
}
|
jianran/spark
|
sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala
|
Scala
|
apache-2.0
| 22,698 |
abstract class Obj { type S }
class ObjImpl extends Obj { type S = String }
abstract class A {
type MyObj <: Obj
type S = MyObj#S
val any: Any = 0
val some: S = any // compiles => type X is set to scala.Any
}
class B extends A {
type MyObj = ObjImpl
val myString: S = "hello"
val realString: String = myString // error: type mismatch
}
|
scala/scala
|
test/files/neg/t836.scala
|
Scala
|
apache-2.0
| 360 |
/*
* Copyright 2013 TeamNexus
*
* TeamNexus Licenses this file to you under the MIT License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License
*/
package com.nexus.time
import java.util.{Calendar, Date}
import com.nexus.time.synchronisation.TimeSynchronisationHandler
/**
* No description given
*
* @author jk-5
*/
object NexusTime {
def getCurrentDate: Date = TimeSynchronisationHandler.getCurrentDate
def getCurrentTime: Long = TimeSynchronisationHandler.getCurrentTime
def getCalendar: Calendar = {
val ret = Calendar.getInstance()
ret.setTime(this.getCurrentDate)
ret
}
}
|
crvidya/nexus-scala
|
src/main/scala/com/nexus/time/NexusTime.scala
|
Scala
|
mit
| 1,064 |
package mr.merc.map.generator
import mr.merc.economics.{FourSeasonsTerrainHex, FourSeasonsTerrainHexField, WorldGenerator}
import mr.merc.map.ShortestGrid
import mr.merc.map.hex._
import mr.merc.map.pathfind.PathFinder
import mr.merc.map.terrain._
import mr.merc.util.MercUtils._
import scala.math.{abs, max, pow}
import scala.util.Random
import mr.merc.economics.WorldGenerationConstants._
import FourSeasonsTerrainTypes._
import FourSeasonsMapObjects._
import mr.merc.log.Logging
import scala.collection.parallel.CollectionConverters._
import scala.collection.mutable
object WorldMapGenerator extends Logging {
def generateWorldMap(width: Int, height: Int, provinces: Int): WorldMap = {
val terrainNoise = Noise(5).add(0.5, Noise(10)).add(0.25, Noise(20)).applyFunction { case ((x, y), n) =>
val distanceFromCenter = 2 * max(abs(x - 0.5), abs(y - 0.5))
n + 0.6 - 1.6 * pow(distanceFromCenter, 2)
}
val biomeNoise = Noise(10).add(0.5, Noise(40)).add(0.25, Noise(80))
def biome(x: Int, y: Int): FourSeasonsTerrainType = {
biomeNoise(x, width, y, height) match {
//case n if n < biomeNoise.percentageBelow(0.05) => FourSeasonsSand2
case n if n < biomeNoise.percentageBelow(0.1) => FourSeasonsSand
case n if n < biomeNoise.percentageBelow(0.15) => FourSeasonsGrass
case n if n < biomeNoise.percentageBelow(0.25) => FourSeasonSwamp
case n if n < biomeNoise.percentageBelow(0.6) => FourSeasonsGrass
case n if n < biomeNoise.percentageBelow(0.85) => FourSeasonsDecForest
case n if n < biomeNoise.percentageBelow(0.98) => FourSeasonsHill
case _ => FourSeasonsMountain
}
}
def f(x: Int, y: Int): FourSeasonsTerrainHex = {
val n = terrainNoise(x, width, y, height)
if (n > terrainNoise.percentageBelow(1 - LandPercentage)) new FourSeasonsTerrainHex(x, y, biome(x, y))
else new FourSeasonsTerrainHex(x, y, FourSeasonsOcean)
}
val terrainField = new FourSeasonsTerrainHexField(width, height, f)
setBorderToOcean(terrainField)
destroyIslands(terrainField)
val provincesMap = divideIntoProvinces(terrainField, provinces)
val provinceSizes = provincesMap.values.map(_.count(_.terrainMap != FourSeasonsOcean)).toList.sorted
info("Provinces sizes is: " + provinceSizes)
info("Average province size is: " + provinceSizes.avg)
info("Province sizes deviation is: " + provinceSizes.deviation)
provincesMap.keys.foreach { h =>
terrainField.hex(h.x, h.y).terrainMap = FourSeasonsHumanCastle
}
addRivers(terrainField)
moveOceanFromTheBeach(terrainField)
connectCitiesByRoads(terrainField, provincesMap)
provincesMap.keys.foreach { cap =>
makeRoadAroundCapitals(terrainField, cap)
}
addDecorations(terrainField)
WorldMap(terrainField, provincesMap.map {
case (capital, hexes) =>
terrainField.hex(capital.x, capital.y) -> hexes.map(h => terrainField.hex(h.x, h.y))
})
}
def destroyIslands(field: FourSeasonsTerrainHexField): Unit = {
val clusters = divideIntoLandClusters(field)
val maxCluster = clusters.maxBy(_.size)
(clusters - maxCluster).flatten.foreach(_.terrainMap = FourSeasonsOcean)
}
// for beautiful country borders
def setBorderToOcean(field: FourSeasonsTerrainHexField): Unit = {
val leftBorder = Set(0, 1)
val rightBorder = Set(field.width, field.width - 1, field.width - 2)
val upBorder = Set(0, 1)
val downBorder = Set(field.height, field.height - 1, field.height - 2, field.height - 3)
field.hexes.filter(h => leftBorder.contains(h.x) || rightBorder.contains(h.x)
|| upBorder.contains(h.y) || downBorder.contains(h.y)).foreach(_.terrainMap = FourSeasonsOcean)
}
def moveOceanFromTheBeach(field: FourSeasonsTerrainHexField): Unit = {
field.hexes.filter(_.terrainMap == FourSeasonsOcean).filter { h =>
field.neighbours(h).exists(_.terrainMap != FourSeasonsOcean)
}.foreach(_.terrainMap = FourSeasonsRiver)
}
private def divideIntoLandClusters(field: FourSeasonsTerrainHexField): Set[Set[FourSeasonsTerrainHex]] = {
val allHexes = field.hexes.filterNot(_.terrainMap == FourSeasonsOcean).toSet
var result: Set[Set[FourSeasonsTerrainHex]] = Set()
case class CoreAndNeigs(core: Set[FourSeasonsTerrainHex], neigs: Set[FourSeasonsTerrainHex]) {
def nextStep(): CoreAndNeigs = {
val newNeigs = neigs.flatMap(h => field.neighboursSet(h) & allHexes) -- neigs -- core
CoreAndNeigs(core | neigs, newNeigs)
}
}
while (result.flatten != allHexes) {
val randomHex = (allHexes -- result.flatten).head
val randomHexSet: Set[FourSeasonsTerrainHex] = {
var set = CoreAndNeigs(Set(), Set(randomHex))
while (set.neigs.nonEmpty) {
set = set.nextStep()
}
set.core
}
result += randomHexSet
}
result
}
def divideIntoProvinces(field: HexField[FourSeasonsTerrainHex], provinces: Int): Map[FourSeasonsTerrainHex, Set[FourSeasonsTerrainHex]] = {
val totalHexes = field.hexes.filterNot(_.terrainMap == FourSeasonsOcean)
val firstCapitals = Random.shuffle(totalHexes).take(provinces)
var divisionIsOk = false
var iterationNumber = 0
var currentDivision = MapDivision(firstCapitals.toSet, totalHexes.toSet, field)
while(!divisionIsOk) {
val newCapitals = currentDivision.lloydRelaxationCapitals
val division = MapDivision(newCapitals, currentDivision.allHexes, field)
val provinceSizes = division.voronoiDivision.values.map(_.size).toList
val minProvinceSize = provinceSizes.min
val maxProvinceSize = provinceSizes.max
if (minProvinceSize <= maxProvinceSize / 1.6 && iterationNumber < 100) {
currentDivision = division.removeSmallestProvinceAndDivideBiggest()
} else {
if (iterationNumber > 30) {
divisionIsOk = true
}
currentDivision = division
}
iterationNumber += 1
}
info(s"Used $iterationNumber iterations to divide map into provinces")
MapDivision(currentDivision.capitals, field.hexes.toSet, field).voronoiDivision
}
def addRivers(field: FourSeasonsTerrainHexField): Unit = {
val riversCount = (field.width + field.height) * 3 / 20
info(s"Calculating $riversCount rivers")
val cityHexes = field.hexes.filter(_.terrainMap.isCastle).flatMap { h =>
h :: field.neighbours(h)
}.toSet
val randomStream = Iterator.continually((Random.nextInt(field.width), Random.nextInt(field.height - 1))).filterNot {
case (x, y) => cityHexes.contains(field.hex(x, y)) || field.hex(x, y).terrainMap == FourSeasonsOcean
}
val initialRivers = (0 until riversCount).zip(randomStream).par.flatMap { case (_, (x, y)) =>
val from = field.hex(x, y)
field.findClosest(from, x => x.terrainMap == FourSeasonsOcean).flatMap { target =>
field.findPathForRiver(from, target, cityHexes.contains)
}
}
info(s"Calculation $riversCount rivers is over")
val seq = initialRivers.seq
seq.flatten.foreach { h =>
if (h.terrainMap != FourSeasonsOcean) {
h.terrainMap = FourSeasonsRiver
}
}
seq.flatMap(_.headOption).grouped(2).map(_.head).foreach(addLake(_, field, 2 + Random.nextInt(3)))
}
def addLake(hex: FourSeasonsTerrainHex, field: FourSeasonsTerrainHexField, size: Int): Unit = {
field.closest(hex).filterNot(_.terrainMap.isCastle).take(size).foreach { h =>
h.terrainMap = FourSeasonsRiver
h.mapObj = Some(FourSeasonsWaterLilies)
}
}
def connectCitiesByRoads(field: FourSeasonsTerrainHexField, provincesMap: Map[FourSeasonsTerrainHex, Set[FourSeasonsTerrainHex]]): Unit = {
val connectivityMap = WorldGenerator.buildConnectivityMap(field, provincesMap)
val connections = connectivityMap.flatMap { case (capital, neigs) =>
neigs.map(h => Set(capital, h))
}.toSet
connections.map(_.toList).foreach {
case List(from, to) =>
val grid = new ShortestGrid[FourSeasonsTerrainHex] {
override def heuristic(from: FourSeasonsTerrainHex, to: FourSeasonsTerrainHex): Double = price(from, to)
override def isBlocked(t: FourSeasonsTerrainHex): Boolean = (!provincesMap(from).contains(t) &&
!provincesMap(to).contains(t)) || from.terrainMap == FourSeasonsOcean
override def price(from: FourSeasonsTerrainHex, to: FourSeasonsTerrainHex): Double =
if (to.terrainMap.isRoad) 0.5
else if (to.mapObj.exists(_.isBridge)) 0.7
else if (to.terrainMap == FourSeasonsOcean || to.terrainMap == FourSeasonsRiver) 3
else 1
override def neighbours(t: FourSeasonsTerrainHex): List[FourSeasonsTerrainHex] = field.neighbours(t)
}
PathFinder.findPath(grid, from, to).foreach { path =>
path.foreach { h =>
if (h.terrainMap == FourSeasonsOcean || h.terrainMap == FourSeasonsRiver) {
h.mapObj = Some(FourSeasonsWoodenBridge)
} else if (!h.terrainMap.isCastle) {
h.terrainMap = FourSeasonsGrassyRoad
}
}
}
case x => sys.error(s"Not expected input $x")
}
}
def makeRoadAroundCapitals(field: FourSeasonsTerrainHexField, capital: FourSeasonsTerrainHex): Unit = {
field.hexRing(capital, 1).foreach { h =>
if (h.terrainMap != FourSeasonsOcean && h.terrainMap != FourSeasonsRiver) {
h.terrainMap = FourSeasonsCleanRoad
}
}
}
def addDecorations(field: FourSeasonsTerrainHexField): Unit = {
field.hexes.filter(_.terrainMap == FourSeasonsGrass).filter(_.mapObj.isEmpty).grouped(10).map(_.head).foreach {
h => h.mapObj = Some(FourSeasonsFlowers)
}
field.hexes.filter(h => h.terrainMap == FourSeasonsGrass || h.terrainMap == FourSeasonsSand
|| h.terrainMap == FourSeasonsSand2).filter(_.mapObj.isEmpty).
grouped(20).map(_.head).foreach {
h => h.mapObj = Some(FourSeasonsSmallStones)
}
}
}
case class WorldMap(terrain: FourSeasonsTerrainHexField, provinces: Map[FourSeasonsTerrainHex, Set[FourSeasonsTerrainHex]])
case class MapDivision[T <: Hex](capitals: Set[T], allHexes: Set[T], terrain: HexField[T]) {
def voronoiDivision: Map[T, Set[T]] = {
var changed: Set[T] = capitals
val hexToCapitalSet: mutable.Set[T] = mutable.Set()
val hexToCapital: mutable.HashMap[T, T] = mutable.HashMap()
changed.foreach { c =>
hexToCapital += c -> c
hexToCapitalSet += c
}
while (changed.nonEmpty) {
var newChanged: Set[T] = Set()
for {
ch <- changed
neig <- terrain.neighbours(ch) if !hexToCapitalSet.contains(neig) && allHexes.contains(neig)
} {
hexToCapitalSet += neig
hexToCapital += neig -> hexToCapital(ch)
newChanged += neig
}
changed = newChanged
}
val result = hexToCapital.groupBy(_._2).map { case (k, v) =>
k -> v.keys.toSet
}
result
}
def lloydRelaxationCapitals: Set[T] = {
voronoiDivision.map { case (_, hexes) =>
val x = math.round(hexes.toList.map(_.x).sum.toDouble / hexes.size).toInt
val y = math.round(hexes.toList.map(_.y).sum.toDouble / hexes.size).toInt
val newCapitalCandidate = new Hex(x, y)
hexes.minBy(_.distance(newCapitalCandidate))
}.toSet
}
def removeSmallestProvinceAndDivideBiggest(): MapDivision[T] = {
val provinces = voronoiDivision.toVector.sortBy(_._2.size)
val (smallestCapital, _) = provinces.head
val (largestCapital, largestProvince) = provinces.last
val newLargestCapitals = Random.shuffle(largestProvince).take(2)
val relaxedCapitals = MapDivision(newLargestCapitals, largestProvince, terrain).lloydRelaxationCapitals
val fixedCapitals = capitals - smallestCapital - largestCapital ++ relaxedCapitals
MapDivision(fixedCapitals, allHexes, terrain)
}
}
|
RenualdMarch/merc
|
src/main/scala/mr/merc/map/generator/WorldMapGenerator.scala
|
Scala
|
gpl-3.0
| 11,956 |
package views.html.analyse
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.game.Pov
object replayBot {
def apply(
pov: Pov,
initialFen: Option[chess.format.FEN],
pgn: String,
simul: Option[lila.simul.Simul],
cross: Option[lila.game.Crosstable.WithMatchup]
)(implicit ctx: Context) = {
views.html.analyse.bits.layout(
title = replay titleOf pov,
moreCss = cssTag("analyse.round"),
openGraph = povOpenGraph(pov).some
) {
main(cls := "analyse")(
st.aside(cls := "analyse__side")(
views.html.game.side(pov, initialFen, none, simul = simul, bookmarked = false)
),
div(cls := "analyse__board main-board")(chessgroundBoard),
div(cls := "analyse__tools")(div(cls := "ceval")),
div(cls := "analyse__controls"),
div(cls := "analyse__underboard")(
div(cls := "analyse__underboard__panels")(
div(cls := "fen-pgn active")(
div(
strong("FEN"),
input(readonly, spellcheck := false, cls := "copyable autoselect analyse__underboard__fen")
),
div(cls := "pgn")(pgn)
),
cross.map { c =>
div(cls := "ctable active")(
views.html.game.crosstable(pov.player.userId.fold(c)(c.fromPov), pov.gameId.some)
)
}
)
)
)
}
}
}
|
luanlv/lila
|
app/views/analyse/replayBot.scala
|
Scala
|
mit
| 1,497 |
package dsmoq.services.json
/**
* ライセンス情報を返却するためのJSON型
*
* @param id ライセンスID
* @param name ライセンス名
*/
case class License(
id: String,
name: String
)
|
nkawa/dsmoq
|
server/apiServer/src/main/scala/dsmoq/services/json/License.scala
|
Scala
|
apache-2.0
| 212 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.external.incorporatedentityid
import models.external.{BvPass, IncorporatedEntity}
import play.api.libs.json.{JsObject, JsString, JsSuccess, Json}
import testHelpers.VatRegSpec
class IncorporatedEntitySpec extends VatRegSpec {
"LimitedCompany" should {
"parse successfully without optional data" in {
val incorpDetails = testLimitedCompany.copy(bpSafeId = None)
val json = Json.toJson(incorpDetails)
json.as[IncorporatedEntity] mustBe incorpDetails
}
"parse successfully with optional data" in {
val incorpDetails = testLimitedCompany
val json = Json.toJson(incorpDetails)
json.as[IncorporatedEntity] mustBe incorpDetails
}
"parse successfully with an empty dateOfIncorporation" in {
val testIncorpNoRegDate: JsObject =
Json.obj("companyProfile" ->
Json.obj(
"companyNumber" -> testCrn,
"companyName" -> testCompanyName
),
"dateOfIncorporation" -> JsString(""),
"countryOfIncorporation" -> testCountry,
"identifiersMatch" -> true,
"registration" ->
Json.obj(
"registrationStatus" -> "REGISTERED",
"registeredBusinessPartnerId" -> testBpSafeId
),
"businessVerification" ->
Json.obj(
"verificationStatus" -> "PASS"
)
)
val res = Json.fromJson[IncorporatedEntity](testIncorpNoRegDate)(IncorporatedEntity.apiFormat)
val expected: IncorporatedEntity = IncorporatedEntity(
companyNumber = testCrn,
companyName = Some(testCompanyName),
ctutr = None,
chrn = None,
dateOfIncorporation = None,
countryOfIncorporation = "GB",
identifiersMatch = true,
registration = testRegistration,
businessVerification = Some(BvPass),
bpSafeId = Some(testBpSafeId)
)
res mustBe JsSuccess(expected)
}
}
"LimitedCompany apiFormat" should {
"parse successfully without optional data" in {
val incorpDetails = testLimitedCompany.copy(bpSafeId = None)
val json = Json.toJson(incorpDetails)(IncorporatedEntity.apiFormat)
json.as[IncorporatedEntity](IncorporatedEntity.apiFormat) mustBe incorpDetails
}
"parse successfully with optional data" in {
val incorpDetails = testLimitedCompany
val json = Json.toJson(incorpDetails)(IncorporatedEntity.apiFormat)
json.as[IncorporatedEntity](IncorporatedEntity.apiFormat) mustBe incorpDetails
}
}
}
|
hmrc/vat-registration-frontend
|
test/models/external/incorporatedentityid/IncorporatedEntitySpec.scala
|
Scala
|
apache-2.0
| 3,169 |
object Test extends dotty.runtime.LegacyApp {
import Macros._
println("2".toOptionOfInt)
}
|
folone/dotty
|
tests/disabled/macro/run/macro-term-declared-in-implicit-class/Test_2.scala
|
Scala
|
bsd-3-clause
| 94 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import scala.reflect.ClassTag
/**
* Bottle allows varying dimensionality input to be forwarded through any module
* that accepts input of nInputDim dimensions, and generates output of nOutputDim dimensions.
*
* @param module transform module
* @param nInputDim nInputDim dimensions of module
* @param nOutputDim1 output of nOutputDim dimensions
*/
@SerialVersionUID(8522437491532919144L)
class Bottle[T: ClassTag](
val module: Module[T],
val nInputDim: Int = 2,
val nOutputDim1: Int = Int.MaxValue)
(implicit ev: TensorNumeric[T]) extends Container[Tensor[T], Tensor[T], T] {
private val nOutputDim = if (nOutputDim1 == Int.MaxValue) nInputDim else nOutputDim1
private val dimDelta = nInputDim - nOutputDim
@transient
private var inShape: Tensor[Double] = null
@transient
private var outShape: Tensor[Double] = null
this.modules.insert(0, module)
override def updateOutput(input: Tensor[T]): Tensor[T] = {
// first batchDims dimensions will be fused
val batchDims = input.dim() - nInputDim + 1
if (null == inShape) inShape = Tensor[Double](nInputDim)
if (null == outShape) outShape = Tensor[Double](nOutputDim)
if (batchDims > 1) {
val inSize = Tensor[Double](Storage(input.size.map(_.toDouble)))
val squeezeSize = inSize.storage().array().slice(0, batchDims - 1).product
inShape.copy(inSize.narrow(1, batchDims, input.dim() - batchDims + 1))
inShape.narrow(1, 1, 1).mul(squeezeSize)
// Forward with the module's dimension
val newInput = input.view(inShape.storage().array().map(_.toInt))
val output1 = modules(0).updateOutput(newInput).toTensor[T]
require(output1.dim() == nOutputDim,
s"Bottle: output dims on module should be $nOutputDim, but get ${output1.dim()}")
outShape.copy(Tensor[Double](Storage(output1.size.map(_.toDouble))))
if (math.abs(dimDelta) > 0) inSize.resize(inSize.size(1) - dimDelta)
inSize.narrow(1, batchDims, inSize.size(1) - batchDims + 1).copy(outShape)
inSize.narrow(1, batchDims, 1).div(squeezeSize)
output.set(output1.view(inSize.storage().array().map(_.toInt)))
} else {
output.set(modules(0).updateOutput(input).toTensor[T])
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
if (input.dim() > nInputDim) {
val input_ = input.view(inShape.storage().array().map(_.toInt))
val gradOutput_ = gradOutput.view(outShape.storage().array().map(_.toInt))
modules(0).updateGradInput(input_, gradOutput_)
val t2 = modules(0).gradInput.toTensor[T].resizeAs(input)
gradInput.set(t2)
} else {
val t1 = modules(0).updateGradInput(input, gradOutput).toTensor[T]
gradInput.set(t1)
}
gradInput
}
override def accGradParameters(input: Tensor[T], gradOutput: Tensor[T]): Unit = {
if (input.dim() > nInputDim) {
val input_ = input.view(inShape.storage().array().map(_.toInt))
val gradOutput_ = gradOutput.view(outShape.storage().array().map(_.toInt))
modules(0).accGradParameters(input_, gradOutput_)
} else {
modules(0).accGradParameters(input, gradOutput)
}
}
override def toString(): String = {
s"${getPrintName}($module, $nInputDim, $nOutputDim1)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[Bottle[T]]
override def equals(other: Any): Boolean = other match {
case that: Bottle[T] =>
super.equals(that) &&
(that canEqual this) &&
module == that.module &&
nInputDim == that.nInputDim &&
nOutputDim1 == that.nOutputDim1
case _ => false
}
override def hashCode(): Int = {
def getHashCode(a: Any): Int = if (a == null) 0 else a.hashCode()
val state = Seq(super.hashCode(), module, nInputDim, nOutputDim1)
state.map(getHashCode).foldLeft(0)((a, b) => 37 * a + b)
}
}
object Bottle {
def apply[@specialized(Float, Double) T: ClassTag](
module: Module[T],
nInputDim: Int = 2,
nOutputDim1: Int = Int.MaxValue)(implicit ev: TensorNumeric[T]) : Bottle[T] = {
new Bottle[T](module, nInputDim, nOutputDim1)
}
}
|
jenniew/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Bottle.scala
|
Scala
|
apache-2.0
| 4,961 |
package org.bitcoins.core.bloom
import org.bitcoins.core.util.Factory
/**
* Created by chris on 8/3/16.
* Specifies how to update a bloom filter
* [[https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki#filter-matching-algorithm]]
*/
sealed trait BloomFlag {
def byte: Byte
}
/** The filtering node should not update the filter. */
case object BloomUpdateNone extends BloomFlag {
def byte = 0.toByte
}
/**
* If the filter matches any data element in a pubkey script,
* the corresponding outpoint is added to the filter.
*/
case object BloomUpdateAll extends BloomFlag {
def byte = 1.toByte
}
/**
* If the filter matches any data element in a pubkey script and that
* scriptPubKey is either a P2PKH or non-P2SH pay-to-multisig script,
* the outpoint for this transaction is added to the filter.
*/
case object BloomUpdateP2PKOnly extends BloomFlag {
def byte = 2.toByte
}
object BloomFlag extends Factory[BloomFlag] {
private def flags = Seq(BloomUpdateNone, BloomUpdateAll, BloomUpdateP2PKOnly)
def apply(byte: Byte): BloomFlag = {
val flagOpt = flags.find(_.byte == byte)
if (flagOpt.isDefined) flagOpt.get
else throw new IllegalArgumentException("The given byte was not defined for BloomFlag, got: " + byte)
}
def fromBytes(bytes: Seq[Byte]): BloomFlag = BloomFlag(bytes.head)
}
|
Christewart/bitcoin-s-core
|
src/main/scala/org/bitcoins/core/bloom/BloomFlag.scala
|
Scala
|
mit
| 1,337 |
package hash.tree
/**
* @author Simon Dirmeier { @literal [email protected]}
*/
trait INode[T]
|
dirmeier/algorithms-and-datastructures
|
hash-tree/src/hash/tree/INode.scala
|
Scala
|
gpl-3.0
| 105 |
package org.openurp.edu.eams.teach.grade.course
import org.beangle.data.model.Entity
import org.beangle.commons.entity.TimeEntity
import org.openurp.base.Semester
import org.openurp.edu.base.Project
import org.openurp.edu.base.Student
import org.openurp.edu.base.Course
import org.openurp.edu.teach.code.ExamStatus
import org.openurp.edu.teach.code.GradeType
import org.openurp.edu.eams.teach.grade.course.model.GradeModifyApplyBean.GradeModifyStatus
trait GradeModifyApply extends Entity[Long] with TimeEntity {
def getGradeType(): GradeType
def setGradeType(gradeType: GradeType): Unit
def getExamStatus(): ExamStatus
def setExamStatus(examStatus: ExamStatus): Unit
def getScore(): java.lang.Float
def setScore(score: java.lang.Float): Unit
def getCourse(): Course
def setCourse(course: Course): Unit
def getStd(): Student
def setStd(std: Student): Unit
def getSemester(): Semester
def setSemester(semester: Semester): Unit
def getProject(): Project
def setProject(project: Project): Unit
def getStatus(): GradeModifyStatus
def setStatus(status: GradeModifyStatus): Unit
def getApplyer(): String
def setApplyer(applyer: String): Unit
def getAuditer(): String
def setAuditer(auditer: String): Unit
def getFinalAuditer(): String
def setFinalAuditer(finalAuditer: String): Unit
def getScoreText(): String
def setScoreText(scoreText: String): Unit
def getOrigScoreText(): String
def setOrigScoreText(origScoreText: String): Unit
def getOrigScore(): java.lang.Float
def setOrigScore(origScore: java.lang.Float): Unit
def getExamStatusBefore(): ExamStatus
def setExamStatusBefore(examStatusBefore: ExamStatus): Unit
def hasChange(): Boolean
def getApplyReason(): String
def setApplyReason(applyReason: String): Unit
def getAuditReason(): String
def setAuditReason(auditReason: String): Unit
}
|
openurp/edu-eams-webapp
|
grade/src/main/scala/org/openurp/edu/eams/teach/grade/course/GradeModifyApply.scala
|
Scala
|
gpl-3.0
| 1,898 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
// elements
import SharedHelpers._
import org.scalatest.events._
import org.scalactic.Prettifier
import java.awt.AWTError
import java.lang.annotation.AnnotationFormatError
import java.nio.charset.CoderMalfunctionError
import javax.xml.parsers.FactoryConfigurationError
import javax.xml.transform.TransformerFactoryConfigurationError
import org.scalactic.exceptions.NullArgumentException
import org.scalatest.exceptions.DuplicateTestNameException
import org.scalatest.exceptions.NotAllowedException
import org.scalatest.exceptions.TestCanceledException
import org.scalatest.exceptions.TestFailedException
import org.scalatest.exceptions.TestRegistrationClosedException
class WordSpecSpec extends FunSpec with GivenWhenThen {
private val prettifier = Prettifier.default
describe("A WordSpec") {
it("should invoke withFixture from runTest") {
val a = new WordSpec {
var withFixtureWasInvoked = false
var testWasInvoked = false
override def withFixture(test: NoArgTest): Outcome = {
withFixtureWasInvoked = true
super.withFixture(test)
}
"do something" in {
testWasInvoked = true
/* ASSERTION_SUCCEED */
}
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.withFixtureWasInvoked)
assert(a.testWasInvoked)
}
it("should pass the correct test name in the NoArgTest passed to withFixture") {
val a = new WordSpec {
var correctTestNameWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
correctTestNameWasPassed = test.name == "do something"
super.withFixture(test)
}
"do something" in {/* ASSERTION_SUCCEED */}
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter))
assert(a.correctTestNameWasPassed)
}
it("should pass the correct config map in the NoArgTest passed to withFixture") {
val a = new WordSpec {
var correctConfigMapWasPassed = false
override def withFixture(test: NoArgTest): Outcome = {
correctConfigMapWasPassed = (test.configMap == ConfigMap("hi" -> 7))
super.withFixture(test)
}
"do something" in {/* ASSERTION_SUCCEED */}
}
import scala.language.reflectiveCalls
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> 7), None, new Tracker(), Set.empty))
assert(a.correctConfigMapWasPassed)
}
describe("(when a nesting rule has been violated)") {
it("should, if they call a should from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" should {
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"should\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a should with a nested in from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" should {
"should never run" in {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"should\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a when from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" when {
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"when\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a when with a nested in from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" when {
"should never run" in {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"when\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a that from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" that {
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"that\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a that with a nested in from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" that {
"should never run" in {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"that\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a which from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" which {
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"which\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a which with a nested in from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" which {
"should never run" in {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"which\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a can from within an in clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" can {
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"can\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a can with a nested in from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" can {
"should never run" in {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceivedWithCorrectMessage(spec, "should blow up", "a \\"can\\" clause may not appear inside an \\"in\\" clause")
}
it("should, if they call a nested it from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"should never run" in {
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested it with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"should never run" taggedAs(mytags.SlowAsMolasses) in {
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
registerTest("should blow up") {
registerTest("should never run", mytags.SlowAsMolasses) {
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a describe with a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"in the wrong place, at the wrong time" should {
"should never run" ignore {
assert(1 === 1)
}
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"should never run" ignore {
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested ignore with tags from within an it clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
"should blow up" in {
"should never run" taggedAs(mytags.SlowAsMolasses) ignore {
assert(1 === 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
it("should, if they call a nested registerIgnoredTest with tags from within a registerTest clause, result in a TestFailedException when running the test") {
class MySpec extends WordSpec {
registerTest("should blow up") {
registerIgnoredTest("should never run", mytags.SlowAsMolasses) {
assert(1 == 1)
}
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
ensureTestFailedEventReceived(spec, "should blow up")
}
}
it("should return the test names in registration order from testNames") {
val a = new WordSpec {
"it should test this" in {/* ASSERTION_SUCCEED */}
"it should test that" in {/* ASSERTION_SUCCEED */}
}
assertResult(List("it should test this", "it should test that")) {
a.testNames.iterator.toList
}
val b = new WordSpec {}
assertResult(List[String]()) {
b.testNames.iterator.toList
}
val c = new WordSpec {
"it should test that" in {/* ASSERTION_SUCCEED */}
"it should test this" in {/* ASSERTION_SUCCEED */}
}
assertResult(List("it should test that", "it should test this")) {
c.testNames.iterator.toList
}
val d = new WordSpec {
"A Tester" should {
"test that" in {/* ASSERTION_SUCCEED */}
"test this" in {/* ASSERTION_SUCCEED */}
}
}
assertResult(List("A Tester should test that", "A Tester should test this")) {
d.testNames.iterator.toList
}
val e = new WordSpec {
"A Tester" should {
"test this" in {/* ASSERTION_SUCCEED */}
"test that" in {/* ASSERTION_SUCCEED */}
}
}
assertResult(List("A Tester should test this", "A Tester should test that")) {
e.testNames.iterator.toList
}
}
it("should throw DuplicateTestNameException if a duplicate test name registration is attempted") {
intercept[DuplicateTestNameException] {
new WordSpec {
"should test this" in {/* ASSERTION_SUCCEED */}
"should test this" in {/* ASSERTION_SUCCEED */}
}
}
intercept[DuplicateTestNameException] {
new WordSpec {
"should test this" in {/* ASSERTION_SUCCEED */}
"should test this" ignore {/* ASSERTION_SUCCEED */}
}
}
intercept[DuplicateTestNameException] {
new WordSpec {
"should test this" ignore {/* ASSERTION_SUCCEED */}
"should test this" ignore {/* ASSERTION_SUCCEED */}
}
}
intercept[DuplicateTestNameException] {
new WordSpec {
"should test this" ignore {/* ASSERTION_SUCCEED */}
"should test this" in {/* ASSERTION_SUCCEED */}
}
}
}
describe("(with info calls)") {
class InfoInsideTestSpec extends WordSpec {
val msg = "hi there, dude"
val testName = "test name"
testName in {
info(msg)
/* ASSERTION_SUCCEED */
}
}
// In a Spec, any InfoProvided's fired during the test should be cached and sent out after the test has
// suceeded or failed. This makes the report look nicer, because the info is tucked under the "specifier'
// text for that test.
it("should, when the info appears in the code of a successful test, report the info in the TestSucceeded") {
val spec = new InfoInsideTestSpec
val (testStartingIndex, testSucceededIndex) =
getIndexesForTestInformerEventOrderTests(spec, spec.testName, spec.msg)
assert(testStartingIndex < testSucceededIndex)
}
class InfoBeforeTestSpec extends WordSpec {
val msg = "hi there, dude"
val testName = "test name"
info(msg)
testName in {/* ASSERTION_SUCCEED */}
}
it("should, when the info appears in the body before a test, report the info before the test") {
val spec = new InfoBeforeTestSpec
val (infoProvidedIndex, testStartingIndex, testSucceededIndex) =
getIndexesForInformerEventOrderTests(spec, spec.testName, spec.msg)
assert(infoProvidedIndex < testStartingIndex)
assert(testStartingIndex < testSucceededIndex)
}
it("should, when the info appears in the body after a test, report the info after the test runs") {
val msg = "hi there, dude"
val testName = "test name"
class MySpec extends WordSpec {
testName in {/* ASSERTION_SUCCEED */}
info(msg)
}
val (infoProvidedIndex, testStartingIndex, testSucceededIndex) =
getIndexesForInformerEventOrderTests(new MySpec, testName, msg)
assert(testStartingIndex < testSucceededIndex)
assert(testSucceededIndex < infoProvidedIndex)
}
it("should print to stdout when info is called by a method invoked after the suite has been executed") {
class MySpec extends WordSpec {
callInfo() // This should work fine
def callInfo(): Unit = {
info("howdy")
}
"howdy also" in {
callInfo() // This should work fine
/* ASSERTION_SUCCEED */
}
}
val spec = new MySpec
val myRep = new EventRecordingReporter
spec.run(None, Args(myRep))
spec.callInfo() // TODO: Actually test that This prints to stdout
}
it("should send an InfoProvided with an IndentedText formatter with level 1 when called outside a test") {
val spec = new InfoBeforeTestSpec
val indentedText = getIndentedTextFromInfoProvided(spec)
assert(indentedText === IndentedText("+ " + spec.msg, spec.msg, 0))
}
it("should send an InfoProvided with an IndentedText formatter with level 2 when called within a test") {
val spec = new InfoInsideTestSpec
val indentedText = getIndentedTextFromTestInfoProvided(spec)
assert(indentedText === IndentedText(" + " + spec.msg, spec.msg, 1))
}
}
it("should throw NullArgumentException if a null test tag is provided") {
// it
intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(null) in {/* ASSERTION_SUCCEED */}
}
}
val caught = intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(mytags.SlowAsMolasses, null) in {/* ASSERTION_SUCCEED */}
}
}
assert(caught.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) in {/* ASSERTION_SUCCEED */}
}
}
// ignore
intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(null) ignore {/* ASSERTION_SUCCEED */}
}
}
val caught2 = intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(mytags.SlowAsMolasses, null) ignore {/* ASSERTION_SUCCEED */}
}
}
assert(caught2.getMessage === "a test tag was null")
intercept[NullArgumentException] {
new WordSpec {
"hi" taggedAs(mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) ignore {/* ASSERTION_SUCCEED */}
}
}
// registerTest
intercept[NullArgumentException] {
new WordSpec {
registerTest("hi", null) {/* ASSERTION_SUCCEED */}
}
}
val caught3 = intercept[NullArgumentException] {
new WordSpec {
registerTest("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */}
}
}
assert(caught3.getMessage == "a test tag was null")
intercept[NullArgumentException] {
new WordSpec {
registerTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */}
}
}
// registerIgnoredTest
intercept[NullArgumentException] {
new WordSpec {
registerIgnoredTest("hi", null) {/* ASSERTION_SUCCEED */}
}
}
val caught4 = intercept[NullArgumentException] {
new WordSpec {
registerIgnoredTest("hi", mytags.SlowAsMolasses, null) {/* ASSERTION_SUCCEED */}
}
}
assert(caught4.getMessage == "a test tag was null")
intercept[NullArgumentException] {
new WordSpec {
registerIgnoredTest("hi", mytags.SlowAsMolasses, null, mytags.WeakAsAKitten) {/* ASSERTION_SUCCEED */}
}
}
}
it("should return a correct tags map from the tags method using is (pending)") {
val a = new WordSpec {
"test this" ignore {/* ASSERTION_SUCCEED */}
"test that" is (pending)
}
assertResult(Map("test this" -> Set("org.scalatest.Ignore"))) {
a.tags
}
val b = new WordSpec {
"test this" is (pending)
"test that" ignore {/* ASSERTION_SUCCEED */}
}
assertResult(Map("test that" -> Set("org.scalatest.Ignore"))) {
b.tags
}
val c = new WordSpec {
"test this" ignore {/* ASSERTION_SUCCEED */}
"test that" ignore {/* ASSERTION_SUCCEED */}
}
assertResult(Map("test this" -> Set("org.scalatest.Ignore"), "test that" -> Set("org.scalatest.Ignore"))) {
c.tags
}
val d = new WordSpec {
"test this" taggedAs(mytags.SlowAsMolasses) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) ignore {/* ASSERTION_SUCCEED */}
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses"), "test that" -> Set("org.scalatest.Ignore", "org.scalatest.SlowAsMolasses"))) {
d.tags
}
val e = new WordSpec {
"test this" is (pending)
"test that" is (pending)
}
assertResult(Map()) {
e.tags
}
val f = new WordSpec {
"test this" taggedAs(mytags.SlowAsMolasses, mytags.WeakAsAKitten) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses", "org.scalatest.WeakAsAKitten"), "test that" -> Set("org.scalatest.SlowAsMolasses"))) {
f.tags
}
val g = new WordSpec {
"test this" taggedAs(mytags.SlowAsMolasses, mytags.WeakAsAKitten) is (pending)
"test that" taggedAs(mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
}
assertResult(Map("test this" -> Set("org.scalatest.SlowAsMolasses", "org.scalatest.WeakAsAKitten"), "test that" -> Set("org.scalatest.SlowAsMolasses"))) {
g.tags
}
}
class TestWasCalledSuite extends WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"run this" in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"run that, maybe" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
it("should execute all tests when run is called with testName None") {
val b = new TestWasCalledSuite
b.run(None, Args(SilentReporter))
assert(b.theTestThisCalled)
assert(b.theTestThatCalled)
}
it("should execute one test when run is called with a defined testName") {
val a = new TestWasCalledSuite
a.run(Some("run this"), Args(SilentReporter))
assert(a.theTestThisCalled)
assert(!a.theTestThatCalled)
}
it("should report as ignored, and not run, tests marked ignored") {
val a = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
val b = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB))
assert(repB.testIgnoredReceived)
assert(repB.lastEvent.isDefined)
assert(repB.lastEvent.get.testName endsWith "test this")
assert(!b.theTestThisCalled)
assert(b.theTestThatCalled)
val c = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" ignore { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repC))
assert(repC.testIgnoredReceived)
assert(repC.lastEvent.isDefined)
assert(repC.lastEvent.get.testName endsWith "test that", repC.lastEvent.get.testName)
assert(c.theTestThisCalled)
assert(!c.theTestThatCalled)
// The order I want is order of appearance in the file.
// Will try and implement that tomorrow. Subtypes will be able to change the order.
val d = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" ignore { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD))
assert(repD.testIgnoredReceived)
assert(repD.lastEvent.isDefined)
assert(repD.lastEvent.get.testName endsWith "test that") // last because should be in order of appearance
assert(!d.theTestThisCalled)
assert(!d.theTestThatCalled)
}
it("should ignore a test marked as ignored if run is invoked with that testName") {
// If I provide a specific testName to run, then it should ignore an Ignore on that test
// method and actually invoke it.
val e = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
import scala.language.reflectiveCalls
val repE = new TestIgnoredTrackingReporter
e.run(Some("test this"), Args(repE))
assert(repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(!e.theTestThatCalled)
}
it("should run only those tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
"test this" taggedAs(mytags.SlowAsMolasses) ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" in { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" in { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" ignore { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" in { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, mytags.SlowAsMolasses excluded
val i = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) in { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) in { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" in { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, mytags.SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) ignore { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" in { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
"test this" taggedAs(mytags.SlowAsMolasses, mytags.FastAsLight) ignore { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
"test that" taggedAs(mytags.SlowAsMolasses) ignore { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
"test the other" ignore { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should run only those registered tests selected by the tags to include and exclude sets") {
// Nothing is excluded
val a = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
import scala.language.reflectiveCalls
val repA = new TestIgnoredTrackingReporter
a.run(None, Args(repA))
assert(!repA.testIgnoredReceived)
assert(a.theTestThisCalled)
assert(a.theTestThatCalled)
// SlowAsMolasses is included, one test should be excluded
val b = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that") { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repB = new TestIgnoredTrackingReporter
b.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repB.testIgnoredReceived)
assert(b.theTestThisCalled)
assert(!b.theTestThatCalled)
// SlowAsMolasses is included, and both tests should be included
val c = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
registerTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repC = new TestIgnoredTrackingReporter
c.run(None, Args(repB, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set()), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repC.testIgnoredReceived)
assert(c.theTestThisCalled)
assert(c.theTestThatCalled)
// SlowAsMolasses is included. both tests should be included but one ignored
val d = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
}
val repD = new TestIgnoredTrackingReporter
d.run(None, Args(repD, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repD.testIgnoredReceived)
assert(!d.theTestThisCalled)
assert(d.theTestThatCalled)
// SlowAsMolasses included, FastAsLight excluded
val e = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repE = new TestIgnoredTrackingReporter
e.run(None, Args(repE, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repE.testIgnoredReceived)
assert(!e.theTestThisCalled)
assert(e.theTestThatCalled)
assert(!e.theTestTheOtherCalled)
// An Ignored test that was both included and excluded should not generate a TestIgnored event
val f = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repF = new TestIgnoredTrackingReporter
f.run(None, Args(repF, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repF.testIgnoredReceived)
assert(!f.theTestThisCalled)
assert(f.theTestThatCalled)
assert(!f.theTestTheOtherCalled)
// An Ignored test that was not included should not generate a TestIgnored event
val g = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repG = new TestIgnoredTrackingReporter
g.run(None, Args(repG, Stopper.default, Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight")),
ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repG.testIgnoredReceived)
assert(!g.theTestThisCalled)
assert(g.theTestThatCalled)
assert(!g.theTestTheOtherCalled)
// No tagsToInclude set, FastAsLight excluded
val h = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repH = new TestIgnoredTrackingReporter
h.run(None, Args(repH, Stopper.default, Filter(None, Set("org.scalatest.FastAsLight")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repH.testIgnoredReceived)
assert(!h.theTestThisCalled)
assert(h.theTestThatCalled)
assert(h.theTestTheOtherCalled)
// No tagsToInclude set, mytags.SlowAsMolasses excluded
val i = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repI = new TestIgnoredTrackingReporter
i.run(None, Args(repI, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!i.theTestThisCalled)
assert(!i.theTestThatCalled)
assert(i.theTestTheOtherCalled)
// No tagsToInclude set, mytags.SlowAsMolasses excluded, TestIgnored should not be received on excluded ones
val j = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repJ = new TestIgnoredTrackingReporter
j.run(None, Args(repJ, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(!repI.testIgnoredReceived)
assert(!j.theTestThisCalled)
assert(!j.theTestThatCalled)
assert(j.theTestTheOtherCalled)
// Same as previous, except Ignore specifically mentioned in excludes set
val k = new WordSpec {
var theTestThisCalled = false
var theTestThatCalled = false
var theTestTheOtherCalled = false
registerIgnoredTest("test this", mytags.SlowAsMolasses, mytags.FastAsLight) { theTestThisCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test that", mytags.SlowAsMolasses) { theTestThatCalled = true; /* ASSERTION_SUCCEED */ }
registerIgnoredTest("test the other") { theTestTheOtherCalled = true; /* ASSERTION_SUCCEED */ }
}
val repK = new TestIgnoredTrackingReporter
k.run(None, Args(repK, Stopper.default, Filter(None, Set("org.scalatest.SlowAsMolasses", "org.scalatest.Ignore")), ConfigMap.empty, None, new Tracker, Set.empty))
assert(repK.testIgnoredReceived)
assert(!k.theTestThisCalled)
assert(!k.theTestThatCalled)
assert(!k.theTestTheOtherCalled)
}
it("should return the correct test count from its expectedTestCount method") {
val a = new WordSpec {
"test this" in {/* ASSERTION_SUCCEED */}
"test that" in {/* ASSERTION_SUCCEED */}
}
assert(a.expectedTestCount(Filter()) === 2)
val b = new WordSpec {
"test this" ignore {/* ASSERTION_SUCCEED */}
"test that" in {/* ASSERTION_SUCCEED */}
}
assert(b.expectedTestCount(Filter()) === 1)
val c = new WordSpec {
"test this" taggedAs(mytags.FastAsLight) in {/* ASSERTION_SUCCEED */}
"test that" in {/* ASSERTION_SUCCEED */}
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) === 1)
val d = new WordSpec {
"test this" taggedAs(mytags.FastAsLight, mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
"test that" taggedAs(mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
"test the other thing" in {/* ASSERTION_SUCCEED */}
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 1)
assert(d.expectedTestCount(Filter()) === 3)
val e = new WordSpec {
"test this" taggedAs(mytags.FastAsLight, mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
"test that" taggedAs(mytags.SlowAsMolasses) in {/* ASSERTION_SUCCEED */}
"test the other thing" ignore {/* ASSERTION_SUCCEED */}
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) === 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) === 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) === 0)
assert(e.expectedTestCount(Filter()) === 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) === 10)
}
it("should return the correct test count from its expectedTestCount method when uses registerTest and registerIgnoredTest to register tests") {
val a = new WordSpec {
registerTest("test this") {/* ASSERTION_SUCCEED */}
registerTest("test that") {/* ASSERTION_SUCCEED */}
}
assert(a.expectedTestCount(Filter()) == 2)
val b = new WordSpec {
registerIgnoredTest("test this") {/* ASSERTION_SUCCEED */}
registerTest("test that") {/* ASSERTION_SUCCEED */}
}
assert(b.expectedTestCount(Filter()) == 1)
val c = new WordSpec {
registerTest("test this", mytags.FastAsLight) {/* ASSERTION_SUCCEED */}
registerTest("test that") {/* ASSERTION_SUCCEED */}
}
assert(c.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(c.expectedTestCount(Filter(None, Set("org.scalatest.FastAsLight"))) == 1)
val d = new WordSpec {
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */}
registerTest("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */}
registerTest("test the other thing") {/* ASSERTION_SUCCEED */}
}
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(d.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(d.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 1)
assert(d.expectedTestCount(Filter()) == 3)
val e = new WordSpec {
registerTest("test this", mytags.FastAsLight, mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */}
registerTest("test that", mytags.SlowAsMolasses) {/* ASSERTION_SUCCEED */}
registerIgnoredTest("test the other thing") {/* ASSERTION_SUCCEED */}
}
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.FastAsLight")), Set())) == 1)
assert(e.expectedTestCount(Filter(Some(Set("org.scalatest.SlowAsMolasses")), Set("org.scalatest.FastAsLight"))) == 1)
assert(e.expectedTestCount(Filter(None, Set("org.scalatest.SlowAsMolasses"))) == 0)
assert(e.expectedTestCount(Filter()) == 2)
val f = new Suites(a, b, c, d, e)
assert(f.expectedTestCount(Filter()) == 10)
}
it("should generate a TestPending message when the test body is (pending)") {
val a = new WordSpec {
"should do this" is (pending)
"should do that" in {
assert(2 + 2 === 4)
}
"should do something else" in {
assert(2 + 2 === 4)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should allow is pendingUntilFixed to be used after is") {
val a = new WordSpec {
"should do this" is pendingUntilFixed {
fail("i meant to do that")
}
"should do that" in {
assert(2 + 2 === 4)
}
"should do something else" in {
assert(2 + 2 === 4)
pending
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tp = rep.testPendingEventsReceived
assert(tp.size === 2)
}
it("should generate a test failure if a Throwable, or an Error other than direct Error subtypes " +
"known in JDK 1.5, excluding AssertionError") {
val a = new WordSpec {
"This WordSpec" should {
"throw AssertionError" in { throw new AssertionError }
"throw plain old Error" in { throw new Error }
"throw Throwable" in { throw new Throwable }
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val tf = rep.testFailedEventsReceived
assert(tf.size === 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should propagate out Errors that are direct subtypes of Error in JDK 1.5, other than " +
"AssertionError, causing Suites and Runs to abort.") {
val a = new WordSpec {
"This WordSpec" should {
"throw AssertionError" in { throw new OutOfMemoryError }
}
}
intercept[OutOfMemoryError] {
a.run(None, Args(SilentReporter))
}
}
// SKIP-SCALATESTJS,NATIVE-END
/*
it("should send InfoProvided events with aboutAPendingTest set to true for info " +
"calls made from a test that is pending") {
val a = new WordSpec with GivenWhenThen {
"A WordSpec" should {
"do something" in {
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
pending
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testPending = rep.testPendingEventsReceived
assert(testPending.size === 1)
val recordedEvents = testPending(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && ip.aboutAPendingTest.get)
}
val so = rep.scopeOpenedEventsReceived
assert(so.size === 1)
for (event <- so) {
assert(event.message == "A WordSpec")
}
val sc = rep.scopeClosedEventsReceived
assert(so.size === 1)
for (event <- sc) {
assert(event.message == "A WordSpec")
}
}
it("should send InfoProvided events with aboutAPendingTest set to false for info " +
"calls made from a test that is not pending") {
val a = new WordSpec with GivenWhenThen {
"A WordSpec" should {
"do something" in {
given("two integers")
when("one is subracted from the other")
then("the result is the difference between the two numbers")
assert(1 + 1 === 2)
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val testSucceeded = rep.testSucceededEventsReceived
assert(testSucceeded.size === 1)
val recordedEvents = testSucceeded(0).recordedEvents
assert(recordedEvents.size === 3)
for (event <- recordedEvents) {
val ip = event.asInstanceOf[InfoProvided]
assert(ip.aboutAPendingTest.isDefined && !ip.aboutAPendingTest.get)
}
val so = rep.scopeOpenedEventsReceived
assert(so.size === 1)
for (event <- so) {
assert(event.message == "A WordSpec")
}
val sc = rep.scopeClosedEventsReceived
assert(so.size === 1)
for (event <- sc) {
assert(event.message == "A WordSpec")
}
}
*/
it("should not put parentheses around should clauses that follow when") {
val a = new WordSpec {
"A Stack" when {
"empty" should {
"chill out" in {
assert(1 + 1 === 2)
}
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val ts = rep.testSucceededEventsReceived
assert(ts.size === 1)
assert(ts.head.testName === "A Stack when empty should chill out")
}
it("should not put parentheses around should clauses that don't follow when") {
val a = new WordSpec {
"A Stack" should {
"chill out" in {
assert(1 + 1 === 2)
}
}
}
val rep = new EventRecordingReporter
a.run(None, Args(rep))
val ts = rep.testSucceededEventsReceived
assert(ts.size === 1)
assert(ts.head.testName === "A Stack should chill out")
}
it("should contains correct formatter for TestStarting, TestSucceeded, TestFailed, TestPending, TestCanceled and TestIgnored") {
class TestSpec extends WordSpec {
"a feature" should {
"succeeded here" in {/* ASSERTION_SUCCEED */}
"failed here" in { fail }
"pending here" in { pending }
"cancel here" in { cancel }
"ignore here" ignore {/* ASSERTION_SUCCEED */}
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
val testStartingList = rep.testStartingEventsReceived
assert(testStartingList.size === 4)
assert(testStartingList(0).formatter === Some(MotionToSuppress), "Expected testStartingList(0).formatter to be Some(MotionToSuppress), but got: " + testStartingList(0).formatter.getClass.getName)
assert(testStartingList(1).formatter === Some(MotionToSuppress), "Expected testStartingList(1).formatter to be Some(MotionToSuppress), but got: " + testStartingList(1).formatter.getClass.getName)
assert(testStartingList(2).formatter === Some(MotionToSuppress), "Expected testStartingList(2).formatter to be Some(MotionToSuppress), but got: " + testStartingList(2).formatter.getClass.getName)
assert(testStartingList(3).formatter === Some(MotionToSuppress), "Expected testStartingList(3).formatter to be Some(MotionToSuppress), but got: " + testStartingList(3).formatter.getClass.getName)
val testSucceededList = rep.testSucceededEventsReceived
assert(testSucceededList.size === 1)
assert(testSucceededList(0).formatter.isDefined, "Expected testSucceededList(0).formatter to be defined, but it is not.")
assert(testSucceededList(0).formatter.get.isInstanceOf[IndentedText], "Expected testSucceededList(0).formatter to be Some(IndentedText), but got: " + testSucceededList(0).formatter)
val testSucceededFormatter = testSucceededList(0).formatter.get.asInstanceOf[IndentedText]
assert(testSucceededFormatter.formattedText === "- should succeeded here")
assert(testSucceededFormatter.rawText === "should succeeded here")
val testFailedList = rep.testFailedEventsReceived
assert(testFailedList.size === 1)
assert(testFailedList(0).formatter.isDefined, "Expected testFailedList(0).formatter to be defined, but it is not.")
assert(testFailedList(0).formatter.get.isInstanceOf[IndentedText], "Expected testFailedList(0).formatter to be Some(IndentedText), but got: " + testSucceededList(0).formatter)
val testFailedFormatter = testFailedList(0).formatter.get.asInstanceOf[IndentedText]
assert(testFailedFormatter.formattedText === "- should failed here")
assert(testFailedFormatter.rawText === "should failed here")
val testPendingList = rep.testPendingEventsReceived
assert(testPendingList.size === 1)
assert(testPendingList(0).formatter.isDefined, "Expected testPendingList(0).formatter to be defined, but it is not.")
assert(testPendingList(0).formatter.get.isInstanceOf[IndentedText], "Expected testPendingList(0).formatter to be Some(IndentedText), but got: " + testSucceededList(0).formatter)
val testPendingFormatter = testPendingList(0).formatter.get.asInstanceOf[IndentedText]
assert(testPendingFormatter.formattedText === "- should pending here")
assert(testPendingFormatter.rawText === "should pending here")
val testCanceledList = rep.testCanceledEventsReceived
assert(testCanceledList.size === 1)
assert(testCanceledList(0).formatter.isDefined, "Expected testCanceledList(0).formatter to be defined, but it is not.")
assert(testCanceledList(0).formatter.get.isInstanceOf[IndentedText], "Expected testCanceledList(0).formatter to be Some(IndentedText), but got: " + testSucceededList(0).formatter)
val testCanceledFormatter = testCanceledList(0).formatter.get.asInstanceOf[IndentedText]
assert(testCanceledFormatter.formattedText === "- should cancel here")
assert(testCanceledFormatter.rawText === "should cancel here")
val testIgnoredList = rep.testIgnoredEventsReceived
assert(testIgnoredList.size === 1)
assert(testIgnoredList(0).formatter.isDefined, "Expected testIgnoredList(0).formatter to be defined, but it is not.")
assert(testIgnoredList(0).formatter.get.isInstanceOf[IndentedText], "Expected testIgnoredList(0).formatter to be Some(IndentedText), but got: " + testSucceededList(0).formatter)
val testIgnoredFormatter = testIgnoredList(0).formatter.get.asInstanceOf[IndentedText]
assert(testIgnoredFormatter.formattedText === "- should ignore here")
assert(testIgnoredFormatter.rawText === "should ignore here")
}
describe("registerTest and registerIgnoredTest method") {
it("should allow test registration and ignored test registration") {
class TestSpec extends WordSpec {
val a = 1
registerTest("test 1") {
val e = intercept[TestFailedException] {
assert(a == 2)
}
assert(e.message == Some("1 did not equal 2"))
assert(e.failedCodeFileName == Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber == Some(thisLineNumber - 4))
}
registerTest("test 2") {
assert(a == 2)
}
registerTest("test 3") {
pending
}
registerTest("test 4") {
cancel
}
registerIgnoredTest("test 5") {
assert(a == 2)
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerTest nested inside a registerTest") {
class TestSpec extends WordSpec {
var registrationClosedThrown = false
"a feature" should {
registerTest("a scenario") {
registerTest("nested scenario") {
/* ASSERTION_SUCCEED */
}; /* ASSERTION_SUCCEED */
}
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has a registerIgnoredTest nested inside a registerTest") {
class TestSpec extends WordSpec {
var registrationClosedThrown = false
"a feature" should {
registerTest("a scenario") {
registerIgnoredTest("nested scenario") {
/* ASSERTION_SUCCEED */
}; /* ASSERTION_SUCCEED */
}
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("Test cannot be nested inside another test."))
}
}
ignore("should support expectations") { // Unignore after we uncomment the expectation implicits in RegistrationPolicy
class TestSpec extends WordSpec with expectations.Expectations {
"it should do something" in {
expect(1 === 2); /* ASSERTION_SUCCEED */
}
"a widget" should {
"do something" in {
expect(1 === 2); /* ASSERTION_SUCCEED */
}
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 2)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "WordSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 13)
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "WordSpecSpec.scala")
assert(rep.testFailedEventsReceived(1).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 11)
}
}
describe("when failure happens") {
it("should fire TestFailed event with correct stack depth info when test failed") {
class TestSpec extends WordSpec {
"A Stack" should {
"chill out" in {
assert(1 === 2)
}
}
}
val rep = new EventRecordingReporter
val s1 = new TestSpec
s1.run(None, Args(rep))
assert(rep.testFailedEventsReceived.size === 1)
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeFileName.get === "WordSpecSpec.scala")
assert(rep.testFailedEventsReceived(0).throwable.get.asInstanceOf[TestFailedException].failedCodeLineNumber.get === thisLineNumber - 9)
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an in nested inside an in") {
class TestSpec extends WordSpec {
var registrationClosedThrown = false
"a feature" should {
"a scenario" in {
"nested scenario" in {
/* ASSERTION_SUCCEED */
}; /* ASSERTION_SUCCEED */
}
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("An in clause may not appear inside another in clause."))
}
it("should generate TestRegistrationClosedException with correct stack depth info when has an ignore nested inside an in") {
class TestSpec extends WordSpec {
var registrationClosedThrown = false
"a feature" should {
"a scenario" in {
"nested scenario" ignore {
/* ASSERTION_SUCCEED */
}; /* ASSERTION_SUCCEED */
}
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: TestRegistrationClosedException) =>
registrationClosedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.registrationClosedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[TestRegistrationClosedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[TestRegistrationClosedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 24)
assert(trce.message == Some("An ignore clause may not appear inside an in clause."))
}
}
describe("shorthand syntax") {
describe("'it'") {
describe("under top level") {
it("should work with subject") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
it should {
"do something interesting 1" in {/* ASSERTION_SUCCEED */}
}
it can {
"do something interesting 2" in {/* ASSERTION_SUCCEED */}
}
it must {
"do something interesting 3" in {/* ASSERTION_SUCCEED */}
}
it when {
"do something interesting 4" in {/* ASSERTION_SUCCEED */}
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
val testStartingList = rep.testStartingEventsReceived
assert(testStartingList.size === 5)
assert(testStartingList(0).testName === "A Stack when empty should be empty")
assert(testStartingList(1).testName === "A Stack should do something interesting 1")
assert(testStartingList(2).testName === "A Stack can do something interesting 2")
assert(testStartingList(3).testName === "A Stack must do something interesting 3")
assert(testStartingList(4).testName === "A Stack when do something interesting 4")
val testSucceededList = rep.testSucceededEventsReceived
assert(testSucceededList.size === 5)
assert(testSucceededList(0).testName === "A Stack when empty should be empty")
assert(testSucceededList(1).testName === "A Stack should do something interesting 1")
assert(testSucceededList(2).testName === "A Stack can do something interesting 2")
assert(testSucceededList(3).testName === "A Stack must do something interesting 3")
assert(testSucceededList(4).testName === "A Stack when do something interesting 4")
}
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called without subject") {
class TestSpec extends WordSpec {
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called without subject") {
class TestSpec extends WordSpec {
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called without subject") {
class TestSpec extends WordSpec {
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called without subject") {
class TestSpec extends WordSpec {
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
}
describe("under inner level") {
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "An it clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
}
describe("under 'in' clause") {
it("should throw NotAllowedException with correct stack depth and message when 'it should' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
it should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "An it clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'it can' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
it can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "An it clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'it must' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
it must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "An it clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'it when' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
it when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "An it clause must only appear after a top level subject clause.")
}
}
}
describe("'they'") {
describe("under top level") {
it("should work with subject") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
they should {
"do something interesting 1" in {/* ASSERTION_SUCCEED */}
}
they can {
"do something interesting 2" in {/* ASSERTION_SUCCEED */}
}
they must {
"do something interesting 3" in {/* ASSERTION_SUCCEED */}
}
they when {
"do something interesting 4" in {/* ASSERTION_SUCCEED */}
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
val testStartingList = rep.testStartingEventsReceived
assert(testStartingList.size === 5)
assert(testStartingList(0).testName === "A Stack when empty should be empty")
assert(testStartingList(1).testName === "A Stack should do something interesting 1")
assert(testStartingList(2).testName === "A Stack can do something interesting 2")
assert(testStartingList(3).testName === "A Stack must do something interesting 3")
assert(testStartingList(4).testName === "A Stack when do something interesting 4")
val testSucceededList = rep.testSucceededEventsReceived
assert(testSucceededList.size === 5)
assert(testSucceededList(0).testName === "A Stack when empty should be empty")
assert(testSucceededList(1).testName === "A Stack should do something interesting 1")
assert(testSucceededList(2).testName === "A Stack can do something interesting 2")
assert(testSucceededList(3).testName === "A Stack must do something interesting 3")
assert(testSucceededList(4).testName === "A Stack when do something interesting 4")
}
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called without subject") {
class TestSpec extends WordSpec {
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called without subject") {
class TestSpec extends WordSpec {
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called without subject") {
class TestSpec extends WordSpec {
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called without subject") {
class TestSpec extends WordSpec {
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
}
"Other do something special" in {/* ASSERTION_SUCCEED */}
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 9))
}
}
describe("under inner level") {
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called with inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called without inner branch") {
class TestSpec extends WordSpec {
"A Stack" when {
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called with inner branch but after an 'in' clause") {
class TestSpec extends WordSpec {
"A Stack" when {
"empty" should {
"be empty" in {/* ASSERTION_SUCCEED */}
}
"do something" in {/* ASSERTION_SUCCEED */}
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}
}
}
val e = intercept[exceptions.NotAllowedException] {
new TestSpec
}
assert(e.getMessage === "A they clause must only appear after a top level subject clause.")
assert(e.failedCodeFileName === Some("WordSpecSpec.scala"))
assert(e.failedCodeLineNumber === Some(thisLineNumber - 10))
}
}
describe("under 'in' clause") {
it("should throw NotAllowedException with correct stack depth and message when 'they should' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
they should {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "A they clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'they can' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
they can {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "A they clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'they must' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
they must {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "A they clause must only appear after a top level subject clause.")
}
it("should throw NotAllowedException with correct stack depth and message when 'they when' is called") {
class TestSpec extends WordSpec {
var notAllowedThrown = false
"Something special" in {
they when {
"do something interesting" in {/* ASSERTION_SUCCEED */}
}; /* ASSERTION_SUCCEED */
}
override def withFixture(test: NoArgTest): Outcome = {
val outcome = test.apply()
outcome match {
case Exceptional(ex: exceptions.NotAllowedException) =>
notAllowedThrown = true
case _ =>
}
outcome
}
}
val rep = new EventRecordingReporter
val s = new TestSpec
s.run(None, Args(rep))
assert(s.notAllowedThrown == true)
val testFailedEvents = rep.testFailedEventsReceived
assert(testFailedEvents.size === 1)
assert(testFailedEvents(0).throwable.get.getClass() === classOf[exceptions.NotAllowedException])
val trce = testFailedEvents(0).throwable.get.asInstanceOf[exceptions.NotAllowedException]
assert("WordSpecSpec.scala" === trce.failedCodeFileName.get)
assert(trce.failedCodeLineNumber.get === thisLineNumber - 23)
assert(trce.getMessage === "A they clause must only appear after a top level subject clause.")
}
/*it("should generate NotAllowedException wrapping a TestFailedException when assert fails in should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestFailedException when assert fails in must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestFailedException when assert fails in when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestFailedException when assert fails in that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestFailedException when assert fails in which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestFailedException when assert fails in can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
assert(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestFailedException])
val cause = causeThrowable.asInstanceOf[TestFailedException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a TestCanceledException when assume fails in can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
assume(a == 2)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.message == Some(FailureMessages.assertionShouldBePutInsideItOrTheyClauseNotShouldMustWhenThatWhichOrCanClause))
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(causeThrowable.isInstanceOf[TestCanceledException])
val cause = causeThrowable.asInstanceOf[TestCanceledException]
assert("WordSpecSpec.scala" == cause.failedCodeFileName.get)
assert(cause.failedCodeLineNumber.get == thisLineNumber - 15)
assert(cause.message == Some(FailureMessages.didNotEqual(prettifier, 1, 2)))
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInThatClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhichClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
it("should generate NotAllowedException wrapping a non-fatal RuntimeException is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new RuntimeException("on purpose")
assert(a == 1)
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 3)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature")))
assert(causeThrowable.isInstanceOf[RuntimeException])
val cause = causeThrowable.asInstanceOf[RuntimeException]
assert(cause.getMessage == "on purpose")
}
*/
// SKIP-SCALATESTJS,NATIVE-START
/*
it("should propagate AnnotationFormatError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AnnotationFormatError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AnnotationFormatError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AnnotationFormatError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AnnotationFormatError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AnnotationFormatError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new AnnotationFormatError("on purpose")
assert(a == 1)
}
}
val e = intercept[AnnotationFormatError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate AWTError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new AWTError("on purpose")
assert(a == 1)
}
}
val e = intercept[AWTError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate CoderMalfunctionError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new CoderMalfunctionError(new RuntimeException("on purpose"))
assert(a == 1)
}
}
val e = intercept[CoderMalfunctionError] {
new TestSpec
}
assert(e.getMessage == "java.lang.RuntimeException: on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate FactoryConfigurationError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new FactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[FactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate LinkageError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new LinkageError("on purpose")
assert(a == 1)
}
}
val e = intercept[LinkageError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate ThreadDeath when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate ThreadDeath when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate ThreadDeath when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate ThreadDeath when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate ThreadDeath when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate ThreadDeath when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new ThreadDeath
assert(a == 1)
}
}
val e = intercept[ThreadDeath] {
new TestSpec
}
assert(e.getMessage == null)
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate TransformerFactoryConfigurationError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new TransformerFactoryConfigurationError("on purpose")
assert(a == 1)
}
}
val e = intercept[TransformerFactoryConfigurationError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside should scope") {
class TestSpec extends WordSpec {
"a feature" should {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside must scope") {
class TestSpec extends WordSpec {
"a feature" must {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside when scope") {
class TestSpec extends WordSpec {
"a feature" when {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside that scope") {
class TestSpec extends WordSpec {
"a feature" that {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside which scope") {
class TestSpec extends WordSpec {
"a feature" which {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}
it("should propagate VirtualMachineError when it is thrown inside can scope") {
class TestSpec extends WordSpec {
"a feature" can {
val a = 1
throw new VirtualMachineError("on purpose") {}
assert(a == 1)
}
}
val e = intercept[VirtualMachineError] {
new TestSpec
}
assert(e.getMessage == "on purpose")
}*/
// SKIP-SCALATESTJS,NATIVE-END
}
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside when") {
class TestSpec extends WordSpec {
"a feature" when {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand when") {
class TestSpec extends WordSpec {
"a feature" when {}
it when {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside should") {
class TestSpec extends WordSpec {
"a feature" should {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand should") {
class TestSpec extends WordSpec {
"a feature" should {}
it should {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside must") {
class TestSpec extends WordSpec {
"a feature" must {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand must") {
class TestSpec extends WordSpec {
"a feature" must {}
it must {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside that") {
class TestSpec extends WordSpec {
"a feature" that {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInThatClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature that", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside which") {
class TestSpec extends WordSpec {
"a feature" which {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhichClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature which", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside can") {
class TestSpec extends WordSpec {
"a feature" can {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand can") {
class TestSpec extends WordSpec {
"a feature" can {}
it can {
"test 1" in {}
"test 1" in {}
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("WordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/WordSpecSpec.scala
|
Scala
|
apache-2.0
| 168,535 |
package breeze.stats.distributions;
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.linalg.RandomInstanceSupport
import org.scalacheck._
import org.scalatest._
import org.scalatest.funsuite._
import org.scalatestplus.scalacheck._
class GaussianTest
extends AnyFunSuite
with Checkers
with UnivariateContinuousDistrTestBase
with MomentsTestBase[Double]
with ExpFamTest[Gaussian, Double]
with HasCdfTestBase {
override type Distr = Gaussian
val expFam: Gaussian.type = Gaussian
import org.scalacheck.Arbitrary.arbitrary;
def arbParameter: Arbitrary[(Double, Double)] = Arbitrary {
for {
mean <- RandomInstanceSupport.genReasonableDouble.arbitrary
std <- RandomInstanceSupport.genReasonableDouble.arbitrary.map { x =>
math.abs(x) % 8.0 + .1
}
} yield (mean, std)
}
def paramsClose(p: (Double, Double), b: (Double, Double)) = {
val y1 = (p._1 - b._1).abs / (p._1.abs / 2 + b._1.abs / 2 + 1) < 1E-1
val y2 = (p._2 - b._2).abs / (p._2.abs / 2 + b._2.abs / 2 + 1) < 1E-1
y1 && y2
}
test("Probability of mean") {
check(Prop.forAll { (m: Double, s: Double) =>
(s == 0) || {
val b = new Gaussian(mu = m, sigma = s.abs);
b.unnormalizedLogPdf(m) == 0.0;
}
})
}
test("#295, cdf/inverseCdf broken") {
val gaussian = Gaussian(0, 1)
assert(
(gaussian.cdf(gaussian.inverseCdf(0.1)) - 0.1).abs <= 1E-3,
gaussian.cdf(gaussian.inverseCdf(0.1)) + " was not close to " + 0.1)
}
test("Probability of N(0,1)(1) propto exp(-.5))") {
assert(new Gaussian(0, 1).unnormalizedLogPdf(1.0) === -0.5)
}
test("Gaussian.probability throws an exception when evaluating 1.0 < N(0, 1) < 0.0") {
val thrown = intercept[IllegalArgumentException] {
new Gaussian(0, 1).probability(1.0, 0.0)
}
}
override val VARIANCE_TOLERANCE: Double = 9E-2
implicit def arbDistr: Arbitrary[Distr] = Arbitrary {
for (mean <- arbitrary[Double].map { x =>
math.abs(x) % 10000.0
};
std <- arbitrary[Double].map { x =>
math.abs(x) % 8.0 + .1
}) yield new Gaussian(mean, std);
}
def asDouble(x: Double) = x
def fromDouble(x: Double) = x
}
|
scalanlp/breeze
|
math/src/test/scala/breeze/stats/distributions/GaussianTest.scala
|
Scala
|
apache-2.0
| 2,755 |
package freecli
package command
object all extends AllOps with AllImplicits {
type ParentWith[P, B] = api.ParentWith[P, B]
}
|
pavlosgi/freecli
|
core/src/main/scala/freecli/command/all.scala
|
Scala
|
apache-2.0
| 127 |
package uk.gov.gds.ier.transaction.ordinary.postalVote
import com.google.inject.{Inject, Singleton}
import uk.gov.gds.ier.serialiser.JsonSerialiser
import uk.gov.gds.ier.model._
import play.api.templates.Html
import uk.gov.gds.ier.config.Config
import uk.gov.gds.ier.security.EncryptionService
import uk.gov.gds.ier.step.OrdinaryStep
import play.api.mvc.Call
import uk.gov.gds.ier.step.Routes
import uk.gov.gds.ier.model.PostalVote
import uk.gov.gds.ier.validation.ErrorTransformForm
import scala.Some
import uk.gov.gds.ier.transaction.ordinary.{OrdinaryControllers, InprogressOrdinary}
import uk.gov.gds.ier.assets.RemoteAssets
@Singleton
class PostalVoteStep @Inject ()(
val serialiser: JsonSerialiser,
val config: Config,
val encryptionService : EncryptionService,
val remoteAssets: RemoteAssets,
val ordinary: OrdinaryControllers
) extends OrdinaryStep
with PostalVoteForms
with PostalVoteMustache {
val validation = postalVoteForm
val routing = Routes(
get = routes.PostalVoteStep.get,
post = routes.PostalVoteStep.post,
editGet = routes.PostalVoteStep.editGet,
editPost = routes.PostalVoteStep.editPost
)
def resetPostalVote = TransformApplication { currentState =>
val postalVoteOption = currentState.postalVote.flatMap(_.postalVoteOption)
postalVoteOption match {
case Some(PostalVoteOption.NoAndVoteInPerson) | Some(PostalVoteOption.NoAndAlreadyHave) =>
currentState.copy(postalVote = Some(PostalVote(
postalVoteOption = postalVoteOption,
deliveryMethod = None))
)
case _ => currentState
}
}
override val onSuccess = resetPostalVote andThen GoToNextIncompleteStep()
def nextStep(currentState: InprogressOrdinary) = {
ordinary.ContactStep
}
}
|
michaeldfallen/ier-frontend
|
app/uk/gov/gds/ier/transaction/ordinary/postalVote/PostalVoteStep.scala
|
Scala
|
mit
| 1,788 |
/*
* BoundedProbFactor.scala
* Factors over variables.
*
* Created By: Avi Pfeffer ([email protected])
* Creation Date: Jan 15, 2014
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.lazyfactored
import com.cra.figaro.algorithm._
import com.cra.figaro.language._
import com.cra.figaro.util._
import annotation.tailrec
import scala.language.existentials
import com.cra.figaro.algorithm.factored._
/**
* Methods for creating lower and upper bound probability factors.
*/
object BoundedProbFactor {
private def makeConditionAndConstraintFactors[T](elem: Element[T], upper: Boolean): List[Factor[Double]] =
elem.allConditions.map(makeConditionFactor(elem, _, upper)) ::: elem.allConstraints.map(makeConstraintFactor(elem, _, upper))
private def makeConditionFactor[T](elem: Element[T], cc: (T => Boolean, Element.Contingency), upper: Boolean): Factor[Double] =
makeConstraintFactor(elem, (ProbConstraintType((t: T) => if (cc._1(t)) 1.0; else 0.0), cc._2), upper)
private def makeConstraintFactor[T](elem: Element[T], cc: (T => Double, Element.Contingency), upper: Boolean): Factor[Double] = {
val (constraint, contingency) = cc
contingency match {
case List() => makeUncontingentConstraintFactor(elem, constraint, upper)
case first :: rest => makeContingentConstraintFactor(elem, constraint, first, rest, upper)
}
}
private def makeUncontingentConstraintFactor[T](elem: Element[T], constraint: T => Double, upper: Boolean): Factor[Double] = {
val elemVar = Variable(elem)
val factor = new Factor[Double](List(elemVar))
for { (elemVal, index) <- elemVar.range.zipWithIndex } {
val entry = if (elemVal.isRegular) {
val c = math.exp(constraint(elemVal.value))
c
} else {
// The (0,1) Double assume the constraint is always bounded between 0 and 1. This is always correct for conditions.
// For constraints that could be greater than 1, the resulting Double on the answer could be wrong.
if (upper) 1.0; else 0.0
}
factor.set(List(index), entry)
}
factor
}
private def makeContingentConstraintFactor[T](elem: Element[T], constraint: T => Double, firstConting: Element.ElemVal[_], restContinges: Element.Contingency, upper: Boolean): Factor[Double] = {
val restFactor = makeConstraintFactor(elem, (constraint, restContinges), upper)
extendConstraintFactor(restFactor, firstConting)
}
private def extendConstraintFactor(restFactor: Factor[Double], firstConting: Element.ElemVal[_]): Factor[Double] = {
// The extended factor is obtained by getting the underlying factor and expanding each row so that the row only provides its entry if the contingent variable takes
// on the appropriate value, otherwise the entry is 1
val Element.ElemVal(firstElem, firstValue) = firstConting
val firstVar = Variable(firstElem)
val firstValues = firstVar.range
val numFirstValues = firstValues.size
val matchingIndex: Int = firstValues.indexOf(Regular(firstValue))
val resultFactor = new Factor[Double](firstVar :: restFactor.variables)
for { restIndices <- restFactor.allIndices } {
val restEntry = restFactor.get(restIndices)
for { firstIndex <- 0 until numFirstValues } {
// constraint doesn't apply if the index is not the required one, so we use a value of 1
val resultEntry = if (firstIndex == matchingIndex) restEntry; else 1.0
resultFactor.set(firstIndex :: restIndices, resultEntry)
}
}
resultFactor
}
/**
* Create the probabilistic factors associated with an element.
*/
def make(elem: Element[_], upper: Boolean): List[Factor[Double]] = {
val constraintFactors = makeConditionAndConstraintFactors(elem, upper)
constraintFactors ::: ProbFactor.makeNonConstraintFactors(elem)
}
/**
* Create the probabilistic factor encoding the probability of evidence in the dependent universe as a function of the
* values of variables in the parent universe. The third argument is the the function to use for computing
* probability of evidence in the dependent universe. It is assumed that the definition of this function will already contain the
* right evidence.
*/
def makeDependentFactor(parentUniverse: Universe,
dependentUniverse: Universe,
probEvidenceComputer: () => Double): (Factor[Double], Factor[Double]) = {
val uses = dependentUniverse.parentElements filter (_.universe == parentUniverse)
def rule(upper: Boolean)(values: List[Extended[_]]) = {
if (values.exists(!_.isRegular)) { if (upper) 1.0; else 0.0 }
else {
for { (elem, value) <- uses zip values } { elem.value = value.asInstanceOf[Regular[elem.Value]].value }
val result = probEvidenceComputer()
result
}
}
val variables = uses map (Variable(_))
val lb = new Factor[Double](variables)
lb.fillByRule(rule(false))
val ub = new Factor[Double](variables)
ub.fillByRule(rule(true))
(lb, ub)
}
}
|
wkretschmer/figaro
|
Figaro/src/main/scala/com/cra/figaro/algorithm/lazyfactored/BoundedProbFactor.scala
|
Scala
|
bsd-3-clause
| 5,224 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.commons.source
import cascading.flow.FlowDef
import cascading.pipe.Pipe
import cascading.scheme.Scheme
import cascading.tap.Tap
import cascading.tuple.Fields
import com.backtype.cascading.scheme.KeyValueByteScheme
import com.backtype.cascading.tap.VersionedTap
import com.backtype.cascading.tap.VersionedTap.TapMode
import com.twitter.algebird.Monoid
import com.twitter.bijection.Injection
import com.twitter.chill.Externalizer
import com.twitter.scalding.TDsl._
import com.twitter.scalding._
import com.twitter.scalding.source.{ CheckedInversion, MaxFailuresCheck }
import com.twitter.scalding.typed.KeyedListLike
import com.twitter.scalding.typed.TypedSink
import org.apache.hadoop.mapred.JobConf
import scala.collection.JavaConverters._
/**
* Source used to write key-value pairs as byte arrays into a versioned store.
* Supports incremental updates via the monoid on V.
*/
object VersionedKeyValSource {
val defaultVersionsToKeep = 3
// TODO: have two apply methods here for binary compatibility purpose. Need to clean it up in next release.
def apply[K, V](path: String, sourceVersion: Option[Long] = None, sinkVersion: Option[Long] = None, maxFailures: Int = 0)(implicit codec: Injection[(K, V), (Array[Byte], Array[Byte])]) = {
new VersionedKeyValSource[K, V](path, sourceVersion, sinkVersion, maxFailures, defaultVersionsToKeep)
}
def apply[K, V](path: String, sourceVersion: Option[Long], sinkVersion: Option[Long], maxFailures: Int, versionsToKeep: Int)(implicit codec: Injection[(K, V), (Array[Byte], Array[Byte])]) =
new VersionedKeyValSource[K, V](path, sourceVersion, sinkVersion, maxFailures, versionsToKeep)
}
class VersionedKeyValSource[K, V](val path: String, val sourceVersion: Option[Long], val sinkVersion: Option[Long],
val maxFailures: Int, val versionsToKeep: Int)(
implicit @transient codec: Injection[(K, V), (Array[Byte], Array[Byte])]) extends Source
with Mappable[(K, V)]
with TypedSink[(K, V)] {
import Dsl._
val keyField = "key"
val valField = "value"
val fields = new Fields(keyField, valField)
val codecBox = Externalizer(codec)
override def converter[U >: (K, V)] = TupleConverter.asSuperConverter[(K, V), U](TupleConverter.of[(K, V)])
override def setter[U <: (K, V)] = TupleSetter.asSubSetter[(K, V), U](TupleSetter.of[(K, V)])
def hdfsScheme =
HadoopSchemeInstance(new KeyValueByteScheme(fields).asInstanceOf[Scheme[_, _, _, _, _]])
@deprecated("This method is deprecated", "0.1.6")
def this(path: String, sourceVersion: Option[Long], sinkVersion: Option[Long], maxFailures: Int)(implicit @transient codec: Injection[(K, V), (Array[Byte], Array[Byte])]) =
this(path, sourceVersion, sinkVersion, maxFailures, VersionedKeyValSource.defaultVersionsToKeep)(codec)
def getTap(mode: TapMode) = {
val tap = new VersionedTap(path, hdfsScheme, mode).setVersionsToKeep(versionsToKeep)
if (mode == TapMode.SOURCE && sourceVersion.isDefined)
tap.setVersion(sourceVersion.get)
else if (mode == TapMode.SINK && sinkVersion.isDefined)
tap.setVersion(sinkVersion.get)
else
tap
}
val source = getTap(TapMode.SOURCE)
val sink = getTap(TapMode.SINK)
override def validateTaps(mode: Mode): Unit = {
// if a version is explicitly supplied, ensure that it exists
sourceVersion.foreach { version =>
mode match {
case hadoopMode: HadoopMode => {
val store = source.getStore(new JobConf(hadoopMode.jobConf))
if (!store.hasVersion(version)) {
throw new InvalidSourceException(
"Version %s does not exist. Currently available versions are: %s"
.format(version, store.getAllVersions))
}
}
case _ => throw new IllegalArgumentException(
"VersionedKeyValSource does not support mode %s. Only HadoopMode is supported"
.format(mode))
}
}
}
def resourceExists(mode: Mode) =
mode match {
case Test(buffers) => {
buffers(this) map { !_.isEmpty } getOrElse false
}
case HadoopTest(conf, buffers) => {
buffers(this) map { !_.isEmpty } getOrElse false
}
case _ => {
val conf = new JobConf(mode.asInstanceOf[HadoopMode].jobConf)
source.resourceExists(conf)
}
}
override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_, _, _] = {
import com.twitter.scalding.CastHfsTap
mode match {
case Hdfs(_strict, _config) =>
readOrWrite match {
case Read => CastHfsTap(source)
case Write => CastHfsTap(sink)
}
case _ =>
TestTapFactory(this, hdfsScheme).createTap(readOrWrite)
}
}
// Override this for more control on failure on decode
protected lazy val checkedInversion: CheckedInversion[(K, V), (Array[Byte], Array[Byte])] =
new MaxFailuresCheck(maxFailures)(codecBox.get)
override def sinkFields = fields
override def transformForRead(pipe: Pipe) = {
pipe.flatMap((keyField, valField) -> (keyField, valField)) { pair: (Array[Byte], Array[Byte]) =>
checkedInversion(pair)
}
}
override def transformForWrite(pipe: Pipe) = {
pipe.mapTo((0, 1) -> (keyField, valField)) { pair: (K, V) =>
codecBox.get.apply(pair)
}
}
override def toIterator(implicit config: Config, mode: Mode): Iterator[(K, V)] = {
val tap = createTap(Read)(mode)
mode.openForRead(config, tap)
.asScala
.flatMap { te =>
val item = te.selectTuple(fields)
mode match {
case _: TestMode =>
val key = item.getObject(0).asInstanceOf[K]
val value = item.getObject(1).asInstanceOf[V]
Some((key, value))
case _ =>
val key = item.getObject(0).asInstanceOf[Array[Byte]]
val value = item.getObject(1).asInstanceOf[Array[Byte]]
checkedInversion((key, value))
}
}
}
override def toString =
"%s path:%s,sourceVersion:%s,sinkVersion:%s".format(getClass(), path, sourceVersion, sinkVersion)
override def equals(other: Any) =
if (other.isInstanceOf[VersionedKeyValSource[_, _]]) {
val otherSrc = other.asInstanceOf[VersionedKeyValSource[K, V]]
otherSrc.path == path && otherSrc.sourceVersion == sourceVersion && otherSrc.sinkVersion == sinkVersion
} else {
false
}
override def hashCode = toString.hashCode
}
object RichPipeEx extends java.io.Serializable {
implicit def pipeToRichPipeEx(pipe: Pipe): RichPipeEx = new RichPipeEx(pipe)
implicit def typedPipeToRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K, V)]) =
new TypedRichPipeEx(pipe)
implicit def keyedListLikeToRichPipeEx[K: Ordering, V: Monoid, T[K, +V] <: KeyedListLike[K, V, T]](
kll: KeyedListLike[K, V, T]) = typedPipeToRichPipeEx(kll.toTypedPipe)
}
class TypedRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K, V)]) extends java.io.Serializable {
import Dsl._
import TDsl._
// Tap reads existing data from the `sourceVersion` (or latest
// version) of data specified in `src`, merges the K,V pairs from
// the pipe in using an implicit `Monoid[V]` and sinks all results
// into the `sinkVersion` of data (or a new version) specified by
// `src`.
def writeIncremental(src: VersionedKeyValSource[K, V], reducers: Int = 1)(implicit flowDef: FlowDef, mode: Mode): TypedPipe[(K, V)] = {
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = TypedPipe
.from[(K, V)](src.read, (0, 1))
.map { case (k, v) => (k, v, 0) }
val newPairs = pipe.sumByLocalKeys.map { case (k, v) => (k, v, 1) }
(oldPairs ++ newPairs)
.groupBy { _._1 }
.withReducers(reducers)
.sortBy { _._3 }
.mapValues { _._2 }
.sum
.toTypedPipe
}
outPipe.write(src)
}
}
class RichPipeEx(pipe: Pipe) extends java.io.Serializable {
import Dsl._
// VersionedKeyValSource always merges with the most recent complete
// version
def writeIncremental[K, V](src: VersionedKeyValSource[K, V], fields: Fields, reducers: Int = 1)(implicit monoid: Monoid[V],
flowDef: FlowDef,
mode: Mode) = {
def appendToken(pipe: Pipe, token: Int) =
pipe.mapTo((0, 1) -> ('key, 'value, 'isNew)) { pair: (K, V) => pair :+ token }
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = appendToken(src.read, 0)
val newPairs = appendToken(pipe, 1)
(oldPairs ++ newPairs)
.groupBy('key) { _.reducers(reducers).sortBy('isNew).sum[V]('value) }
.project(('key, 'value))
.rename(('key, 'value) -> fields)
}
outPipe.write(src)
}
}
|
sid-kap/scalding
|
scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala
|
Scala
|
apache-2.0
| 9,350 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.list
class Hasher(
k: Int,
m: Int) {
val bigM = BigInt(m)
var coefs:List[BigInt] = null
def hash(x: Int) = {
val p = BigInt(1073741789)
if(coefs == null) {
coefs = List()
val rand = new scala.util.Random()
for(i <- 0 to k - 1) {
coefs = BigInt(rand.nextInt()) :: coefs
}
}
val bigX = BigInt(x)
val (s,_) = coefs.foldLeft((BigInt(0), BigInt(k))) {
(t, c) => (t._1 + c * bigX.modPow(t._2, p), t._2 - 1)
}
s.mod(bigM).toInt
}
}
|
twmarshall/tdb
|
core/src/main/scala/tdb/list/Hasher.scala
|
Scala
|
apache-2.0
| 1,142 |
package com.lightbend.coursegentools
import java.io.File
/**
* Copyright © 2016 Lightbend, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* NO COMMERCIAL SUPPORT OR ANY OTHER FORM OF SUPPORT IS OFFERED ON
* THIS SOFTWARE BY LIGHTBEND, Inc.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
object LinearizeCmdLineOptParse {
def parse(args: Array[String]): Option[LinearizeCmdOptions] = {
implicit val eofe: ExitOnFirstError = ExitOnFirstError(true)
val parser = new scopt.OptionParser[LinearizeCmdOptions]("linearize") {
head("Course Management Tools:", "linearize", com.github.eloots.cmt.BuildInfo.version)
arg[File]("mainRepo")
.text("base folder holding main course repository")
.action {
case (mainRepo, c) =>
if (!folderExists(mainRepo))
printError(s"Base main repo folder (${mainRepo.getPath}) doesn't exist")
c.copy(mainRepo = mainRepo)
}
arg[File]("linearRepo")
.text("base folder for linearized version repo")
.action {
case (linearRepo, config) =>
if (!folderExists(linearRepo))
printError(s"Base folder for linearized version repo (${linearRepo.getPath}) doesn't exist")
config.copy(linearRepo = linearRepo)
}
opt[Unit]("multi-jvm")
.text("generate multi-jvm build file")
.abbr("mjvm")
.action {
case (_, c) =>
c.copy(multiJVM = true)
}
opt[Unit]("force-delete")
.text("Force-delete a pre-existing destination folder")
.abbr("f")
.action {
case (_, c) =>
c.copy(forceDeleteExistingDestinationFolder = true)
}
opt[String]("config-file")
.text("configuration file")
.abbr("cfg")
.action {
case (cfgFile, c) =>
c.copy(configurationFile = Some(cfgFile))
}
opt[Unit]("dotty")
.text("studentified repository is a Dotty project")
.abbr("dot")
.action {
case (_, c) =>
c.copy(isADottyProject = true)
}
opt[Unit]("no-auto-reload-sbt")
.text("no automatic reload on build definition change")
.abbr("nar")
.action {
case (_, c) =>
c.copy(autoReloadOnBuildDefChange = false)
}
opt[Unit]("bare-lin-repo")
.text("create a linearized repo without any of the CMT plugin functionality")
.abbr("m")
.action {
case (_, c) =>
c.copy(bareLinRepo = true)
}
help("help").text("Prints the usage text")
version("version").abbr("v").text("Prints the version info")
}
parser.parse(args, LinearizeCmdOptions())
}
}
|
lightbend-training/course-management-tools
|
linearize/src/main/scala/com/lightbend/coursegentools/LinearizeCmdLineOptParse.scala
|
Scala
|
apache-2.0
| 3,261 |
package com.seanshubin.detangler.domain
import java.nio.file.Path
case class RawConfiguration(reportDir: Option[Path],
searchPaths: Option[Seq[Path]],
level: Option[Int],
startsWith: Option[RawStartsWithConfiguration],
ignoreFiles: Option[Seq[Path]],
canFailBuild: Option[Boolean],
ignoreJavadoc: Option[Boolean],
logTiming: Option[Boolean],
logEffectiveConfiguration: Option[Boolean],
allowedInCycle: Option[Path],
pathsRelativeToCurrentDirectory: Option[Boolean],
pathsRelativeToConfigurationDirectory: Option[Boolean]) {
def replaceEmptyWithDefaults(configurationPath: Path): Configuration = {
val newPathsRelativeToConfigDirectory = !pathsRelativeToCurrentDirectory.getOrElse(false) && pathsRelativeToConfigurationDirectory.getOrElse(false)
val newPathsRelativeToCurrentDirectory = !newPathsRelativeToConfigDirectory
def modifyRelativePathIfUsingConfigurationDirectory(path: Path): Path =
if (newPathsRelativeToConfigDirectory && !path.isAbsolute) configurationPath.getParent.resolve(path)
else path
val newReportDir = modifyRelativePathIfUsingConfigurationDirectory(reportDir.getOrElse(Configuration.Default.reportDir))
val newSearchPaths = searchPaths.getOrElse(Configuration.Default.searchPaths).map(modifyRelativePathIfUsingConfigurationDirectory)
val newLevel = level.getOrElse(Configuration.Default.level)
val newStartsWith = startsWith match {
case None => Configuration.Default.startsWith
case Some(rawStartsWithConfiguration) => rawStartsWithConfiguration.replaceEmptyWithDefaults()
}
val newAllowedInCycle = modifyRelativePathIfUsingConfigurationDirectory(allowedInCycle.getOrElse(Configuration.Default.allowedInCycle))
val newIgnoreFiles = ignoreFiles.getOrElse(Configuration.Default.ignoreFiles).map(modifyRelativePathIfUsingConfigurationDirectory)
val newCanFailBuild = canFailBuild.getOrElse(Configuration.Default.canFailBuild)
val newIgnoreJavadoc = ignoreJavadoc.getOrElse(Configuration.Default.ignoreJavadoc)
val newLogTiming = logTiming.getOrElse(Configuration.Default.logTiming)
val newLogEffectiveConfiguration = logEffectiveConfiguration.getOrElse(Configuration.Default.logEffectiveConfiguration)
Configuration(
newReportDir,
newSearchPaths,
newLevel,
newStartsWith,
newIgnoreFiles,
newCanFailBuild,
newIgnoreJavadoc,
newLogTiming,
newLogEffectiveConfiguration,
newAllowedInCycle,
newPathsRelativeToCurrentDirectory,
newPathsRelativeToConfigDirectory)
}
}
|
SeanShubin/detangler
|
domain/src/main/scala/com/seanshubin/detangler/domain/RawConfiguration.scala
|
Scala
|
unlicense
| 2,853 |
package se.uprise.graphql.types
import org.scalatest.FunSpec
import se.uprise.graphql.GraphQL
class CoercionSpec extends FunSpec {
describe("Type System: Scalar coercion") {
it("coerces output int") {
//GraphQLInt.coerce()
}
}
}
|
hrosenhorn/graphql-scala
|
src/test/scala/se/uprise/graphql/types/CoercionSpec.scala
|
Scala
|
mit
| 250 |
package contoso.conference.registration
import scala.concurrent.duration._
import akka.actor.{ Actor, ActorLogging, ActorRef, Props }
import akka.event.LoggingReceive
import com.typesafe.config.ConfigFactory
import contoso.conference.{ ConferenceModule, ConferenceProtocol }
import contoso.registration.{ OrderLine, SeatOrderLine, SeatQuantity }
import demesne.DomainModel
import squants.market._
object PricingRetriever {
def props( model: DomainModel ): Props = Props( new PricingRetriever( model ) )
val shardName: String = "PricingRetrievers"
sealed trait PricingMessage
case class CalculateTotal( conferenceId: ConferenceModule.TID, seatItems: Seq[SeatQuantity] )
extends PricingMessage
// Conference.Registration/OrderTotal.cs
case class OrderTotal( lines: Seq[OrderLine], total: Money ) extends PricingMessage
case object ConferencePublishedSeatTypesTimeout extends PricingMessage
val fallback = "conference-timeout = 250ms"
val config = ConfigFactory.load
.getConfig( "contoso.conference.registration.pricing" )
.withFallback( ConfigFactory.parseString( fallback ) )
import java.util.concurrent.TimeUnit
val conferenceTimeout =
Duration( config.getDuration( "conference-timeout", TimeUnit.MILLISECONDS ), MILLISECONDS )
object CalculationHandler {
def props( seatItems: Seq[SeatQuantity], originalSender: ActorRef ): Props = {
Props( new CalculationHandler( seatItems, originalSender ) )
}
}
// Conference/Registration/PricingService.cs
class CalculationHandler( seatItems: Seq[SeatQuantity], originalSender: ActorRef )
extends Actor
with ActorLogging {
override def receive: Receive = LoggingReceive {
case ConferenceProtocol.SeatTypes( seatTypes ) => {
val lines = for {
i <- seatItems
t <- seatTypes find { _.id == i.seatTypeId }
} yield
SeatOrderLine( seatTypeId = i.seatTypeId, unitPrice = t.price, quantity = i.quantity )
val total = lines.foldLeft( USD( 0 ) )( _ + _.total )
sendResponseAndShutdown( OrderTotal( lines, total ) )
}
case ConferencePublishedSeatTypesTimeout =>
sendResponseAndShutdown( ConferencePublishedSeatTypesTimeout )
}
def sendResponseAndShutdown( response: Any ): Unit = {
originalSender ! response
log debug s"shutting down CalculationHandler on: ${response}"
context stop self
}
import context.dispatcher
val timeoutMessager = context.system.scheduler.scheduleOnce( conferenceTimeout ) {
self ! ConferencePublishedSeatTypesTimeout
}
}
}
class PricingRetriever( model: DomainModel ) extends Actor with ActorLogging {
import contoso.conference.registration.PricingRetriever._
override def receive: Receive = LoggingReceive {
case CalculateTotal( conferenceId, seatItems ) => {
val originalSender = sender()
val handler = context.actorOf( CalculationHandler.props( seatItems, originalSender ) )
val conference = model.aggregateOf( ConferenceModule.rootType, conferenceId )
conference.tell( ConferenceProtocol.GetPublishedSeatTypes, handler )
}
}
}
|
dmrolfs/demesne
|
examples/src/main/scala/contoso/conference/registration/PricingRetriever.scala
|
Scala
|
apache-2.0
| 3,160 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.cluster.EndPoint
import kafka.utils._
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.zookeeper.Watcher.Event.KeeperState
import org.I0Itec.zkclient.{IZkStateListener, ZkClient}
import java.net.InetAddress
/**
* This class registers the broker in zookeeper to allow
* other brokers and consumers to detect failures. It uses an ephemeral znode with the path:
* /brokers/[0...N] --> advertisedHost:advertisedPort
*
* Right now our definition of health is fairly naive. If we register in zk we are healthy, otherwise
* we are dead.
*/
class KafkaHealthcheck(private val brokerId: Int,
private val advertisedEndpoints: Map[SecurityProtocol, EndPoint],
private val zkSessionTimeoutMs: Int,
private val zkClient: ZkClient) extends Logging {
val brokerIdPath = ZkUtils.BrokerIdsPath + "/" + brokerId
val sessionExpireListener = new SessionExpireListener
def startup() {
zkClient.subscribeStateChanges(sessionExpireListener)
register()
}
/**
* Register this broker as "alive" in zookeeper
*/
def register() {
val jmxPort = System.getProperty("com.sun.management.jmxremote.port", "-1").toInt
val updatedEndpoints = advertisedEndpoints.mapValues(endpoint =>
if (endpoint.host == null || endpoint.host.trim.isEmpty)
EndPoint(InetAddress.getLocalHost.getCanonicalHostName, endpoint.port, endpoint.protocolType)
else
endpoint
)
// the default host and port are here for compatibility with older client
// only PLAINTEXT is supported as default
// if the broker doesn't listen on PLAINTEXT protocol, an empty endpoint will be registered and older clients will be unable to connect
val plaintextEndpoint = updatedEndpoints.getOrElse(SecurityProtocol.PLAINTEXT, new EndPoint(null,-1,null))
ZkUtils.registerBrokerInZk(zkClient, brokerId, plaintextEndpoint.host, plaintextEndpoint.port, updatedEndpoints, zkSessionTimeoutMs, jmxPort)
}
/**
* When we get a SessionExpired event, we lost all ephemeral nodes and zkclient has reestablished a
* connection for us. We need to re-register this broker in the broker registry.
*/
class SessionExpireListener() extends IZkStateListener {
@throws(classOf[Exception])
def handleStateChanged(state: KeeperState) {
// do nothing, since zkclient will do reconnect for us.
}
/**
* Called after the zookeeper session has expired and a new session has been created. You would have to re-create
* any ephemeral nodes here.
*
* @throws Exception
* On any error.
*/
@throws(classOf[Exception])
def handleNewSession() {
info("re-registering broker info in ZK for broker " + brokerId)
register()
info("done re-registering broker")
info("Subscribing to %s path to watch for new topics".format(ZkUtils.BrokerTopicsPath))
}
override def handleSessionEstablishmentError(error: Throwable): Unit = {
fatal("Could not establish session with zookeeper", error)
}
}
}
|
usakey/kafka
|
core/src/main/scala/kafka/server/KafkaHealthcheck.scala
|
Scala
|
apache-2.0
| 3,945 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api
import org.apache.calcite.rel.RelNode
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.operators.join.JoinType
import org.apache.flink.table.calcite.FlinkRelBuilder
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.expressions.{Alias, Asc, Expression, ExpressionParser, Ordering, UnresolvedAlias, UnresolvedFieldReference}
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils
import org.apache.flink.table.plan.ProjectionTranslator._
import org.apache.flink.table.plan.logical.{Minus, _}
import org.apache.flink.table.sinks.TableSink
import _root_.scala.collection.JavaConverters._
import _root_.scala.annotation.varargs
/**
* A Table is the core component of the Table API.
* Similar to how the batch and streaming APIs have DataSet and DataStream,
* the Table API is built around [[Table]].
*
* Use the methods of [[Table]] to transform data. Use [[TableEnvironment]] to convert a [[Table]]
* back to a DataSet or DataStream.
*
* When using Scala a [[Table]] can also be converted using implicit conversions.
*
* Example:
*
* {{{
* val env = ExecutionEnvironment.getExecutionEnvironment
* val tEnv = TableEnvironment.getTableEnvironment(env)
*
* val set: DataSet[(String, Int)] = ...
* val table = set.toTable(tEnv, 'a, 'b)
* ...
* val table2 = ...
* val set2: DataSet[MyType] = table2.toDataSet[MyType]
* }}}
*
* Operations such as [[join]], [[select]], [[where]] and [[groupBy]] either take arguments
* in a Scala DSL or as an expression String. Please refer to the documentation for the expression
* syntax.
*
* @param tableEnv The [[TableEnvironment]] to which the table is bound.
* @param logicalPlan logical representation
*/
class Table(
private[flink] val tableEnv: TableEnvironment,
private[flink] val logicalPlan: LogicalNode) {
// Check if the plan has an unbounded TableFunctionCall as child node.
// A TableFunctionCall is tolerated as root node because the Table holds the initial call.
if (containsUnboudedUDTFCall(logicalPlan) &&
!logicalPlan.isInstanceOf[LogicalTableFunctionCall]) {
throw new ValidationException("TableFunction can only be used in join and leftOuterJoin.")
}
/**
* Creates a [[Table]] for a TableFunction call from a String expression.
*
* @param tableEnv The TableEnvironment in which the call is created.
* @param udtfCall A String expression of the TableFunction call.
*/
def this(tableEnv: TableEnvironment, udtfCall: String) {
this(tableEnv, UserDefinedFunctionUtils.createLogicalFunctionCall(tableEnv, udtfCall))
}
private lazy val tableSchema: TableSchema = new TableSchema(
logicalPlan.output.map(_.name).toArray,
logicalPlan.output.map(_.resultType).toArray)
def relBuilder: FlinkRelBuilder = tableEnv.getRelBuilder
def getRelNode: RelNode = if (containsUnboudedUDTFCall(logicalPlan)) {
throw new ValidationException("Cannot translate a query with an unbounded table function call.")
} else {
logicalPlan.toRelNode(relBuilder)
}
/**
* Returns the schema of this table.
*/
def getSchema: TableSchema = tableSchema
/**
* Prints the schema of this table to the console in a tree format.
*/
def printSchema(): Unit = print(tableSchema.toString)
/**
* Performs a selection operation. Similar to an SQL SELECT statement. The field expressions
* can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* tab.select('key, 'value.avg + " The average" as 'average)
* }}}
*/
def select(fields: Expression*): Table = {
val expandedFields = expandProjectList(fields, logicalPlan, tableEnv)
val (aggNames, propNames) = extractAggregationsAndProperties(expandedFields, tableEnv)
if (propNames.nonEmpty) {
throw ValidationException("Window properties can only be used on windowed tables.")
}
if (aggNames.nonEmpty) {
val projectsOnAgg = replaceAggregationsAndProperties(
expandedFields, tableEnv, aggNames, propNames)
val projectFields = extractFieldReferences(expandedFields)
new Table(tableEnv,
Project(projectsOnAgg,
Aggregate(Nil, aggNames.map(a => Alias(a._1, a._2)).toSeq,
Project(projectFields, logicalPlan).validate(tableEnv)
).validate(tableEnv)
).validate(tableEnv)
)
} else {
new Table(tableEnv,
Project(expandedFields.map(UnresolvedAlias), logicalPlan).validate(tableEnv))
}
}
/**
* Performs a selection operation. Similar to an SQL SELECT statement. The field expressions
* can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* tab.select("key, value.avg + ' The average' as average")
* }}}
*/
def select(fields: String): Table = {
val fieldExprs = ExpressionParser.parseExpressionList(fields)
//get the correct expression for AggFunctionCall
val withResolvedAggFunctionCall = fieldExprs.map(replaceAggFunctionCall(_, tableEnv))
select(withResolvedAggFunctionCall: _*)
}
/**
* Renames the fields of the expression result. Use this to disambiguate fields before
* joining to operations.
*
* Example:
*
* {{{
* tab.as('a, 'b)
* }}}
*/
def as(fields: Expression*): Table = {
logicalPlan match {
case functionCall: LogicalTableFunctionCall if functionCall.child == null =>
// If the logical plan is a TableFunctionCall, we replace its field names to avoid special
// cases during the validation.
if (fields.length != functionCall.output.length) {
throw new ValidationException(
"List of column aliases must have same degree as TableFunction's output")
}
if (!fields.forall(_.isInstanceOf[UnresolvedFieldReference])) {
throw new ValidationException(
"Alias field must be an instance of UnresolvedFieldReference"
)
}
new Table(
tableEnv,
LogicalTableFunctionCall(
functionCall.functionName,
functionCall.tableFunction,
functionCall.parameters,
functionCall.resultType,
fields.map(_.asInstanceOf[UnresolvedFieldReference].name).toArray,
functionCall.child)
)
case _ =>
// prepend an AliasNode
new Table(tableEnv, AliasNode(fields, logicalPlan).validate(tableEnv))
}
}
/**
* Renames the fields of the expression result. Use this to disambiguate fields before
* joining to operations.
*
* Example:
*
* {{{
* tab.as("a, b")
* }}}
*/
def as(fields: String): Table = {
val fieldExprs = ExpressionParser.parseExpressionList(fields)
as(fieldExprs: _*)
}
/**
* Filters out elements that don't pass the filter predicate. Similar to a SQL WHERE
* clause.
*
* Example:
*
* {{{
* tab.filter('name === "Fred")
* }}}
*/
def filter(predicate: Expression): Table = {
new Table(tableEnv, Filter(predicate, logicalPlan).validate(tableEnv))
}
/**
* Filters out elements that don't pass the filter predicate. Similar to a SQL WHERE
* clause.
*
* Example:
*
* {{{
* tab.filter("name = 'Fred'")
* }}}
*/
def filter(predicate: String): Table = {
val predicateExpr = ExpressionParser.parseExpression(predicate)
filter(predicateExpr)
}
/**
* Filters out elements that don't pass the filter predicate. Similar to a SQL WHERE
* clause.
*
* Example:
*
* {{{
* tab.where('name === "Fred")
* }}}
*/
def where(predicate: Expression): Table = {
filter(predicate)
}
/**
* Filters out elements that don't pass the filter predicate. Similar to a SQL WHERE
* clause.
*
* Example:
*
* {{{
* tab.where("name = 'Fred'")
* }}}
*/
def where(predicate: String): Table = {
filter(predicate)
}
/**
* Groups the elements on some grouping keys. Use this before a selection with aggregations
* to perform the aggregation on a per-group basis. Similar to a SQL GROUP BY statement.
*
* Example:
*
* {{{
* tab.groupBy('key).select('key, 'value.avg)
* }}}
*/
def groupBy(fields: Expression*): GroupedTable = {
new GroupedTable(this, fields)
}
/**
* Groups the elements on some grouping keys. Use this before a selection with aggregations
* to perform the aggregation on a per-group basis. Similar to a SQL GROUP BY statement.
*
* Example:
*
* {{{
* tab.groupBy("key").select("key, value.avg")
* }}}
*/
def groupBy(fields: String): GroupedTable = {
val fieldsExpr = ExpressionParser.parseExpressionList(fields)
groupBy(fieldsExpr: _*)
}
/**
* Removes duplicate values and returns only distinct (different) values.
*
* Example:
*
* {{{
* tab.select("key, value").distinct()
* }}}
*/
def distinct(): Table = {
new Table(tableEnv, Distinct(logicalPlan).validate(tableEnv))
}
/**
* Joins two [[Table]]s. Similar to an SQL join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary. You can use
* where and select clauses after a join to further specify the behaviour of the join.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.join(right).where('a === 'b && 'c > 3).select('a, 'b, 'd)
* }}}
*/
def join(right: Table): Table = {
join(right, None, JoinType.INNER)
}
/**
* Joins two [[Table]]s. Similar to an SQL join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.join(right, "a = b")
* }}}
*/
def join(right: Table, joinPredicate: String): Table = {
join(right, joinPredicate, JoinType.INNER)
}
/**
* Joins two [[Table]]s. Similar to an SQL join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.join(right, 'a === 'b).select('a, 'b, 'd)
* }}}
*/
def join(right: Table, joinPredicate: Expression): Table = {
join(right, Some(joinPredicate), JoinType.INNER)
}
/**
* Joins this [[Table]] with an user-defined [[org.apache.calcite.schema.TableFunction]].
* This join is similar to a SQL left outer join with ON TRUE predicate, but it works with a
* table function. Each row of the outer table is joined with all rows produced by the table
* function. If the table function does not produce any row, the outer row is padded with nulls.
*
* Scala Example:
* {{{
* class MySplitUDTF extends TableFunction[String] {
* def eval(str: String): Unit = {
* str.split("#").foreach(collect)
* }
* }
*
* val split = new MySplitUDTF()
* table.leftOuterJoin(split('c) as ('s)).select('a,'b,'c,'s)
* }}}
*
* Java Example:
* {{{
* class MySplitUDTF extends TableFunction<String> {
* public void eval(String str) {
* str.split("#").forEach(this::collect);
* }
* }
*
* TableFunction<String> split = new MySplitUDTF();
* tableEnv.registerFunction("split", split);
* table.leftOuterJoin(new Table(tableEnv, "split(c)").as("s"))).select("a, b, c, s");
* }}}
*/
def leftOuterJoin(right: Table): Table = {
join(right, None, JoinType.LEFT_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL left outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.leftOuterJoin(right, "a = b").select('a, 'b, 'd)
* }}}
*/
def leftOuterJoin(right: Table, joinPredicate: String): Table = {
join(right, joinPredicate, JoinType.LEFT_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL left outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.leftOuterJoin(right, 'a === 'b).select('a, 'b, 'd)
* }}}
*/
def leftOuterJoin(right: Table, joinPredicate: Expression): Table = {
join(right, Some(joinPredicate), JoinType.LEFT_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL right outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.rightOuterJoin(right, "a = b").select('a, 'b, 'd)
* }}}
*/
def rightOuterJoin(right: Table, joinPredicate: String): Table = {
join(right, joinPredicate, JoinType.RIGHT_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL right outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.rightOuterJoin(right, 'a === 'b).select('a, 'b, 'd)
* }}}
*/
def rightOuterJoin(right: Table, joinPredicate: Expression): Table = {
join(right, Some(joinPredicate), JoinType.RIGHT_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL full outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.fullOuterJoin(right, "a = b").select('a, 'b, 'd)
* }}}
*/
def fullOuterJoin(right: Table, joinPredicate: String): Table = {
join(right, joinPredicate, JoinType.FULL_OUTER)
}
/**
* Joins two [[Table]]s. Similar to an SQL full outer join. The fields of the two joined
* operations must not overlap, use [[as]] to rename fields if necessary.
*
* Note: Both tables must be bound to the same [[TableEnvironment]] and its [[TableConfig]] must
* have nullCheck enabled.
*
* Example:
*
* {{{
* left.fullOuterJoin(right, 'a === 'b).select('a, 'b, 'd)
* }}}
*/
def fullOuterJoin(right: Table, joinPredicate: Expression): Table = {
join(right, Some(joinPredicate), JoinType.FULL_OUTER)
}
private def join(right: Table, joinPredicate: String, joinType: JoinType): Table = {
val joinPredicateExpr = ExpressionParser.parseExpression(joinPredicate)
join(right, Some(joinPredicateExpr), joinType)
}
private def join(right: Table, joinPredicate: Option[Expression], joinType: JoinType): Table = {
// check if we join with a table or a table function
if (!containsUnboudedUDTFCall(right.logicalPlan)) {
// regular table-table join
// check that the TableEnvironment of right table is not null
// and right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException("Only tables from the same TableEnvironment can be joined.")
}
new Table(
tableEnv,
Join(this.logicalPlan, right.logicalPlan, joinType, joinPredicate, correlated = false)
.validate(tableEnv))
} else {
// join with a table function
// check join type
if (joinType != JoinType.INNER && joinType != JoinType.LEFT_OUTER) {
throw new ValidationException(
"TableFunctions are currently supported for join and leftOuterJoin.")
}
val udtf = right.logicalPlan.asInstanceOf[LogicalTableFunctionCall]
val udtfCall = LogicalTableFunctionCall(
udtf.functionName,
udtf.tableFunction,
udtf.parameters,
udtf.resultType,
udtf.fieldNames,
this.logicalPlan
).validate(tableEnv)
new Table(
tableEnv,
Join(this.logicalPlan, udtfCall, joinType, joinPredicate, correlated = true)
.validate(tableEnv))
}
}
/**
* Minus of two [[Table]]s with duplicate records removed.
* Similar to a SQL EXCEPT clause. Minus returns records from the left table that do not
* exist in the right table. Duplicate records in the left table are returned
* exactly once, i.e., duplicates are removed. Both tables must have identical field types.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.minus(right)
* }}}
*/
def minus(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException("Only tables from the same TableEnvironment can be " +
"subtracted.")
}
new Table(tableEnv, Minus(logicalPlan, right.logicalPlan, all = false)
.validate(tableEnv))
}
/**
* Minus of two [[Table]]s. Similar to an SQL EXCEPT ALL.
* Similar to a SQL EXCEPT ALL clause. MinusAll returns the records that do not exist in
* the right table. A record that is present n times in the left table and m times
* in the right table is returned (n - m) times, i.e., as many duplicates as are present
* in the right table are removed. Both tables must have identical field types.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.minusAll(right)
* }}}
*/
def minusAll(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException("Only tables from the same TableEnvironment can be " +
"subtracted.")
}
new Table(tableEnv, Minus(logicalPlan, right.logicalPlan, all = true)
.validate(tableEnv))
}
/**
* Unions two [[Table]]s with duplicate records removed.
* Similar to an SQL UNION. The fields of the two union operations must fully overlap.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.union(right)
* }}}
*/
def union(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException("Only tables from the same TableEnvironment can be unioned.")
}
new Table(tableEnv, Union(logicalPlan, right.logicalPlan, all = false).validate(tableEnv))
}
/**
* Unions two [[Table]]s. Similar to an SQL UNION ALL. The fields of the two union operations
* must fully overlap.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.unionAll(right)
* }}}
*/
def unionAll(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException("Only tables from the same TableEnvironment can be unioned.")
}
new Table(tableEnv, Union(logicalPlan, right.logicalPlan, all = true).validate(tableEnv))
}
/**
* Intersects two [[Table]]s with duplicate records removed. Intersect returns records that
* exist in both tables. If a record is present in one or both tables more than once, it is
* returned just once, i.e., the resulting table has no duplicate records. Similar to an
* SQL INTERSECT. The fields of the two intersect operations must fully overlap.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.intersect(right)
* }}}
*/
def intersect(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException(
"Only tables from the same TableEnvironment can be intersected.")
}
new Table(tableEnv, Intersect(logicalPlan, right.logicalPlan, all = false).validate(tableEnv))
}
/**
* Intersects two [[Table]]s. IntersectAll returns records that exist in both tables.
* If a record is present in both tables more than once, it is returned as many times as it
* is present in both tables, i.e., the resulting table might have duplicate records. Similar
* to an SQL INTERSECT ALL. The fields of the two intersect operations must fully overlap.
*
* Note: Both tables must be bound to the same [[TableEnvironment]].
*
* Example:
*
* {{{
* left.intersectAll(right)
* }}}
*/
def intersectAll(right: Table): Table = {
// check that right table belongs to the same TableEnvironment
if (right.tableEnv != this.tableEnv) {
throw new ValidationException(
"Only tables from the same TableEnvironment can be intersected.")
}
new Table(tableEnv, Intersect(logicalPlan, right.logicalPlan, all = true).validate(tableEnv))
}
/**
* Sorts the given [[Table]]. Similar to SQL ORDER BY.
* The resulting Table is globally sorted across all parallel partitions.
*
* Example:
*
* {{{
* tab.orderBy('name.desc)
* }}}
*/
def orderBy(fields: Expression*): Table = {
val order: Seq[Ordering] = fields.map {
case o: Ordering => o
case e => Asc(e)
}
new Table(tableEnv, Sort(order, logicalPlan).validate(tableEnv))
}
/**
* Sorts the given [[Table]]. Similar to SQL ORDER BY.
* The resulting Table is sorted globally sorted across all parallel partitions.
*
* Example:
*
* {{{
* tab.orderBy("name.desc")
* }}}
*/
def orderBy(fields: String): Table = {
val parsedFields = ExpressionParser.parseExpressionList(fields)
orderBy(parsedFields: _*)
}
/**
* Limits a sorted result from an offset position.
* Similar to a SQL LIMIT clause. Limit is technically part of the Order By operator and
* thus must be preceded by it.
*
* Example:
*
* {{{
* // returns unlimited number of records beginning with the 4th record
* tab.orderBy('name.desc).limit(3)
* }}}
*
* @param offset number of records to skip
*/
def limit(offset: Int): Table = {
new Table(tableEnv, Limit(offset = offset, child = logicalPlan).validate(tableEnv))
}
/**
* Limits a sorted result to a specified number of records from an offset position.
* Similar to a SQL LIMIT clause. Limit is technically part of the Order By operator and
* thus must be preceded by it.
*
* Example:
*
* {{{
* // returns 5 records beginning with the 4th record
* tab.orderBy('name.desc).limit(3, 5)
* }}}
*
* @param offset number of records to skip
* @param fetch number of records to be returned
*/
def limit(offset: Int, fetch: Int): Table = {
new Table(tableEnv, Limit(offset, fetch, logicalPlan).validate(tableEnv))
}
/**
* Writes the [[Table]] to a [[TableSink]]. A [[TableSink]] defines an external storage location.
*
* A batch [[Table]] can only be written to a
* [[org.apache.flink.table.sinks.BatchTableSink]], a streaming [[Table]] requires a
* [[org.apache.flink.table.sinks.AppendStreamTableSink]], a
* [[org.apache.flink.table.sinks.RetractStreamTableSink]], or an
* [[org.apache.flink.table.sinks.UpsertStreamTableSink]].
*
* @param sink The [[TableSink]] to which the [[Table]] is written.
* @tparam T The data type that the [[TableSink]] expects.
*/
def writeToSink[T](sink: TableSink[T]): Unit = {
def queryConfig = this.tableEnv match {
case s: StreamTableEnvironment => s.queryConfig
case b: BatchTableEnvironment => new BatchQueryConfig
case _ => null
}
writeToSink(sink, queryConfig)
}
/**
* Writes the [[Table]] to a [[TableSink]]. A [[TableSink]] defines an external storage location.
*
* A batch [[Table]] can only be written to a
* [[org.apache.flink.table.sinks.BatchTableSink]], a streaming [[Table]] requires a
* [[org.apache.flink.table.sinks.AppendStreamTableSink]], a
* [[org.apache.flink.table.sinks.RetractStreamTableSink]], or an
* [[org.apache.flink.table.sinks.UpsertStreamTableSink]].
*
* @param sink The [[TableSink]] to which the [[Table]] is written.
* @param conf The configuration for the query that writes to the sink.
* @tparam T The data type that the [[TableSink]] expects.
*/
def writeToSink[T](sink: TableSink[T], conf: QueryConfig): Unit = {
// get schema information of table
val rowType = getRelNode.getRowType
val fieldNames: Array[String] = rowType.getFieldNames.asScala.toArray
val fieldTypes: Array[TypeInformation[_]] = rowType.getFieldList.asScala
.map(field => FlinkTypeFactory.toTypeInfo(field.getType)).toArray
// configure the table sink
val configuredSink = sink.configure(fieldNames, fieldTypes)
// emit the table to the configured table sink
tableEnv.writeToSink(this, configuredSink, conf)
}
/**
* Groups the records of a table by assigning them to windows defined by a time or row interval.
*
* For streaming tables of infinite size, grouping into windows is required to define finite
* groups on which group-based aggregates can be computed.
*
* For batch tables of finite size, windowing essentially provides shortcuts for time-based
* groupBy.
*
* __Note__: Computing windowed aggregates on a streaming table is only a parallel operation
* if additional grouping attributes are added to the `groupBy(...)` clause.
* If the `groupBy(...)` only references a window alias, the streamed table will be processed
* by a single task, i.e., with parallelism 1.
*
* @param window window that specifies how elements are grouped.
* @return A windowed table.
*/
def window(window: Window): WindowedTable = {
new WindowedTable(this, window)
}
/**
* Defines over-windows on the records of a table.
*
* An over-window defines for each record an interval of records over which aggregation
* functions can be computed.
*
* Example:
*
* {{{
* table
* .window(Over partitionBy 'c orderBy 'rowTime preceding 10.seconds as 'ow)
* .select('c, 'b.count over 'ow, 'e.sum over 'ow)
* }}}
*
* __Note__: Computing over window aggregates on a streaming table is only a parallel operation
* if the window is partitioned. Otherwise, the whole stream will be processed by a single
* task, i.e., with parallelism 1.
*
* __Note__: Over-windows for batch tables are currently not supported.
*
* @param overWindows windows that specify the record interval over which aggregations are
* computed.
* @return An OverWindowedTable to specify the aggregations.
*/
@varargs
def window(overWindows: OverWindow*): OverWindowedTable = {
if (tableEnv.isInstanceOf[BatchTableEnvironment]) {
throw TableException("Over-windows for batch tables are currently not supported..")
}
if (overWindows.size != 1) {
throw TableException("Over-Windows are currently only supported single window.")
}
new OverWindowedTable(this, overWindows.toArray)
}
var tableName: String = _
/**
* Registers an unique table name under the table environment
* and return the registered table name.
*/
override def toString: String = {
if (tableName == null) {
tableName = "UnnamedTable$" + tableEnv.attrNameCntr.getAndIncrement()
tableEnv.registerTable(tableName, this)
}
tableName
}
/**
* Checks if the plan represented by a [[LogicalNode]] contains an unbounded UDTF call.
* @param n the node to check
* @return true if the plan contains an unbounded UDTF call, false otherwise.
*/
private def containsUnboudedUDTFCall(n: LogicalNode): Boolean = {
n match {
case functionCall: LogicalTableFunctionCall if functionCall.child == null => true
case u: UnaryNode => containsUnboudedUDTFCall(u.child)
case b: BinaryNode => containsUnboudedUDTFCall(b.left) || containsUnboudedUDTFCall(b.right)
case _: LeafNode => false
}
}
}
/**
* A table that has been grouped on a set of grouping keys.
*/
class GroupedTable(
private[flink] val table: Table,
private[flink] val groupKey: Seq[Expression]) {
/**
* Performs a selection operation on a grouped table. Similar to an SQL SELECT statement.
* The field expressions can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* tab.groupBy('key).select('key, 'value.avg + " The average" as 'average)
* }}}
*/
def select(fields: Expression*): Table = {
val expandedFields = expandProjectList(fields, table.logicalPlan, table.tableEnv)
val (aggNames, propNames) = extractAggregationsAndProperties(expandedFields, table.tableEnv)
if (propNames.nonEmpty) {
throw ValidationException("Window properties can only be used on windowed tables.")
}
val projectsOnAgg = replaceAggregationsAndProperties(
expandedFields, table.tableEnv, aggNames, propNames)
val projectFields = extractFieldReferences(expandedFields ++ groupKey)
new Table(table.tableEnv,
Project(projectsOnAgg,
Aggregate(groupKey, aggNames.map(a => Alias(a._1, a._2)).toSeq,
Project(projectFields, table.logicalPlan).validate(table.tableEnv)
).validate(table.tableEnv)
).validate(table.tableEnv))
}
/**
* Performs a selection operation on a grouped table. Similar to an SQL SELECT statement.
* The field expressions can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* tab.groupBy("key").select("key, value.avg + ' The average' as average")
* }}}
*/
def select(fields: String): Table = {
val fieldExprs = ExpressionParser.parseExpressionList(fields)
//get the correct expression for AggFunctionCall
val withResolvedAggFunctionCall = fieldExprs.map(replaceAggFunctionCall(_, table.tableEnv))
select(withResolvedAggFunctionCall: _*)
}
}
class WindowedTable(
private[flink] val table: Table,
private[flink] val window: Window) {
/**
* Groups the elements by a mandatory window and one or more optional grouping attributes.
* The window is specified by referring to its alias.
*
* If no additional grouping attribute is specified and if the input is a streaming table,
* the aggregation will be performed by a single task, i.e., with parallelism 1.
*
* Aggregations are performed per group and defined by a subsequent `select(...)` clause similar
* to SQL SELECT-GROUP-BY query.
*
* Example:
*
* {{{
* tab.window([window] as 'w)).groupBy('w, 'key).select('key, 'value.avg)
* }}}
*/
def groupBy(fields: Expression*): WindowGroupedTable = {
val fieldsWithoutWindow = fields.filterNot(window.alias.equals(_))
if (fields.size != fieldsWithoutWindow.size + 1) {
throw new ValidationException("GroupBy must contain exactly one window alias.")
}
new WindowGroupedTable(table, fieldsWithoutWindow, window)
}
/**
* Groups the elements by a mandatory window and one or more optional grouping attributes.
* The window is specified by referring to its alias.
*
* If no additional grouping attribute is specified and if the input is a streaming table,
* the aggregation will be performed by a single task, i.e., with parallelism 1.
*
* Aggregations are performed per group and defined by a subsequent `select(...)` clause similar
* to SQL SELECT-GROUP-BY query.
*
* Example:
*
* {{{
* tab.window([window].as("w")).groupBy("w, key").select("key, value.avg")
* }}}
*/
def groupBy(fields: String): WindowGroupedTable = {
val fieldsExpr = ExpressionParser.parseExpressionList(fields)
groupBy(fieldsExpr: _*)
}
}
class OverWindowedTable(
private[flink] val table: Table,
private[flink] val overWindows: Array[OverWindow]) {
def select(fields: Expression*): Table = {
val expandedFields = expandProjectList(
fields,
table.logicalPlan,
table.tableEnv)
val expandedOverFields = resolveOverWindows(expandedFields, overWindows, table.tableEnv)
new Table(
table.tableEnv,
Project(expandedOverFields.map(UnresolvedAlias), table.logicalPlan).validate(table.tableEnv))
}
def select(fields: String): Table = {
val fieldExprs = ExpressionParser.parseExpressionList(fields)
//get the correct expression for AggFunctionCall
val withResolvedAggFunctionCall = fieldExprs.map(replaceAggFunctionCall(_, table.tableEnv))
select(withResolvedAggFunctionCall: _*)
}
}
class WindowGroupedTable(
private[flink] val table: Table,
private[flink] val groupKeys: Seq[Expression],
private[flink] val window: Window) {
/**
* Performs a selection operation on a window grouped table. Similar to an SQL SELECT statement.
* The field expressions can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* windowGroupedTable.select('key, 'window.start, 'value.avg as 'valavg)
* }}}
*/
def select(fields: Expression*): Table = {
val expandedFields = expandProjectList(fields, table.logicalPlan, table.tableEnv)
val (aggNames, propNames) = extractAggregationsAndProperties(expandedFields, table.tableEnv)
val projectsOnAgg = replaceAggregationsAndProperties(
expandedFields, table.tableEnv, aggNames, propNames)
val projectFields = extractFieldReferences(expandedFields ++ groupKeys :+ window.timeField)
new Table(table.tableEnv,
Project(
projectsOnAgg,
WindowAggregate(
groupKeys,
window.toLogicalWindow,
propNames.map(a => Alias(a._1, a._2)).toSeq,
aggNames.map(a => Alias(a._1, a._2)).toSeq,
Project(projectFields, table.logicalPlan).validate(table.tableEnv)
).validate(table.tableEnv)
).validate(table.tableEnv))
}
/**
* Performs a selection operation on a window grouped table. Similar to an SQL SELECT statement.
* The field expressions can contain complex expressions and aggregations.
*
* Example:
*
* {{{
* windowGroupedTable.select("key, window.start, value.avg as valavg")
* }}}
*/
def select(fields: String): Table = {
val fieldExprs = ExpressionParser.parseExpressionList(fields)
//get the correct expression for AggFunctionCall
val withResolvedAggFunctionCall = fieldExprs.map(replaceAggFunctionCall(_, table.tableEnv))
select(withResolvedAggFunctionCall: _*)
}
}
|
WangTaoTheTonic/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/api/table.scala
|
Scala
|
apache-2.0
| 36,539 |
package almond.util
import java.lang.Thread.UncaughtExceptionHandler
import java.util.concurrent.{Executors, ThreadFactory}
import java.util.concurrent.atomic.AtomicInteger
import scala.concurrent.{ExecutionContext, ExecutionContextExecutorService}
import scala.util.control.NonFatal
object ThreadUtil {
// From https://github.com/functional-streams-for-scala/fs2/blob/d47f903bc6bbcdd5d8bc6d573bc7cfd956f0cbb6/core/jvm/src/main/scala/fs2/Strategy.scala#L19-L41
/** A `ThreadFactory` which creates daemon threads, using the given name. */
def daemonThreadFactory(threadName: String, exitJvmOnFatalError: Boolean = true): ThreadFactory = new ThreadFactory {
val defaultThreadFactory = Executors.defaultThreadFactory()
val idx = new AtomicInteger(0)
def newThread(r: Runnable) = {
val t = defaultThreadFactory.newThread(r)
t.setDaemon(true)
t.setName(s"$threadName-${idx.incrementAndGet()}")
t.setUncaughtExceptionHandler(new UncaughtExceptionHandler {
def uncaughtException(t: Thread, e: Throwable): Unit = {
System.err.println(s"------------ UNHANDLED EXCEPTION ---------- (${t.getName})")
e.printStackTrace(System.err)
if (exitJvmOnFatalError) {
e match {
case NonFatal(_) => ()
case fatal => System.exit(-1)
}
}
}
})
t
}
}
def sequentialExecutionContext(): ExecutionContext =
new SequentialExecutionContext
def singleThreadedExecutionContext(threadName: String): ExecutionContext =
ExecutionContext.fromExecutorService(
Executors.newSingleThreadExecutor(daemonThreadFactory(threadName))
)
def attemptShutdownExecutionContext(ec: ExecutionContext): Boolean =
ec match {
case _: SequentialExecutionContext =>
true
case es: ExecutionContextExecutorService =>
es.shutdown()
true
case _ =>
false
}
}
|
alexarchambault/jupyter-scala
|
modules/shared/channels/src/main/scala/almond/util/ThreadUtil.scala
|
Scala
|
apache-2.0
| 1,954 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.