code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.common
import slamdata.Predef._
import quasar.{RenderTree, Terminal}
import scalaz._, Scalaz._
sealed abstract class JoinType extends Product with Serializable
object JoinType {
final case object Inner extends JoinType
final case object FullOuter extends JoinType
final case object LeftOuter extends JoinType
final case object RightOuter extends JoinType
implicit val equal: Equal[JoinType] = Equal.equalRef
implicit val show: Show[JoinType] = Show.showFromToString
implicit val renderTree: RenderTree[JoinType] =
RenderTree.make(t => Terminal(List(t.shows, "JoinType"), None))
}
|
jedesah/Quasar
|
common/src/main/scala/quasar/common/JoinType.scala
|
Scala
|
apache-2.0
| 1,223 |
package com.datastax.spark.connector.japi
import com.datastax.driver.core.{ProtocolVersion, Row}
import com.datastax.spark.connector.GettableData
final class CassandraRow(val columnNames: IndexedSeq[String], val columnValues: IndexedSeq[AnyRef])
extends JavaGettableData with Serializable {
private[spark] def this() = this(null: IndexedSeq[String], null) // required by Kryo for deserialization :(
def this(columnNames: Array[String], columnValues: Array[AnyRef]) =
this(columnNames.toIndexedSeq, columnValues.toIndexedSeq)
protected def fieldNames = columnNames
protected def fieldValues = columnValues
def iterator = columnValues.iterator
override def toString = "CassandraRow" + dataAsString
}
object CassandraRow {
/** Deserializes first n columns from the given `Row` and returns them as
* a `CassandraRow` object. The number of columns retrieved is determined by the length
* of the columnNames argument. The columnNames argument is used as metadata for
* the newly created `CassandraRow`, but it is not used to fetch data from
* the input `Row` in order to improve performance. Fetching column values by name is much
* slower than fetching by index. */
def fromJavaDriverRow(row: Row, columnNames: Array[String])(implicit protocolVersion: ProtocolVersion): CassandraRow = {
val data = new Array[Object](columnNames.length)
for (i <- columnNames.indices)
data(i) = GettableData.get(row, i)
new CassandraRow(columnNames, data)
}
/** Creates a CassandraRow object from a map with keys denoting column names and
* values denoting column values. */
def fromMap(map: Map[String, Any]): CassandraRow = {
val (columnNames, values) = map.unzip
new CassandraRow(columnNames.toArray, values.map(_.asInstanceOf[AnyRef]).toArray)
}
}
|
Stratio/spark-cassandra-connector
|
spark-cassandra-connector-java/src/main/scala/com/datastax/spark/connector/japi/CassandraRow.scala
|
Scala
|
apache-2.0
| 1,820 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.github.errantlinguist.snom.gui
import scala.swing.Frame
import com.github.errantlinguist.snom.PhoneSettingsListener
import com.github.errantlinguist.snom.PhoneSettings
/**
* @author Todd Shore
* @version 03.09.2012
* @since 03.09.2012
*
*/
class SettingsFrame(private val settingsListener: PhoneSettingsListener, title0: String, private var currentSettings: PhoneSettings = null) extends Frame with PhoneSettingsListener {
import collection.JavaConversions._
import scala.swing.Action
import scala.swing.BoxPanel
import scala.swing.Button
import scala.swing.ComboBox
import scala.swing.Component
import scala.swing.Dialog
import scala.swing.FlowPanel
import scala.swing.Label
import scala.swing.Orientation
import scala.swing.TextField
import java.nio.charset.Charset
title = title0
private lazy val encodingBox = {
val encodingNameIter = for (charset ← Charset.availableCharsets().values()) yield charset.name()
val encodingNames = encodingNameIter.toSeq
new ComboBox(encodingNames) {
makeEditable()
}
}
private lazy val phoneHostnameField = new TextField(SettingsFrame.PhoneHostnameFieldColumns)
contents = new BoxPanel(Orientation.Vertical) {
contents += createPhoneHostnamePanel()
contents += createEncodingPanel()
contents += createCloseButtonPanel()
focusable = true
}
override def notifyNewPhoneSettings(newSettings: PhoneSettings) {
currentSettings = newSettings
updateDisplayedSettings()
}
private def createPhoneHostnamePanel(): FlowPanel = {
new FlowPanel {
contents += new Label("Phone hostname")
contents += phoneHostnameField
}
}
private def createEncodingPanel(): FlowPanel = {
new FlowPanel {
contents += new Label("Encoding")
contents += encodingBox
}
}
private def createCloseButton(dialogMessageParent: Component): Button = {
new Button(Action("OK") {
val newSettings = createNewPhoneSettings()
if (!isValidHostname()) {
Dialog.showMessage(dialogMessageParent, "Please enter a valid (i.e. non-empty) hostname.", "Invalid hostname", Dialog.Message.Error)
} else if (!isValidEncoding()) {
Dialog.showMessage(dialogMessageParent, "Please enter a valid (i.e. non-empty) encoding.", "Invalid encoding", Dialog.Message.Error)
} else {
settingsListener.notifyNewPhoneSettings(newSettings)
newSettings.store()
// listenTo(ps)
dispose()
}
})
}
private def createCloseButtonPanel(): FlowPanel = {
val result = new FlowPanel() {
contents += createCloseButton(this)
contents += new Button(Action("Cancel") {
dispose()
})
}
return result
}
private def createNewPhoneSettings(): PhoneSettings = {
new PhoneSettings(phoneHostnameField.text, encodingBox.selection.item)
}
private def updateDisplayedSettings() {
phoneHostnameField.text = currentSettings.phoneHostname
encodingBox.selection.item = currentSettings.encoding
}
private def isValidHostname(): Boolean = {
(phoneHostnameField.text != null
&& !phoneHostnameField.text.matches("\\\\s*"))
}
private def isValidEncoding(): Boolean = {
(encodingBox.selection.item != null
&& !encodingBox.selection.item.matches("\\\\s*"))
}
}
/**
* @author Todd Shore
* @version 03.09.2012
* @since 03.09.2012
*
*/
private object SettingsFrame {
private val PhoneHostnameFieldColumns = 15
}
|
errantlinguist/phonecontroller
|
src/main/scala/com/github/errantlinguist/snom/gui/SettingsFrame.scala
|
Scala
|
apache-2.0
| 4,160 |
package com.bolour.boardgame.scala.server.domain.json
import org.slf4j.LoggerFactory
import spray.json._
import com.bolour.boardgame.scala.server.domain.{Play, SwapPlay, WordPlay}
import com.bolour.boardgame.scala.server.domain.json.CaseClassFormats._
object PlayJsonProtocol extends DefaultJsonProtocol {
val logger = LoggerFactory.getLogger(this.getClass)
implicit object PlayJsonFormat extends RootJsonFormat[Play] {
override def write(play: Play): JsValue = play match {
case wp: WordPlay => wp.toJson
case sp: SwapPlay => sp.toJson
}
// TODO. How is PlayType serialized?
override def read(json: JsValue): Play = {
val playType = json.asJsObject.fields("playType")
playType match {
case JsString(PlayTypeJsonProtocol.WordPlayTypeString) => json.convertTo[WordPlay]
case JsString(PlayTypeJsonProtocol.SwapPlayTypeString) => json.convertTo[SwapPlay]
case _ => deserializationError("Play expected")
}
}
}
}
|
azadbolour/boardgame
|
scala-server/app/com/bolour/boardgame/scala/server/domain/json/PlayJsonProtocol.scala
|
Scala
|
agpl-3.0
| 995 |
/**
* Mapping Selector
* Mapping Selector
* Copyright (C) 01/04/16 echinopsii
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.echinopsii.ariane.community.core.mapping.ds.sdsl
import com.tinkerpop.blueprints.{GraphQuery, Graph}
import net.echinopsii.ariane.community.core.mapping.ds.{SelectorExecutorException, SelectorParserException, MappingDSGraphPropertyNames}
import net.echinopsii.ariane.community.core.mapping.ds.sdsl.internal.{BlueprintsQueryOperations, Expression}
import org.slf4j.{LoggerFactory, Logger}
class SelectorExecutor(val graph: Object) {
private final val log: Logger = LoggerFactory.getLogger(classOf[SelectorExecutor])
def executeQuery(predicates: (Expression, Expression, String), initialQuery: Object): Object = {
val left = predicates._1
val right = predicates._2
val ops = predicates._3
var updatedQuery: Object = None
ops match {
case "and" =>
updatedQuery = executeQuery(left.query, initialQuery)
updatedQuery = executeQuery(right.query, updatedQuery)
case _ =>
initialQuery match {
case blueprint_query: GraphQuery =>
updatedQuery = blueprint_query.has(left.toString, BlueprintsQueryOperations.toBlueprintsPredicate(ops.toString), right.getValue)
case _ => throw new SelectorParserException("The query type is not supported !")
}
}
updatedQuery
}
def execute(query: String, mo_type: String): Object = {
log.debug("selector query : \\n\\n" + query)
graph match {
case blueprints_graph: Graph =>
val predicates = new SelectorParser().parse(query)
executeQuery(predicates, blueprints_graph.query().has(MappingDSGraphPropertyNames.DD_GRAPH_VERTEX_TYPE_KEY, mo_type))
case _ => throw new SelectorExecutorException("Unsupported Graph API !")
}
}
}
|
echinopsii/net.echinopsii.ariane.community.core.mapping
|
ds/dsl/src/main/scala/net/echinopsii/ariane/community/core/mapping/ds/sdsl/SelectorExecutor.scala
|
Scala
|
agpl-3.0
| 2,470 |
package tests.rescala.dynamic
import tests.rescala.testtools.RETests
class ObserverCreation extends RETests {
multiEngined { engine =>
import engine._
test("add Event After") {
var res = 0
val e0 = Evt[Int]()("source")
val e1 = e0.map(identity)("firstMap")
e1.map(_ => e0.map { _ + 1 }("innerMap").observe { res = _ }("observer"))("creatingMap")
e0.fire(10)
assert(res === 11)
}
test("event Handlers Can Be Removed") {
var test = 0
val e1 = Evt[Int]()("e1")
val f = (x: Int) => { test += 1 }
val o = e1.observe(f)("e1Observer")
e1.fire(10)
e1.fire(10)
assert(test == 2)
o.remove()
e1.fire(10)
assert(test == 2)
}
}
}
|
guidosalva/REScala
|
Code/Main/shared/src/test/scala-2/tests/rescala/dynamic/ObserverCreation.scala
|
Scala
|
apache-2.0
| 754 |
package com.twitter.finagle.postgres
import java.nio.charset.Charset
import com.twitter.concurrent.AsyncStream
import com.twitter.finagle.Status
import com.twitter.finagle.postgres.messages.SelectResult
import com.twitter.finagle.postgres.values.Types
import com.twitter.util.Future
trait PostgresClient {
def charset: Charset
/*
* Execute some actions inside of a transaction using a single connection
*/
def inTransaction[T](fn: PostgresClient => Future[T]): Future[T]
/*
* Issue an arbitrary SQL query and get the response.
*/
def query(
sql: String
): Future[QueryResponse]
/*
* Issue a single SELECT query and get the response.
*/
def fetch(
sql: String
): Future[SelectResult]
/*
* Execute an update command (e.g., INSERT, DELETE) and get the response.
*/
def executeUpdate(
sql: String
): Future[OK]
def execute(sql: String): Future[OK]
/*
* Run a single SELECT query and wrap the results with the provided function.
*/
def select[T](sql: String)(f: Row => T): Future[Seq[T]] =
selectToStream(sql)(f).toSeq
def selectToStream[T](sql: String)(f: Row => T): AsyncStream[T]
/*
* Issue a single, prepared SELECT query and wrap the response rows with the provided function.
*/
def prepareAndQuery[T](sql: String, params: Param[_]*)(f: Row => T): Future[Seq[T]] =
prepareAndQueryToStream(sql, params: _*)(f).toSeq
def prepareAndQueryToStream[T](sql: String, params: Param[_]*)(f: Row => T): AsyncStream[T]
/*
* Issue a single, prepared arbitrary query without an expected result set, and provide the affected row count
*/
def prepareAndExecute(
sql: String, params: Param[_]*
): Future[Int]
/**
* Close the underlying connection pool and make this Client eternally down
*
* @return
*/
def close(): Future[Unit]
/**
* The current availability [[Status]] of this client.
*/
def status: Status
/**
* Determines whether this client is available (can accept requests
* with a reasonable likelihood of success).
*/
def isAvailable: Boolean
}
object PostgresClient {
case class TypeSpecifier(receiveFunction: String, typeName: String, elemOid: Long = 0)
val defaultTypes = Map(
Types.BOOL -> TypeSpecifier("boolrecv", "bool"),
Types.BYTE_A -> TypeSpecifier("bytearecv", "bytea"),
Types.CHAR -> TypeSpecifier("charrecv", "char"),
Types.NAME -> TypeSpecifier("namerecv", "name"),
Types.INT_8 -> TypeSpecifier("int8recv", "int8"),
Types.INT_2 -> TypeSpecifier("int2recv", "int2"),
Types.INT_4 -> TypeSpecifier("int4recv", "int4"),
Types.REG_PROC -> TypeSpecifier("regprocrecv", "regproc"),
Types.TEXT -> TypeSpecifier("textrecv", "text"),
Types.OID -> TypeSpecifier("oidrecv", "oid"),
Types.TID -> TypeSpecifier("tidrecv", "tid"),
Types.XID -> TypeSpecifier("xidrecv", "xid"),
Types.CID -> TypeSpecifier("cidrecv", "cid"),
Types.JSON -> TypeSpecifier("json_recv", "json"),
Types.XML -> TypeSpecifier("xml_recv", "xml"),
Types.POINT -> TypeSpecifier("point_recv", "point"),
Types.L_SEG -> TypeSpecifier("lseg_recv", "lseg"),
Types.PATH -> TypeSpecifier("path_recv", "path"),
Types.BOX -> TypeSpecifier("box_recv", "box"),
Types.POLYGON -> TypeSpecifier("poly_recv", "poly"),
Types.LINE -> TypeSpecifier("line_recv", "line"),
Types.CIDR -> TypeSpecifier("cidr_recv", "cidr"),
Types.FLOAT_4 -> TypeSpecifier("float4recv", "float4"),
Types.FLOAT_8 -> TypeSpecifier("float8recv", "float8"),
Types.ABS_TIME -> TypeSpecifier("abstimerecv", "abstime"),
Types.REL_TIME -> TypeSpecifier("reltimerecv", "reltime"),
Types.T_INTERVAL -> TypeSpecifier("tinternalrecv", "tinternal"),
Types.UNKNOWN -> TypeSpecifier("unknownrecv", "unknown"),
Types.CIRCLE -> TypeSpecifier("circle_recv", "circle"),
Types.MONEY -> TypeSpecifier("cash_recv", "cash"),
Types.MAC_ADDR -> TypeSpecifier("macaddr_recv", "macaddr"),
Types.INET -> TypeSpecifier("inet_recv", "inet"),
Types.BP_CHAR -> TypeSpecifier("bpcharrecv", "bpchar"),
Types.VAR_CHAR -> TypeSpecifier("varcharrecv", "varchar"),
Types.DATE -> TypeSpecifier("date_recv", "date"),
Types.TIME -> TypeSpecifier("time_recv", "time"),
Types.TIMESTAMP -> TypeSpecifier("timestamp_recv", "timestamp"),
Types.TIMESTAMP_TZ -> TypeSpecifier("timestamptz_recv", "timestamptz"),
Types.INTERVAL -> TypeSpecifier("interval_recv", "interval"),
Types.TIME_TZ -> TypeSpecifier("timetz_recv", "timetz"),
Types.BIT -> TypeSpecifier("bit_recv", "bit"),
Types.VAR_BIT -> TypeSpecifier("varbit_recv", "varbit"),
Types.NUMERIC -> TypeSpecifier("numeric_recv", "numeric"),
Types.RECORD -> TypeSpecifier("record_recv", "record"),
Types.VOID -> TypeSpecifier("void_recv", "void"),
Types.UUID -> TypeSpecifier("uuid_recv", "uuid")
)
}
|
finagle/finagle-postgres
|
src/main/scala/com/twitter/finagle/postgres/PostgresClient.scala
|
Scala
|
apache-2.0
| 4,890 |
/*
* Copyright 2014 Kevin Herron
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpetri.ethernetip.encapsulation.cpf.items
import com.digitalpetri.ethernetip.encapsulation.cpf.CpfItem
import com.digitalpetri.ethernetip.util.Buffers
import io.netty.buffer.ByteBuf
/**
* The Sockaddr Info items shall be used to communicate IP address or port information necessary to create Class 0 or
* Class 1 connections. There are separate items for originator-to- target and target-to-originator socket information.
*
* The items are present as additional data in Forward_Open / Large_Forward_Open request and reply services encapsulated
* in a SendRRData message.
*/
sealed trait SockaddrItem extends CpfItem {
val sockaddr: Sockaddr
}
case class SockaddrItemO2t(sockaddr: Sockaddr) extends CpfItem(SockaddrItem.TypeIdO2t) with SockaddrItem
case class SockaddrItemT2o(sockaddr: Sockaddr) extends CpfItem(SockaddrItem.TypeIdT2o) with SockaddrItem
object SockaddrItem {
val TypeIdO2t = 0x8000
val TypeIdT2o = 0x8001
val Length = 16
def encode(item: SockaddrItem, buffer: ByteBuf = Buffers.unpooled()): ByteBuf = {
buffer.writeShort(item.typeId)
buffer.writeShort(Length)
Sockaddr.encode(item.sockaddr, buffer)
}
def decode(buffer: ByteBuf): SockaddrItem = {
val typeId = buffer.readUnsignedShort()
val length = buffer.readUnsignedShort()
assert(typeId == TypeIdO2t || typeId == TypeIdT2o)
assert(length == Length)
val sockaddr = Sockaddr.decode(buffer)
typeId match {
case TypeIdO2t => SockaddrItemO2t(sockaddr)
case TypeIdT2o => SockaddrItemT2o(sockaddr)
case _ => throw new Exception(s"invalid SockAddrItem type: $typeId")
}
}
}
|
digitalpetri/scala-ethernet-ip
|
enip-core/src/main/scala/com/digitalpetri/ethernetip/encapsulation/cpf/items/SockaddrItem.scala
|
Scala
|
apache-2.0
| 2,249 |
package com.monochromeroad.play.xwiki.rendering.plugin
import play.api._
import com.monochromeroad.play.xwiki.rendering._
import macros.XWikiMacroManager
import scala.Some
/**
* Play Plugin to configure XWiki default renderers
*
* @author Masatoshi Hayashi
*/
class DefaultXWikiRenderingPlugin(app: Application) extends Plugin {
val name = "XWiki Default Renderer"
override def onStart() {
Logger.info("Configuring XWiki default renderers ...")
val macroManager = new XWikiMacroManager(DefaultXWikiComponentManager)
val pluginConfiguration = DefaultXWikiRenderingPluginConfiguration
pluginConfiguration.macroList.map({macroName =>
loadMacroClass(macroName).map(macroManager.reloadMacro(_))
Logger.info("Registered a macro: " + macroName)
})
Logger.info("XWiki default renderers configuration done.\n" + pluginConfiguration.rendererConfiguration)
}
private def loadMacroClass(macroClassName: String): Option[Class[DefaultXWikiMacro[_]]] = {
findMacroClass(macroClassName) match {
case Some(mc) =>
if (mc.isInstanceOf[Class[DefaultXWikiMacro[_]]]) {
Some(mc.asInstanceOf[Class[DefaultXWikiMacro[_]]])
} else {
None
}
case None =>
Logger.warn("The XWiki macro (" + macroClassName + ") not found.")
None
}
}
private def findMacroClass(macroClassName: String): Option[Class[_]] = {
try {
Some(Class.forName(macroClassName))
} catch {
case _: ClassNotFoundException =>
Logger.warn("The XWiki macro (" + macroClassName + ") not found.")
None
}
}
}
|
literalice/play-xwiki-rendering
|
src/main/scala/com/monochromeroad/play/xwiki/rendering/plugin/DefaultXWikiRenderingPlugin.scala
|
Scala
|
lgpl-2.1
| 1,627 |
package com.mycompany.scalcium.transformers
import scala.io.Source
import scala.util.control.Breaks.{break, breakable}
import com.mycompany.scalcium.tokenizers.Tokenizer
object NegationHandler {
val NegationPrefix = "negate0"
val phraseSrc = Source.fromInputStream(getClass.getResourceAsStream("/negator_phrases.txt"))
val phrasePatterns = phraseSrc.getLines.filter(line =>
((line.trim().length() > 0) && (! line.startsWith("#")))).
toList.
sortWith(_.length > _.length).
map(phrase => pad(phrase))
def maskNegative(sentence: String, tokenizer: Tokenizer): String = {
var splits: (String,String) = null
breakable {
for (phrasePattern <- phrasePatterns) {
val psentence = pad(sentence)
val firstMatch = psentence.toLowerCase().indexOf(phrasePattern)
if (firstMatch > -1) {
splits = psentence.splitAt(firstMatch)
break
}
}
}
if (splits == null) sentence
else {
val nright = tokenizer.wordTokenize(splits._2).map(word =>
NegationPrefix + word).mkString(" ")
splits._1 + " " + nright
}
}
def pad(s: String): String = " " + s + " "
}
|
sujitpal/scalcium
|
src/main/scala/com/mycompany/scalcium/transformers/NegationHandler.scala
|
Scala
|
apache-2.0
| 1,181 |
/*
* Copyright 2018-2020 Jan Bessai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless.feat
object VersionCompatibility {
type Factory[-E, +C] = scala.collection.Factory[E, C]
}
|
JanBessai/shapeless-feat
|
src/main/scala-2.13+/shapeless/feat/VersionCompatibility.scala
|
Scala
|
apache-2.0
| 712 |
package io.ino.solrs
/**
* Created by magro on 4/29/14.
*/
private[solrs] object HttpUtils {
private val ContentTypePattern = "([a-z]+/[a-z]+)(?:;\\\\s*charset=([^;]+))?".r
def getContentCharSet(contentType: String): Option[String] = {
if (contentType != null) {
// e.g. application/xml; charset=UTF-8
contentType match {
case ContentTypePattern(_, charset) => Some(charset)
case _ => None
}
} else {
None
}
}
def getMimeType(contentType: String): Option[String] = {
if (contentType != null) {
contentType match {
case ContentTypePattern(mimeType, _) => Some(mimeType)
case _ => None
}
} else {
None
}
}
}
|
inoio/solrs
|
src/main/scala/io/ino/solrs/HttpUtils.scala
|
Scala
|
apache-2.0
| 717 |
//package com.sksamuel.avro4s.schema
//
//import java.time.Instant
//
//import com.sksamuel.avro4s.AvroSchema
//import org.scalatest.matchers.should.Matchers
//import org.scalatest.wordspec.AnyWordSpec
//
//case class Foo(gg: String = "wibble")
//
//class DefaultValueSchemaTest extends AnyWordSpec with Matchers {
//
// "SchemaEncoder" should {
// "support default values for strings in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultString]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_string.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for ints in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultInt]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_int.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for booleans in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultBoolean]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_boolean.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for doubles in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultDouble]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_double.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for longs in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultLong]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_long.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for floats in top level classes" in {
// val schema = AvroSchema[ClassWithDefaultFloat]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_float.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for instants in top level classes" in {
// implicit val schema = AvroSchema[ClassWithDefaultInstant]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_instant.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values for maps, sets and seqs" in {
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/defaultvalues.json"))
// val schema = AvroSchema[DefaultValues]
// schema.toString(true) shouldBe expected.toString(true)
// }
// "support default values set to None for optional sealed trait hierarchies" in {
// val schema = AvroSchema[DogProspect]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/default_values_optional_union.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
//
// "support default values of optional Seq, Set and Map" in {
// val schema = AvroSchema[OptionalDefaultValues]
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/optional_default_values.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
//
// "support default values that are case classes" in {
// val schema = AvroSchema[Cuppers]
//
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/case_class_default_values.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
//
// "support default values that are case objects" in {
// implicit val schema = AvroSchema[NoVarg]
//
// val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/case_object_default_values.json"))
// schema.toString(true) shouldBe expected.toString(true)
// }
// }
//}
//
//sealed trait Dog
//case class UnderDog(how_unfortunate: Double) extends Dog
//case class UpperDog(how_fortunate: Double) extends Dog
//case class DogProspect(dog: Option[Dog] = None)
//
//case class OptionalDefaultValues(name: Option[String] = Some("sammy"),
// age: Option[Int] = Some(21),
// isFemale: Option[Boolean] = Some(false),
// length: Option[Double] = Some(6.2),
// timestamp: Option[Long] = Some(1468920998000l),
// address: Option[Map[String, String]] = Some(Map(
// "home" -> "sammy's home address",
// "work" -> "sammy's work address"
// )),
// traits: Option[Seq[String]] = Some(Seq("Adventurous", "Helpful")),
// favoriteWine: Option[Wine] = Some(Wine.CabSav),
// luckyNumbers: Option[Set[Int]] = Some(Set(7, 9)),
// favoriteSongs: Option[Set[Song]] = Some(Set.empty[Song])
// )
//
//
//case class ClassWithDefaultString(s: String = "foo")
//case class ClassWithDefaultInt(i: Int = 123)
//case class ClassWithDefaultBoolean(b: Boolean = true)
//case class ClassWithDefaultLong(l: Long = 1468920998000l)
//case class ClassWithDefaultFloat(f: Float = 123.458F)
//case class ClassWithDefaultDouble(d: Double = 123.456)
//case class ClassWithDefaultInstant(min: Instant = Instant.MIN, max: Instant = Instant.MAX, epoch: Instant = Instant.EPOCH)
//
//case class DefaultValues(name: String = "sammy",
// age: Int = 21,
// isFemale: Boolean = false,
// length: Double = 6.2,
// timestamp: Long = 1468920998000l,
// address: Map[String, String] = Map(
// "home" -> "sammy's home address",
// "work" -> "sammy's work address"
// ),
// traits: Seq[String] = Seq("Adventurous", "Helpful"),
// favoriteWine: Wine = Wine.CabSav,
// luckyNumbers: Set[Int] = Set(7, 9),
// favoriteSongs: Set[Song] = Set.empty[Song]
// )
//
//sealed trait Cupcat
//case object Rendal extends Cupcat
//case class Snoutley(snoutley: String) extends Cupcat
//
//case class Cuppers(cupcat: Cupcat = Snoutley("hates varg"))
//case class NoVarg(cupcat: Cupcat = Rendal)
//
//case class Song(title: String)
//
|
sksamuel/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/schema/DefaultValueSchemaTest.scala
|
Scala
|
apache-2.0
| 6,910 |
package unfiltered.filter
import unfiltered.response.HttpResponse
import unfiltered.request.HttpRequest
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import unfiltered.Cookie
import scala.collection.JavaConverters._
class RequestBinding(req: HttpServletRequest) extends HttpRequest(req) {
def inputStream = req.getInputStream
def reader = req.getReader
def protocol = req.getProtocol
def method = req.getMethod.toUpperCase
def uri = Option(req.getRequestURI) ++ Option(req.getQueryString).map("?%s".format(_)) mkString("")
def parameterNames: Iterator[String] =
req.getParameterNames.asScala
def parameterValues(param: String) = Option[Seq[String]](req.getParameterValues(param)).getOrElse(Nil)
def headerNames: Iterator[String] =
req.getHeaderNames.asScala
def headers(name: String): Iterator[String] =
req.getHeaders(name).asScala
lazy val cookies = req.getCookies match {
case null => Nil
case jcookies =>
jcookies.foldLeft(List[Cookie]())((l, c) =>
Cookie(c.getName, c.getValue, Option(c.getDomain), Option(c.getPath), Option(c.getMaxAge), Option(c.getSecure)) :: l)
}
def isSecure = req.isSecure
def remoteAddr = req.getRemoteAddr
}
class ResponseBinding(res: HttpServletResponse) extends HttpResponse(res) {
def status(statusCode: Int) = res.setStatus(statusCode)
def status: Int = res.getStatus
def outputStream() = res.getOutputStream
def redirect(url: String) = res.sendRedirect(url)
def header(name: String, value: String) = res.addHeader(name, value)
def cookies(resCookies: Seq[Cookie]) = {
import javax.servlet.http.{Cookie => JCookie}
resCookies.foreach { c =>
val jc = new JCookie(c.name, c.value)
if(c.domain.isDefined) jc.setDomain(c.domain.get)
if(c.path.isDefined) jc.setPath(c.path.get)
if(c.maxAge.isDefined) jc.setMaxAge(c.maxAge.get)
if(c.secure.isDefined) jc.setSecure(c.secure.get)
res.addCookie(jc)
}
}
}
|
omarkilani/unfiltered
|
filter/src/main/scala/bindings.scala
|
Scala
|
mit
| 1,979 |
/*
* Copyright (c) <2015-2016>, see CONTRIBUTORS
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ch.usi.inf.l3.sana.ooj.typechecker
import ch.usi.inf.l3.sana
import sana.ooj
import sana.brokenj
import sana.primj
import sana.tiny
import sana.calcj
import tiny.core.CheckerComponent
import tiny.dsl._
import primj.ast.Implicits._
import primj.ast.{MethodDefApi => PMethodDefApi}
import tiny.names.Name
import calcj.ast.operators.{Inc, Dec}
import tiny.errors.ErrorReporting.{error,warning}
import primj.symbols._
import primj.modifiers.Ops._
import primj.typechecker.ShapeCheckerComponent
import brokenj.typechecker.LabelNameCheckerComponent
import brokenj.errors.ErrorCodes._
import brokenj.ast.TreeUtils
import ooj.ast._
/**
CompilationUnit: DONE
Program: DONE
PackageDef: DONE
ClassDef: DONE
Template: DONE
New: DONE
Select: DONE
This: DONE
Super: DONE
MethodDef: DONE
*/
@component(tree, labelNames)
trait ProgramLabelNameCheckerComponent extends LabelNameCheckerComponent {
(prg: ProgramApi) => {
prg.members.foreach { member => check((member, labelNames)) }
}
}
@component(tree, labelNames)
trait CompilationUnitLabelNameCheckerComponent extends
LabelNameCheckerComponent {
(cunit: CompilationUnitApi) => {
check((cunit.module, labelNames))
}
}
@component(tree, labelNames)
trait PackageDefLabelNameCheckerComponent extends LabelNameCheckerComponent {
(pkg: PackageDefApi) => {
pkg.members.foreach { member => check((member, labelNames)) }
}
}
@component(tree, labelNames)
trait ClassDefLabelNameCheckerComponent extends LabelNameCheckerComponent {
(clazz: ClassDefApi) => {
clazz.parents.foreach { parent => check((parent, labelNames)) }
check((clazz.body, labelNames))
}
}
@component(tree, labelNames)
trait TemplateLabelNameCheckerComponent extends LabelNameCheckerComponent {
(template: TemplateApi) => {
template.members.foreach { member => check((member, labelNames)) }
}
}
@component(tree, labelNames)
trait NewLabelNameCheckerComponent extends LabelNameCheckerComponent {
(nw: NewApi) => {
check((nw.app, labelNames))
}
}
@component(tree, labelNames)
trait SelectLabelNameCheckerComponent extends LabelNameCheckerComponent {
(select: SelectApi) => {
check((select.qual, labelNames))
check((select.tree, labelNames))
}
}
// @component(tree, labelNames)
// trait ThisLabelNameCheckerComponent extends LabelNameCheckerComponent {
// (ths: ThisApi) => ths
// }
//
// @component(tree, labelNames)
// trait SuperLabelNameCheckerComponent extends LabelNameCheckerComponent {
// (spr: SuperApi) => spr
// }
@component(tree, labelNames)
trait MethodDefLabelNameCheckerComponent extends
brokenj.typechecker.MethodDefLabelNameCheckerComponent {
(mthd: PMethodDefApi) => super.apply((mthd, labelNames))
}
|
amanjpro/languages-a-la-carte
|
ooj/src/main/scala/typechecker/labelcheckers.scala
|
Scala
|
bsd-3-clause
| 4,291 |
package org.vertx.scala.router
/**
* @author <a href="http://www.campudus.com/">Joern Bernhardt</a>
*/
case class RouterException(message: String = "",
cause: Throwable = null,
id: String = "UNKNOWN_SERVER_ERROR",
statusCode: Int = 500)
extends Exception(message, cause)
|
vert-x/mod-lang-scala
|
src/main/scala/org/vertx/scala/router/RouterException.scala
|
Scala
|
apache-2.0
| 357 |
package com.trifectalabs.polyline
import scala.math.BigDecimal.RoundingMode
object Polyline {
implicit def double2BigDecimal(d: Double): BigDecimal = BigDecimal(d)
implicit def bigDecimal2Double(bd: BigDecimal): Double = bd.toDouble
def encode(coordinates: List[LatLng]): String = {
coordinates.foldLeft[List[(BigDecimal,BigDecimal)]](Nil)({(acc, coordinate) =>
val lat = coordinate.lat.setScale(5, RoundingMode.HALF_EVEN)
val lng = coordinate.lng.setScale(5, RoundingMode.HALF_EVEN)
acc match {
case Nil => List((lat, lng))
case differences =>
val currentPos = differences.reduceLeft((pos, diff) => (pos._1 + diff._1, pos._2 + diff._2))
(lat - currentPos._1, lng - currentPos._2)::differences
}
}).reverse.map{ case (latDiff, lngDiff) =>
encodeDifference(latDiff) + encodeDifference(lngDiff)
}.mkString
}
private def encodeDifference(diff: BigDecimal): String = {
val value = if (diff < 0) {
~((diff * 100000).toInt << 1)
} else {
(diff * 100000).toInt << 1
}
if (diff == 0)
encodeFiveBitComponents(value, "") + "?"
else
encodeFiveBitComponents(value, "")
}
private def encodeFiveBitComponents(value: Int, str: String): String = {
if (value != 0) {
val fiveBitComponent = if (value >= 0x20) {
((value & 0x1f) | 0x20) + 63
} else {
(value & 0x1f) + 63
}
encodeFiveBitComponents(value >> 5, str + fiveBitComponent.toChar)
} else {
str
}
}
def decode(polyline: String): List[LatLng] = {
decodeDifferences(polyline, Nil).foldRight[List[LatLng]](Nil)({(diff, acc) =>
acc match {
case Nil => List(LatLng(diff._1, diff._2))
case coordinates => LatLng(
(coordinates.head.lat + diff._1).setScale(5, RoundingMode.HALF_DOWN),
(coordinates.head.lng + diff._2).setScale(5, RoundingMode.HALF_DOWN))::coordinates
}
}).reverse
}
private def decodeDifferences(polyline: String, differences: List[(BigDecimal, BigDecimal)]): List[(BigDecimal, BigDecimal)] = {
if (polyline.length > 0) {
val (latDiff, pl1) = decodeDifference(polyline)
val (lngDiff, pl2) = decodeDifference(pl1)
decodeDifferences(pl2, (BigDecimal(latDiff/100000.0), BigDecimal(lngDiff/100000.0))::differences)
} else {
differences
}
}
private def decodeDifference(polyline: String, shift: Int = 0, result: Int = 0): (Int, String) = {
val byte = polyline(0).toInt - 63
val newResult = result | ((byte & 0x1f) << shift)
if (byte >= 0x20) {
decodeDifference(polyline.drop(1), shift+5, newResult)
} else {
val endResult =
if ((newResult & 0x01) == 0x01)
~(newResult >> 1)
else
(newResult >> 1)
(endResult, polyline.drop(1))
}
}
}
|
trifectalabs/polyline-scala
|
src/main/scala/Polyline.scala
|
Scala
|
mit
| 2,858 |
package lila.common
import play.api.http.HeaderNames
import play.api.mvc.RequestHeader
object HTTPRequest {
def isXhr(req: RequestHeader): Boolean =
(req.headers get "X-Requested-With") contains "XMLHttpRequest"
def isSocket(req: RequestHeader): Boolean =
(req.headers get HeaderNames.UPGRADE) ?? (_.toLowerCase == "websocket")
def isSynchronousHttp(req: RequestHeader) = !isXhr(req) && !isSocket(req)
def isSafe(req: RequestHeader) = req.method == "GET"
def isRedirectable(req: RequestHeader) = isSynchronousHttp(req) && isSafe(req)
def fullUrl(req: RequestHeader): String = "http://" + req.host + req.uri
def userAgent(req: RequestHeader): Option[String] = req.headers get HeaderNames.USER_AGENT
val isAndroid = UaMatcher("""(?i).*android.+mobile.*""".r)
val isIOS = UaMatcher("""(?i).*(iphone|ipad|ipod).*""".r)
val isMobile = UaMatcher("""(?i).*(iphone|ipad|ipod|android.+mobile).*""".r)
def referer(req: RequestHeader): Option[String] = req.headers get HeaderNames.REFERER
def sid(req: RequestHeader): Option[String] = req.session get "sid"
val isBot = UaMatcher {
("""(?i).*(googlebot|googlebot-mobile|googlebot-image|mediapartners-google|bingbot|slurp|java|wget|curl|commons-httpclient|python-urllib|libwww|httpunit|nutch|phpcrawl|msnbot|adidxbot|blekkobot|teoma|ia_archiver|gingercrawler|webmon|httrack|webcrawler|fast-webcrawler|fastenterprisecrawler|convera|biglotron|grub\\.org|usinenouvellecrawler|antibot|netresearchserver|speedy|fluffy|jyxobot|bibnum\\.bnf|findlink|exabot|gigabot|msrbot|seekbot|ngbot|panscient|yacybot|aisearchbot|ioi|ips-agent|tagoobot|mj12bot|dotbot|woriobot|yanga|buzzbot|mlbot|purebot|lingueebot|yandex\\.com/bots|""" +
"""voyager|cyberpatrol|voilabot|baiduspider|citeseerxbot|spbot|twengabot|postrank|turnitinbot|scribdbot|page2rss|sitebot|linkdex|ezooms|dotbot|mail\\.ru|discobot|zombie\\.js|heritrix|findthatfile|europarchive\\.org|nerdbynature\\.bot|sistrixcrawler|ahrefsbot|aboundex|domaincrawler|wbsearchbot|summify|ccbot|edisterbot|seznambot|ec2linkfinder|gslfbot|aihitbot|intelium_bot|yeti|retrevopageanalyzer|lb-spider|sogou|lssbot|careerbot|wotbox|wocbot|ichiro|duckduckbot|lssrocketcrawler|drupact|webcompanycrawler|acoonbot|openindexspider|gnamgnamspider|web-archive-net\\.com\\.bot|backlinkcrawler|""" +
"""coccoc|integromedb|contentcrawlerspider|toplistbot|seokicks-robot|it2media-domain-crawler|ip-web-crawler\\.com|siteexplorer\\.info|elisabot|proximic|changedetection|blexbot|arabot|wesee:search|niki-bot|crystalsemanticsbot|rogerbot|360spider|psbot|interfaxscanbot|lipperheyseoservice|ccmetadatascaper|g00g1e\\.net|grapeshotcrawler|urlappendbot|brainobot|fr-crawler|binlar|simplecrawler|simplecrawler|livelapbot|twitterbot|cxensebot|smtbot|facebookexternalhit|daumoa|sputnikimagebot).*""").r
}
private def UaMatcher(regex: scala.util.matching.Regex): RequestHeader => Boolean = {
val pattern = regex.pattern
req => {
userAgent(req) ?? { ua => pattern.matcher(ua).matches }
}
}
def isHuman(req: RequestHeader) = !isBot(req)
def isFacebookBot(req: RequestHeader) = userAgent(req) ?? (_ contains "facebookexternalhit")
private val fileExtensionPattern = """.+\\.[a-z0-9]{2,4}$""".r.pattern
def hasFileExtension(req: RequestHeader) =
fileExtensionPattern.matcher(req.path).matches
}
|
terokinnunen/lila
|
modules/common/src/main/HTTPRequest.scala
|
Scala
|
mit
| 3,324 |
package com.ybrikman.ping.scalaapi.dedupe
import scala.concurrent.ExecutionContext
/**
* Any time you use the DedupingCache, you must add this CacheFilter to your filter chain. This filter will initialize
* the cache for each incoming request and cleanup the cache after you're done processing the request. To avoid memory
* leaks, you want to be sure this filter runs on every single request, so it's a good idea to make it the very first
* one in the filter chain, so no other filter can bypass it.
*
* @param dedupingCache
* @tparam K
* @tparam V
*/
class CacheFilter[K, V](dedupingCache: DedupingCache[K, V])(implicit ec: ExecutionContext) extends BeforeAndAfterFilter(
before = rh => dedupingCache.initCacheForRequest(rh),
after = rh => dedupingCache.cleanupCacheForRequest(rh))
|
carlosFattor/ping-play
|
big-pipe/src/main/scala/com/ybrikman/ping/scalaapi/dedupe/CacheFilter.scala
|
Scala
|
mit
| 800 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table.validation
import java.sql.Timestamp
import org.apache.flink.api.scala._
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.utils._
import org.junit.Test
class TemporalTableJoinValidationTest extends TableTestBase {
val util: TableTestUtil = streamTestUtil()
val orders = util.addTable[(Long, String, Timestamp)](
"Orders", 'o_amount, 'o_currency, 'o_rowtime.rowtime)
val ordersProctime = util.addTable[(Long, String)](
"OrdersProctime", 'o_amount, 'o_currency, 'o_rowtime.proctime)
val ordersWithoutTimeAttribute = util.addTable[(Long, String, Timestamp)](
"OrdersWithoutTimeAttribute", 'o_amount, 'o_currency, 'o_rowtime)
val ratesHistory = util.addTable[(String, Int, Timestamp)](
"RatesHistory", 'currency, 'rate, 'rowtime.rowtime)
val ratesHistoryWithoutTimeAttribute = util.addTable[(String, Int, Timestamp)](
"ratesHistoryWithoutTimeAttribute", 'currency, 'rate, 'rowtime)
@Test
def testInvalidFieldReference(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [foobar]")
ratesHistory.createTemporalTableFunction('rowtime, 'foobar)
}
@Test
def testInvalidStringFieldReference(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [foobar]")
ratesHistory.createTemporalTableFunction("rowtime", "foobar")
}
@Test
def testNonTimeIndicatorOnRightSide(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIMESTAMP(3)] used to create TemporalTableFunction")
val rates = ratesHistoryWithoutTimeAttribute.createTemporalTableFunction('rowtime, 'currency)
val result = orders
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select("o_amount * rate").as("rate")
util.explain(result)
}
@Test
def testNonTimeIndicatorOnLeftSide(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIMESTAMP(3)] passed as the argument to TemporalTableFunction")
val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
val result = ordersWithoutTimeAttribute
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select("o_amount * rate").as("rate")
util.explain(result)
}
@Test
def testMixedTimeIndicators(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Non rowtime timeAttribute [TIME ATTRIBUTE(PROCTIME)] passed as the argument " +
"to TemporalTableFunction")
val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
val result = ordersProctime
.joinLateral(rates('o_rowtime), 'currency === 'o_currency)
.select("o_amount * rate").as("rate")
util.explain(result)
}
}
|
bowenli86/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/validation/TemporalTableJoinValidationTest.scala
|
Scala
|
apache-2.0
| 3,920 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.content
import org.apache.toree.kernel.protocol.v5.KernelMessageContent
import play.api.libs.functional.syntax._
import play.api.libs.json._
case class ClearOutput (
// We cannot use _wait as a field because it is a method defined on all objects
_wait: Boolean
) extends KernelMessageContent {
override def content : String =
Json.toJson(this)(ClearOutput.clearOutputWrites).toString
}
// Single property fields are not well supported by play, this is a little funky workaround founde here:
// https://groups.google.com/forum/?fromgroups=#!starred/play-framework/hGrveOkbJ6U
object ClearOutput extends TypeString {
implicit val clearOutputReads: Reads[ClearOutput] = (
(JsPath \ "wait").read[Boolean].map(ClearOutput(_)))
implicit val clearOutputWrites: Writes[ClearOutput] = (
(JsPath \ "wait").write[Boolean].contramap((c : ClearOutput) => c._wait)
)
/**
* Returns the type string associated with this object.
*
* @return The type as a string
*/
override def toTypeString: String = "clear_output"
}
|
asorianostratio/incubator-toree
|
protocol/src/main/scala/org/apache/toree/kernel/protocol/v5/content/ClearOutput.scala
|
Scala
|
apache-2.0
| 1,907 |
package org.denigma.threejs.extensions.controls
import org.denigma.threejs._
import org.scalajs.dom
trait IntersectionControls
{
def camera:Camera
def scene:Object3D
val raycaster = new Raycaster()
var intersections = List.empty[Intersection]
var underMouse = Map.empty[Object3D, List[Intersection]]
var last = Map.empty[Object3D, List[Intersection]]
var exit = Map.empty[Object3D, List[Intersection]]
var enter = Map.empty[Object3D, List[Intersection]]
def findIntersections(x:Double,y:Double) =
{
val vector = new Vector3( x, y, 1 )
raycaster.setFromCamera(vector, camera)
raycaster.intersectObjects( scene.children ).sortWith( (a,b)=>a.point.distanceTo(vector)<b.point.distanceTo(vector)).toList
}
def onCursorMove(cordX:Double,cordY:Double,width:Double,height:Double) =
{
val mouseX = 2 * cordX / width - 1
val mouseY = - 2 * cordY / height + 1
intersections = findIntersections(mouseX,mouseY)
underMouse = intersections.groupBy(_.`object`).toMap
val l = last // if I do not do this assigment and use last instead of l I get into trouble
this.exit = l.filterKeys(!underMouse.contains(_)).toMap
this.enter = underMouse.filterKeys(!l.contains(_)).toMap
//if(exit.exists{case (key,value)=>enter.contains(key)}) dom.console.error("same enterexit")
val s = enter.size
last = underMouse
if(s!=enter.size) dom.console.error("ScalaJS error with immutable collections")
}
}
|
waman/threejs-facade
|
facade/src/main/scala/org/denigma/threejs/extensions/controls/IntersectionControls.scala
|
Scala
|
mpl-2.0
| 1,466 |
package org.apache.spark.asyspark.core.serialization
/**
* A very fast primitive serializer using sun's Unsafe to directly read/write memory regions in the JVM
*
* @param size The size of the serialized output (in bytes)
*/
private class FastPrimitiveSerializer(size: Int) {
val bytes = new Array[Byte](size)
private val unsafe = SerializationConstants.unsafe
private val offset = unsafe.arrayBaseOffset(classOf[Array[Byte]])
private var position: Long = 0
@inline
def reset(): Unit = position = 0L
@inline
def writeFloat(value: Float): Unit = {
unsafe.putFloat(bytes, offset + position, value)
position += SerializationConstants.sizeOfFloat
}
@inline
def writeInt(value: Int): Unit = {
unsafe.putInt(bytes, offset + position, value)
position += SerializationConstants.sizeOfInt
}
@inline
def writeByte(value: Byte): Unit = {
unsafe.putByte(bytes, offset + position, value)
position += SerializationConstants.sizeOfByte
}
@inline
def writeLong(value: Long): Unit = {
unsafe.putLong(bytes, offset + position, value)
position += SerializationConstants.sizeOfLong
}
@inline
def writeDouble(value: Double): Unit = {
unsafe.putDouble(bytes, offset + position, value)
position += SerializationConstants.sizeOfDouble
}
@inline
def writeArrayInt(value: Array[Int]): Unit = {
unsafe.copyMemory(value, unsafe.arrayBaseOffset(classOf[Array[Int]]), bytes, offset + position, value.length * SerializationConstants.sizeOfInt)
position += value.length * SerializationConstants.sizeOfInt
}
@inline
def writeArrayLong(value: Array[Long]): Unit = {
unsafe.copyMemory(value, unsafe.arrayBaseOffset(classOf[Array[Long]]), bytes, offset + position, value.length * SerializationConstants.sizeOfLong)
position += value.length * SerializationConstants.sizeOfLong
}
@inline
def writeArrayFloat(value: Array[Float]): Unit = {
unsafe.copyMemory(value, unsafe.arrayBaseOffset(classOf[Array[Float]]), bytes, offset + position, value.length * SerializationConstants.sizeOfFloat)
position += value.length * SerializationConstants.sizeOfFloat
}
@inline
def writeArrayDouble(value: Array[Double]): Unit = {
unsafe.copyMemory(value, unsafe.arrayBaseOffset(classOf[Array[Double]]), bytes, offset + position, value.length * SerializationConstants.sizeOfDouble)
position += value.length * SerializationConstants.sizeOfDouble
}
}
|
CASISCAS/asyspark
|
src/main/scala/org/apache/spark/asyspark/core/serialization/FastPrimitiveSerializer.scala
|
Scala
|
mit
| 2,452 |
package net.thereturningvoid.bladebot.command
import net.dv8tion.jda.MessageBuilder
import net.dv8tion.jda.events.message.MessageReceivedEvent
import scala.collection.JavaConversions._
class AvatarCommand extends Command {
override def getName: String = "Get User Avatars"
override def getDescription: String = "Gets avatars of specified users."
override def getAliases: List[String] = List(s"${prefix}avatar")
override def getUsage: String = s"${prefix}avatar [users]\\n" +
s"${prefix}avatar - Sends your avatar to the text channel.\\n" +
s"${prefix}avatar *[users]* - Sends the avatars of mentioned users to the text channel.\\n" +
" - Users passed to this command must be **mentioned** (ie. \\"@username\\", not \\"username\\").\\n" +
"__Examples:__\\n" +
s"*${prefix}avatar @Joe Bloggs @Foo Bar* - Posts Joe Bloggs' and Foo Bar's avatar."
override def onCommand(e: MessageReceivedEvent, args: Array[String]): Unit = {
if (!args.isEmpty) {
e.getTextChannel.sendMessage(new MessageBuilder()
.appendString("__Avatars of mentioned users__")
.build())
e.getMessage.getMentionedUsers foreach { u =>
e.getTextChannel.sendMessage(new MessageBuilder()
.appendMention(u)
.appendString("'s avatar:\\n")
.appendString(u.getAvatarUrl)
.build())
}
} else {
e.getTextChannel.sendMessage(new MessageBuilder()
.appendMention(e.getAuthor)
.appendString("'s avatar is:\\n")
.appendString(e.getAuthor.getAvatarUrl)
.build())
}
}
}
|
TheReturningVoid/BladeBot
|
src/main/scala/net/thereturningvoid/bladebot/command/AvatarCommand.scala
|
Scala
|
mit
| 1,577 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author Matthew Saltz, John Miller
* @version 1.2
* @date Fri Jul 10 12:39:33 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.graphalytics
import java.io.PrintWriter
import scala.collection.immutable.{Set => SET}
import scala.io.Source.fromFile
import LabelType.{TLabel, toTLabel}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `GraphIO` class is used to write graphs to a file.
* @param g the graph to write
*/
class GraphIO (g: Graph)
{
private val DEBUG = true // debug flag
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write graph 'g' to a file in the following format:
* <p>
* Graph (<name>, <inverse> <nVertices>
* <vertexId> <label> <chVertex0> <chVertex1> ...
* ...
* )
* <p>
* @param name the file-name containing the graph's vertex, edge and label information
* @param base the base sub-directory for storing graphs
* @param ext the standard file extension for graph
*/
def write (name: String = g.name, base: String = BASE_DIR, ext: String = EXT)
{
val gFile = base + name + ext // relative path-name for file
val pw = new PrintWriter (gFile)
if (DEBUG) println (s"write: gFile = $gFile")
pw.println (s"Graph (${g.name}, ${g.inverse}, ${g.size}")
for (i <- g.ch.indices) pw.println (g.toLine (i))
pw.println (")")
pw.close ()
} // write
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write the graph to TWO 'igraph' compatible files.
* @see igraph.sourceforge.net
*/
def write2IgraphFiles (prefix: String): (String, String) =
{
val lFile = prefix + "igl.txt"
val eFile = prefix + "ige.txt"
val lOut = new PrintWriter (lFile)
g.label.foreach (lOut.println (_))
lOut.close
val eOut = new PrintWriter (eFile)
for (i <- g.ch.indices) g.ch(i).foreach (x => eOut.println (i + " " + x))
eOut.close
(lFile, eFile)
} // write2IgraphFiles
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write the graph to TWO 'Neo4J' compatible files: 'lFile' and 'eFile' so that
* they may be fed into 'Neo4j' with one of its utilities.
* FIX: need to handle multiple edge types.
* @param lFile the file containing the graph labels (line: vertex-id TAB label)
* @param eFile the file the edges (line: start-id TAB end-id TAB type)
*/
def write2Neo4JFiles (lFile: String, eFile: String)
{
val vertexLine = new PrintWriter (lFile) // write the vertex ids and their labels
vertexLine.println ("id\\tlabel")
g.label.foldLeft (1) { (i, l) => vertexLine.println (i + "\\t" + l); i + 1 }
vertexLine.close
val edgeLine = new PrintWriter (eFile) // write the edges and their types.
edgeLine.println ("start\\tend\\ttype")
g.ch.foldLeft (1) { (i, v) =>
v.foreach { c => edgeLine.println (i + "\\t" + (c+1) + "\\tEDGE") }
i + 1
} // foldLeft
edgeLine.close
} // write2Neo4JFiles
} // GraphIO class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `GraphIO` object is the companion object to the `GraphIO` class and is used
* for reading graphs from files.
*/
object GraphIO
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given an array of integers as strings, make the corresponding set.
* @param line the element string array
*/
def makeSet (eStrArr: Array [String]): SET [Int] =
{
if (eStrArr(0) == "") SET [Int] () else eStrArr.map (_.toInt).toSet.asInstanceOf [SET [Int]]
} // makeSet
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a graph from a file based on the format used by 'print' and 'write':
* <p>
* Graph (<name>, <inverse>, <nVertices>
* <vertexId>, <label>, <chVertex0>, <chVertex1> ...
* ...
* )
* <p>
* @param name the file-name containing the graph's vertex, edge and label information
* @param base the base sub-directory for storing graphs
* @param ext the standard file extension for graph
* @param sep the character separating the values (e.g., ',', ' ', '\\t')
*/
def apply (name: String, base: String = BASE_DIR, ext: String = EXT, sep: Char = ','): Graph =
{
val gFile = base + name + ext // relative path-name for file
val l = fromFile (gFile).getLines.toArray // get the lines from gFile
var l0 = l(0).split ('(')(1).split (sep).map (_.trim) // array for line 0
val n = l0(2).toInt // number of lines in file
val ch = Array.ofDim [SET [Int]] (n) // adjacency: array of children (ch)
val label = Array.ofDim [TLabel] (n) // array of vertex labels
println (s"apply: read $n vertices from $gFile")
for (i <- ch.indices) {
val li = l(i+1).split (sep).map (_.trim) // line i (>0) splits into i, label, ch
label(i) = toTLabel (li(1)) // make vertex label
ch(i) = makeSet (li.slice (2, li.length)) // make ch set
} // for
new Graph (ch, label, l0(1) == "true", l0(0))
} // apply
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a graph from TWO files:
* 'lFile' is a file with one label per line, where each line represents
* the vertex with id <lineNumber>.
* 'eFile' is a file with each line representing the vertex with id
* <lineNumber>, and each line contains a space-separated list of vertices
* to which the current vertex is adjacent.
* @param lFile the file containing the graph labels
* @param eFile the file the edges (to create adjacency sets)
* @param inverse whether to store inverse adjacency sets (parents)
*/
def read2Files (lFile: String, eFile: String, inverse: Boolean = false): Graph =
{
val lLines = fromFile (lFile).getLines // get the lines from lFile
val label = lLines.map (x => toTLabel (x.trim)).toArray // make the label array
val eLines = fromFile (eFile).getLines // get the lines from eFile
val ch = eLines.map ( line => // make the adj array
if (line.trim != "") line.split (" ").map (x => x.trim.toInt).toSet
else SET [Int] ()
).toArray
new Graph (ch, label)
} // read2Files
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Read a graph from TWO specially formatted Pajek files.
* @param lFile the file containing the graph labels
* @param eFile the file the edges (to create adjacency sets)
* @param inverse whether to store inverse adjacency sets (parents)
*/
def read2PajekFile (lFile: String, eFile: String, inverse: Boolean = false): Graph =
{
val lLines = fromFile (lFile).getLines // get the lines from lFile
val label = lLines.map (x => toTLabel (x.trim)).toArray
val ch = Array.ofDim [SET [Int]] (label.size)
for (i <- ch.indices) ch(i) = SET [Int] ()
val eLines = fromFile (eFile).getLines // get the lines from eFile
for (line <- eLines) {
val splitL = line.split (" ").map (_.trim)
val adjs = splitL.slice (1, splitL.length).map(_.trim.toInt).toSet
ch(splitL(0).toInt-1) ++= adjs
} // for
new Graph (ch, label)
} // read2PajekFile
} // GraphIO object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `GraphIOTest` object is used to test the `GraphIO` class and object.
* > run-main scalation.graphalytics.GraphIOTest
*/
object GraphIOTest extends App
{
val name = "ran_graph" // the name of the graph
val size = 50 // size of the graph
val nLabels = 10 // number of distinct vertex labels
val avDegree = 5 // average vertex out degree for the graph
val inverse = false
// Create a random graph and print it out
val ran_graph = GraphGen.genRandomGraph (size, nLabels, avDegree, inverse, "ran_graph")
println (s"ran_graph = $ran_graph")
ran_graph.printG ()
// Write the graph to a file
println ("start writing graph to " + name)
(new GraphIO (ran_graph)).write ()
println ("end writing graph to " + name)
// Read the file to create a new identical graph
val g = GraphIO (name)
println (s"g = $g")
g.printG ()
} // GraphIOTest object
|
NBKlepp/fda
|
scalation_1.2/src/main/scala/scalation/graphalytics/GraphIO.scala
|
Scala
|
mit
| 9,236 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.dsl
import algolia.AlgoliaDsl._
import algolia.AlgoliaTest
import algolia.http.{HttpPayload, POST}
import algolia.inputs._
class SendTest extends AlgoliaTest {
describe("send event payloads") {
it("should produce ClickedFilters payload") {
(send event ClickedFilters(
"user-token",
"event-name",
"index-name",
Seq("filter")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"click","eventName":"event-name","index":"index-name","userToken":"user-token","filters":["filter"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ClickedObjectIDs payload") {
(send event ClickedObjectIDs(
"user-token",
"event-name",
"index-name",
Seq("objectID")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"click","eventName":"event-name","index":"index-name","userToken":"user-token","objectIDs":["objectID"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ClickedObjectIDsAfterSearch payload") {
(send event ClickedObjectIDsAfterSearch(
"user-token",
"event-name",
"index-name",
Seq("objectID"),
Seq(42),
"query-id"
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"click","eventName":"event-name","index":"index-name","userToken":"user-token","objectIDs":["objectID"],"positions":[42],"queryID":"query-id"}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ConvertedObjectIDs payload") {
(send event ConvertedObjectIDs(
"user-token",
"event-name",
"index-name",
Seq("objectID")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"conversion","eventName":"event-name","index":"index-name","userToken":"user-token","objectIDs":["objectID"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ConvertedObjectIDsAfterSearch payload") {
(send event ConvertedObjectIDsAfterSearch(
"user-token",
"event-name",
"index-name",
Seq("objectID"),
"query-id"
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"conversion","eventName":"event-name","index":"index-name","userToken":"user-token","objectIDs":["objectID"],"queryID":"query-id"}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ConvertedFilters payload") {
(send event ConvertedFilters(
"user-token",
"event-name",
"index-name",
Seq("filter")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"conversion","eventName":"event-name","index":"index-name","userToken":"user-token","filters":["filter"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ViewedFilters payload") {
(send event ViewedFilters(
"user-token",
"event-name",
"index-name",
Seq("filter")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"view","eventName":"event-name","index":"index-name","userToken":"user-token","filters":["filter"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
it("should produce ViewedObjectIDs payload") {
(send event ViewedObjectIDs(
"user-token",
"event-name",
"index-name",
Seq("objectID")
)).build() should be(
HttpPayload(
POST,
Seq("1", "events"),
body = Some(
"""{"events":[{"eventType":"view","eventName":"event-name","index":"index-name","userToken":"user-token","objectIDs":["objectID"]}]}"""
),
isSearch = false,
isInsights = true,
requestOptions = None
)
)
}
}
}
|
algolia/algoliasearch-client-scala
|
src/test/scala/algolia/dsl/SendTest.scala
|
Scala
|
mit
| 6,117 |
package com.twitter.finagle.netty4
import org.scalatest.FunSuite
import com.twitter.finagle.Stack
class ParamTest extends FunSuite {
test("WorkerPool is global") {
val params = Stack.Params.empty
// make sure that we have referential equality across
// param invocations.
val e0 = params[param.WorkerPool].eventLoopGroup
val e1 = params[param.WorkerPool].eventLoopGroup
assert(e0 eq e1)
}
}
|
luciferous/finagle
|
finagle-netty4/src/test/scala/com/twitter/finagle/netty4/ParamTest.scala
|
Scala
|
apache-2.0
| 421 |
package organizing_data.change_value_to_reference.before
class Order(private var customerName: String){
//Custmoer对象还是值对象。就算多份订单属于同一客户,但每个Order对象还是拥有各自的Customer对象。
//改成每个客户名称对应一个Customer对象。
private var _customer: Customer = new Customer(customerName)
def setCustomerName(customerName: String) = {
_customer = new Customer(customerName)
}
def getCustomerName = {
_customer.name
}
}
object Order {
def numberOfOrdersFor(orders:List[Order],customerName: String): Int = {
orders.count(_.getCustomerName == customerName)
}
}
|
zj-lingxin/refactoring
|
src/main/scala/organizing_data/change_value_to_reference/before/Order.scala
|
Scala
|
mit
| 655 |
package ch.becompany.social.github
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.FlatSpec
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
class GithubClientSpec extends FlatSpec with LazyLogging {
import GithubClient._
"A github client" should "receive events" in {
val f = events("becompany")
f.map(_.mkString("\\n")).foreach(e => logger.info(e))
assert(Await.result(f, 10 seconds).size == 30)
}
}
|
becompany/akka-social-stream
|
src/test/scala/ch/becompany/social/github/GithubClientSpec.scala
|
Scala
|
apache-2.0
| 517 |
package spray.client
import akka.actor.ActorSystem
import helpers.WithoutEqualizer
import org.scalatest.FunSpec
import spray.httpx.SprayJsonSupport
import spray.json.DefaultJsonProtocol._
import spray.json.JsonFormat
class SendReceiveSpec extends FunSpec with SprayJsonSupport with WithoutEqualizer {
it("Example usage of spray.client.task") {
// just import spray.client.task._ instead of spray.client.pipelining._
import spray.client.task._
case class Location(latitude: Double, longitude: Double)
case class Elevation(location: Location, elevation: Double)
case class GoogleApiResult[T](status: String, results: List[T])
implicit val locationFormat = jsonFormat(Location.apply, "lat", "lng")
implicit val elevationFormat = jsonFormat2(Elevation)
implicit def googleApiResultFormat[T: JsonFormat] = jsonFormat2(GoogleApiResult.apply[T])
implicit val system = ActorSystem("test")
// We just care only first elevation on the list
val takeFirstElevation: GoogleApiResult[Elevation] => Option[Elevation] = _.results.headOption
// make request, unmarshall response and take first elevation from the list
val pipeline = sendReceive ~> unmarshal[GoogleApiResult[Elevation]] ~> takeFirstElevation
val response = pipeline(Get("http://maps.googleapis.com/maps/api/elevation/json?locations=27.988056,86.925278&sensor=false"))
// Tasks are lazy, we need to run when we need their result
val taskResult = response.attemptRun
taskResult.fold(
error => println("Error: " + error.getMessage),
success => println("Result: " + success)
)
assert(taskResult.isRight)
assert(taskResult.exists(_.contains(Elevation(Location(27.988056, 86.925278), 8815.7158203125))))
system.shutdown()
}
}
|
msimav/spray-contrib-scalaz
|
src/test/scala/spray/client/SendReceiveSpec.scala
|
Scala
|
mit
| 1,783 |
package org.scalatra.test
import java.io.InputStream
import collection.DefaultMap
case class ResponseStatus(code: Int, message: String)
import collection.JavaConverters._
abstract class ClientResponse {
def bodyBytes: Array[Byte]
def inputStream: InputStream
def statusLine: ResponseStatus
def headers: Map[String, Seq[String]]
def body = new String(bodyBytes, charset.getOrElse("ISO-8859-1"))
def mediaType: Option[String] = {
header.get("Content-Type") match {
case Some(contentType) => contentType.split(";").map(_.trim).headOption
case _ => None
}
}
def status = statusLine.code
val header = new DefaultMap[String, String] {
def get(key: String) = {
headers.get(key) match {
case Some(values) => Some(values.head)
case _ => None
}
}
override def apply(key: String) = {
get(key) match {
case Some(value) => value
case _ => null
}
}
def iterator = {
headers.keys.map(name => (name -> this(name))).iterator
}
}
def charset = {
header.getOrElse("Content-Type", "").split(";").map(_.trim).find(_.startsWith("charset=")) match {
case Some(attr) => Some(attr.split("=")(1))
case _ => None
}
}
def getReason() = statusLine.message
def getHeader(name: String) = header.getOrElse(name, null)
def getLongHeader(name: String) = header.getOrElse(name, "-1").toLong
def getHeaderNames(): java.util.Enumeration[String] = headers.keysIterator.asJavaEnumeration
def getHeaderValues(name: String): java.util.Enumeration[String] = headers.getOrElse(name, Seq()).iterator.asJavaEnumeration
def getContentBytes() = bodyBytes
def getContent() = body
def getContentType() = header.getOrElse("Content-Type", null)
}
|
etorreborre/scalatra
|
test/src/main/scala/org/scalatra/test/ClientResponse.scala
|
Scala
|
bsd-2-clause
| 1,813 |
package integration
import org.joda.time.DateTime
import org.scalatestplus.play.OneAppPerSuite
import traits.TestBuilder
/**
* Created by unoedx on 06/05/16.
*/
class ListPerformanceTest extends TestBuilder {
var list:List[DateTime] = List()
for(i <- 1 to 100000) {
list = DateTime.now().minusSeconds(i) :: list
}
it should "evaluate speed of iterating over a list" in {
val yesterday = DateTime.now().minusDays(1)
list = DateTime.now() :: list.filter(_.isAfter(yesterday))
assert(list.length < 90000)
}
}
|
waveinch/ses-transactional
|
test/integration/ListPerformanceTest.scala
|
Scala
|
apache-2.0
| 545 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia
import algolia.http.HttpPayload
import algolia.objects.Query
import org.slf4j.{Logger, LoggerFactory}
import java.nio.charset.Charset
import java.util.Base64
import javax.crypto.Mac
import javax.crypto.spec.SecretKeySpec
import scala.concurrent.duration.Duration
import scala.concurrent.{ExecutionContext, Future}
import scala.util.matching.Regex
import scala.util.{Failure, Success, Try}
/**
* The AlgoliaClient to query Algolia
*
* @param applicationId The APP_ID of your Algolia account
* @param apiKey The API KEY of your Algolia account
* @param customHeader Custom headers to add to every requests
*/
class AlgoliaClient(
applicationId: String,
apiKey: String,
customHeader: Map[String, String] = Map.empty,
configuration: AlgoliaClientConfiguration =
AlgoliaClientConfiguration.default,
private[algolia] val utils: AlgoliaUtils = AlgoliaUtils
) {
if (applicationId == null || applicationId.isEmpty) {
throw new AlgoliaClientException(
s"'applicationId' is probably too short: '$applicationId'"
)
}
if (apiKey == null || apiKey.isEmpty) {
throw new AlgoliaClientException(
s"'apiKey' is probably too short: '$apiKey'"
)
}
final private val ALGOLIANET_COM_HOST = "algolianet.com"
final private val ALGOLIANET_HOST = "algolia.net"
val httpClient: AlgoliaHttpClient = AlgoliaHttpClient(configuration)
val indexingHosts: Seq[String] =
s"https://$applicationId.$ALGOLIANET_HOST" +:
utils.shuffle(
Seq(
s"https://$applicationId-1.$ALGOLIANET_COM_HOST",
s"https://$applicationId-2.$ALGOLIANET_COM_HOST",
s"https://$applicationId-3.$ALGOLIANET_COM_HOST"
)
)
val queryHosts: Seq[String] =
s"https://$applicationId-dsn.$ALGOLIANET_HOST" +:
utils.shuffle(
Seq(
s"https://$applicationId-1.$ALGOLIANET_COM_HOST",
s"https://$applicationId-2.$ALGOLIANET_COM_HOST",
s"https://$applicationId-3.$ALGOLIANET_COM_HOST"
)
)
val analyticsHost: String = "https://analytics.algolia.com"
val insightsHost: String = "https://insights.algolia.io"
/* Personalization default host is set as 'var' because the region might be overridden. */
var personalizationHost: String = "https://personalization.us.algolia.com"
@deprecated("use personalization instead", "1.40.0")
var recommendationHost: String = "https://recommendation.us.algolia.com"
val userAgent =
s"Algolia for Scala (${BuildInfo.version}); JVM (${System.getProperty("java.version")}); Scala (${BuildInfo.scalaVersion})"
val headers: Map[String, String] = customHeader ++ Map(
"Accept-Encoding" -> "gzip",
"X-Algolia-Application-Id" -> applicationId,
"X-Algolia-API-Key" -> apiKey,
"User-Agent" -> userAgent,
"Content-Type" -> "application/json; charset=UTF-8",
"Accept" -> "application/json"
)
private val HMAC_SHA256 = "HmacSHA256"
private val UTF8_CHARSET = Charset.forName("UTF8")
private[algolia] lazy val hostsStatuses =
HostsStatuses(configuration, utils, queryHosts, indexingHosts)
def execute[QUERY, RESULT](query: QUERY)(
implicit executable: Executable[QUERY, RESULT],
executor: ExecutionContext
): Future[RESULT] =
executable(this, query)
def generateSecuredApiKey(
privateApiKey: String,
query: Query,
userToken: Option[String] = None
): String = {
val queryStr = query.copy(userToken = userToken).toParam
val key = hmac(privateApiKey, queryStr)
new String(
Base64.getEncoder.encode(s"$key$queryStr".getBytes(UTF8_CHARSET))
)
}
def getSecuredApiKeyRemainingValidity(
securedApiKey: String
): Option[Duration] = {
val decoded =
new String(Base64.getDecoder.decode(securedApiKey), UTF8_CHARSET)
val keyWithValidUntil: Regex = """validUntil=(\d{1,10})""".r.unanchored
decoded match {
case keyWithValidUntil(validUntil) => {
Try(Duration(validUntil + " seconds")) match {
case Success(d) => Some(d)
case Failure(e) => None
}
}
case _ => None
}
}
private def hmac(key: String, msg: String): String = {
val algorithm = Mac.getInstance(HMAC_SHA256)
algorithm.init(new SecretKeySpec(key.getBytes(), HMAC_SHA256))
algorithm.doFinal(msg.getBytes()).map("%02x".format(_)).mkString
}
def close(): Unit = httpClient.close()
private val failedStart: Future[Nothing] = Future.failed(StartException())
val logger: Logger = LoggerFactory.getLogger("algoliasearch")
private[algolia] def request[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
if (payload.isAnalytics) {
requestAnalytics(payload)
} else if (payload.isInsights) {
requestInsights(payload)
} else if (payload.isPersonalization) {
requestPersonalization(payload)
} else if (payload.isRecommendation) {
requestRecommendation(payload)
} else {
requestSearch(payload)
}
}
private[algolia] def requestAnalytics[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
httpClient.request[T](analyticsHost, headers, payload).andThen {
case Failure(e) =>
logger.debug("Analytics API call failed", e)
Future
.failed(new AlgoliaClientException("Analytics API call failed", e))
}
}
private[algolia] def requestPersonalization[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
httpClient.request[T](personalizationHost, headers, payload).andThen {
case Failure(e) =>
logger.debug("Personalization API call failed", e)
Future.failed(
new AlgoliaClientException("Personalization API call failed", e)
)
}
}
@deprecated("use personalization instead", "1.40.0")
private[algolia] def requestRecommendation[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
httpClient.request[T](recommendationHost, headers, payload).andThen {
case Failure(e) =>
logger.debug("Recommendation API call failed", e)
Future.failed(
new AlgoliaClientException("Recommendation API call failed", e)
)
}
}
private[algolia] def requestInsights[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
httpClient.request[T](insightsHost, headers, payload).andThen {
case Failure(e) =>
logger.debug("Insights API call failed", e)
Future.failed(new AlgoliaClientException("Insights API call failed", e))
}
}
private[algolia] def requestSearch[T: Manifest](
payload: HttpPayload
)(implicit executor: ExecutionContext): Future[T] = {
val hosts = if (payload.isSearch) {
hostsStatuses.queryHostsThatAreUp()
} else {
hostsStatuses.indexingHostsThatAreUp()
}
def makeRequest(host: String): Future[T] =
httpClient.request[T](host, headers, payload).andThen {
case Success(_) =>
hostsStatuses.markHostAsUp(host)
case Failure(_: `4XXAPIException`) =>
hostsStatuses.markHostAsUp(host)
case Failure(_) =>
hostsStatuses.markHostAsDown(host)
}
val result = hosts.foldLeft[Future[T]](failedStart) { (future, host) =>
future.recoverWith {
case f: `4XXAPIException` =>
logger.debug("Got 4XX, no retry", f)
Future.failed(f) //No retry if 4XX
case _ =>
makeRequest(host)
}
}
result.recoverWith {
case e: `4XXAPIException` =>
logger.debug("Got 4XX, no retry", e)
Future.failed(new AlgoliaClientException(e.getMessage, e))
case e =>
logger.debug("All retries failed", e)
Future.failed(new AlgoliaClientException("All retries failed", e))
}
}
}
private[algolia] case class StartException() extends Exception
class AlgoliaClientException(message: String, exception: Throwable)
extends Exception(message, exception) {
def this(message: String) = {
this(message, null)
}
def this(exception: Throwable) = {
this(null, exception)
}
}
|
algolia/algoliasearch-client-scala
|
src/main/scala/algolia/AlgoliaClient.scala
|
Scala
|
mit
| 9,430 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import java.time.{LocalDate, LocalDateTime, LocalTime, ZoneId}
import java.util.TimeZone
import java.util.concurrent.TimeUnit
import org.apache.spark.sql.catalyst.util.DateTimeUtils.TimeZoneUTC
/**
* Helper functions for testing date and time functionality.
*/
object DateTimeTestUtils {
val ALL_TIMEZONES: Seq[TimeZone] = TimeZone.getAvailableIDs.toSeq.map(TimeZone.getTimeZone)
val outstandingTimezonesIds: Seq[String] = Seq(
"UTC",
"PST",
"CET",
"Africa/Dakar",
"America/Los_Angeles",
"Antarctica/Vostok",
"Asia/Hong_Kong",
"Europe/Amsterdam")
val outstandingTimezones: Seq[TimeZone] = outstandingTimezonesIds.map(TimeZone.getTimeZone)
val outstandingZoneIds: Seq[ZoneId] = outstandingTimezonesIds.map(DateTimeUtils.getZoneId)
def withDefaultTimeZone[T](newDefaultTimeZone: TimeZone)(block: => T): T = {
val originalDefaultTimeZone = TimeZone.getDefault
try {
TimeZone.setDefault(newDefaultTimeZone)
block
} finally {
TimeZone.setDefault(originalDefaultTimeZone)
}
}
def localDateTimeToMicros(localDateTime: LocalDateTime, tz: TimeZone): Long = {
val instant = localDateTime.atZone(tz.toZoneId).toInstant
DateTimeUtils.instantToMicros(instant)
}
// Returns microseconds since epoch for the given date
def date(
year: Int,
month: Byte = 1,
day: Byte = 1,
hour: Byte = 0,
minute: Byte = 0,
sec: Byte = 0,
micros: Int = 0,
tz: TimeZone = TimeZoneUTC): Long = {
val nanos = TimeUnit.MICROSECONDS.toNanos(micros).toInt
val localDateTime = LocalDateTime.of(year, month, day, hour, minute, sec, nanos)
localDateTimeToMicros(localDateTime, tz)
}
// Returns number of days since epoch for the given date
def days(
year: Int,
month: Byte = 1,
day: Byte = 1,
hour: Byte = 0,
minute: Byte = 0,
sec: Byte = 0): Int = {
val micros = date(year, month, day, hour, minute, sec)
TimeUnit.MICROSECONDS.toDays(micros).toInt
}
// Returns microseconds since epoch for current date and give time
def time(
hour: Byte = 0,
minute: Byte = 0,
sec: Byte = 0,
micros: Int = 0,
tz: TimeZone = TimeZoneUTC): Long = {
val nanos = TimeUnit.MICROSECONDS.toNanos(micros).toInt
val localDate = LocalDate.now(tz.toZoneId)
val localTime = LocalTime.of(hour, minute, sec, nanos)
val localDateTime = LocalDateTime.of(localDate, localTime)
localDateTimeToMicros(localDateTime, tz)
}
}
|
pgandhi999/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeTestUtils.scala
|
Scala
|
apache-2.0
| 3,367 |
package castalia.matcher
import akka.actor._
import akka.http.scaladsl.model.StatusCodes._
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import castalia.matcher.MatchResultGatherer.{MatchFound, MatchNotFound}
import castalia.model.Model.StubResponse
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
object MatchResultGatherer {
case object MatchNotFound
case class MatchFound(stubHandler: ActorRef, requestMatch: RequestMatch)
def props(nOfMatchers: Int, origin: ActorRef): Props = Props(new MatchResultGatherer(nOfMatchers, origin))
}
class MatchResultGatherer(nrOfMatchers: Int, origin: ActorRef) extends Actor with ActorLogging {
implicit val timeout = Timeout(2.seconds)
def receive: Receive = awaitResponses(false, nrOfMatchers)
def awaitResponses(responseSent: Boolean, responsesToGet: Int): Receive = {
case MatchNotFound =>
handleMatchNotFound(responseSent, responsesToGet)
case MatchFound(handler, requestMatch) =>
handleMatchFound(responseSent, responsesToGet, handler, requestMatch)
// unexpected messages
case x =>
log.info("MatchResultGatherer received unexpected message: " + x.toString)
}
def handleMatchNotFound(responseSent: Boolean, responsesToGet: Int): Unit = {
if (responsesToGet > 1) {
// No match was found, wait for other matcher results
context.become(awaitResponses(responseSent, responsesToGet - 1))
} else {
// This is the last match result we are waiting for. Stop this actor after processing this message
context.stop(self)
if (!responseSent) {
// No response to origin was sent yet, do so now
origin ! StubResponse(NotFound.intValue, NotFound.reason)
}
}
}
def handleMatchFound(responseSent: Boolean, responsesToGet: Int, stubHandler: ActorRef, requestMatch: RequestMatch): Unit = {
if (!responseSent) {
// First match result found, let stub handler create response and return that to consumer
log.debug(s"match found for $stubHandler: $requestMatch")
(stubHandler ? requestMatch) pipeTo origin
}
if (responsesToGet > 1) {
// Other matchers will still send their result to this actor, stay alive until they have done so
context.become(awaitResponses(true, responsesToGet - 1))
} else {
// No more matchers will send their result
context.stop(self)
}
}
}
|
TimSoethout/stubserver
|
src/main/scala/castalia/matcher/MatchResultGatherer.scala
|
Scala
|
mit
| 2,436 |
package scalaz.stream
import scalaz.\\/
import scalaz.stream.Process.Halt
import Cause._
/**
* Defines termination cause for the process.
* Cause is always wrapped in `Halt` and controls process flow.
*/
sealed trait Cause {
/**
* Produces a cause that was caused by `cause`
* @param cause
* @return
*/
def causedBy(cause: Cause): Cause = {
(this, cause) match {
case (End, End) => End
case (End, Kill) => Kill
case (Kill, End | Kill) => Kill
case (End | Kill, err@Error(_)) => err
case (err@Error(_), End | Kill) => err
case (Error(rsn1), Error(rsn2)) if rsn1 == rsn2 => this
case (Error(rsn1), Error(rsn2)) => Error(CausedBy(rsn1, rsn2))
}
}
/**
* Converts cause to `Kill` or an `Error`
* @return
*/
def kill: EarlyCause = fold[EarlyCause](Kill)(identity)
def fold[A](onEnd: => A)(f:(EarlyCause => A)) = this match {
case End => onEnd
case early:EarlyCause => f(early)
}
/**
* Converts this termination cause to `Process.Halt`
*/
def asHalt: Halt = this match {
case End => Halt(End)
case Error(Terminated(cause)) => Halt(cause)
case cause => Halt(cause)
}
/**
* Converts this cause to `java.lang.Throwable`
*/
def asThrowable: Throwable = this match {
case End => Terminated(End)
case Kill => Terminated(Kill)
case Error(rsn) => rsn
}
}
object Cause {
/**
* A marker that is indicating Cause was terminating the stream EarlyCause,
* either due to error, or being killed
*/
sealed trait EarlyCause extends Cause
object EarlyCause {
def fromTaskResult[A](r:Throwable\\/A):EarlyCause\\/A =
r.bimap(Error.apply,identity)
}
/**
* Process terminated normally due to End of input.
* That means items from Emit ha been exhausted.
*/
case object End extends Cause
/**
* Signals force-full process termination.
* Process can be killed when merged (pipe,tee,wye,njoin) and other merging stream or
* resulting downstream requested termination of process.
* This shall cause process to run all cleanup actions and then terminate normally
*/
case object Kill extends EarlyCause
/**
* Signals, that evaluation of last await resulted in error.
*
* If error is not handled, this will cause the process to terminate with supplier error.
*
* @param rsn Error thrown by last await.
*
*/
case class Error(rsn: Throwable) extends EarlyCause {
override def toString: String = {
s"Error(${rsn.getClass.getName}: ${rsn.getMessage}})"
}
}
/**
* Wrapper for Exception that was caused by other Exception during the
* Execution of the Process
*/
case class CausedBy(e: Throwable, cause: Throwable) extends Exception(cause) {
override def toString = s"$e caused by: $cause"
override def getMessage: String = toString
override def fillInStackTrace(): Throwable = this
}
/**
* wrapper to signal cause for termination.
* This is useful when cause needs to be propagated out of process domain (i.e. Task)
*/
case class Terminated(cause:Cause) extends Exception {
override def fillInStackTrace(): Throwable = cause match {
case End | Kill => this
case Error(rsn) => rsn
}
override def toString: String = s"Terminated($cause)"
override def getMessage: String = cause.toString
}
}
|
drostron/scalaz-stream
|
src/main/scala/scalaz/stream/Cause.scala
|
Scala
|
mit
| 3,415 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.table
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.table.api.DataTypes
import org.apache.flink.table.api.config.{ExecutionConfigOptions, OptimizerConfigOptions}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.utils.{AggregatePhaseStrategy, StreamTableTestUtil, TableTestBase}
import org.junit.{Before, Test}
class TwoStageAggregateTest extends TableTestBase {
private var util: StreamTableTestUtil = _
@Before
def before(): Unit = {
util = streamTestUtil()
util.tableEnv.getConfig
.setIdleStateRetentionTime(Time.hours(1), Time.hours(2))
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
util.tableEnv.getConfig.getConfiguration
.setBoolean(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration
.setLong(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 3)
util.tableEnv.getConfig.getConfiguration.setString(
OptimizerConfigOptions.TABLE_OPTIMIZER_AGG_PHASE_STRATEGY,
AggregatePhaseStrategy.TWO_PHASE.toString)
}
@Test
def testGroupAggregate(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.groupBy('b)
.select('a.count)
util.verifyPlan(resultTable)
}
@Test
def testGroupAggregateWithConstant1(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.select('a, 4 as 'four, 'b)
.groupBy('four, 'a)
.select('four, 'b.sum)
util.verifyPlan(resultTable)
}
@Test
def testGroupAggregateWithConstant2(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.select('b, 4 as 'four, 'a)
.groupBy('b, 'four)
.select('four, 'a.sum)
util.verifyPlan(resultTable)
}
@Test
def testGroupAggregateWithExpressionInSelect(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.select('a as 'a, 'b % 3 as 'd, 'c as 'c)
.groupBy('d)
.select('c.min, 'a.avg)
util.verifyPlan(resultTable)
}
@Test
def testGroupAggregateWithFilter(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.groupBy('b)
.select('b, 'a.sum)
.where('b === 2)
util.verifyPlan(resultTable)
}
@Test
def testGroupAggregateWithAverage(): Unit = {
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val resultTable = table
.groupBy('b)
.select('b, 'a.cast(DataTypes.DOUBLE()).avg)
util.verifyPlan(resultTable)
}
}
|
fhueske/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TwoStageAggregateTest.scala
|
Scala
|
apache-2.0
| 3,664 |
import controllers.ControllerSupport
import play.api.libs.concurrent.Akka
import play.api.mvc.RequestHeader
import play.api.{Application, GlobalSettings}
import scala.concurrent.Future
object Global extends GlobalSettings with ControllerSupport{
override def onStart(app: Application) {
super.onStart(app)
play.Logger.info("Starting the application.")
}
override def onStop(app: Application) {
super.onStop(app)
play.Logger.info("Stopping the application.")
}
override def onRouteRequest(request: RequestHeader) = {
play.Logger.info("Incoming request: %s" format request)
super.onRouteRequest(request)
}
override def onHandlerNotFound(request: RequestHeader) = {
import play.api.Play.current
implicit val dispatcher = Akka.system.dispatcher
implicit val request0 = request
Future(CustomNotFound)
}
}
|
semberal/homelibrary
|
app/Global.scala
|
Scala
|
apache-2.0
| 860 |
package org.tearne.crosser.cross
import org.junit.runner.RunWith
import org.tearne.crosser.plant.ConcretePlant
import org.tearne.crosser.plant.Species
import org.tearne.crosser.plant.Plant
import org.scalatest.FreeSpec
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
class CrossTest extends FreeSpec with MockitoSugar{
"Cross should" - {
"be crossable" in new Setup{
assert(
Cross(leftCrossable, leftCrossable, protocol, "bert").isInstanceOf[Crossable]
)
}
"know it's species from it's parents" in new Setup{
val species = mock[Species]
when(leftCrossable.species).thenReturn(species)
when(rightCrossable.species).thenReturn(species)
val instance = Cross(leftCrossable, rightCrossable, protocol, "bert")
assertResult(species)(instance.species)
}
"throw exception if parent species don't agree" in new Setup{
when(leftCrossable.species).thenReturn(mock[Species])
when(rightCrossable.species).thenReturn(mock[Species])
intercept[CrossableException]{
Cross(leftCrossable, rightCrossable, protocol, "bert")
}
}
"have value based hashcode and equals" in new Setup{
val instance1a = Cross(leftCrossable, rightCrossable, protocol, "bert")
val instance1b = Cross(leftCrossable, rightCrossable, protocol, "bert")
val instance2 = Cross(mock[Crossable], rightCrossable, protocol, "bert")
val instance3 = Cross(leftCrossable, mock[Crossable], protocol, "bert")
val instance4 = Cross(leftCrossable, rightCrossable, mock[Protocol], "bert")
val instance5 = Cross(leftCrossable, rightCrossable, protocol, "bert1")
assertResult(instance1b)(instance1a)
assertResult(instance2)(instance1a)
assertResult(instance3)(instance1a)
assertResult(instance4)(instance1a)
assertResult(instance5)(instance1a)
assertResult(instance1b.hashCode)(instance1a.hashCode)
assert(instance1a.hashCode != instance2.hashCode)
assert(instance1a.hashCode != instance3.hashCode)
assert(instance1a.hashCode != instance4.hashCode)
assert(instance1a.hashCode != instance5.hashCode)
}
}
trait Setup {
val leftCrossable = mock[Crossable]
val rightCrossable = mock[Crossable]
val protocol = mock[Protocol]
}
}
|
tearne/Crosser
|
src/test/scala/org/tearne/crosser/cross/CrossTest.scala
|
Scala
|
apache-2.0
| 2,224 |
package jp.co.bizreach.play2handlebars
import org.scalatest.FunSpec
class Product2MapSpec extends FunSpec {
describe("Product2Map") {
describe("when the case class has symbolic name fields") {
it("should extract values") {
case class TestClass(** : String, && : Int, !! : Boolean)
val value = TestClass("a", 42, false)
assert(Product2Map.convert(value) === Map("**" -> "a", "&&" -> 42, "!!" -> false))
}
}
}
}
|
bizreach/play2-handlebars
|
src/test/scala/jp/co/bizreach/play2handlebars/Product2MapSpec.scala
|
Scala
|
apache-2.0
| 462 |
package examples.extended
import java.lang.Integer.{MAX_VALUE, MIN_VALUE}
import com.twitter.finagle.Service
import com.twitter.finagle.http.Method.Get
import com.twitter.finagle.http.{Request, Status}
import io.fintrospect.ContentTypes.APPLICATION_JSON
import io.fintrospect.RouteSpec
import io.fintrospect.formats.Argo.JsonFormat.array
import io.fintrospect.formats.Argo.ResponseBuilder._
import io.fintrospect.parameters.Query
class BookTermSearch(books: Books) {
private val titleTerms = Query.required.*.string("term", "parts of the title to look for")
private val search = Service.mk { request: Request =>
Ok(array(books.search(MIN_VALUE, MAX_VALUE, titleTerms <-- request).map(_.toJson)))
}
val route = RouteSpec("search for book by title fragment")
.taking(titleTerms)
.returning(Status.Ok -> "we found some books", array(Book("a book", "authorName", 99).toJson))
.producing(APPLICATION_JSON)
.at(Get) / "titleSearch" bindTo search
}
|
daviddenton/fintrospect
|
src/main/scala/examples/extended/BookTermSearch.scala
|
Scala
|
apache-2.0
| 975 |
import scala.meta._
object Test {
val v = Version()
}
|
som-snytt/dotty
|
tests/pos/i2551/test_2.scala
|
Scala
|
apache-2.0
| 57 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.frontend.filters
import play.api.mvc.{Filter, RequestHeader, Result}
import play.api.{Logger, Play}
import play.mvc.Http.Status
import uk.gov.hmrc.play.HeaderCarrierConverter
import scala.collection.JavaConverters._
import uk.gov.hmrc.play.http.logging.MdcLoggingExecutionContext._
import scala.concurrent.Future
abstract class CacheControlFilter extends Filter with MicroserviceFilterSupport {
val cachableContentTypes: Seq[String]
final def apply(next: (RequestHeader) => Future[Result])(rh: RequestHeader): Future[Result] = {
implicit val hc = HeaderCarrierConverter.fromHeadersAndSession(rh.headers, Some(rh.session))
next(rh).map(r =>
(r.header.status, r.body.contentType) match {
case (Status.NOT_MODIFIED, _) => r
case (_, Some(contentType)) if cachableContentTypes.exists(contentType.startsWith) => r
case _ => r.withHeaders(CommonHeaders.NoCacheHeader)
})
}
}
object CacheControlFilter {
def fromConfig(configKey: String) =
new CacheControlFilter {
override lazy val cachableContentTypes = {
val c = Play.current.configuration.getStringList(configKey).toList.map(_.asScala).flatten
Logger.debug(s"Will allow caching of content types matching: ${c.mkString(", ")}")
c
}
}
}
|
hmrc/frontend-bootstrap
|
src/main/scala/uk/gov/hmrc/play/frontend/filters/CacheControlFilter.scala
|
Scala
|
apache-2.0
| 2,041 |
package dpla.ingestion3.mappers.providers
import dpla.ingestion3.mappers.utils.Document
import dpla.ingestion3.messages.{IngestMessage, MessageCollector}
import dpla.ingestion3.model._
import dpla.ingestion3.utils.FlatFileIO
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods.parse
import org.scalatest.{BeforeAndAfter, FlatSpec}
class NYPLMappingTest extends FlatSpec with BeforeAndAfter {
implicit val msgCollector: MessageCollector[IngestMessage] = new MessageCollector[IngestMessage]
val shortName = Some("nypl")
val jsonString: String = new FlatFileIO().readFileAsString("/nypl.json")
val json: Document[JValue] = Document(parse(jsonString))
val extractor = new NyplMapping(json)
it should "extract the correct original ID " in {
val expected = Some("93cd9a10-c552-012f-20e8-58d385a7bc34")
assert(extractor.originalId(json) === expected)
}
it should "mint the correct DPLA URI" in {
assert(extractor.dplaUri(json) === Some(URI("http://dp.la/api/items/9412682033a0b7a5926b584e7756019c")))
}
it should "use provider prefix" in {
assert(extractor.useProviderName === true)
}
it should "use the correct provider prefix" in {
assert(extractor.getProviderName === shortName)
}
it should "extract the correct title " in {
val expected = Seq("Jedediah Buxton [National Calculator, 1705-1780]")
assert(extractor.title(json) === expected)
}
it should "extract the correct alt titles" in {
val expected = Seq("Alternate Title")
assert(extractor.alternateTitle(json) === expected)
}
it should "extract the correct identifiers" in {
val expected = Seq("URN id")
assert(extractor.identifier(json) === expected)
}
it should "extract the correct description" in {
val expected = Seq("A valid note", "hello world.")
assert(extractor.description(json) === expected)
}
it should "extract the correct isShownAt value" in {
val expected = Seq(stringOnlyWebResource("https://digitalcollections.nypl.org/items/4d0e0bc0-c540-012f-1857-58d385a7bc34"))
assert(extractor.isShownAt(json) === expected)
}
it should "extract the correct subjects" in {
val expected = Seq("temporal subject", "Public figures", "Subject title", "Subject name").map(nameOnlyConcept)
assert(extractor.subject(json) forall(expected contains))
}
it should "extract the correct temporal values" in {
val expected = Seq("temporal subject").map(stringOnlyTimeSpan)
assert(extractor.temporal(json) === expected)
}
it should "extract the correct type" in {
val expected = Seq("still image")
assert(extractor.`type`(json) === expected)
}
it should "extract the correct creator" in {
val expected = Seq("Kay, John (1742-1826)").map(nameOnlyAgent)
assert(extractor.creator(json) === expected)
}
it should "extract the correct contributor" in {
val expected = Seq("Contributor").map(nameOnlyAgent)
assert(extractor.contributor(json) === expected)
}
it should "extract the correct dataProvider" in {
val expected = Seq("The Miriam and Ira D. Wallach Division of Art, Prints and Photographs: Print Collection. The New York Public Library").map(nameOnlyAgent)
assert(extractor.dataProvider(json) === expected)
}
it should "extrac the correct preview" in {
val expected = Seq("https://images.nypl.org/index.php?t=t&id=G91F088_006F").map(stringOnlyWebResource)
assert(extractor.preview(json) === expected)
}
it should "extract the correct edmRights " in {
val expected = Seq("http://rightsstatements.org/vocab/NoC-US/1.0/").map(URI)
assert(extractor.edmRights(json) === expected)
}
it should "extract the correct dc rights " in {
val expected = Seq("The New York Public Library believes that this item is in the public domain under the laws of the United States, but did not make a determination as to its copyright status under the copyright laws of other countries. This item may not be in the public domain under the laws of other countries. Though not required, if you want to credit us as the source, please use the following statement, \\"From The New York Public Library,\\" and provide a link back to the item on our Digital Collections site. Doing so helps us track how our collection is used and helps justify freely releasing even more content in the future.")
assert(extractor.rights(json) === expected)
}
it should "extract the correct collection name" in {
val expected = Seq("Robert N. Dennis collection of stereoscopic views").map(nameOnlyCollection)
assert(extractor.collection(json) === expected)
}
}
|
dpla/ingestion3
|
src/test/scala/dpla/ingestion3/mappers/providers/NYPLMappingTest.scala
|
Scala
|
mit
| 4,624 |
package mgoeminne.scalaggplot.coord
/**
* The Cartesian coordinate system is the most familiar, and common, type of coordinate system.
* Setting limits on the coordinate system will zoom the plot (like you're looking at it with a magnifying glass),
* and will not change the underlying data like setting limits on a scale will.
*
* == Examples ==
*
* TODO
*
* @param xlim limits for the x axis.
* @param ylim limits for the y axis.
*
*/
case class cartesian( xlim: Option[(Double, Double)] = None,
ylim: Option[(Double, Double)] = None
) extends Coordinate
|
mgoeminne/scala-ggplot
|
src/main/scala/mgoeminne/scalaggplot/coord/cartesian.scala
|
Scala
|
lgpl-3.0
| 619 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.util.TimeZone
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException}
import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* A reasonable complete test suite (i.e. behaviors) for a [[ExternalCatalog]].
*
* Implementations of the [[ExternalCatalog]] interface can create test suites by extending this.
*/
abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEach {
protected val utils: CatalogTestUtils
import utils._
protected def resetState(): Unit = { }
// Clear all state after each test
override def afterEach(): Unit = {
try {
resetState()
} finally {
super.afterEach()
}
}
// --------------------------------------------------------------------------
// Databases
// --------------------------------------------------------------------------
test("basic create and list databases") {
val catalog = newEmptyCatalog()
catalog.createDatabase(newDb("default"), ignoreIfExists = true)
assert(catalog.databaseExists("default"))
assert(!catalog.databaseExists("testing"))
assert(!catalog.databaseExists("testing2"))
catalog.createDatabase(newDb("testing"), ignoreIfExists = false)
assert(catalog.databaseExists("testing"))
assert(catalog.listDatabases().toSet == Set("default", "testing"))
catalog.createDatabase(newDb("testing2"), ignoreIfExists = false)
assert(catalog.listDatabases().toSet == Set("default", "testing", "testing2"))
assert(catalog.databaseExists("testing2"))
assert(!catalog.databaseExists("does_not_exist"))
}
test("get database when a database exists") {
val db1 = newBasicCatalog().getDatabase("db1")
assert(db1.name == "db1")
assert(db1.description.contains("db1"))
}
test("get database should throw exception when the database does not exist") {
intercept[AnalysisException] { newBasicCatalog().getDatabase("db_that_does_not_exist") }
}
test("list databases without pattern") {
val catalog = newBasicCatalog()
assert(catalog.listDatabases().toSet == Set("default", "db1", "db2", "db3"))
}
test("list databases with pattern") {
val catalog = newBasicCatalog()
assert(catalog.listDatabases("db").toSet == Set.empty)
assert(catalog.listDatabases("db*").toSet == Set("db1", "db2", "db3"))
assert(catalog.listDatabases("*1").toSet == Set("db1"))
assert(catalog.listDatabases("db2").toSet == Set("db2"))
}
test("drop database") {
val catalog = newBasicCatalog()
catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = false)
assert(catalog.listDatabases().toSet == Set("default", "db2", "db3"))
}
test("drop database when the database is not empty") {
// Throw exception if there are functions left
val catalog1 = newBasicCatalog()
catalog1.dropTable("db2", "tbl1", ignoreIfNotExists = false, purge = false)
catalog1.dropTable("db2", "tbl2", ignoreIfNotExists = false, purge = false)
intercept[AnalysisException] {
catalog1.dropDatabase("db2", ignoreIfNotExists = false, cascade = false)
}
resetState()
// Throw exception if there are tables left
val catalog2 = newBasicCatalog()
catalog2.dropFunction("db2", "func1")
intercept[AnalysisException] {
catalog2.dropDatabase("db2", ignoreIfNotExists = false, cascade = false)
}
resetState()
// When cascade is true, it should drop them
val catalog3 = newBasicCatalog()
catalog3.dropDatabase("db2", ignoreIfNotExists = false, cascade = true)
assert(catalog3.listDatabases().toSet == Set("default", "db1", "db3"))
}
test("drop database when the database does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = false, cascade = false)
}
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false)
}
test("alter database") {
val catalog = newBasicCatalog()
val db1 = catalog.getDatabase("db1")
// Note: alter properties here because Hive does not support altering other fields
catalog.alterDatabase(db1.copy(properties = Map("k" -> "v3", "good" -> "true")))
val newDb1 = catalog.getDatabase("db1")
assert(db1.properties.isEmpty)
assert(newDb1.properties.size == 2)
assert(newDb1.properties.get("k") == Some("v3"))
assert(newDb1.properties.get("good") == Some("true"))
}
test("alter database should throw exception when the database does not exist") {
intercept[AnalysisException] {
newBasicCatalog().alterDatabase(newDb("does_not_exist"))
}
}
// --------------------------------------------------------------------------
// Tables
// --------------------------------------------------------------------------
test("the table type of an external table should be EXTERNAL_TABLE") {
val catalog = newBasicCatalog()
val table = newTable("external_table1", "db2").copy(tableType = CatalogTableType.EXTERNAL)
catalog.createTable(table, ignoreIfExists = false)
val actual = catalog.getTable("db2", "external_table1")
assert(actual.tableType === CatalogTableType.EXTERNAL)
}
test("create table when the table already exists") {
val catalog = newBasicCatalog()
assert(catalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
val table = newTable("tbl1", "db2")
intercept[TableAlreadyExistsException] {
catalog.createTable(table, ignoreIfExists = false)
}
}
test("drop table") {
val catalog = newBasicCatalog()
assert(catalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
catalog.dropTable("db2", "tbl1", ignoreIfNotExists = false, purge = false)
assert(catalog.listTables("db2").toSet == Set("tbl2"))
}
test("drop table when database/table does not exist") {
val catalog = newBasicCatalog()
// Should always throw exception when the database does not exist
intercept[AnalysisException] {
catalog.dropTable("unknown_db", "unknown_table", ignoreIfNotExists = false, purge = false)
}
intercept[AnalysisException] {
catalog.dropTable("unknown_db", "unknown_table", ignoreIfNotExists = true, purge = false)
}
// Should throw exception when the table does not exist, if ignoreIfNotExists is false
intercept[AnalysisException] {
catalog.dropTable("db2", "unknown_table", ignoreIfNotExists = false, purge = false)
}
catalog.dropTable("db2", "unknown_table", ignoreIfNotExists = true, purge = false)
}
test("rename table") {
val catalog = newBasicCatalog()
assert(catalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
catalog.renameTable("db2", "tbl1", "tblone")
assert(catalog.listTables("db2").toSet == Set("tblone", "tbl2"))
}
test("rename table when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.renameTable("unknown_db", "unknown_table", "unknown_table")
}
intercept[AnalysisException] {
catalog.renameTable("db2", "unknown_table", "unknown_table")
}
}
test("rename table when destination table already exists") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.renameTable("db2", "tbl1", "tbl2")
}
}
test("alter table") {
val catalog = newBasicCatalog()
val tbl1 = catalog.getTable("db2", "tbl1")
catalog.alterTable(tbl1.copy(properties = Map("toh" -> "frem")))
val newTbl1 = catalog.getTable("db2", "tbl1")
assert(!tbl1.properties.contains("toh"))
assert(newTbl1.properties.size == tbl1.properties.size + 1)
assert(newTbl1.properties.get("toh") == Some("frem"))
}
test("alter table when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.alterTable(newTable("tbl1", "unknown_db"))
}
intercept[AnalysisException] {
catalog.alterTable(newTable("unknown_table", "db2"))
}
}
test("alter table schema") {
val catalog = newBasicCatalog()
val newDataSchema = StructType(Seq(
StructField("col1", IntegerType),
StructField("new_field_2", StringType)))
catalog.alterTableDataSchema("db2", "tbl1", newDataSchema)
val newTbl1 = catalog.getTable("db2", "tbl1")
assert(newTbl1.dataSchema == newDataSchema)
}
test("alter table stats") {
val catalog = newBasicCatalog()
val oldTableStats = catalog.getTable("db2", "tbl1").stats
assert(oldTableStats.isEmpty)
val newStats = CatalogStatistics(sizeInBytes = 1)
catalog.alterTableStats("db2", "tbl1", Some(newStats))
val newTableStats = catalog.getTable("db2", "tbl1").stats
assert(newTableStats.get == newStats)
}
test("get table") {
assert(newBasicCatalog().getTable("db2", "tbl1").identifier.table == "tbl1")
}
test("get table when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.getTable("unknown_db", "unknown_table")
}
intercept[AnalysisException] {
catalog.getTable("db2", "unknown_table")
}
}
test("list tables without pattern") {
val catalog = newBasicCatalog()
intercept[AnalysisException] { catalog.listTables("unknown_db") }
assert(catalog.listTables("db1").toSet == Set.empty)
assert(catalog.listTables("db2").toSet == Set("tbl1", "tbl2"))
}
test("list tables with pattern") {
val catalog = newBasicCatalog()
intercept[AnalysisException] { catalog.listTables("unknown_db", "*") }
assert(catalog.listTables("db1", "*").toSet == Set.empty)
assert(catalog.listTables("db2", "*").toSet == Set("tbl1", "tbl2"))
assert(catalog.listTables("db2", "tbl*").toSet == Set("tbl1", "tbl2"))
assert(catalog.listTables("db2", "*1").toSet == Set("tbl1"))
}
test("column names should be case-preserving and column nullability should be retained") {
val catalog = newBasicCatalog()
val tbl = CatalogTable(
identifier = TableIdentifier("tbl", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = storageFormat,
schema = new StructType()
.add("HelLo", "int", nullable = false)
.add("WoRLd", "int", nullable = true),
provider = Some(defaultProvider),
partitionColumnNames = Seq("WoRLd"),
bucketSpec = Some(BucketSpec(4, Seq("HelLo"), Nil)))
catalog.createTable(tbl, ignoreIfExists = false)
val readBack = catalog.getTable("db1", "tbl")
assert(readBack.schema == tbl.schema)
assert(readBack.partitionColumnNames == tbl.partitionColumnNames)
assert(readBack.bucketSpec == tbl.bucketSpec)
}
// --------------------------------------------------------------------------
// Partitions
// --------------------------------------------------------------------------
test("basic create and list partitions") {
val catalog = newEmptyCatalog()
catalog.createDatabase(newDb("mydb"), ignoreIfExists = false)
catalog.createTable(newTable("tbl", "mydb"), ignoreIfExists = false)
catalog.createPartitions("mydb", "tbl", Seq(part1, part2), ignoreIfExists = false)
assert(catalogPartitionsEqual(catalog, "mydb", "tbl", Seq(part1, part2)))
}
test("create partitions when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.createPartitions("does_not_exist", "tbl1", Seq(), ignoreIfExists = false)
}
intercept[AnalysisException] {
catalog.createPartitions("db2", "does_not_exist", Seq(), ignoreIfExists = false)
}
}
test("create partitions that already exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.createPartitions("db2", "tbl2", Seq(part1), ignoreIfExists = false)
}
catalog.createPartitions("db2", "tbl2", Seq(part1), ignoreIfExists = true)
}
test("create partitions without location") {
val catalog = newBasicCatalog()
val table = CatalogTable(
identifier = TableIdentifier("tbl", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)
val partition = CatalogTablePartition(Map("partCol1" -> "1", "partCol2" -> "2"), storageFormat)
catalog.createPartitions("db1", "tbl", Seq(partition), ignoreIfExists = false)
val partitionLocation = catalog.getPartition(
"db1",
"tbl",
Map("partCol1" -> "1", "partCol2" -> "2")).location
val tableLocation = new Path(catalog.getTable("db1", "tbl").location)
val defaultPartitionLocation = new Path(new Path(tableLocation, "partCol1=1"), "partCol2=2")
assert(new Path(partitionLocation) == defaultPartitionLocation)
}
test("create/drop partitions in managed tables with location") {
val catalog = newBasicCatalog()
val table = CatalogTable(
identifier = TableIdentifier("tbl", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)
val newLocationPart1 = newUriForDatabase()
val newLocationPart2 = newUriForDatabase()
val partition1 =
CatalogTablePartition(Map("partCol1" -> "1", "partCol2" -> "2"),
storageFormat.copy(locationUri = Some(newLocationPart1)))
val partition2 =
CatalogTablePartition(Map("partCol1" -> "3", "partCol2" -> "4"),
storageFormat.copy(locationUri = Some(newLocationPart2)))
catalog.createPartitions("db1", "tbl", Seq(partition1), ignoreIfExists = false)
catalog.createPartitions("db1", "tbl", Seq(partition2), ignoreIfExists = false)
assert(exists(newLocationPart1))
assert(exists(newLocationPart2))
// the corresponding directory is dropped.
catalog.dropPartitions("db1", "tbl", Seq(partition1.spec),
ignoreIfNotExists = false, purge = false, retainData = false)
assert(!exists(newLocationPart1))
// all the remaining directories are dropped.
catalog.dropTable("db1", "tbl", ignoreIfNotExists = false, purge = false)
assert(!exists(newLocationPart2))
}
test("list partition names") {
val catalog = newBasicCatalog()
val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "%="), storageFormat)
catalog.createPartitions("db2", "tbl2", Seq(newPart), ignoreIfExists = false)
val partitionNames = catalog.listPartitionNames("db2", "tbl2")
assert(partitionNames == Seq("a=1/b=%25%3D", "a=1/b=2", "a=3/b=4"))
}
test("list partition names with partial partition spec") {
val catalog = newBasicCatalog()
val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "%="), storageFormat)
catalog.createPartitions("db2", "tbl2", Seq(newPart), ignoreIfExists = false)
val partitionNames1 = catalog.listPartitionNames("db2", "tbl2", Some(Map("a" -> "1")))
assert(partitionNames1 == Seq("a=1/b=%25%3D", "a=1/b=2"))
// Partial partition specs including "weird" partition values should use the unescaped values
val partitionNames2 = catalog.listPartitionNames("db2", "tbl2", Some(Map("b" -> "%=")))
assert(partitionNames2 == Seq("a=1/b=%25%3D"))
val partitionNames3 = catalog.listPartitionNames("db2", "tbl2", Some(Map("b" -> "%25%3D")))
assert(partitionNames3.isEmpty)
}
test("list partitions with partial partition spec") {
val catalog = newBasicCatalog()
val parts = catalog.listPartitions("db2", "tbl2", Some(Map("a" -> "1")))
assert(parts.length == 1)
assert(parts.head.spec == part1.spec)
// if no partition is matched for the given partition spec, an empty list should be returned.
assert(catalog.listPartitions("db2", "tbl2", Some(Map("a" -> "unknown", "b" -> "1"))).isEmpty)
assert(catalog.listPartitions("db2", "tbl2", Some(Map("a" -> "unknown"))).isEmpty)
}
test("SPARK-21457: list partitions with special chars") {
val catalog = newBasicCatalog()
assert(catalog.listPartitions("db2", "tbl1").isEmpty)
val part1 = CatalogTablePartition(Map("a" -> "1", "b" -> "i+j"), storageFormat)
val part2 = CatalogTablePartition(Map("a" -> "1", "b" -> "i.j"), storageFormat)
catalog.createPartitions("db2", "tbl1", Seq(part1, part2), ignoreIfExists = false)
assert(catalog.listPartitions("db2", "tbl1", Some(part1.spec)).map(_.spec) == Seq(part1.spec))
assert(catalog.listPartitions("db2", "tbl1", Some(part2.spec)).map(_.spec) == Seq(part2.spec))
}
test("list partitions by filter") {
val tz = TimeZone.getDefault.getID
val catalog = newBasicCatalog()
def checkAnswer(
table: CatalogTable, filters: Seq[Expression], expected: Set[CatalogTablePartition])
: Unit = {
assertResult(expected.map(_.spec)) {
catalog.listPartitionsByFilter(table.database, table.identifier.identifier, filters, tz)
.map(_.spec).toSet
}
}
val tbl2 = catalog.getTable("db2", "tbl2")
checkAnswer(tbl2, Seq.empty, Set(part1, part2))
checkAnswer(tbl2, Seq('a.int <= 1), Set(part1))
checkAnswer(tbl2, Seq('a.int === 2), Set.empty)
checkAnswer(tbl2, Seq(In('a.int * 10, Seq(30))), Set(part2))
checkAnswer(tbl2, Seq(Not(In('a.int, Seq(4)))), Set(part1, part2))
checkAnswer(tbl2, Seq('a.int === 1, 'b.string === "2"), Set(part1))
checkAnswer(tbl2, Seq('a.int === 1 && 'b.string === "2"), Set(part1))
checkAnswer(tbl2, Seq('a.int === 1, 'b.string === "x"), Set.empty)
checkAnswer(tbl2, Seq('a.int === 1 || 'b.string === "x"), Set(part1))
intercept[AnalysisException] {
try {
checkAnswer(tbl2, Seq('a.int > 0 && 'col1.int > 0), Set.empty)
} catch {
// HiveExternalCatalog may be the first one to notice and throw an exception, which will
// then be caught and converted to a RuntimeException with a descriptive message.
case ex: RuntimeException if ex.getMessage.contains("MetaException") =>
throw new AnalysisException(ex.getMessage)
}
}
}
test("drop partitions") {
val catalog = newBasicCatalog()
assert(catalogPartitionsEqual(catalog, "db2", "tbl2", Seq(part1, part2)))
catalog.dropPartitions(
"db2", "tbl2", Seq(part1.spec), ignoreIfNotExists = false, purge = false, retainData = false)
assert(catalogPartitionsEqual(catalog, "db2", "tbl2", Seq(part2)))
resetState()
val catalog2 = newBasicCatalog()
assert(catalogPartitionsEqual(catalog2, "db2", "tbl2", Seq(part1, part2)))
catalog2.dropPartitions(
"db2", "tbl2", Seq(part1.spec, part2.spec), ignoreIfNotExists = false, purge = false,
retainData = false)
assert(catalog2.listPartitions("db2", "tbl2").isEmpty)
}
test("drop partitions when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.dropPartitions(
"does_not_exist", "tbl1", Seq(), ignoreIfNotExists = false, purge = false,
retainData = false)
}
intercept[AnalysisException] {
catalog.dropPartitions(
"db2", "does_not_exist", Seq(), ignoreIfNotExists = false, purge = false,
retainData = false)
}
}
test("drop partitions that do not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.dropPartitions(
"db2", "tbl2", Seq(part3.spec), ignoreIfNotExists = false, purge = false,
retainData = false)
}
catalog.dropPartitions(
"db2", "tbl2", Seq(part3.spec), ignoreIfNotExists = true, purge = false, retainData = false)
}
test("get partition") {
val catalog = newBasicCatalog()
assert(catalog.getPartition("db2", "tbl2", part1.spec).spec == part1.spec)
assert(catalog.getPartition("db2", "tbl2", part2.spec).spec == part2.spec)
intercept[AnalysisException] {
catalog.getPartition("db2", "tbl1", part3.spec)
}
}
test("get partition when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.getPartition("does_not_exist", "tbl1", part1.spec)
}
intercept[AnalysisException] {
catalog.getPartition("db2", "does_not_exist", part1.spec)
}
}
test("rename partitions") {
val catalog = newBasicCatalog()
val newPart1 = part1.copy(spec = Map("a" -> "100", "b" -> "101"))
val newPart2 = part2.copy(spec = Map("a" -> "200", "b" -> "201"))
val newSpecs = Seq(newPart1.spec, newPart2.spec)
catalog.renamePartitions("db2", "tbl2", Seq(part1.spec, part2.spec), newSpecs)
assert(catalog.getPartition("db2", "tbl2", newPart1.spec).spec === newPart1.spec)
assert(catalog.getPartition("db2", "tbl2", newPart2.spec).spec === newPart2.spec)
// The old partitions should no longer exist
intercept[AnalysisException] { catalog.getPartition("db2", "tbl2", part1.spec) }
intercept[AnalysisException] { catalog.getPartition("db2", "tbl2", part2.spec) }
}
test("rename partitions should update the location for managed table") {
val catalog = newBasicCatalog()
val table = CatalogTable(
identifier = TableIdentifier("tbl", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)
val tableLocation = new Path(catalog.getTable("db1", "tbl").location)
val mixedCasePart1 = CatalogTablePartition(
Map("partCol1" -> "1", "partCol2" -> "2"), storageFormat)
val mixedCasePart2 = CatalogTablePartition(
Map("partCol1" -> "3", "partCol2" -> "4"), storageFormat)
catalog.createPartitions("db1", "tbl", Seq(mixedCasePart1), ignoreIfExists = false)
assert(
new Path(catalog.getPartition("db1", "tbl", mixedCasePart1.spec).location) ==
new Path(new Path(tableLocation, "partCol1=1"), "partCol2=2"))
catalog.renamePartitions("db1", "tbl", Seq(mixedCasePart1.spec), Seq(mixedCasePart2.spec))
assert(
new Path(catalog.getPartition("db1", "tbl", mixedCasePart2.spec).location) ==
new Path(new Path(tableLocation, "partCol1=3"), "partCol2=4"))
// For external tables, RENAME PARTITION should not update the partition location.
val existingPartLoc = catalog.getPartition("db2", "tbl2", part1.spec).location
catalog.renamePartitions("db2", "tbl2", Seq(part1.spec), Seq(part3.spec))
assert(
new Path(catalog.getPartition("db2", "tbl2", part3.spec).location) ==
new Path(existingPartLoc))
}
test("rename partitions when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.renamePartitions("does_not_exist", "tbl1", Seq(part1.spec), Seq(part2.spec))
}
intercept[AnalysisException] {
catalog.renamePartitions("db2", "does_not_exist", Seq(part1.spec), Seq(part2.spec))
}
}
test("rename partitions when the new partition already exists") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.renamePartitions("db2", "tbl2", Seq(part1.spec), Seq(part2.spec))
}
}
test("alter partitions") {
val catalog = newBasicCatalog()
try {
val newLocation = newUriForDatabase()
val newSerde = "com.sparkbricks.text.EasySerde"
val newSerdeProps = Map("spark" -> "bricks", "compressed" -> "false")
// alter but keep spec the same
val oldPart1 = catalog.getPartition("db2", "tbl2", part1.spec)
val oldPart2 = catalog.getPartition("db2", "tbl2", part2.spec)
catalog.alterPartitions("db2", "tbl2", Seq(
oldPart1.copy(storage = storageFormat.copy(locationUri = Some(newLocation))),
oldPart2.copy(storage = storageFormat.copy(locationUri = Some(newLocation)))))
val newPart1 = catalog.getPartition("db2", "tbl2", part1.spec)
val newPart2 = catalog.getPartition("db2", "tbl2", part2.spec)
assert(newPart1.storage.locationUri == Some(newLocation))
assert(newPart2.storage.locationUri == Some(newLocation))
assert(oldPart1.storage.locationUri != Some(newLocation))
assert(oldPart2.storage.locationUri != Some(newLocation))
// alter other storage information
catalog.alterPartitions("db2", "tbl2", Seq(
oldPart1.copy(storage = storageFormat.copy(serde = Some(newSerde))),
oldPart2.copy(storage = storageFormat.copy(properties = newSerdeProps))))
val newPart1b = catalog.getPartition("db2", "tbl2", part1.spec)
val newPart2b = catalog.getPartition("db2", "tbl2", part2.spec)
assert(newPart1b.storage.serde == Some(newSerde))
assert(newPart2b.storage.properties == newSerdeProps)
// alter but change spec, should fail because new partition specs do not exist yet
val badPart1 = part1.copy(spec = Map("a" -> "v1", "b" -> "v2"))
val badPart2 = part2.copy(spec = Map("a" -> "v3", "b" -> "v4"))
intercept[AnalysisException] {
catalog.alterPartitions("db2", "tbl2", Seq(badPart1, badPart2))
}
} finally {
// Remember to restore the original current database, which we assume to be "default"
catalog.setCurrentDatabase("default")
}
}
test("alter partitions when database/table does not exist") {
val catalog = newBasicCatalog()
intercept[AnalysisException] {
catalog.alterPartitions("does_not_exist", "tbl1", Seq(part1))
}
intercept[AnalysisException] {
catalog.alterPartitions("db2", "does_not_exist", Seq(part1))
}
}
// --------------------------------------------------------------------------
// Functions
// --------------------------------------------------------------------------
test("basic create and list functions") {
val catalog = newEmptyCatalog()
catalog.createDatabase(newDb("mydb"), ignoreIfExists = false)
catalog.createFunction("mydb", newFunc("myfunc"))
assert(catalog.listFunctions("mydb", "*").toSet == Set("myfunc"))
}
test("create function when database does not exist") {
val catalog = newBasicCatalog()
intercept[NoSuchDatabaseException] {
catalog.createFunction("does_not_exist", newFunc())
}
}
test("create function that already exists") {
val catalog = newBasicCatalog()
intercept[FunctionAlreadyExistsException] {
catalog.createFunction("db2", newFunc("func1"))
}
}
test("drop function") {
val catalog = newBasicCatalog()
assert(catalog.listFunctions("db2", "*").toSet == Set("func1"))
catalog.dropFunction("db2", "func1")
assert(catalog.listFunctions("db2", "*").isEmpty)
}
test("drop function when database does not exist") {
val catalog = newBasicCatalog()
intercept[NoSuchDatabaseException] {
catalog.dropFunction("does_not_exist", "something")
}
}
test("drop function that does not exist") {
val catalog = newBasicCatalog()
intercept[NoSuchFunctionException] {
catalog.dropFunction("db2", "does_not_exist")
}
}
test("get function") {
val catalog = newBasicCatalog()
assert(catalog.getFunction("db2", "func1") ==
CatalogFunction(FunctionIdentifier("func1", Some("db2")), funcClass,
Seq.empty[FunctionResource]))
intercept[NoSuchFunctionException] {
catalog.getFunction("db2", "does_not_exist")
}
}
test("get function when database does not exist") {
val catalog = newBasicCatalog()
intercept[NoSuchDatabaseException] {
catalog.getFunction("does_not_exist", "func1")
}
}
test("rename function") {
val catalog = newBasicCatalog()
val newName = "funcky"
assert(catalog.getFunction("db2", "func1").className == funcClass)
catalog.renameFunction("db2", "func1", newName)
intercept[NoSuchFunctionException] { catalog.getFunction("db2", "func1") }
assert(catalog.getFunction("db2", newName).identifier.funcName == newName)
assert(catalog.getFunction("db2", newName).className == funcClass)
intercept[NoSuchFunctionException] { catalog.renameFunction("db2", "does_not_exist", "me") }
}
test("rename function when database does not exist") {
val catalog = newBasicCatalog()
intercept[NoSuchDatabaseException] {
catalog.renameFunction("does_not_exist", "func1", "func5")
}
}
test("rename function when new function already exists") {
val catalog = newBasicCatalog()
catalog.createFunction("db2", newFunc("func2", Some("db2")))
intercept[FunctionAlreadyExistsException] {
catalog.renameFunction("db2", "func1", "func2")
}
}
test("alter function") {
val catalog = newBasicCatalog()
assert(catalog.getFunction("db2", "func1").className == funcClass)
val myNewFunc = catalog.getFunction("db2", "func1").copy(className = newFuncClass)
catalog.alterFunction("db2", myNewFunc)
assert(catalog.getFunction("db2", "func1").className == newFuncClass)
}
test("list functions") {
val catalog = newBasicCatalog()
catalog.createFunction("db2", newFunc("func2"))
catalog.createFunction("db2", newFunc("not_me"))
assert(catalog.listFunctions("db2", "*").toSet == Set("func1", "func2", "not_me"))
assert(catalog.listFunctions("db2", "func*").toSet == Set("func1", "func2"))
}
// --------------------------------------------------------------------------
// File System operations
// --------------------------------------------------------------------------
private def exists(uri: URI, children: String*): Boolean = {
val base = new Path(uri)
val finalPath = children.foldLeft(base) {
case (parent, child) => new Path(parent, child)
}
base.getFileSystem(new Configuration()).exists(finalPath)
}
test("create/drop database should create/delete the directory") {
val catalog = newBasicCatalog()
val db = newDb("mydb")
catalog.createDatabase(db, ignoreIfExists = false)
assert(exists(db.locationUri))
catalog.dropDatabase("mydb", ignoreIfNotExists = false, cascade = false)
assert(!exists(db.locationUri))
}
test("create/drop/rename table should create/delete/rename the directory") {
val catalog = newBasicCatalog()
val db = catalog.getDatabase("db1")
val table = CatalogTable(
identifier = TableIdentifier("my_table", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType().add("a", "int").add("b", "string"),
provider = Some(defaultProvider)
)
catalog.createTable(table, ignoreIfExists = false)
assert(exists(db.locationUri, "my_table"))
catalog.renameTable("db1", "my_table", "your_table")
assert(!exists(db.locationUri, "my_table"))
assert(exists(db.locationUri, "your_table"))
catalog.dropTable("db1", "your_table", ignoreIfNotExists = false, purge = false)
assert(!exists(db.locationUri, "your_table"))
val externalTable = CatalogTable(
identifier = TableIdentifier("external_table", Some("db1")),
tableType = CatalogTableType.EXTERNAL,
storage = CatalogStorageFormat(
Some(Utils.createTempDir().toURI),
None, None, None, false, Map.empty),
schema = new StructType().add("a", "int").add("b", "string"),
provider = Some(defaultProvider)
)
catalog.createTable(externalTable, ignoreIfExists = false)
assert(!exists(db.locationUri, "external_table"))
}
test("create/drop/rename partitions should create/delete/rename the directory") {
val catalog = newBasicCatalog()
val table = CatalogTable(
identifier = TableIdentifier("tbl", Some("db1")),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("partCol1", "int")
.add("partCol2", "string"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("partCol1", "partCol2"))
catalog.createTable(table, ignoreIfExists = false)
val tableLocation = catalog.getTable("db1", "tbl").location
val part1 = CatalogTablePartition(Map("partCol1" -> "1", "partCol2" -> "2"), storageFormat)
val part2 = CatalogTablePartition(Map("partCol1" -> "3", "partCol2" -> "4"), storageFormat)
val part3 = CatalogTablePartition(Map("partCol1" -> "5", "partCol2" -> "6"), storageFormat)
catalog.createPartitions("db1", "tbl", Seq(part1, part2), ignoreIfExists = false)
assert(exists(tableLocation, "partCol1=1", "partCol2=2"))
assert(exists(tableLocation, "partCol1=3", "partCol2=4"))
catalog.renamePartitions("db1", "tbl", Seq(part1.spec), Seq(part3.spec))
assert(!exists(tableLocation, "partCol1=1", "partCol2=2"))
assert(exists(tableLocation, "partCol1=5", "partCol2=6"))
catalog.dropPartitions("db1", "tbl", Seq(part2.spec, part3.spec), ignoreIfNotExists = false,
purge = false, retainData = false)
assert(!exists(tableLocation, "partCol1=3", "partCol2=4"))
assert(!exists(tableLocation, "partCol1=5", "partCol2=6"))
val tempPath = Utils.createTempDir()
// create partition with existing directory is OK.
val partWithExistingDir = CatalogTablePartition(
Map("partCol1" -> "7", "partCol2" -> "8"),
CatalogStorageFormat(
Some(tempPath.toURI),
None, None, None, false, Map.empty))
catalog.createPartitions("db1", "tbl", Seq(partWithExistingDir), ignoreIfExists = false)
tempPath.delete()
// create partition with non-existing directory will create that directory.
val partWithNonExistingDir = CatalogTablePartition(
Map("partCol1" -> "9", "partCol2" -> "10"),
CatalogStorageFormat(
Some(tempPath.toURI),
None, None, None, false, Map.empty))
catalog.createPartitions("db1", "tbl", Seq(partWithNonExistingDir), ignoreIfExists = false)
assert(tempPath.exists())
}
test("drop partition from external table should not delete the directory") {
val catalog = newBasicCatalog()
catalog.createPartitions("db2", "tbl1", Seq(part1), ignoreIfExists = false)
val partPath = new Path(catalog.getPartition("db2", "tbl1", part1.spec).location)
val fs = partPath.getFileSystem(new Configuration)
assert(fs.exists(partPath))
catalog.dropPartitions(
"db2", "tbl1", Seq(part1.spec), ignoreIfNotExists = false, purge = false, retainData = false)
assert(fs.exists(partPath))
}
}
/**
* A collection of utility fields and methods for tests related to the [[ExternalCatalog]].
*/
abstract class CatalogTestUtils {
// Unimplemented methods
val tableInputFormat: String
val tableOutputFormat: String
val defaultProvider: String
def newEmptyCatalog(): ExternalCatalog
// These fields must be lazy because they rely on fields that are not implemented yet
lazy val storageFormat = CatalogStorageFormat(
locationUri = None,
inputFormat = Some(tableInputFormat),
outputFormat = Some(tableOutputFormat),
serde = None,
compressed = false,
properties = Map.empty)
lazy val part1 = CatalogTablePartition(Map("a" -> "1", "b" -> "2"), storageFormat)
lazy val part2 = CatalogTablePartition(Map("a" -> "3", "b" -> "4"), storageFormat)
lazy val part3 = CatalogTablePartition(Map("a" -> "5", "b" -> "6"), storageFormat)
lazy val partWithMixedOrder = CatalogTablePartition(Map("b" -> "6", "a" -> "6"), storageFormat)
lazy val partWithLessColumns = CatalogTablePartition(Map("a" -> "1"), storageFormat)
lazy val partWithMoreColumns =
CatalogTablePartition(Map("a" -> "5", "b" -> "6", "c" -> "7"), storageFormat)
lazy val partWithUnknownColumns =
CatalogTablePartition(Map("a" -> "5", "unknown" -> "6"), storageFormat)
lazy val partWithEmptyValue =
CatalogTablePartition(Map("a" -> "3", "b" -> ""), storageFormat)
lazy val funcClass = "org.apache.spark.myFunc"
lazy val newFuncClass = "org.apache.spark.myNewFunc"
/**
* Creates a basic catalog, with the following structure:
*
* default
* db1
* db2
* - tbl1
* - tbl2
* - part1
* - part2
* - func1
* db3
* - view1
*/
def newBasicCatalog(): ExternalCatalog = {
val catalog = newEmptyCatalog()
// When testing against a real catalog, the default database may already exist
catalog.createDatabase(newDb("default"), ignoreIfExists = true)
catalog.createDatabase(newDb("db1"), ignoreIfExists = false)
catalog.createDatabase(newDb("db2"), ignoreIfExists = false)
catalog.createDatabase(newDb("db3"), ignoreIfExists = false)
catalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false)
catalog.createTable(newTable("tbl2", "db2"), ignoreIfExists = false)
catalog.createTable(newView("view1", Some("db3")), ignoreIfExists = false)
catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists = false)
catalog.createFunction("db2", newFunc("func1", Some("db2")))
catalog
}
def newFunc(): CatalogFunction = newFunc("funcName")
def newUriForDatabase(): URI = new URI(Utils.createTempDir().toURI.toString.stripSuffix("/"))
def newDb(name: String): CatalogDatabase = {
CatalogDatabase(name, name + " description", newUriForDatabase(), Map.empty)
}
def newTable(name: String, db: String): CatalogTable = newTable(name, Some(db))
def newTable(name: String, database: Option[String] = None): CatalogTable = {
CatalogTable(
identifier = TableIdentifier(name, database),
tableType = CatalogTableType.EXTERNAL,
storage = storageFormat.copy(locationUri = Some(Utils.createTempDir().toURI)),
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("a", "int")
.add("b", "string"),
provider = Some(defaultProvider),
partitionColumnNames = Seq("a", "b"),
bucketSpec = Some(BucketSpec(4, Seq("col1"), Nil)))
}
def newView(
name: String,
database: Option[String] = None): CatalogTable = {
val viewDefaultDatabase = database.getOrElse("default")
CatalogTable(
identifier = TableIdentifier(name, database),
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = new StructType()
.add("col1", "int")
.add("col2", "string")
.add("a", "int")
.add("b", "string"),
viewText = Some("SELECT * FROM tbl1"),
properties = Map(CatalogTable.VIEW_DEFAULT_DATABASE -> viewDefaultDatabase))
}
def newFunc(name: String, database: Option[String] = None): CatalogFunction = {
CatalogFunction(FunctionIdentifier(name, database), funcClass, Seq.empty[FunctionResource])
}
/**
* Whether the catalog's table partitions equal the ones given.
* Note: Hive sets some random serde things, so we just compare the specs here.
*/
def catalogPartitionsEqual(
catalog: ExternalCatalog,
db: String,
table: String,
parts: Seq[CatalogTablePartition]): Boolean = {
catalog.listPartitions(db, table).map(_.spec).toSet == parts.map(_.spec).toSet
}
}
|
bravo-zhang/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala
|
Scala
|
apache-2.0
| 41,103 |
package controllers
import java.sql.Connection
import javax.inject.{Inject, Singleton}
import constraints.FormConstraints
import controllers.NeedLogin.Authenticated
import helpers.Sanitize.{forUrl => sanitize}
import models._
import play.api.data.Form
import play.api.data.Forms._
import play.api.data.validation.Constraints._
import play.api.db.Database
import play.api.i18n.{Lang, Messages, MessagesProvider}
import play.api.mvc._
import scala.language.postfixOps
@Singleton
class EntryUserEntry @Inject() (
cc: MessagesControllerComponents,
fc: FormConstraints,
authenticated: Authenticated,
implicit val db: Database,
implicit val storeUserRepo: StoreUserRepo,
implicit val entryUserRegistrationRepo: EntryUserRegistrationRepo,
implicit val loginSessionRepo: LoginSessionRepo,
implicit val shoppingCartItemRepo: ShoppingCartItemRepo
) extends MessagesAbstractController(cc) with I18n {
def jaForm(implicit mp: MessagesProvider) = Form(
mapping(
"userName" -> text.verifying(fc.normalUserNameConstraint(): _*),
"password" -> tuple(
"main" -> text.verifying(fc.passwordConstraint: _*),
"confirm" -> text
).verifying(
Messages("confirmPasswordDoesNotMatch"), passwords => passwords._1 == passwords._2
),
"zip1" -> text.verifying(z => fc.zip1Pattern.matcher(z).matches),
"zip2" -> text.verifying(z => fc.zip2Pattern.matcher(z).matches),
"prefecture" -> number,
"address1" -> text.verifying(nonEmpty, maxLength(256)),
"address2" -> text.verifying(nonEmpty, maxLength(256)),
"address3" -> text.verifying(maxLength(256)),
"tel" -> text.verifying(Messages("error.number"), z => fc.telPattern.matcher(z).matches),
"fax" -> text.verifying(Messages("error.number"), z => fc.telOptionPattern.matcher(z).matches),
"firstName" -> text.verifying(fc.firstNameConstraint: _*),
"lastName" -> text.verifying(fc.lastNameConstraint: _*),
"firstNameKana" -> text.verifying(fc.firstNameConstraint: _*),
"lastNameKana" -> text.verifying(fc.lastNameConstraint: _*),
"email" -> text.verifying(fc.emailConstraint: _*)
)(entryUserRegistrationRepo.apply4Japan)(entryUserRegistrationRepo.unapply4Japan)
)
def userForm(implicit mp: MessagesProvider) = Form(
mapping(
"userName" -> text.verifying(fc.normalUserNameConstraint(): _*),
"firstName" -> text.verifying(fc.firstNameConstraint: _*),
"middleName" -> optional(text.verifying(fc.middleNameConstraint: _*)),
"lastName" -> text.verifying(fc.lastNameConstraint: _*),
"email" -> text.verifying(fc.emailConstraint: _*),
"password" -> tuple(
"main" -> text.verifying(fc.passwordConstraint: _*),
"confirm" -> text
).verifying(
Messages("confirmPasswordDoesNotMatch"), passwords => passwords._1 == passwords._2
)
)(PromoteAnonymousUser.apply)(PromoteAnonymousUser.unapply)
)
def showForm(url: String)(
implicit request: MessagesRequest[AnyContent]
): Result = {
db.withConnection { implicit conn =>
supportedLangs.preferred(langs) match {
case `japanese` =>
Ok(views.html.entryUserEntryJa(jaForm, Address.JapanPrefectures, sanitize(url)))
case `japan` =>
Ok(views.html.entryUserEntryJa(jaForm, Address.JapanPrefectures, sanitize(url)))
case _ =>
Ok(views.html.entryUserEntryJa(jaForm, Address.JapanPrefectures, sanitize(url)))
}
}
}
def startRegistrationAsEntryUser(url: String) = Action { implicit request: MessagesRequest[AnyContent] =>
db.withConnection { implicit conn =>
if (loginSessionRepo.fromRequest(request).isDefined) {
Redirect(url)
}
else showForm(url)
}
}
def submitUserJa(url: String) = Action { implicit request: MessagesRequest[AnyContent] => db.withConnection { implicit conn => {
if (loginSessionRepo.fromRequest(request).isDefined) {
Redirect(url)
}
else {
jaForm.bindFromRequest.fold(
formWithErrors => {
BadRequest(views.html.entryUserEntryJa(formWithErrors, Address.JapanPrefectures, sanitize(url)))
},
newUser => db.withConnection { implicit conn: Connection =>
if (newUser.isNaivePassword) {
BadRequest(
views.html.entryUserEntryJa(
jaForm.fill(newUser).withError("password.main", "naivePassword"),
Address.JapanPrefectures, sanitize(url)
)
)
}
else {
try {
val user = newUser.save(CountryCode.JPN, storeUserRepo.PasswordHashStretchCount())
Redirect(url).withSession {
(loginSessionRepo.loginUserKey, loginSessionRepo.serialize(user.id.get, System.currentTimeMillis + loginSessionRepo.sessionTimeout))
}
}
catch {
case e: UniqueConstraintException =>
BadRequest(
views.html.entryUserEntryJa(
jaForm.fill(newUser).withError("userName", "userNameIsTaken"),
Address.JapanPrefectures, sanitize(url)
)
)
case t: Throwable => throw t
}
}
}
)
}
}}}
def startRegisterCurrentUserAsEntryUser = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
if (login.isAnonymousBuyer) Ok(
views.html.promoteAnonymousUser(
userForm.fill(
PromoteAnonymousUser(
"",
login.storeUser.firstName,
login.storeUser.middleName,
login.storeUser.lastName,
login.storeUser.email,
("", "")
)
).discardingErrors
)
)
else Redirect(routes.Application.index.url)
}
def promoteAnonymousUser = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
if (login.isAnonymousBuyer) {
userForm.bindFromRequest.fold(
formWithErrors =>
BadRequest(views.html.promoteAnonymousUser(formWithErrors)),
newUser => db.withConnection { implicit conn: Connection =>
if (newUser.isNaivePassword) {
BadRequest(
views.html.promoteAnonymousUser(
userForm.fill(newUser).withError("password.main", "naivePassword")
)
)
}
else {
try {
if (! newUser.update(login)) {
throw new Error("Cannot update user " + login)
}
Redirect(routes.Application.index).flashing(
"message" -> Messages("anonymousUserPromoted")
)
}
catch {
case e: UniqueConstraintException =>
BadRequest(
views.html.promoteAnonymousUser(
userForm.fill(newUser).withError("userName", "userNameIsTaken")
)
)
case t: Throwable => throw t
}
}
}
)
}
else Redirect(routes.Application.index.url)
}
}
|
ruimo/store2
|
app/controllers/EntryUserEntry.scala
|
Scala
|
apache-2.0
| 7,222 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.betterers.spark
/**
* GIS extensions for SparkSQL
*/
package object gis {
/** Type for coordinates pair */
type Coordinate = (Double, Double)
}
|
drubbo/SparkGIS
|
src/main/scala/org/betterers/spark/gis/package.scala
|
Scala
|
apache-2.0
| 967 |
package elea
import elea.term._
import scalaz._
import Scalaz._
sealed trait Statement {
def apply(program: Program): Program
}
case class OldTermDef(name: String, term: Term) extends Statement {
def apply(program: Program) = program + (name -> term.withName(name))
def modifyTerm(f: Term => Term): OldTermDef =
copy(term = f(term))
}
case class ConstructorDef(constr: Constructor) extends Statement {
def apply(program: Program) =
program + (constr.name.toString -> constr)
}
object OldParser {
private sealed trait BinOp
private object BinOp {
case object Leq extends BinOp
case object Geq extends BinOp
case object Eq extends BinOp
case object And extends BinOp
case object Or extends BinOp
}
private class Rules(program: Program) {
import fastparse.WhitespaceApi
import fastparse.noApi._
val whitespace = {
import fastparse.all._
lazy val commentBody: P[Unit] = P(CharsWhile(_ != '*') ~/ "*" ~/ ("/" | commentBody))
(CharIn(" \\n\\r") | "/*" ~ commentBody).rep
}
val White = WhitespaceApi.Wrapper(whitespace)
import White._
val keywords = Set("fix", "fn", "case", "of", "else", "data", "let", "end", "rec", "unfold", "assert", "false", "true", "in")
val lowercase = P(CharIn('a' to 'z') | CharIn(Seq('_', 'α')))
val uppercase = P(CharIn('A' to 'Z') | CharIn('0' to '9') | CharIn(Seq('\\'')))
val int: P[Int] = P((P(CharIn('1' to '9')) ~~ P(CharIn('0' to '9')).repX).!).map(Integer.parseInt(_))
val freshener: P[Int] = P("[" ~ int ~ "]")
val varName: P[Name] = P(P(lowercase ~~ (uppercase | lowercase).repX).! ~ freshener.?)
.filter(n => !keywords.contains(n._1) || n._2.isDefined)
.map(n => Name(n._1, n._2))
val definitionName: P[String] = P((uppercase | lowercase).repX(1).!)
val definedTerm: P[Term] = P("." ~~ definitionName ~/).map(n => program.definitionOf(n).get)
val fixIndex: P[Name] = P("[" ~ varName ~ "]")
val caseIndex: P[Case.Index] = P(("[" ~ varName ~ "]").?).map(_.map(Case.Index.fromName).getOrElse(Case.Index.fresh))
val termVar: P[Term] = P(varName).map(Var)
val simpleTerm: P[Term] = P(truth | falsity | bot | unfold | termVar | definedTerm | "(" ~ term ~ ")")
val unfold: P[Term] = P("unfold" ~/ definedTerm).map(_.unfold)
val fix: P[Fix] = P("fix" ~/ fixIndex.? ~ varName.rep(1) ~ "->" ~/ term)
.map(m => Fix(Lam(IList(m._2 : _*).toNel.get, m._3), m._1.map(Fix.finite).getOrElse(Fix.freshOmegaIndex)))
val lam: P[Term] = P("fn" ~/ varName.rep(1) ~ "->" ~/ term).map(m => Lam(IList(m._1 : _*), m._2))
val app: P[Term] = P(simpleTerm ~ simpleTerm.rep).map(m => m._1(m._2 : _*))
val bot: P[Term] = P(("_|_" | "⊥") ~/).map(_ => Bot)
val truth: P[Term] = P("true" ~/).map(_ => Logic.Truth)
val falsity: P[Term] = P("false" ~/).map(_ => Logic.Falsity)
val negation: P[Term] = P("!" ~/ term).map(Logic.not)
val binOp: P[BinOp] =
P("=<").map(_ => BinOp.Leq) |
P(">=").map(_ => BinOp.Geq) |
P("==").map(_ => BinOp.Eq) |
P("&&").map(_ => BinOp.And) |
P("||").map(_ => BinOp.Or)
val prop: P[Term] = P(app ~ binOp ~/ term).map { m =>
val left = m._1
val right = m._3
m._2 match {
case BinOp.Leq => Leq(left, right)
case BinOp.Geq => Leq(right, left)
case BinOp.Eq => Logic.equality(left, right)
case BinOp.And => Logic.and(left, right)
case BinOp.Or => Logic.or(left, right)
}
}
val assertion: P[Case] = P("assert" ~/ pattern ~ "<-" ~/ term ~ "in" ~/ term).map {
case (pattern, matchedTerm, branchTerm) =>
val branches = NonEmptyList[Branch](
PatternBranch(pattern, branchTerm),
DefaultBranch(Logic.Truth))
Case(matchedTerm, branches, Case.Index.fresh)
}
val caseOf: P[Case] = P("case" ~/ caseIndex ~ term ~ branch.rep(1) ~ "end" ~/).map(m => Case(m._2, IList(m._3 : _*).toNel.get, m._1))
val term: P[Term] = P(NoCut(prop) | negation | assertion | fix | lam | app | caseOf)
val pattern: P[Pattern] = P(definedTerm ~ varName.rep).map(m => Pattern(m._1.asInstanceOf[Constructor], IList(m._2 : _*)))
val branch: P[Branch] = {
val defaultBranch: P[Branch] = P("else" ~/ "->" ~/ term).map(DefaultBranch)
val patternBranch: P[Branch] = P(pattern ~/ "->" ~/ term).map(m => PatternBranch(m._1, m._2))
P("|" ~/ (defaultBranch | patternBranch))
}
val constructorDef: P[Constructor] = {
val typeArg: P[Boolean] = P("*").map(_ => true) | P("_").map(_ => false)
P(definitionName ~ typeArg.rep).map { m =>
val recArgs = m._2.indices.filter(i => m._2(i))
Constructor(Name(m._1), m._2.length, ISet.fromList(List(recArgs : _*)))
}
}
val data: P[Statement] = P("data" ~/ constructorDef).map(ConstructorDef)
val letrec: P[Statement] = for {
(defName, vars, body) <- P("let" ~ "rec" ~/ definitionName ~ varName.rep ~ "=" ~/ term)
} yield OldTermDef(defName, Fix(Lam(NonEmptyList(Name(defName), vars: _*), body), Fix.freshOmegaIndex))
val let: P[Statement] = for {
(defName, vars, body) <- P("let" ~ definitionName ~ varName.rep ~ "=" ~/ term)
} yield OldTermDef(defName, Lam(IList(vars: _*), body))
val statement: P[Option[Statement]] =
P(whitespace ~ (P(data | letrec | let).map(Some(_)) | P(End).map(_ => None)))
}
/**
* Parses the first definition in the given string.
*/
def parseStatement(text: String)(implicit program: Program): Option[(Statement, String)] = {
val parsed = new Rules(program).statement.parse(text).get
parsed.value match {
case None => None
case Some(stmt) => Some((stmt, text.substring(parsed.index)))
}
}
/**
* Parses statements one by one from a string
* @return Successive [[Program]] objects after each statement has been loaded.
*/
def parseAll(text: String)(termHandler: OldTermDef => OldTermDef)(implicit program: Program): Program =
Scalaz.unfold((text, program)) { case (text, program) =>
parseStatement(text)(program).map { case (stmt, remaining) =>
val newProgram = stmt match {
case stmt: OldTermDef => termHandler(stmt)(program)
case _ => stmt(program)
}
(newProgram, (remaining, newProgram))
}
}.last
def parseTerm(text: String)(implicit program: Program): Term =
new Rules(program).term.parse(text).get.value
}
|
wsonnex/elea
|
src/main/scala/elea/OldParser.scala
|
Scala
|
mit
| 6,630 |
package finatra.quickstart
import com.google.inject.testing.fieldbinder.Bind
import com.twitter.finagle.http.Status._
import com.twitter.finatra.http.test.{EmbeddedHttpServer, HttpTest}
import com.twitter.inject.Mockito
import com.twitter.inject.server.FeatureTest
import com.twitter.util.Future
import finatra.quickstart.domain.TweetId
import finatra.quickstart.domain.http.{TweetLocation, TweetResponse}
import finatra.quickstart.firebase.FirebaseClient
import finatra.quickstart.services.IdService
class TwitterCloneFeatureTest extends FeatureTest with Mockito with HttpTest {
override val server = new EmbeddedHttpServer(new TwitterCloneServer)
@Bind val firebaseClient = smartMock[FirebaseClient]
@Bind val idService = smartMock[IdService]
/* Mock GET Request performed in TwitterCloneWarmup */
firebaseClient.get("/tweets/123.json")(manifest[TweetResponse]) returns Future(None)
"tweet creation" in {
idService.getId returns Future(TweetId("123"))
val savedStatus = TweetResponse(
id = TweetId("123"),
message = "Hello #FinagleCon",
location = Some(TweetLocation(37.7821120598956, -122.400612831116)),
nsfw = false)
firebaseClient.put("/tweets/123.json", savedStatus) returns Future.Unit
firebaseClient.get("/tweets/123.json")(manifest[TweetResponse]) returns Future(Option(savedStatus))
firebaseClient.get("/tweets/124.json")(manifest[TweetResponse]) returns Future(None)
firebaseClient.get("/tweets/125.json")(manifest[TweetResponse]) returns Future(None)
val result = server.httpPost(
path = "/tweet",
postBody = """
{
"message": "Hello #FinagleCon",
"location": {
"lat": "37.7821120598956",
"long": "-122.400612831116"
},
"nsfw": false
}""",
andExpect = Created,
withJsonBody = """
{
"id": "123",
"message": "Hello #FinagleCon",
"location": {
"lat": "37.7821120598956",
"long": "-122.400612831116"
},
"nsfw": false
}""")
server.httpGetJson[TweetResponse](
path = result.location.get,
andExpect = Ok,
withJsonBody = result.contentString)
}
"Post bad tweet" in {
server.httpPost(
path = "/tweet",
postBody = """
{
"message": "",
"location": {
"lat": "9999"
},
"nsfw": "abc"
}""",
andExpect = BadRequest,
withJsonBody = """
{
"errors" : [
"location.lat: [9999.0] is not between -85 and 85",
"location.long: field is required",
"message: size [0] is not between 1 and 140",
"nsfw: 'abc' is not a valid boolean"
]
}
""")
}
}
|
joecwu/finatra
|
examples/twitter-clone/src/test/scala/finatra/quickstart/TwitterCloneFeatureTest.scala
|
Scala
|
apache-2.0
| 2,816 |
/*
* Copyright 2012 OneCalendar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import dao.EventDao
import models.Event
import org.joda.time.DateTime
import play.api.data.Forms._
import play.api.data._
import play.api.mvc._
import play.api.libs.json._
import play.api.libs.json.Writes._
object EventsController extends Controller with MongoDBProdContext {
implicit val now = () => DateTime.now.getMillis
def addEvents = Action( Ok( views.html.addEvents() ) )
def addSingleEvent = Action { implicit request =>
val event:Event = eventForm.bindFromRequest.get
EventDao.saveEvent(event)
Ok( "évènement " + event + " ajouté dans la base 'OneCalendar'" )
}
def allEvents = Action { implicit request =>
import Application.eventWriter
val events = EventDao.findAllFromNow()
.map( event => event.copy(tags = event.tags.distinct) ) // TODO régler le problème à la source <=> mettre un Set sur tags et supprimé les doublons à l'écriture
.sortWith { (e1,e2) => e1.begin.compareTo(e2.begin) < 0 }
Ok(Json.toJson(events))
}
// TODO tous les champs sont obligatoires sauf description
private val eventForm = Form(
mapping(
"title" -> text,
"begindate" -> date,
"beginhour" -> nonEmptyText (5, 5),
"enddate" -> date,
"endhour" -> nonEmptyText (5, 5),
"location" -> text,
"description" -> text,
"tags" -> text
)( ( title, begindate, beginhour, endate, endhour, location, description, tags ) => (
Event(
title = title,
location = location,
description = description,
begin = new DateTime( begindate ).plusHours( beginhour.split( ":" )(0).toInt ).plusMinutes( beginhour.split( ":" )(1).toInt ),
end = new DateTime( endate ).plusHours( endhour.split( ":" )(0).toInt ).plusMinutes( endhour.split( ":" )(1).toInt ),
tags = cleanTags( tags )
))
)
( ( event: Event ) => Some( ( event.title, event.begin.toDate, "12345", event.end.toDate, "12345", event.location, event.description, event.tags.mkString( "," ) ) ) )
)
private def cleanTags( tags: String ): List[ String ] = {
tags.trim().toUpperCase.split(",").toList
}
}
|
OneCalendar/OneCalendar
|
app/controllers/EventsController.scala
|
Scala
|
apache-2.0
| 2,921 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.effect
import quasar.fp.free
import scalaz._
/** Encapsulates boilerplate useful in defining lifted operations on free
* monads over effect algebras.
*/
abstract class LiftedOps[G[_], S[_]](implicit S: G :<: S) {
type FreeS[A] = Free[S, A]
def lift[A](ga: G[A]): FreeS[A] =
free.lift(ga).into[S]
}
|
drostron/quasar
|
effect/src/main/scala/quasar/effect/LiftedOps.scala
|
Scala
|
apache-2.0
| 934 |
package org.company.app.models
import com.plasmaconduit.json.codegen.traits._
case class User(id: Int, username: String, password: String, email: String, items: List[Item], lastPurchase: Option[Item]) extends GenWriter with GenReader {
override val writerRep = GenObjectRep(Ignore("password"))
}
|
Agrosis/jcg
|
examples/src/main/scala/org/company/app/models/User.scala
|
Scala
|
mit
| 300 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.optimize
import org.apache.calcite.rel.RelNode
/**
* The query [[Optimizer]] that transforms relational expressions into
* semantically equivalent relational expressions.
*/
trait Optimizer {
/**
* Generates the optimized [[RelNode]] DAG from the original relational nodes.
* <p>NOTES:
* <p>1. The reused node in result DAG will be converted to the same RelNode.
* <p>2. If a root node requires retract changes on Stream, the node should be
* a [[org.apache.flink.table.sinks.BaseRetractStreamTableSink]] or
* a regular node with [[org.apache.flink.table.plan.trait.UpdateAsRetractionTrait]]
* which `updateAsRetraction` is true.
*
* @param roots the original relational nodes.
* @return a list of RelNode represents an optimized RelNode DAG.
*/
def optimize(roots: Seq[RelNode]): Seq[RelNode]
}
|
ueshin/apache-flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/optimize/Optimizer.scala
|
Scala
|
apache-2.0
| 1,696 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.benchmark
import org.openjdk.jmh.results.format.ResultFormatType
import org.openjdk.jmh.runner.Runner
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue}
import wvlet.airframe.benchmark.json.JSONBenchmark
import wvlet.airframe.launcher.{Launcher, argument, command, option}
import wvlet.airframe.metrics.ElapsedTime
import wvlet.log.{LogSupport, Logger}
/**
*/
object BenchmarkMain {
wvlet.airframe.log.init
private def launcher = Launcher.of[BenchmarkMain]
def main(argLine: String): Unit = {
launcher.execute(argLine)
Logger.clearAllHandlers
}
def main(args: Array[String]): Unit = {
launcher.execute(args)
Logger.clearAllHandlers
}
}
class BenchmarkMain(
@option(prefix = "-h,--help", description = "display help message", isHelp = true)
displayHelp: Boolean,
@option(prefix = "-f", description = "Result format: text, csv, scsv, json, latex")
resultFormat: Option[String] = None,
@option(prefix = "-o", description = "Result output file name")
resultOutput: Option[String] = None,
@option(prefix = "-wt", description = "warmup time (default: 1s)")
warmupTime: ElapsedTime = ElapsedTime.parse("1s")
) extends LogSupport {
@command(isDefault = true)
def default: Unit = {
info("Type --help to see the list of sub commands")
}
@command(description = "Run a benchmark quickly")
def bench_quick(
@option(prefix = "-i,--iteration", description = "The number of iteration (default: 1)")
iteration: Int = 1,
@option(prefix = "-mt", description = "measurement time (default: 0.25s)")
measurementTime: ElapsedTime = ElapsedTime.parse("0.25s"),
@option(prefix = "-F,--fork-count", description = "Fork Count (default: 0)")
forkCount: Int = 0,
@argument(description = "Target benchmark suite to run: json, msgpack")
targetPackage: Option[String] = None
): Unit = {
bench(
iteration = iteration,
warmupIteration = 0,
measurementTime = measurementTime,
forkCount = forkCount,
targetPackage = targetPackage
)
}
@command(description = "Run a benchmark")
def bench(
@option(prefix = "-i,--iteration", description = "The number of iteration (default: 10)")
iteration: Int = 10,
@option(prefix = "-w,--warmup", description = "The number of warm-up iteration (default: 5)")
warmupIteration: Int = 5,
@option(prefix = "-mt", description = "measurement time (default: 0.5s)")
measurementTime: ElapsedTime = ElapsedTime.parse("0.5s"),
@option(prefix = "-F,--fork-count", description = "Fork Count (default: 1)")
forkCount: Int = 1,
@argument(description = "Target benchmark suite to run: json, msgpack")
targetPackage: Option[String] = None
): Unit = {
info("Starting the benchmark")
var opt = new OptionsBuilder()
.forks(forkCount)
.measurementIterations(iteration)
.warmupIterations(warmupIteration)
.warmupTime(TimeValue.milliseconds(warmupTime.toMillis.toLong))
.measurementTime(TimeValue.milliseconds(measurementTime.toMillis.toLong))
.include(targetPackage.map(x => s".*${x}.*").getOrElse(".*"))
resultFormat.map { rf => opt = opt.resultFormat(ResultFormatType.valueOf(rf.toUpperCase())) }
resultOutput.map { out => opt = opt.result(out) }
new Runner(opt.build()).run()
}
@command(description = "Run JSON performance benchmark")
def json_perf(
@option(prefix = "-n", description = "The number of iteration (default: 10)")
iteration: Int = 10,
@option(prefix = "-b", description = "The number of block iteration (default: 10)")
blockIteration: Int = 10
): Unit = {
JSONBenchmark.runAll(N = iteration, B = iteration)
}
}
|
wvlet/airframe
|
airframe-benchmark/src/main/scala/wvlet/airframe/benchmark/BenchmarkMain.scala
|
Scala
|
apache-2.0
| 4,341 |
package net.gadgil
object Stub {
def main(args: Array[String]) {
// do something
}
}
class Stub {
// do something
}
|
navaidya/optionpricing
|
scenariomodeling/src/main/scala/Stub.scala
|
Scala
|
mit
| 127 |
package de.tu_berlin.formic.gatling.action.linear
import de.tu_berlin.formic.client.FormicSystem
import de.tu_berlin.formic.common.DataStructureInstanceId
import de.tu_berlin.formic.datastructure.linear.client.FormicString
import de.tu_berlin.formic.gatling.action.TimeMeasureCallback.CreateResponseTimeMeasureListener
import de.tu_berlin.formic.gatling.action.{SessionVariables, TimeMeasureCallback}
import io.gatling.commons.util.TimeHelper
import io.gatling.core.action.{Action, ChainableAction}
import io.gatling.core.session.{Expression, Session}
import io.gatling.core.stats.StatsEngine
/**
* @author Ronny Bräunlich
*/
case class LinearCreation(dataTypeInstanceId: Expression[String], statsEngine: StatsEngine, next: Action) extends ChainableAction {
override def name: String = "CreateLinearDataType action"
override def execute(session: Session): Unit = {
val start = TimeHelper.nowMillis
val formicSystemOption = session(SessionVariables.FORMIC_SYSTEM).asOption[FormicSystem]
val validatedDataTypeInstanceId = dataTypeInstanceId.apply(session)
validatedDataTypeInstanceId.foreach { id =>
formicSystemOption match {
case Some(formicSystem) =>
val callback = session(SessionVariables.TIMEMEASURE_CALLBACK).as[TimeMeasureCallback]
val string = new FormicString(callback.callbackMethod, formicSystem, DataStructureInstanceId.valueOf(id))
val modifiedSession = session.set(id, string)
callback.addListener(CreateResponseTimeMeasureListener(string.dataStructureInstanceId, start, session, statsEngine, name))
next ! modifiedSession
case None => throw new IllegalArgumentException("Users have to connect first!")
}
}
}
}
|
rbraeunlich/formic
|
formic-gatling/src/main/scala/de/tu_berlin/formic/gatling/action/linear/LinearCreation.scala
|
Scala
|
apache-2.0
| 1,739 |
/*
* Copyright (C) 2014 - 2019 Dennis Vriend <https://github.com/dnvriend>
* Copyright (C) 2019 - 2021 Lightbend Inc. <https://www.lightbend.com>
*/
package akka.persistence.jdbc.journal
import scala.concurrent.duration._
import akka.actor.Props
import akka.persistence.CapabilityFlag
import akka.persistence.jdbc.config._
import akka.persistence.jdbc.db.SlickExtension
import akka.persistence.jdbc.testkit.internal.H2
import akka.persistence.jdbc.testkit.internal.SchemaType
import akka.persistence.jdbc.util.ClasspathResources
import akka.persistence.jdbc.util.DropCreate
import akka.persistence.journal.JournalPerfSpec
import akka.persistence.journal.JournalPerfSpec.BenchActor
import akka.persistence.journal.JournalPerfSpec.Cmd
import akka.persistence.journal.JournalPerfSpec.ResetCounter
import akka.testkit.TestProbe
import com.typesafe.config.Config
import com.typesafe.config.ConfigFactory
import com.typesafe.config.ConfigValueFactory
import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.ScalaFutures
abstract class JdbcJournalPerfSpec(config: Config, schemaType: SchemaType)
extends JournalPerfSpec(config)
with BeforeAndAfterAll
with BeforeAndAfterEach
with ScalaFutures
with ClasspathResources
with DropCreate {
override protected def supportsRejectingNonSerializableObjects: CapabilityFlag = true
implicit lazy val ec = system.dispatcher
implicit def pc: PatienceConfig = PatienceConfig(timeout = 10.minutes)
override def eventsCount: Int = 1000
override def awaitDurationMillis: Long = 10.minutes.toMillis
override def measurementIterations: Int = 1
lazy val cfg = system.settings.config.getConfig("jdbc-journal")
lazy val journalConfig = new JournalConfig(cfg)
lazy val db = SlickExtension(system).database(cfg).database
override def beforeAll(): Unit = {
dropAndCreate(schemaType)
super.beforeAll()
}
override def afterAll(): Unit = {
db.close()
super.afterAll()
}
def actorCount = 100
private val commands = Vector(1 to eventsCount: _*)
"A PersistentActor's performance" must {
s"measure: persist()-ing $eventsCount events for $actorCount actors" in {
val testProbe = TestProbe()
val replyAfter = eventsCount
def createBenchActor(actorNumber: Int) =
system.actorOf(Props(classOf[BenchActor], s"$pid--$actorNumber", testProbe.ref, replyAfter))
val actors = 1.to(actorCount).map(createBenchActor)
measure(d => s"Persist()-ing $eventsCount * $actorCount took ${d.toMillis} ms") {
for (cmd <- commands; actor <- actors) {
actor ! Cmd("p", cmd)
}
for (_ <- actors) {
testProbe.expectMsg(awaitDurationMillis.millis, commands.last)
}
for (actor <- actors) {
actor ! ResetCounter
}
}
}
}
"A PersistentActor's performance" must {
s"measure: persistAsync()-ing $eventsCount events for $actorCount actors" in {
val testProbe = TestProbe()
val replyAfter = eventsCount
def createBenchActor(actorNumber: Int) =
system.actorOf(Props(classOf[BenchActor], s"$pid--$actorNumber", testProbe.ref, replyAfter))
val actors = 1.to(actorCount).map(createBenchActor)
measure(d => s"persistAsync()-ing $eventsCount * $actorCount took ${d.toMillis} ms") {
for (cmd <- commands; actor <- actors) {
actor ! Cmd("pa", cmd)
}
for (_ <- actors) {
testProbe.expectMsg(awaitDurationMillis.millis, commands.last)
}
for (actor <- actors) {
actor ! ResetCounter
}
}
}
}
}
class H2JournalPerfSpec extends JdbcJournalPerfSpec(ConfigFactory.load("h2-application.conf"), H2)
class H2JournalPerfSpecSharedDb extends JdbcJournalPerfSpec(ConfigFactory.load("h2-shared-db-application.conf"), H2)
class H2JournalPerfSpecPhysicalDelete extends H2JournalPerfSpec {
this.cfg.withValue("jdbc-journal.logicalDelete", ConfigValueFactory.fromAnyRef(false))
}
|
dnvriend/akka-persistence-jdbc
|
core/src/test/scala/akka/persistence/jdbc/journal/JdbcJournalPerfSpec.scala
|
Scala
|
apache-2.0
| 4,049 |
package com.emotioncity.soriento
import com.emotioncity.soriento.ReflectionUtils._
import com.emotioncity.soriento.testmodels._
import com.orientechnologies.orient.core.id.ORecordId
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
/**
* Created by stream on 25.12.14.
*/
class ReflectionTest extends FunSuite with Matchers with ODb with BeforeAndAfter {
// Test is too raw
// test("it should be create instance of case class by name simple and recursively") {
// val simpleMap = Map("sField" -> "Test field")
// val simpleCaseClass = createCaseClass[Simple](simpleMap)
// simpleCaseClass should equal(Simple("Test field"))
//
// val complexMap = Map("iField" -> 2, "sField" -> "tt", "simple" -> Map("sField" -> "Simple"), "listField" -> List(Simple("Simple")))
// val complexCaseClass = createCaseClass[Complex](complexMap)
// val simple = Simple("Simple")
// complexCaseClass should equal(Complex(2, simple, sField = "tt", List(simple)))
// }
test("detect ORID in case class instance") {
val complexWithRid = ComplexWithRid(id = ORecordId.EMPTY_RECORD_ID, 1, Simple("tt"), "tt", Nil)
rid(complexWithRid) should not be empty
val computedRid = rid(complexWithRid)
computedRid.get should equal(ORecordId.EMPTY_RECORD_ID)
}
test("detect Option[ORID], ORID representation of @rid in case class instance") {
//------- Option[ORID]
val classWithOptionalRid = ClassWithOptionalRid(rid = Option(ORecordId.EMPTY_RECORD_ID), "name")
rid(classWithOptionalRid) should not be empty
val computedRid1 = rid(classWithOptionalRid)
computedRid1.get should equal(ORecordId.EMPTY_RECORD_ID)
val classWithOptionalRidNone = ClassWithOptionalRid(name = "name2")
rid(classWithOptionalRidNone) shouldBe empty
val computedRid2 = rid(classWithOptionalRidNone)
computedRid2 should equal(None)
//------ ORID
val classWithRid = ClassWithRid(rid = ORecordId.EMPTY_RECORD_ID, "name")
rid(classWithRid) should not be empty
val computedRid3 = rid(classWithRid)
computedRid3.get should equal(ORecordId.EMPTY_RECORD_ID)
val classWitRidNull = ClassWithRid(name = "name2")
rid(classWitRidNull) shouldBe empty
val computedRid4 = rid(classWitRidNull)
computedRid4 should equal(None)
}
after {
dropOClass[Simple]
dropOClass[Complex]
}
}
|
b0c1/Soriento
|
src/test/scala/com/emotioncity/soriento/ReflectionTest.scala
|
Scala
|
apache-2.0
| 2,372 |
package pep_043
object solution {
@deprecated("Check not needed", "ever")
def isPandigital09(n: Seq[Int]): Boolean = n.forall(_ < 10) && n.toSet.size == 10
def primaryProperty(n: Seq[Int]): Boolean = {
val divisors = Seq(2, 3, 5, 7, 11, 13, 17)
val triplets = n.sliding(3).drop(1).map(_.mkString).map(_.toInt).toSeq
triplets.zip(divisors).forall(v => (v._1 % v._2) == 0)
}
val pandigital09 = (0 to 9).permutations.filter(s => s(5) == 5 || s(5) == 0)
def solve() = pandigital09.filter(primaryProperty).map(_.mkString("")).map(_.toLong).sum
}
|
filippovitale/pe
|
pe-solution/src/main/scala/pep_043/solution.scala
|
Scala
|
mit
| 572 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtInteger}
import uk.gov.hmrc.ct.computations.calculations.SummaryCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP99(value: Int) extends CtBoxIdentifier("Trade net allowances") with CtInteger
object CP99 extends Calculated[CP99, ComputationsBoxRetriever] with SummaryCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CP99 =
calculateTradeNetAllowancesForSummary(fieldValueRetriever.cp186(),
fieldValueRetriever.cp668(),
fieldValueRetriever.cp674(),
fieldValueRetriever.cp91(),
fieldValueRetriever.cp670())
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP99.scala
|
Scala
|
apache-2.0
| 1,465 |
package org.jetbrains.plugins.scala
package util.macroDebug
import java.awt.event.MouseEvent
import java.util
import com.intellij.psi._
import lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.icons.Icons
import com.intellij.codeHighlighting.Pass
import com.intellij.codeInsight.daemon._
import com.intellij.codeInsight.daemon.impl.PsiElementListNavigator
import com.intellij.ide.util.gotoByName.GotoFileCellRenderer
import com.intellij.navigation.GotoRelatedItem
import com.intellij.openapi.editor.markup.GutterIconRenderer
import com.intellij.openapi.util.TextRange
import collection.GenIterable
import scala.Some
import org.jetbrains.plugins.scala.worksheet.ui.WorksheetEditorPrinter
import com.intellij.openapi.fileEditor.FileEditorManager
import com.intellij.openapi.editor.Editor
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScExpression, MethodInvocation}
import com.intellij.psi.codeStyle.CodeStyleManager
import scala.Some
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import com.intellij.psi.{NavigatablePsiElement, PsiDocumentManager, PsiElement}
import com.intellij.util.Function
import org.jetbrains.plugins.scala.icons.Icons
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import scala.collection.GenIterable
/**
* User: Dmitry Naydanov
* Date: 11/7/12
*/
class GoToExpandedMacroCallProviderExt extends LineMarkerProvider {
def getLineMarkerInfo(element: PsiElement): LineMarkerInfo[_ <: PsiElement] = null
def collectSlowLineMarkers(elements: util.List[PsiElement], result: util.Collection[LineMarkerInfo[_ <: PsiElement]]) {
ScalaMacroDebuggingUtil.allMacroCalls.clear()
if (!ScalaMacroDebuggingUtil.isEnabled || elements.isEmpty) return
val first = elements get 0
val file = first.getContainingFile
val synFile = file match {
case scalaFile: ScalaFile if ScalaMacroDebuggingUtil tryToLoad scalaFile => Some(scalaFile)
case _ => None
}
import scala.collection.JavaConversions._
val macrosFound = elements filter ScalaMacroDebuggingUtil.isMacroCall
if (macrosFound.length == 0) return
macrosFound foreach {
case macroCall =>
val markerInfo = new RelatedItemLineMarkerInfo[PsiElement](macroCall, macroCall.getTextRange, Icons.NO_SCALA_SDK,
Pass.UPDATE_OVERRIDEN_MARKERS, new Function[PsiElement, String] {
def fun(param: PsiElement): String = {
if (!ScalaMacroDebuggingUtil.macrosToExpand.contains(macroCall)) {
"Expand macro"
} else {
"Collapse macro"
}
}
},
new GutterIconNavigationHandler[PsiElement] {
def navigate(mouseEvent: MouseEvent, elt: PsiElement) {
if (ScalaMacroDebuggingUtil.macrosToExpand.contains(elt)) {
ScalaMacroDebuggingUtil.macrosToExpand.remove(elt)
} else {
ScalaMacroDebuggingUtil.macrosToExpand.add(elt)
}
ScalaMacroDebuggingUtil.expandMacros(elt.getProject)
}
}, GutterIconRenderer.Alignment.RIGHT, util.Arrays.asList[GotoRelatedItem]())
result add markerInfo
ScalaMacroDebuggingUtil.allMacroCalls.add(macroCall)
}
}
}
|
SergeevPavel/intellij-scala
|
src/org/jetbrains/plugins/scala/util/macroDebug/GoToExpandedMacroCallProviderExt.scala
|
Scala
|
apache-2.0
| 3,310 |
import scala.reflect.runtime.universe._
class ImaginaryCanBuildFrom[-From, -Elem, +To]
class CompletelyIndependentList[+A] {
type Repr <: CompletelyIndependentList[A]
def map[B, That](f: A => B)(implicit cbf: ImaginaryCanBuildFrom[Repr, B, That]): That = ???
def distinct(): CompletelyIndependentList[A] = ???
}
object Test {
var failed = false
def expectFailure[T](body: => T): Boolean = {
try { val res = body ; failed = true ; println(res + " failed to fail.") ; false }
catch { case _: AssertionError => true }
}
/** Attempt to use a method type as a type argument - expect failure. */
def tcon[T: TypeTag](args: Type*) = appliedType(typeOf[T].typeConstructor, args.toList)
def cil = typeOf[CompletelyIndependentList[Int]]
def map = cil.member(TermName("map")).asMethod
def distinct = cil.member(TermName("distinct")).asMethod
def main(args: Array[String]): Unit = {
// Need the assert in there to fail.
// expectFailure(println(tcon[CompletelyIndependentList[Int]](map)))
// expectFailure(tcon[CompletelyIndependentList[Int]](distinct))
// Why is the first map signature printing showing an
// uninitialized symbol?
//
// [B <: <?>, That <: <?>](f: <?>)(implicit cbf: <?>)That
//
println(map.info)
println(map.infoIn(cil))
println(distinct.info)
if (failed) sys.exit(1)
}
}
|
folone/dotty
|
tests/pending/run/fail-non-value-types.scala
|
Scala
|
bsd-3-clause
| 1,378 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.scaladsl.registry
import java.net.URI
import akka.NotUsed
import akka.util.ByteString
import com.lightbend.lagom.scaladsl.api.deser.MessageSerializer.{ NegotiatedDeserializer, NegotiatedSerializer }
import com.lightbend.lagom.scaladsl.api.deser.{ MessageSerializer, StrictMessageSerializer }
import com.lightbend.lagom.scaladsl.api.transport.{ MessageProtocol, Method }
import com.lightbend.lagom.scaladsl.api.{ Descriptor, Service, ServiceAcl, ServiceCall }
import play.api.libs.functional.syntax._
import play.api.libs.json._
import scala.collection.immutable
import scala.collection.immutable.Seq
/**
* This mirrors com.lightbend.lagom.internal.javadsl.registry.ServiceRegistry. The service locator implements the
* javadsl version, but this is used to talk to it from Scala apps, and so they must be kept in sync.
*/
trait ServiceRegistry extends Service {
def register(name: String): ServiceCall[ServiceRegistryService, NotUsed]
def unregister(name: String): ServiceCall[NotUsed, NotUsed]
def lookup(name: String): ServiceCall[NotUsed, URI]
def registeredServices: ServiceCall[NotUsed, immutable.Seq[RegisteredService]]
import Service._
import ServiceRegistry._
def descriptor: Descriptor = {
named(ServiceName).withCalls(
restCall(Method.PUT, "/services/:id", register _),
restCall(Method.DELETE, "/services/:id", this.unregister _),
restCall(Method.GET, "/services/:id", lookup _),
pathCall("/services", registeredServices)
).withLocatableService(false)
}
}
object ServiceRegistry {
val ServiceName = "lagom-service-registry"
implicit val uriMessageSerializer: MessageSerializer[URI, ByteString] = new StrictMessageSerializer[URI] {
private val serializer = new NegotiatedSerializer[URI, ByteString] {
override def serialize(message: URI): ByteString = ByteString.fromString(message.toString, "utf-8")
override val protocol: MessageProtocol = MessageProtocol.empty.withContentType("text/plain").withCharset("utf-8")
}
override def serializerForRequest = serializer
override def serializerForResponse(acceptedMessageProtocols: Seq[MessageProtocol]) = serializer
override def deserializer(protocol: MessageProtocol): NegotiatedDeserializer[URI, ByteString] =
new NegotiatedDeserializer[URI, ByteString] {
override def deserialize(wire: ByteString) =
URI.create(wire.decodeString(protocol.charset.getOrElse("utf-8")))
}
}
}
case class RegisteredService(name: String, url: URI)
object RegisteredService {
import UriFormat.uriFormat
implicit val format: Format[RegisteredService] = Json.format[RegisteredService]
}
case class ServiceRegistryService(uri: URI, acls: immutable.Seq[ServiceAcl])
object ServiceRegistryService {
import UriFormat.uriFormat
implicit val methodFormat: Format[Method] =
(__ \ "name").format[String].inmap(new Method(_), _.name)
implicit val serviceAclFormat: Format[ServiceAcl] = (
(__ \ "method").formatNullable[Method] and
(__ \ "pathRegex").formatNullable[String]
).apply(ServiceAcl.apply, acl => (acl.method, acl.pathRegex))
implicit val format: Format[ServiceRegistryService] = Json.format[ServiceRegistryService]
}
object UriFormat {
implicit val uriFormat: Format[URI] =
implicitly[Format[String]].inmap(URI.create, _.toString)
}
|
edouardKaiser/lagom
|
dev/service-registry/devmode-scaladsl/src/main/scala/com/lightbend/lagom/internal/scaladsl/registry/ServiceRegistry.scala
|
Scala
|
apache-2.0
| 3,449 |
package gie.ut
import org.scalatest._
import org.scalatest.matchers.ShouldMatchers._
import net.liftweb.common.{Full, Loggable}
import net.liftweb.util.Props
import java.io.FileInputStream
class dummy_test extends FlatSpec with PrivateMethodTester with Loggable {
}
|
igorge/gbb
|
src/test/scala/dummy_ut.scala
|
Scala
|
gpl-2.0
| 269 |
import sbt._
import Keys._
import play.Project._
object ApplicationBuild extends Build {
val appName = "networkblame"
val appVersion = "1.0-SNAPSHOT"
val appDependencies = Seq(
// Add your project dependencies here,
//jdbc,
//anorm
"org.reactivemongo" %% "play2-reactivemongo" % "0.9"
)
val main = play.Project(appName, appVersion, appDependencies).settings(
// Add your own project settings here
scalaVersion := "2.10.0"
)
}
|
lukiano/networkblame
|
project/Build.scala
|
Scala
|
mit
| 481 |
package edu.uw.at.iroberts.wirefugue.pcap
import akka.util.ByteString
import edu.uw.at.iroberts.wirefugue.protocol.overlay.EtherType
import org.apache.kafka.common.serialization.Serializer
import scala.collection.mutable.ArrayBuffer
/**
* Created by Ian Robertson <[email protected]> on 4/5/17.
*/
/** IP protocol field name/value pairs */
abstract class Protocol(val name: String, val value: Byte)
object Protocol {
case object ICMP extends Protocol("ICMP", 0x01)
case object TCP extends Protocol("TCP", 0x06)
case object UDP extends Protocol("UDP", 0x11)
case class Other(override val value: Byte) extends Protocol("Unknown", value)
def fromByte(b: Byte): Protocol = b match {
case 0x01 => ICMP
case 0x06 => TCP
case 0x11 => UDP
case _ => Other(b)
}
}
/** Operations for constructing Datagram objects */
object Datagram {
val headerLength = 20
def parse(bytes: IndexedSeq[Byte]): Datagram = {
require(bytes.length >= headerLength)
import ByteSeqOps._
// IHL (Internet Header Length) is the number of
// 32-bit words in the IP header.
val ihl = bytes(0) & 0x0f
Datagram(
version = ((bytes(0) & 0xf0) >>> 4).toByte,
ihl = ihl.toByte,
dscpAndECN = bytes(1),
totalLength = bytes.drop(2).getInt16BE,
identification = bytes.slice(4, 6).getInt16BE,
flags = ((bytes(6) & 0xff) >> 5).toByte,
fragmentOffset = (bytes.slice(6, 8).getUInt16BE & 0x1fff).toShort,
ttl = bytes(8),
protocol = Protocol.fromByte(bytes(9)),
headerChecksum = bytes.slice(10, 12).getInt16BE,
sourceIP = IPAddress(bytes.slice(12, 16)),
destinationIP = IPAddress(bytes.slice(16, 20)),
data = ByteString(bytes.drop(ihl * 4): _*)
)
}
val fromFrame: PartialFunction[EthernetFrame, Datagram] = {
case frame: EthernetFrame
if frame.etherType == EtherType.IPv4 =>
parse(frame.payload)
}
}
/** Representation of an IPv4 packet a.k.a. datagram
*
* @param version The IP version
* @param totalLength The length including payload data
* @param ttl Time-to-live; interpret as unsigned
* @param protocol The encapsulated protocol
* @param sourceIP Source IP Address
* @param destinationIP Destination IP Address
* @param data Payload bytes
*/
// TODO: add more fields
case class Datagram private (
version: Byte = 0x04.toByte,
ihl: Byte = 0x05.toByte,
dscpAndECN: Byte = 0x00.toByte,
totalLength: Short, // NB: Unsigned!
identification: Short = 0x0000.toShort,
flags: Byte = 0x00.toByte,
fragmentOffset: Short = 0x0000.toShort,
ttl: Byte = 128.toByte, // NB: Unsigned!
protocol: Protocol = Protocol.ICMP,
headerChecksum: Short = 0x0000.toShort,
sourceIP: IPAddress = IPAddress(IndexedSeq[Byte](0, 0, 0, 0)),
destinationIP: IPAddress = IPAddress(IndexedSeq[Byte](0, 0, 0, 0)),
data: ByteString = ByteString()
) {
override def toString() = {
val protoStr = f"proto ${protocol.name} (0x${protocol.value}%02x)"
val ttlSigned = ttl.toInt & 0xff
s"$sourceIP > $destinationIP, IPv$version length $totalLength, ttl $ttlSigned, $protoStr [${data.length} bytes]"
}
def toBytes: Array[Byte] = {
def serializeShort(x: Short): Array[Byte] = Array(
((x >> 8) & 0xff).toByte,
(x & 0xff).toByte
)
val buf: ArrayBuffer[Byte] = new ArrayBuffer[Byte](ihl * 4)
val versionAndIHL: Byte = (((version & 0x0f) << 4) | (ihl & 0x0f)).toByte
val flagsAndFragmentOffset: Short = (((flags & 0x07) << 13) | (fragmentOffset & 0x1fff)).toShort
buf += versionAndIHL
buf += dscpAndECN
buf ++= serializeShort(totalLength)
buf ++= serializeShort(identification)
buf ++= serializeShort(flagsAndFragmentOffset)
buf += ttl
buf += protocol.value
buf ++= serializeShort(headerChecksum)
buf ++= sourceIP.bytes
buf ++= destinationIP.bytes
buf ++= data
buf.toArray
}
def mkDatagram(
version: Int,
dspAndEcn: Int,
identification: Int,
flags: Int,
fragmentOffset: Int,
ttl: Int,
protocol: Int,
headerChecksum: Int,
sourceIP: Int,
destinationIP: Int,
data: TraversableOnce[Byte]
): Datagram = {
val ihl = 5
val fragmentOffset = 0
val checksum = 0
require(version >= 0x0 && version <= 0xf)
val myData = data.toArray
Datagram(totalLength = 0)
}
import edu.uw.at.iroberts.wirefugue.pcap.InternetChecksum._
def fixChecksum: Datagram = {
val dg = copy(headerChecksum = 0.toShort)
copy(headerChecksum = internetChecksum(dg.toBytes.slice(0, ihl*4)))
}
def checksumValid: Boolean = {
internetChecksum(toBytes.slice(0, ihl*4)) == 0
}
}
case object DatagramSerializer extends Serializer[Datagram] {
override def configure(configs: java.util.Map[String, _], isKey: Boolean) = {
/* Nothing to do */
}
override def close() = { /* Nothing to do */ }
override def serialize(topic: String, data: Datagram): Array[Byte] =
data.toBytes
}
object DatagramSerializerDemo extends App {
val ser = DatagramSerializer
val dg =
Datagram(
version = 4.toByte,
ihl = 5.toByte,
dscpAndECN = 0.toByte,
totalLength = 28.toShort,
identification = 1234.toShort,
flags = 0x00.toByte,
fragmentOffset = 0,
ttl = 128.toByte,
protocol = Protocol.ICMP,
headerChecksum = 0x0000,
sourceIP = IPAddress("192.168.0.1"),
destinationIP = IPAddress("172.16.2.16"),
data = ByteString(0x01, 0x00, 0x00, 0x00, 0x12, 0x34, 0x56, 0x78)
).fixChecksum
val data: Array[Byte] = ser.serialize("ip-packets", dg)
val output = data.grouped(2).map { case Array(x, y) => f"$x%02x$y%02x" }.mkString(" ")
println(output)
println(dg.checksumValid)
}
|
robertson-tech/wirefugue
|
sensor/src/main/scala/edu/uw/at/iroberts/wirefugue/pcap/Datagram.scala
|
Scala
|
gpl-3.0
| 6,326 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xml
import org.xml.sax.Attributes
class ElementFilterXMLReceiver(
xmlReceiver : XMLReceiver,
keep : (Int, String, String, Attributes) => Boolean
) extends SimpleForwardingXMLReceiver(xmlReceiver) {
private var level: Int = 0
private var filterLevel: Int = -1
override def startElement(uri: String, localname: String, qName: String, attributes: Attributes): Unit = {
if (filterLevel == -1) {
if (keep(level, uri, localname, attributes))
super.startElement(uri, localname, qName, attributes)
else
filterLevel = level
}
level += 1
}
override def endElement(uri: String, localname: String, qName: String): Unit = {
level -= 1
if (filterLevel == level)
filterLevel = -1
else if (filterLevel == -1)
super.endElement(uri, localname, qName)
}
override def startPrefixMapping(s: String, s1: String) : Unit = if (filterLevel == -1) super.startPrefixMapping(s, s1)
override def endPrefixMapping(s: String) : Unit = if (filterLevel == -1) super.endPrefixMapping(s)
override def ignorableWhitespace(chars: Array[Char], start: Int, length: Int) : Unit = if (filterLevel == -1) super.ignorableWhitespace(chars, start, length)
override def characters(chars: Array[Char], start: Int, length: Int) : Unit = if (filterLevel == -1) super.characters(chars, start, length)
override def skippedEntity(s: String) : Unit = if (filterLevel == -1) super.skippedEntity(s)
override def processingInstruction(s: String, s1: String) : Unit = if (filterLevel == -1) super.processingInstruction(s, s1)
}
|
orbeon/orbeon-forms
|
src/main/scala/org/orbeon/oxf/xml/ElementFilterXMLReceiver.scala
|
Scala
|
lgpl-2.1
| 2,387 |
// scalac: -opt:l:none
//
// See comment in BCodeBodyBuilder
// -target:jvm-1.6 -opt:l:none
// target enables stack map frame generation
class C {
// can't just emit a call to ???, that returns value of type Nothing$ (not Int).
def f1: Int = ???
def f2: Int = throw new Error("")
def f3(x: Boolean) = {
var y = 0
// cannot assign an object of type Nothing$ to Int
if (x) y = ???
else y = 1
y
}
def f4(x: Boolean) = {
var y = 0
// tests that whatever is emitted after the throw is valid (what? depends on opts, presence of stack map frames)
if (x) y = throw new Error("")
else y = 1
y
}
def f5(x: Boolean) = {
// stack heights need to be the same. ??? looks to the jvm like returning a value of
// type Nothing$, need to drop or throw it.
println(
if (x) { ???; 10 }
else 20
)
}
def f6(x: Boolean) = {
println(
if (x) { throw new Error(""); 10 }
else 20
)
}
def f7(x: Boolean) = {
println(
if (x) throw new Error("")
else 20
)
}
def f8(x: Boolean) = {
println(
if (x) throw new Error("")
else 20
)
}
}
object Test extends App {
new C()
}
|
lrytz/scala
|
test/files/run/nothingTypeNoOpt.scala
|
Scala
|
apache-2.0
| 1,211 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.prac.innovation.model
import org.beangle.data.model.IntId
import org.beangle.data.model.pojo.{Coded, Named}
import org.beangle.data.model.pojo.TemporalOn
import org.beangle.data.model.pojo.Hierarchical
object StageType {
val Initial = 1
val Closure = 3
val ApplyExemptionReply = 4 //这是结项的一个子阶段
}
class StageType extends IntId with Named with Coded with TemporalOn with Hierarchical[StageType] {
def this(id: Int) = {
this()
this.id = id
}
}
|
openurp/api
|
prac/src/main/scala/org/openurp/prac/innovation/model/StageType.scala
|
Scala
|
lgpl-3.0
| 1,224 |
package com.sksamuel.avro4s
import org.apache.avro.Schema
import org.apache.avro.generic.IndexedRecord
/**
* Brings together [[ToRecord]] and [[FromRecord]] in a single interface.
*/
trait RecordFormat[T] extends ToRecord[T] with FromRecord[T] with Serializable
/**
* Returns a [[RecordFormat]] that will convert to/from
* instances of T and avro Record's.
*/
object RecordFormat {
def apply[T](schema: Schema)(using toRecord: ToRecord[T], fromRecord: FromRecord[T]): RecordFormat[T] = new RecordFormat[T] {
override def from(record: IndexedRecord): T = fromRecord.from(record)
override def to(t: T): Record = toRecord.to(t)
}
}
|
sksamuel/avro4s
|
avro4s-core/src/main/scala/com/sksamuel/avro4s/RecordFormat.scala
|
Scala
|
apache-2.0
| 654 |
/*
Copyright 2013 Ilya Lakhin (Илья Александрович Лахин)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.lexis
sealed abstract class Matcher {
def apply(code: String, position: Int): Option[Int]
}
final case class StringMatcher(pattern: String) extends Matcher {
def apply(code: String, position: Int) =
Some(position + pattern.length)
.filter(
next =>
next <= code.length
&& code.substring(position, next) == pattern)
}
final case class CharSetMatcher(set: Set[Char], positive: Boolean = true)
extends Matcher {
def apply(code: String, position: Int) =
Some(position + 1)
.filter(next => next <= code.length)
.filter(_ => set.contains(code.charAt(position)) == positive)
def sup = copy(positive = !positive)
}
final case class CharRangeMatcher(from: Char,
to: Char,
positive: Boolean = true)
extends Matcher {
def apply(code: String, position: Int) =
Some(position + 1)
.filter(next => next <= code.length)
.filter(_ =>
(from <= code.charAt(position)
&& code.charAt(position) <= to) == positive)
def sup = copy(positive = !positive)
}
final case class RepetitionMatcher(sub: Matcher,
min: Int = 0,
max: Int = Int.MaxValue)
extends Matcher {
def apply(code: String, position: Int) = {
var count = 0
var finished = false
var current = position
while (count < max && !finished) {
sub(code, current) match {
case Some(next) =>
current = next
count += 1
case None => finished = true
}
}
Some(current).filter(_ => count >= min)
}
}
final case class ChoiceMatcher(first: Matcher, second: Matcher)
extends Matcher {
def apply(code: String, position: Int) =
first(code, position).orElse(second(code, position))
}
final case class SequentialMatcher(first: Matcher, second: Matcher)
extends Matcher {
def apply(code: String, position: Int) =
first(code, position).flatMap(next => second(code, next))
}
final case class PredicativeMatcher(sub: Matcher, positive: Boolean = true)
extends Matcher {
def apply(code: String, position: Int) =
Some(position)
.filter(_ => sub.apply(code, position).isDefined == positive)
}
object Matcher {
def zeroOrMore(sub: Matcher) = RepetitionMatcher(sub, 0)
def oneOrMore(sub: Matcher) = RepetitionMatcher(sub, 1)
def optional(sub: Matcher) = RepetitionMatcher(sub, 0, 1)
def repeat(sub: Matcher, times: Int) =
RepetitionMatcher(sub, times, times)
def anyOf(pattern: String) = CharSetMatcher(pattern.toSet)
def anyExceptOf(pattern: String) = CharSetMatcher(pattern.toSet).sup
def nothing() = CharSetMatcher(Set.empty)
def any() = CharSetMatcher(Set.empty).sup
def rangeOf(from: Char, to: Char) = CharRangeMatcher(from, to)
def chunk(pattern: String) = StringMatcher(pattern)
def test(sub: Matcher) = PredicativeMatcher(sub, positive = true)
def testNot(sub: Matcher) = PredicativeMatcher(sub, positive = false)
def sequence(steps: Matcher*): Matcher = steps.toList match {
case first :: Nil => first
case first :: second :: tail =>
(sequence _).apply(SequentialMatcher(first, second) :: tail)
case _ => nothing()
}
def choice(cases: Matcher*): Matcher = cases.toList match {
case first :: Nil => first
case first :: second :: tail =>
(choice _).apply(ChoiceMatcher(first, second) :: tail)
case _ => nothing()
}
}
|
Eliah-Lakhin/papa-carlo
|
src/main/scala/name.lakhin.eliah.projects/papacarlo/lexis/Matcher.scala
|
Scala
|
apache-2.0
| 4,190 |
package com.eevolution.context.dictionary.infrastructure.repository
import com.eevolution.context.dictionary.domain.model.MigrationStep
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 25/10/17.
*/
/**
* Migration Step Mapping
*/
trait MigrationStepMapping {
val queryMigrationStep = quote {
querySchema[MigrationStep]("AD_MigrationStep",
_.migrationStepId-> "AD_MigrationStep_ID",
_.tenantId-> "AD_Client_ID",
_.organizationId-> "AD_Org_ID",
_.migrationId-> "AD_Migration_ID",
_.entityId-> "AD_Table_ID",
_.action-> "Action",
_.applyScript -> "Apply",
_.comments-> "Comments",
_.created-> "Created",
_.createdBy-> "CreatedBy",
_.dbType-> "DBType",
_.errorMsg-> "ErrorMsg",
_.isActive-> "IsActive",
_.recordId-> "Record_ID",
_.seqNo-> "SeqNo",
_.statusCode-> "StatusCode",
_.stepType-> "StepType",
_.updated-> "Updated",
_.updatedBy-> "UpdatedBy",
_.parse-> "Parse",
_.sqlStatement-> "SQLStatement",
_.rollBackStatement-> "RollBackStatement",
_.uuid-> "UUID")
}
}
|
adempiere/ADReactiveSystem
|
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/MigrationStepMapping.scala
|
Scala
|
gpl-3.0
| 2,060 |
/* Copyright (C) 2011 Mikołaj Sochacki mikolajsochacki AT gmail.com
* This file is part of VRegister (Virtual Register - Wirtualny Dziennik)
* LICENCE: GNU AFFERO GENERAL PUBLIC LICENS Version 3 (AGPLv3)
* See: <http://www.gnu.org/licenses/>.
*/
package eu.brosbit.opos.model.page
import _root_.net.liftweb.mongodb._
import java.util.Date
import org.bson.types.ObjectId
case class Comment(id: ObjectId, authorName: String, authorId: String, date: String, content: String) {
def mapString = Map[String, String](("id" -> id.toString), ("authorName" -> authorName),
("authorId" -> authorId), ("date" -> date), ("content" -> content))
}
object ForumThreadContent extends MongoDocumentMeta[ForumThreadContent] {
override def collectionName = "forumthreadcontent"
override def formats = super.formats + new ObjectIdSerializer + new DateSerializer
def create = new ForumThreadContent(ObjectId.get, "", Nil)
}
case class ForumThreadContent(var _id: ObjectId,
var content: String, var comments: List[Comment])
extends MongoDocument[ForumThreadContent] {
def meta = ForumThreadContent
}
|
mikolajs/osp
|
src/main/scala/eu/brosbit/opos/model/page/ForumThreadContent.scala
|
Scala
|
agpl-3.0
| 1,145 |
package builder
import builder.api_json.{ApiJsonServiceValidator, ServiceJsonServiceValidator}
import io.apibuilder.api.v0.models.{Original, OriginalType}
import io.apibuilder.spec.v0.models.Service
import core.{ServiceFetcher, VersionMigration}
import lib.{ServiceConfiguration, ServiceValidator}
import io.apibuilder.avro.AvroIdlServiceValidator
import io.apibuilder.swagger.SwaggerServiceValidator
object OriginalValidator {
// TODO: if valid, need to use ServiceSpecValidator.scala
def apply(
config: ServiceConfiguration,
original: Original,
fetcher: ServiceFetcher,
migration: VersionMigration = VersionMigration(internal = false)
): ServiceValidator[Service] = {
val validator = original.`type` match {
case OriginalType.ApiJson => {
ApiJsonServiceValidator(config, original.data, fetcher, migration)
}
case OriginalType.AvroIdl => {
AvroIdlServiceValidator(config, original.data)
}
case OriginalType.ServiceJson => {
ServiceJsonServiceValidator(original.data)
}
case OriginalType.Swagger => {
SwaggerServiceValidator(config, original.data)
}
case OriginalType.UNDEFINED(other) => {
sys.error(s"Invalid original type[$other]")
}
}
WithServiceSpecValidator(validator)
}
case class WithServiceSpecValidator(underlying: ServiceValidator[Service]) extends ServiceValidator[Service] {
override def validate(): Either[Seq[String], Service] = {
underlying.validate() match {
case Left(errors) => Left(errors)
case Right(service) => {
ServiceSpecValidator(service).errors match {
case Nil => Right(service)
case errors => Left(errors)
}
}
}
}
}
}
|
apicollective/apibuilder
|
core/src/main/scala/core/builder/OriginalValidator.scala
|
Scala
|
mit
| 1,783 |
package org.orbeon.oxf.xforms
import org.orbeon.oxf.util.StaticXPath
import org.orbeon.oxf.xml.dom.Converter._
import org.scalatest.funspec.AnyFunSpec
class XFormsStaticElementValueTest extends AnyFunSpec {
describe("Extract expression or constant from LHHA element") {
val Expected = List(
(
"error summary example",
<xf:label xmlns:xf="http://www.w3.org/2002/xforms" xmlns:xh="http://www.w3.org/1999/xhtml">
<xf:output
mediatype="text/html"
value="$label-or-placeholder"/>
<span class="fr-error-alert fr-error-alert-{@level}">
<xf:output
mediatype="text/html"
value="@alert"/>
</span>
</xf:label>.toDocument,
false,
Left("""concat(' ', string(($label-or-placeholder)[1]), ' <span class="', xxf:evaluate-avt('fr-error-alert fr-error-alert-{@level}'), '"> ', string((@alert)[1]), ' </span> ')"""),
true
),
(
"mixed example",
<xf:label xmlns:xf="http://www.w3.org/2002/xforms" xmlns:xh="http://www.w3.org/1999/xhtml">
<xh:span>
<xf:output ref="my/path"/>
<xh:i class="{'my-class'[$var = 42]}"></xh:i>
</xh:span>
<xh:span class="static-class" id="my-id"><xh:b>Bold text for <xf:output bind="my-bind"/>!</xh:b></xh:span>
</xf:label>.toDocument,
false,
Left("""concat(' <span> ', string((my/path)[1]), ' <i class="', xxf:evaluate-avt('{''my-class''[$var = 42]}'), '"></i> </span> <span class="static-class" id="prefix$my-id"><b>Bold text for ', string((bind('my-bind'))[1]), '!</b></span> ')"""),
true
),
(
"static with HTML",
<xf:label xmlns:xf="http://www.w3.org/2002/xforms"><i>Italics</i></xf:label>.toDocument,
false,
Right("<i>Italics</i>"),
true
),
(
"static without HTML",
<xf:label xmlns:xf="http://www.w3.org/2002/xforms">First name:</xf:label>.toDocument,
false,
Right("First name:"),
false
),
(
"mixed example within repeat with ids",
<xf:label xmlns:xf="http://www.w3.org/2002/xforms" xmlns:xh="http://www.w3.org/1999/xhtml">
<xh:span>
<xf:output id="my-output" ref="my/path"/>
<xh:i id="my-i" class="{'my-class'[$var = 42]}"></xh:i>
</xh:span>
<xh:span class="static-class" id="my-id"><xh:b>Bold text for <xf:output bind="my-bind"/>!</xh:b></xh:span>
</xf:label>.toDocument,
true,
Left("""concat(' <span> ', string((my/path)[1]), ' <i id="prefix$my-i⊙', string-join(for $p in xxf:repeat-positions() return string($p), '-'), '" class="', xxf:evaluate-avt('{''my-class''[$var = 42]}'), '"></i> </span> <span class="static-class" id="prefix$my-id⊙', string-join(for $p in xxf:repeat-positions() return string($p), '-'), '"><b>Bold text for ', string((bind('my-bind'))[1]), '!</b></span> ')"""),
true
),
)
for ((desc, doc, isWithinRepeat, expectedExprOrConst, expectedContainsHTML) <- Expected)
it(s"must pass for $desc") {
val (expressionOrConstant, containsHTML) =
XFormsStaticElementValue.findElementExpressionOrConstantDirectOrNested(
outerElem = doc.getRootElement,
containerPrefix = "prefix$",
isWithinRepeat = isWithinRepeat,
acceptHTML = true,
makeString = StaticXPath.makeStringExpression
)
assert(expectedExprOrConst == expressionOrConstant)
assert(expectedContainsHTML == containsHTML)
}
}
}
|
orbeon/orbeon-forms
|
xforms-compiler/jvm/src/test/scala/org/orbeon/oxf/xforms/XFormsStaticElementValueTest.scala
|
Scala
|
lgpl-2.1
| 3,715 |
package com.krux.hyperion.activity
import com.typesafe.config.ConfigFactory
import org.scalatest.FlatSpec
import org.scalatest.Matchers._
import com.krux.hyperion.adt.HString
import com.krux.hyperion.HyperionContext
import com.krux.hyperion.resource.SparkCluster
class SparkTaskActivitySpec extends FlatSpec {
implicit val hc = new HyperionContext(ConfigFactory.load("example"))
class MainClass
it should "handle EMR release 4.x.x" in {
val cluster = SparkCluster().withReleaseLabel("emr-4.6.0")
val activity = SparkTaskActivity("something.jar", MainClass)(cluster)
activity.scriptRunner.shouldBe("s3://elasticmapreduce/libs/script-runner/script-runner.jar": HString)
activity.jobRunner.shouldBe("s3://your-bucket/datapipeline/scripts/run-spark-step-release-label.sh": HString)
}
it should "be backwards compatible" in {
val cluster = SparkCluster()
val activity = SparkTaskActivity("something.jar", MainClass)(cluster)
activity.scriptRunner shouldBe ("s3://elasticmapreduce/libs/script-runner/script-runner.jar": HString)
assert(activity.jobRunner.toString.endsWith("run-spark-step.sh"))
}
}
|
sethyates/hyperion
|
core/src/test/scala/com/krux/hyperion/activity/SparkTaskActivitySpec.scala
|
Scala
|
apache-2.0
| 1,143 |
package com.sksamuel.elastic4s.indexes
import com.sksamuel.elastic4s.http.ElasticDsl
import com.sksamuel.elastic4s.testkit.ResponseConverterImplicits._
import com.sksamuel.elastic4s.testkit.{DualClient, DualElasticSugar}
import org.scalatest.{Matchers, WordSpec}
class DeleteIndexTest extends WordSpec with Matchers with ElasticDsl with DualElasticSugar with DualClient {
"delete index request" should {
"delete index" in {
execute {
createIndex("languages").mappings(
mapping("dialects").fields(
textField("type")
)
).shards(1).waitForActiveShards(1)
}.await
execute {
indexExists("languages")
}.await.exists shouldBe true
execute {
ElasticDsl.deleteIndex("languages")
}.await.acknowledged shouldBe true
execute {
indexExists("languages")
}.await.exists shouldBe false
}
"support multiple indexes" in {
execute {
createIndex("languages1").mappings(
mapping("dialects").fields(
textField("type")
)
)
}.await
execute {
createIndex("languages2").mappings(
mapping("dialects").fields(
textField("type")
)
)
}.await
execute {
indexExists("languages1")
}.await.exists shouldBe true
execute {
indexExists("languages2")
}.await.exists shouldBe true
execute {
ElasticDsl.deleteIndex("languages1", "languages2")
}.await.acknowledged shouldBe true
execute {
indexExists("languages1")
}.await.exists shouldBe false
execute {
indexExists("languages2")
}.await.exists shouldBe false
}
}
}
|
aroundus-inc/elastic4s
|
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/indexes/DeleteIndexTest.scala
|
Scala
|
apache-2.0
| 1,750 |
package pl.arapso.scaffoldings.scala.funproginscala
abstract class MyBoolean {
def ifThenElse[T](t: T, e: T): T
def && (other: MyBoolean) = ifThenElse(MyTrue, other)
}
object MyTrue extends MyBoolean {
override def ifThenElse[T](t: T, e: T): T = t
}
object MyFalse extends MyBoolean {
override def ifThenElse[T](t: T, e: T): T = e
}
object Excercise {
def main(args: Array[String]) {
}
}
class Nat {
}
object Zero {
}
class Succ {
}
|
arapso-scaffoldings/scala
|
scala-tutor/custom/src/main/scala/pl/arapso/scaffoldings/scala/funproginscala/Boolean.scala
|
Scala
|
apache-2.0
| 461 |
trait Elems {
sealed class Elem[A] extends Dummy
val UnitElement: Elem[Unit]
trait Dummy
}
class BadMatch[A <: Elems](a: A) {
private def toLuaValue(eX: a.Elem[_]): String = eX match {
case a.UnitElement => "" // type mismatch
}
}
|
som-snytt/dotty
|
tests/patmat/t9779.scala
|
Scala
|
apache-2.0
| 247 |
/*
* Copyright 2010 Michael Fortin <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*/
package org.brzy.calista.results
import java.nio.ByteBuffer
import org.brzy.calista.serializer.Serializers
import java.util.Date
import RowType._
import org.apache.commons.lang.builder.ToStringBuilder
/**
* A single row in the data store.
*
* @author Michael Fortin
*/
class Row(
_rowType: RowType,
_family: String,
_key: ByteBuffer,
_superColumn: ByteBuffer,
_column: ByteBuffer,
_value: ByteBuffer,
_timestamp: Date) {
def rowType = _rowType
def family = _family
def key = _key
def superColumn = _rowType match {
case Super => _superColumn
case SuperCounter => _superColumn
case _ => throw new InvalidRowTypeAccessException("superColumn does not apply to: " + _rowType)
}
def column = _column
def value = _value
def timestamp = _rowType match {
case Standard => _timestamp
case Super => _timestamp
case _ => throw new InvalidRowTypeAccessException("timestamp does not apply to: " + _rowType)
}
def keyAs[T: Manifest]: T = as[T](key)
def superColumnAs[T: Manifest]: T = as[T](superColumn)
def columnAs[T: Manifest]: T = as[T](column)
def valueAs[T: Manifest]: T = as[T](value)
protected[this] def as[T: Manifest](b: ByteBuffer): T =
Serializers.fromClassBytes(manifest[T].runtimeClass, b).asInstanceOf[T]
override def toString = new ToStringBuilder(this).append("rowType", rowType.toString).toString
}
|
m410/calista
|
src/main/scala/org/brzy/calista/results/Row.scala
|
Scala
|
apache-2.0
| 2,043 |
package circumflex
package web
import core._
import collection.Iterator
import collection.mutable.Map
/*!# Parameters
The `param` object of is a convenient helper which is used to
retrieve the parameters of current match or current request:
* the parameters are first resolved from `MatchResult` objects found in context;
* if no match result contain a parameter with specified name,
then the result is searched in request parameters.
In other words, match results always override request parameters.
*/
object param extends Map[String, String] with KeyValueCoercion {
def +=(kv: (String, String)): this.type = this
def -=(key: String): this.type = this
def iterator: Iterator[(String, String)] = ctx.iterator.flatMap(p => p._2 match {
case m: MatchResult => m.params.iterator
case s: String => Seq(p._1 -> s).iterator
case _ => Iterator.empty
}) ++ requestOption.toSeq.flatMap(_.params).iterator
def get(key: String): Option[String] = iterator.find(_._1 == key).map(_._2)
override def default(key: String): String = ""
/*! Use the `list` method to retrieve multi-value parameters from request. */
def list(key: String): Seq[String] = iterator.filter(_._1 == key).map(_._2).toList
}
object paramList extends Map[String, Seq[String]] {
def +=(kv: (String, Seq[String])) = this
def -=(key: String) = this
def get(key: String) = requestOption
.map(r => r.params.list(key))
.orElse(Some(Nil))
def iterator = requestOption
.map(r => r.params.keys.map(k => k -> r.params.list(k)))
.getOrElse(Nil)
.iterator
}
|
inca/circumflex
|
web/src/main/scala/param.scala
|
Scala
|
bsd-2-clause
| 1,606 |
/*
* The Simplified BSD Licence follows:
* Copyright (c) 2014, Jeshan G. BABOOA
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*/
package co.jeshan.code.log4janalyser.utils.extended
import _root_.org.apache.log4j.chainsaw.vfs.VFSLogFilePatternReceiver
import _root_.org.tepi.filtertable.FilterTable
import org.apache.log4j.spi.LoggingEvent
import org.apache.log4j.Logger
import org.apache.log4j.chainsaw.LogFilePatternLayoutBuilder
import co.jeshan.code.log4janalyser.ui.WindowWrapper
/**
* User: jeshan
* Date: 26/10/13
* Time: 10:56
*/
class LogFilePatternReceiverExtended(url: String, log4jPattern: String, isTailing: Boolean, table: FilterTable) extends VFSLogFilePatternReceiver {
val defaultTimestampFormat = "HH:mm:ss,SSS"
val defaultWaitMillis = 500
val log = Logger.getLogger(getClass)
setFileURL(url)
setLogFormat(LogFilePatternLayoutBuilder.getLogFormatFromPatternLayout(log4jPattern))
setTailing(isTailing)
setAppendNonMatches(true)
setUseCurrentThread(false)
setWaitMillis(defaultWaitMillis)
setTimestampFormat(
if (log4jPattern.contains("%d{")) {
val regex = ".*%d\\\\{(.+)}.*".r
val regex(timestampFormat) = log4jPattern
timestampFormat
}
else if (log4jPattern.contains("%d")) defaultTimestampFormat
else null
)
override def doPost(event: LoggingEvent) = {
val eventExtended = LoggingEventExtended.create(event)
WindowWrapper.printToTable(table, eventExtended)
}
override def shutdown() {
super.shutdown()
if(isTailing) {
log.info(s"Stopping listening events for $url")
}
}
def readFileAsync() {
activateOptions()
}
override def activateOptions(): Unit = {
super.activateOptions()
}
}
|
jeshan/log4janalyser
|
src/main/scala/co/jeshan/code/log4janalyser/utils/extended/LogFilePatternReceiverExtended.scala
|
Scala
|
bsd-2-clause
| 3,101 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest2_1_7
import org.jetbrains.plugins.scala.testingSupport.scalatest.finders.FindersApiTest
/**
* @author Roman.Shein
* @since 10.02.2015.
*/
class Scalatest2_11_2_1_7_FindersApiTest extends Scalatest2_11_2_1_7_Base with FindersApiTest {
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest2_1_7/Scalatest2_11_2_1_7_FindersApiTest.scala
|
Scala
|
apache-2.0
| 322 |
package com.campudus.scycle.vdom
import com.campudus.scycle.dom._
import com.campudus.scycle.vdom.VirtualDom.{Insertion, Path, Replacement}
import org.scalajs.dom
import org.scalatest.FunSpec
class VirtualDomTest extends FunSpec {
describe("scalajs test setup") {
it("can tell if two divs are the same") {
val container = dom.document.createElement("div")
val div = dom.document.createElement("div")
container.appendChild(div)
assert(container == container)
assert(div == container.firstChild)
assert(div == container.firstChild.asInstanceOf[dom.Element])
}
}
describe("The virtual dom diffing algorithm") {
it("can apply null values") {
assert(VirtualDom(null) === null)
}
it("can apply all kinds of elements") {
def check[T <: Hyperscript](tagName: String, kind: Class[T]): Unit = {
val element = dom.document.createElement(tagName)
assert(kind.isInstance(VirtualDom(element)))
}
check("div", classOf[Div])
check("h1", classOf[H1])
check("span", classOf[Span])
check("hr", classOf[Hr])
check("input", classOf[Input])
check("label", classOf[Label])
check("button", classOf[Button])
check("p", classOf[P])
check("a", classOf[A])
}
it("does not yield changes if nothing changes") {
val first = Div(children = Seq(
Button(className = "get-first", children = Seq(Text("something a 1"))),
Button(className = "get-second", children = Seq(Text("something b 1"))),
Div(className = "user-details", children = Seq(
H1(className = "user-name", children = Seq(Text("(name)"))),
Div(className = "user-email", children = Seq(Text("(email)"))),
A(className = "user-website", href = "https://example.com", children = Seq(Text("(website)")))
))
))
val second = Div(children = Seq(
Button(className = "get-first", children = Seq(Text("something a 2"))),
Button(className = "get-second", children = Seq(Text("something b 2"))),
Div(className = "user-details", children = Seq(
H1(className = "user-name", children = Seq(Text("(name)"))),
Div(className = "user-email", children = Seq(Text("(email)"))),
A(className = "user-website", href = "https://example.com", children = Seq(Text("(website)")))
))
))
val diffs = VirtualDom.diff(first, second)
assert(diffs.length === 2)
assert(diffs.forall(diff => diff.isInstanceOf[Replacement]))
}
describe("doing replacements") {
it("detects the same element") {
val myH1 = H1("hello")
assert(VirtualDom.diff(myH1, myH1) === Nil)
}
it("replaces two different elements") {
val myH1 = H1("hello")
val mySpan = Span("bye")
assert(VirtualDom.diff(myH1, mySpan) === List(Replacement(Path(), mySpan)))
}
it("replaces same element with different attributes") {
val myH1a = H1("hello")
val myH1b = H1("bye")
assert(VirtualDom.diff(myH1a, myH1b) === List(Replacement(Path(), myH1b)))
}
it("only replaces a changed text if the tags above are the same") {
val firstDiv = Div(children = Seq(Text("hello")))
val secondDiv = Div(children = Seq(Text("bye")))
assert(VirtualDom.diff(firstDiv, secondDiv) === List(Replacement(Path(0), Text("bye"))))
}
it("can replace multiple child nodes") {
val before = Div(children = Seq(
Text("firstA"),
Text("firstB")
))
val after = Div(children = Seq(
Text("secondA"),
Text("secondB")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(0), Text("secondA")),
Replacement(Path(1), Text("secondB"))
))
}
it("replaces only changed child nodes") {
val before = Div(children = Seq(
Text("firstA"),
Text("firstB"),
Text("firstC")
))
val after = Div(children = Seq(
Text("secondA"),
Text("firstB"),
Text("secondC")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(0), Text("secondA")),
Replacement(Path(2), Text("secondC"))
))
}
it("can switch two elements by replacing them") {
val before = Div(children = Seq(
Text("a"),
Text("b"),
Text("c")
))
val after = Div(children = Seq(
Text("a"),
Text("c"),
Text("b")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(1), Text("c")),
Replacement(Path(2), Text("b"))
))
}
it("replaces child nodes recursively if necessary") {
val before = Div(children = Seq(
Text("firstA"),
Div(children = Seq(
Text("firstChildB1"),
Text("firstChildB2"),
Text("firstChildB3")
)),
Text("firstC")
))
val after = Div(children = Seq(
Text("firstA"),
Div(children = Seq(
Text("secondChildB1"),
Text("firstChildB2"),
Text("secondChildB3")
)),
Text("secondC")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(1, 0), Text("secondChildB1")),
Replacement(Path(1, 2), Text("secondChildB3")),
Replacement(Path(2), Text("secondC"))
))
}
it("replaces changed nodes with whole trees") {
val before = Div(children = Seq(
Text("firstA"),
Text("firstB"),
Text("firstC")
))
val after = Div(children = Seq(
Text("firstA"),
Div(children = Seq(
Text("secondChildB1"),
Text("secondChildB2"),
Div(children = Seq(Text("secondChildB3a")))
)),
Text("firstC")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(1), Div(children = Seq(
Text("secondChildB1"),
Text("secondChildB2"),
Div(children = Seq(Text("secondChildB3a")))
)))
))
}
}
describe("doing insertions") {
it("can handle null elements") {
val diffs = VirtualDom.diff(null, Div())
assert(diffs.length === 1)
assert(diffs.head.isInstanceOf[Insertion])
}
it("adds new elements") {
val before = Div(children = Seq(
Text("a")
))
val after = Div(children = Seq(
Text("a"),
Text("b")
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(1), Text("b"))
))
}
it("adds multiple new elements") {
val before = Div(children = Seq(
Text("a")
))
val after = Div(children = Seq(
Text("a"),
Text("b"),
Div(children = Seq(Text("c")))
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(1), Text("b")),
Insertion(Path(2), Div(children = Seq(Text("c"))))
))
}
it("can add elements before others") {
val before = Div(children = Seq(
Text("a")
))
val after = Div(children = Seq(
Text("b"),
Text("a")
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(0), Text("b"))
))
}
it("can add elements before others, not only at beginning") {
val before = Div(children = Seq(
Text("a"),
Text("b")
))
val after = Div(children = Seq(
Text("a"),
Text("c"),
Text("b")
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(1), Text("c"))
))
}
it("can add multiple elements with one before others") {
val before = Div(children = Seq(
Text("a"),
Text("b")
))
val after = Div(children = Seq(
Text("c"),
Text("a"),
Text("b"),
Text("d")
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(0), Text("c")),
Insertion(Path(3), Text("d"))
))
}
it("can add subtrees") {
val before = Div(children = Seq(
Text("a"),
Text("b")
))
val after = Div(children = Seq(
Text("a"),
Text("b"),
Div(children = Seq(Text("c")))
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(2), Div(children = Seq(Text("c"))))
))
}
it("can add subtrees before others") {
val before = Div(children = Seq(
Text("a"),
Text("b")
))
val after = Div(children = Seq(
Text("a"),
Div(children = Seq(Text("c"))),
Text("b")
))
assert(VirtualDom.diff(before, after) === List(
Insertion(Path(1), Div(children = Seq(Text("c"))))
))
}
it("does not incorrectly optimize") {
val before = Div(children = Seq(
Text("a"),
Text("b")
))
val after = Div(children = Seq(
Text("b"),
Text("a"),
Text("c")
))
assert(VirtualDom.diff(before, after) === List(
Replacement(Path(0), Text("b")),
Replacement(Path(1), Text("a")),
Insertion(Path(2), Text("c"))
))
}
}
}
describe("virtual dom") {
it("can map real div elements to virtual dom div elements") {
val realDiv = dom.document.createElement("div")
realDiv.setAttribute("class", "hello")
val virtualDiv = Div(className = "hello")
val result = VirtualDom(realDiv)
assert(result === virtualDiv)
}
it("can map real div elements with child elements to virtual dom div elements") {
val containerDiv = dom.document.createElement("div")
val childDiv = dom.document.createElement("div")
childDiv.setAttribute("class", "child")
containerDiv.setAttribute("class", "container")
containerDiv.appendChild(childDiv)
val virtualDiv = Div(className = "container", children = Seq(Div(className = "child")))
val result = VirtualDom(containerDiv)
assert(result === virtualDiv)
}
}
describe("removals") {
it("can remove child elements") {
val containerDiv = dom.document.createElement("div")
val childDiv = dom.document.createElement("div")
childDiv.setAttribute("class", "child")
containerDiv.setAttribute("class", "container")
containerDiv.appendChild(childDiv)
val virtualDiv = Div(className = "container", children = Seq())
val diff = VirtualDom.diff(VirtualDom(containerDiv), virtualDiv)
VirtualDom.update(containerDiv, diff)
assert(containerDiv.childNodes.length === 0)
}
}
describe("replacements") {
it("insertions in root are possible") {
val container = dom.document.createElement("div")
val test = Div(className = "test")
val diffs = VirtualDom.diff(null, test)
assert(container.children.length === 0)
VirtualDom.update(container, diffs)
assert(container.children.length > 0)
}
it("remove in root is possible") {
val container = dom.document.createElement("div")
val test = dom.document.createElement("div")
test.setAttribute("class", "test")
container.appendChild(test)
val diffs = VirtualDom.diff(VirtualDom(container), null)
assert(container.children.length > 0)
VirtualDom.update(container, diffs)
assert(container.children.length === 0)
}
it("empty replacement won't change elements") {
val div = dom.document.createElement("div")
val sub = dom.document.createElement("div")
div.appendChild(sub)
assert(sub == div.firstChild)
VirtualDom.update(div, List())
assert(sub == div.firstChild)
}
it("will reuse the element and just replace its attributes") {
val container = dom.document.createElement("div")
val realDiv = dom.document.createElement("div")
realDiv.setAttribute("class", "first")
container.appendChild(realDiv)
val firstDiv = VirtualDom(realDiv)
val secondDiv = Div(className = "second")
val diff = VirtualDom.diff(firstDiv, secondDiv)
VirtualDom.update(container, diff)
val resultDiv = container.firstChild
assert(resultDiv == realDiv)
}
it("can insert new elements and not change existing ones") {
val container = dom.document.createElement("div")
val realDivA = dom.document.createElement("div")
realDivA.setAttribute("class", "first")
container.appendChild(realDivA)
val containerBefore = VirtualDom(container)
val containerAfter = Div(children = Seq(
Div(className = "first"),
Div(className = "second")
))
val diff = VirtualDom.diff(containerBefore, containerAfter)
VirtualDom.update(container, diff)
val resultFirstDiv = container.firstChild
val resultSecondDiv = container.childNodes(1).asInstanceOf[dom.Element]
assert(resultFirstDiv == realDivA)
assert(resultSecondDiv.getAttribute("class") === "second")
}
it("can replace input values") {
val container = dom.document.createElement("div")
val firstInput = dom.document.createElement("input")
firstInput.setAttribute("value", "hello")
container.appendChild(firstInput)
val containerBefore = VirtualDom(container)
val containerAfter = Div(children = Seq(
Input(options = List("value" -> "bye"))
))
val diff = VirtualDom.diff(containerBefore, containerAfter)
VirtualDom.update(container, diff)
val resultA = container.firstElementChild
assert(resultA == firstInput)
assert(resultA.getAttribute("value") === "bye")
}
it("can replace any hyperscript element") {
val container = dom.document.createElement("div")
val h1 = dom.document.createElement("h1")
val span = dom.document.createElement("span")
h1.setAttribute("class", "testh1")
span.setAttribute("class", "testspan")
container.appendChild(h1)
container.appendChild(span)
val containerBefore = VirtualDom(container)
val containerAfter = Div(children = Seq(
H1(className = "testafterh1"),
Span(className = "testafterspan")
))
val diff = VirtualDom.diff(containerBefore, containerAfter)
VirtualDom.update(container, diff)
val resultA = container.firstElementChild
val resultB = container.childNodes(1).asInstanceOf[dom.Element]
assert(resultA == h1)
assert(resultA.getAttribute("class") === "testafterh1")
assert(resultB == span)
assert(resultB.getAttribute("class") === "testafterspan")
}
it("can replace Text nodes") {
val div = dom.document.createElement("div")
div.appendChild(dom.document.createTextNode("before"))
assert(div.textContent === "before")
val before = VirtualDom(div)
assert(before == Div(children = Seq(Text("before"))))
val after = Div(children = Seq(Text("after")))
val diffs = VirtualDom.diff(before, after)
VirtualDom.update(div, diffs)
assert(div.textContent === "after")
}
it("can replace nested elements with not so nested elements") {
val div = dom.document.createElement("div")
div.setAttribute("id", "div")
val span = dom.document.createElement("span")
span.setAttribute("id", "span")
val deep = dom.document.createElement("span")
deep.setAttribute("id", "deep")
span.appendChild(deep)
div.appendChild(span)
val before = VirtualDom(div)
assert(before == Div(id = "div", children = Seq(Span(id = "span", children = Seq(Span(id = "deep"))))))
val after = Div(id = "div", children = Seq(Span(id = "shallow")))
val diffs = VirtualDom.diff(before, after)
VirtualDom.update(div, diffs)
assert(div.firstElementChild.getAttribute("id") === "shallow")
assert(div.childNodes.length === 1)
assert(div.firstElementChild.childNodes.length === 0)
}
it("can replace elements with text nodes") {
val div = dom.document.createElement("div")
val span = dom.document.createElement("span")
span.appendChild(dom.document.createTextNode("before"))
div.appendChild(span)
assert(div.textContent === "before")
val before = VirtualDom(div)
assert(before == Div(children = Seq(Span(children = Seq(Text("before"))))))
val after = Div(children = Seq(Text("after")))
val diffs = VirtualDom.diff(before, after)
assert(diffs == List(Replacement(Path(0), Text("after"))))
VirtualDom.update(div, diffs)
assert(div.textContent === "after")
}
}
}
|
Narigo/scalajs-fun
|
scycle/src/test/scala/com/campudus/scycle/vdom/VirtualDomTest.scala
|
Scala
|
mit
| 17,132 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.features.avro.serde
import java.nio.ByteBuffer
import com.vividsolutions.jts.io.InStream
import org.apache.avro.io.Decoder
import org.locationtech.geomesa.features.avro.AvroSimpleFeature
import org.locationtech.geomesa.utils.text.WKBUtils
/**
* AvroSimpleFeature version 2 changes serialization of Geometry types from
* WKT (Well Known Text) to WKB (Well Known Binary)
*/
object Version2Deserializer extends ASFDeserializer {
override def setGeometry(sf: AvroSimpleFeature, field: String, in: Decoder): Unit = {
val bb = in.readBytes(null)
val bytes = new Array[Byte](bb.remaining)
bb.get(bytes)
val geom = WKBUtils.read(bytes)
sf.setAttributeNoConvert(field, geom)
}
class BBInStream(bb: ByteBuffer) extends InStream {
override def read(buf: Array[Byte]): Unit = bb.get(buf)
}
override def consumeGeometry(in: Decoder) = in.skipBytes()
}
|
drackaer/geomesa
|
geomesa-features/geomesa-feature-avro/src/main/scala/org/locationtech/geomesa/features/avro/serde/Version2Deserializer.scala
|
Scala
|
apache-2.0
| 1,378 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger, Linked}
case class CP278(value: Int) extends CtBoxIdentifier("Expenditure on designated environmentally friendly machinery and plant") with CtInteger
object CP278 extends Linked[CP252, CP278] {
override def apply(source: CP252): CP278 = CP278(source.orZero)
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP278.scala
|
Scala
|
apache-2.0
| 962 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka08
import kafka.admin.AdminUtils
import kafka.client.ClientUtils
import kafka.cluster.Broker
import kafka.consumer.AssignmentContext
import kafka.network.BlockingChannel
import org.I0Itec.zkclient.ZkClient
import org.apache.zookeeper.data.Stat
import org.locationtech.geomesa.kafka.common.ZkUtils
case class ZkUtils08(zkClient: ZkClient) extends ZkUtils {
override def channelToOffsetManager(groupId: String, socketTimeoutMs: Int, retryBackOffMs: Int): BlockingChannel =
ClientUtils.channelToOffsetManager(groupId, zkClient, socketTimeoutMs, retryBackOffMs)
override def deleteTopic(topic: String): Unit = AdminUtils.deleteTopic(zkClient, topic)
override def topicExists(topic: String): Boolean = AdminUtils.topicExists(zkClient, topic)
override def getAllTopics: Seq[String] = kafka.utils.ZkUtils.getAllTopics(zkClient)
override def createTopic(topic: String, partitions: Int, replication: Int) = AdminUtils.createTopic(zkClient, topic, partitions, replication)
override def getLeaderForPartition(topic: String, partition: Int): Option[Int] = kafka.utils.ZkUtils.getLeaderForPartition(zkClient, topic, partition)
override def createEphemeralPathExpectConflict(path: String, data: String): Unit = kafka.utils.ZkUtils.createEphemeralPathExpectConflict(zkClient, path, data)
override def createEphemeralPathExpectConflictHandleZKBug(path: String,
data: String,
expectedCallerData: Any,
checker: (String, Any) => Boolean,
backoffTime: Int): Unit =
kafka.utils.ZkUtils.createEphemeralPathExpectConflictHandleZKBug(zkClient, path, data, expectedCallerData, checker, backoffTime)
override def deletePath(path: String) = kafka.utils.ZkUtils.deletePath(zkClient, path)
override def getConsumerPartitionOwnerPath(groupId: String, topic: String, partition: Int): String =
kafka.utils.ZkUtils.getConsumerPartitionOwnerPath(groupId, topic, partition)
override def getChildrenParentMayNotExist(path: String): Seq[String] = kafka.utils.ZkUtils.getChildrenParentMayNotExist(zkClient, path)
override def getAllBrokersInCluster: Seq[kafka.cluster.Broker] = kafka.utils.ZkUtils.getAllBrokersInCluster(zkClient)
override def createAssignmentContext(group: String, consumerId: String, excludeInternalTopics: Boolean): AssignmentContext =
new AssignmentContext(group, consumerId, excludeInternalTopics, zkClient)
override def readData(path: String): (String, Stat) = kafka.utils.ZkUtils.readData(zkClient, path)
override def fetchTopicMetadataFromZk(topic: String) = AdminUtils.fetchTopicMetadataFromZk(topic, zkClient)
override def close(): Unit = zkClient.close()
}
|
mdzimmerman/geomesa
|
geomesa-kafka/geomesa-kafka-08-utils/src/main/scala/org/locationtech/geomesa/kafka08/ZkUtils08.scala
|
Scala
|
apache-2.0
| 3,336 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive
import java.io.{BufferedReader, InputStream, PrintStream, Reader}
import cats.{Alternative, Applicative, Apply, CoflatMap, Eq, FlatMap, Functor, FunctorFilter, Monoid, NonEmptyParallel, Order, ~>}
import cats.effect.{Bracket, Effect, ExitCase, Resource}
import monix.eval.{Coeval, Task, TaskLift, TaskLike}
import monix.eval.Task.defaultOptions
import monix.execution.Ack.{Continue, Stop}
import monix.execution.ChannelType.MultiProducer
import monix.execution._
import monix.execution.annotations.{UnsafeBecauseImpure, UnsafeProtocol}
import monix.execution.cancelables.{BooleanCancelable, SingleAssignCancelable}
import monix.execution.exceptions.{DownstreamTimeoutException, UpstreamTimeoutException}
import monix.reactive.Observable.{Operator, Transformer}
import monix.reactive.OverflowStrategy.Synchronous
import monix.reactive.internal.builders
import monix.reactive.internal.builders._
import monix.reactive.internal.deprecated.{ObservableDeprecatedBuilders, ObservableDeprecatedMethods}
import monix.reactive.internal.operators._
import monix.reactive.internal.subscribers.ForeachSubscriber
import monix.reactive.observables._
import monix.reactive.observers._
import monix.reactive.subjects._
import org.reactivestreams.{Publisher => RPublisher, Subscriber => RSubscriber}
import scala.collection.mutable
import scala.collection.immutable
import scala.concurrent.duration.{Duration, FiniteDuration}
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success, Try}
/** The `Observable` type that implements the Reactive Pattern.
*
* Provides methods of subscribing to the Observable and operators
* for combining observable sources, filtering, modifying,
* throttling, buffering, error handling and others.
*
* See the available documentation at: [[https://monix.io]]
*
* @define concatMergeDifference ==Concat vs Merge==
*
* The difference between the [[Observable!.concat concat]]
* operation and [[Observable!.merge merge]] is that `concat`
* cares about the ordering of sequences (e.g. all items
* emitted by the first observable in the sequence will come
* before the elements emitted by the second observable),
* whereas `merge` doesn't care about that (elements get
* emitted as they come). Because of back-pressure applied to
* observables, `concat` is safe to use in all contexts,
* whereas `merge` requires buffering. Or in other words
* `concat` has deterministic, lawful behavior (being the
* "monadic bind"), whereas `merge` has non-deterministic
* behavior.
*
* @define concatDescription Concatenates the sequence of observables
* emitted by the source into one observable, without any
* transformation.
*
* You can combine the items emitted by multiple observables
* so that they act like a single sequence by using this
* operator.
*
* This operation is the "monadic bind", implementing the
* `flatMap` operation of [[cats.Monad]].
*
* $concatMergeDifference
*
* @define delayErrorsDescription ==Delaying Errors==
*
* This version is reserving `onError` notifications until
* all of the observables complete and only then passing the
* issued errors(s) downstream. Note that the streamed error is a
* [[monix.execution.exceptions.CompositeException CompositeException]],
* since multiple errors from multiple streams can happen.
*
* @define concatReturn an observable that emits the merged events of all
* streams created by the source
*
* @define switchMapDescription Returns a new observable that emits the items
* emitted by the observable most recently generated by the
* mapping function.
*
* @define overflowStrategyParam the [[OverflowStrategy overflow strategy]]
* used for buffering, which specifies what to do in case
* we're dealing with a slow consumer - should an unbounded
* buffer be used, should back-pressure be applied, should
* the pipeline drop newer or older events, should it drop
* the whole buffer? See [[OverflowStrategy]] for more
* details.
*
* @define defaultOverflowStrategy this operation needs to do buffering
* and by not specifying an [[OverflowStrategy]], the
* [[OverflowStrategy.Default default strategy]] is being
* used.
*
* @define mergeMapDescription Creates a new observable by applying a
* function that you supply to each item emitted by the
* source observable, where that function returns an
* observable, and then merging those resulting observable
* and emitting the results of this merger.
*
* $concatMergeDifference
*
* @define mergeMapReturn an observable that emits the result of applying the
* transformation function to each item emitted by the source
* observable and merging the results of the observables
* obtained from this transformation.
*
* @define mergeDescription
*
* @define mergeReturn an observable containing the merged events of all
* streams created by the source
*
* @define onOverflowParam a function that is used for signaling a special
* event used to inform the consumers that an overflow event
* happened, function that receives the number of dropped
* events as a parameter (see [[OverflowStrategy.Evicted]])
*
* @define bufferWithSelectorDesc Periodically gather items emitted by
* an observable into bundles and emit these bundles rather than
* emitting the items one at a time, whenever the `selector`
* observable signals an event.
*
* The resulting observable collects the elements of the source
* in a buffer and emits that buffer whenever the given `selector`
* observable emits an `onNext` event, when the buffer is emitted
* as a sequence downstream and then reset. Thus the resulting
* observable emits connected, non-overlapping bundles triggered
* by the given `selector`.
*
* If `selector` terminates with an `onComplete`, then the resulting
* observable also terminates normally. If `selector` terminates with
* an `onError`, then the resulting observable also terminates with an
* error.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* @define unsafeBecauseImpure '''UNSAFE WARNING''':
* this operation can trigger the execution of side effects, which
* breaks referential transparency and is thus not a pure function.
*
* For FP code these functions shouldn't be called until
* "the end of the world", which is to say at the end of
* the program (for a console app), or at the end of a web
* request.
*
* Otherwise for modifying or operating on streams, prefer
* its pure functions like [[publishSelector]] for sharing
* the data source, or [[map]] or [[flatMap]] for operating
* on its events. Or in case of specialized logic, prefer
* to suspend these side effects via
* [[monix.reactive.Observable.suspend Observable.suspend]].
* Monix also provides [[monix.eval.Task Task]] which can
* also be used for suspending side effects and the `Task`
* was built to interop well with `Observable`.
*
* @define unsafeSubscribe '''UNSAFE PROTOCOL:''' This function is
* "unsafe" to call because it does not protect the calls to
* the given [[Observer]] implementation and thus knowledge
* of the protocol is needed.
*
* Prefer normal
* [[monix.reactive.Observable!.subscribe(subscriber* subscribe]]
* when consuming a stream, these unsafe subscription methods
* being useful when building operators and for testing
* purposes.
*
* Normal `subscribe` protects users in these ways:
*
* - it does a best effort attempt to catch and report
* exceptions that violate the protocol
* - the final `onComplete` or `onError` message is
* guaranteed to be signaled after the completion
* of the [[monix.execution.Ack acknowledgement]]
* received from the last `onNext`; the internal
* protocol doesn't require back-pressuring of
* this last message for performance reasons
*
* @define catsOrderInterop ==Cats Order and Scala Interop==
*
* Monix prefers to work with [[cats.Order]] for assessing the order
* of elements that have an ordering defined, instead of
* [[scala.math.Ordering]].
*
* We do this for consistency, as Monix is now building on top of Cats.
* This may change in the future, depending on what happens with
* [[https://github.com/typelevel/cats/issues/2455 typelevel/cats#2455]].
*
* Building a `cats.Order` is easy to do if you already have a
* Scala `Ordering` instance:
* {{{
* import cats.Order
*
* case class Person(name: String, age: Int)
*
* // Starting from a Scala Ordering
* implicit val scalaOrderingForPerson: Ordering[Person] =
* new Ordering[Person] {
* def compare(x: Person, y: Person): Int =
* x.age.compareTo(y.age) match {
* case 0 => x.name.compareTo(y.name)
* case o => o
* }
* }
*
* // Building a cats.Order from it
* implicit val catsOrderForPerson: Order[Person] =
* Order.fromOrdering
* }}}
*
* You can also do that in reverse, so you can prefer `cats.Order`
* (due to Cats also exposing laws and tests for free) and build a
* Scala `Ordering` when needed:
* {{{
* val scalaOrdering = catsOrderForPerson.toOrdering
* }}}
*
* @define catsEqInterop ==Cats Eq and Scala Interop==
*
* Monix prefers to work with [[cats.Eq]] for assessing the equality
* of elements that have an ordering defined, instead of
* [[scala.math.Equiv]].
*
* We do this because Scala's `Equiv` has a default instance defined
* that's based on universal equality and that's a big problem, because
* when using the `Eq` type class, it is universal equality that we
* want to avoid and there have been countless of bugs in the ecosystem
* related to both universal equality and `Equiv`. Thankfully people
* are working to fix it.
*
* We also do this for consistency, as Monix is now building on top of
* Cats. This may change in the future, depending on what happens with
* [[https://github.com/typelevel/cats/issues/2455 typelevel/cats#2455]].
*
* Defining `Eq` instance is easy and we can use universal equality
* in our definitions as well:
* {{{
* import cats.Eq
*
* case class Address(host: String, port: Int)
*
* implicit val eqForAddress: Eq[Address] =
* Eq.fromUniversalEquals
* }}}
*/
abstract class Observable[+A] extends Serializable { self =>
// -----------------------------------------------------------------------
// Impure operations (that break referential transparency) ...
/** Characteristic function for an `Observable` instance, that creates
* the subscription and that eventually starts the streaming of
* events to the given [[Observer]], to be provided by observable
* implementations.
*
* $unsafeSubscribe
*
* $unsafeBecauseImpure
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable
/** Given an [[monix.reactive.Observer observer]] and a
* [[monix.execution.Scheduler scheduler]] for managing async
* boundaries, subscribes to this observable for events.
*
* Helper for calling the
* [[Observable.unsafeSubscribeFn(subscriber* abstract method]].
*
* $unsafeSubscribe
*
* $unsafeBecauseImpure
*/
@UnsafeProtocol
@UnsafeBecauseImpure
final def unsafeSubscribeFn(observer: Observer[A])(implicit s: Scheduler): Cancelable =
unsafeSubscribeFn(Subscriber(observer, s))
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe(observer: Observer[A])(implicit s: Scheduler): Cancelable =
subscribe(Subscriber(observer, s))
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe(subscriber: Subscriber[A]): Cancelable =
unsafeSubscribeFn(SafeSubscriber[A](subscriber))
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe(nextFn: A => Future[Ack], errorFn: Throwable => Unit)(implicit s: Scheduler): Cancelable =
subscribe(nextFn, errorFn, () => ())
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe()(implicit s: Scheduler): Cancelable =
subscribe(_ => Continue)
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe(nextFn: A => Future[Ack])(implicit s: Scheduler): Cancelable =
subscribe(nextFn, error => s.reportFailure(error), () => ())
/** Subscribes to the stream.
*
* $unsafeBecauseImpure
*
* @return a subscription that can be used to cancel the streaming.
* @see [[consumeWith]] for another way of consuming observables
*/
@UnsafeBecauseImpure
final def subscribe(nextFn: A => Future[Ack], errorFn: Throwable => Unit, completedFn: () => Unit)(implicit
s: Scheduler): Cancelable = {
subscribe(new Subscriber[A] {
implicit val scheduler = s
def onNext(elem: A) = nextFn(elem)
def onComplete() = completedFn()
def onError(ex: Throwable) = errorFn(ex)
})
}
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers).
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def multicast[B >: A, R](pipe: Pipe[B, R])(implicit s: Scheduler): ConnectableObservable[R] =
ConnectableObservable.multicast(this, pipe)
/** Returns a new Observable that multi-casts (shares) the original Observable
* between multiple consumers.
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def share(implicit s: Scheduler): Observable[A] =
publish.refCount
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers). The underlying subject used is a
* [[monix.reactive.subjects.PublishSubject PublishSubject]].
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def publish(implicit s: Scheduler): ConnectableObservable[A] =
unsafeMulticast(PublishSubject[A]())
/** Caches the emissions from the source Observable and replays them
* in order to any subsequent Subscribers. This operator has
* similar behavior to [[Observable!.replay(implicit* replay]]
* except that this auto-subscribes to the source Observable rather
* than returning a
* [[monix.reactive.observables.ConnectableObservable ConnectableObservable]]
* for which you must call
* [[monix.reactive.observables.ConnectableObservable.connect connect]]
* to activate the subscription.
*
* When you call cache, it does not yet subscribe to the source
* Observable and so does not yet begin caching items. This only
* happens when the first Subscriber calls the resulting
* Observable's `subscribe` method.
*
* Note: You sacrifice the ability to cancel the origin when you
* use the cache operator so be careful not to use this on
* Observables that emit an infinite or very large number of items
* that will use up memory.
*
* $unsafeBecauseImpure
*
* @return an Observable that, when first subscribed to, caches all of its
* items and notifications for the benefit of subsequent subscribers
*/
@UnsafeBecauseImpure
final def cache: Observable[A] =
CachedObservable.create(self)
/** Caches the emissions from the source Observable and replays them
* in order to any subsequent Subscribers. This operator has
* similar behavior to [[Observable!.replay(implicit* replay]]
* except that this auto-subscribes to the source Observable rather
* than returning a
* [[monix.reactive.observables.ConnectableObservable ConnectableObservable]]
* for which you must call
* [[monix.reactive.observables.ConnectableObservable.connect connect]]
* to activate the subscription.
*
* When you call cache, it does not yet subscribe to the source
* Observable and so does not yet begin caching items. This only
* happens when the first Subscriber calls the resulting
* Observable's `subscribe` method.
*
* $unsafeBecauseImpure
*
* @param maxCapacity is the maximum buffer size after which old events
* start being dropped (according to what happens when using
* [[monix.reactive.subjects.ReplaySubject.createLimited[A](capacity:Int,initial* ReplaySubject.createLimited]])
*
* @return an Observable that, when first subscribed to, caches all of its
* items and notifications for the benefit of subsequent subscribers
*/
@UnsafeBecauseImpure
final def cache(maxCapacity: Int): Observable[A] =
CachedObservable.create(self, maxCapacity)
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers). The underlying subject used is a
* [[monix.reactive.subjects.BehaviorSubject BehaviorSubject]].
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def behavior[B >: A](initialValue: B)(implicit s: Scheduler): ConnectableObservable[B] =
unsafeMulticast(BehaviorSubject[B](initialValue))
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers). The underlying subject used is a
* [[monix.reactive.subjects.ReplaySubject ReplaySubject]].
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def replay(implicit s: Scheduler): ConnectableObservable[A] =
unsafeMulticast(ReplaySubject[A]())
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers). The underlying subject used is a
* [[monix.reactive.subjects.ReplaySubject ReplaySubject]].
*
* $unsafeBecauseImpure
*
* @param bufferSize is the size of the buffer limiting the number
* of items that can be replayed (on overflow the head
* starts being dropped)
*/
@UnsafeBecauseImpure
final def replay(bufferSize: Int)(implicit s: Scheduler): ConnectableObservable[A] =
unsafeMulticast(ReplaySubject.createLimited[A](bufferSize))
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers).
*
* '''UNSAFE PROTOCOL''': This operator is unsafe because `Subject`
* objects are stateful and have to obey the `Observer` contract,
* meaning that they shouldn't be subscribed multiple times, so
* they are error prone. Only use if you know what you're doing,
* otherwise prefer the safe [[Observable!.multicast multicast]]
* operator.
*
* $unsafeBecauseImpure
*/
@UnsafeProtocol
@UnsafeBecauseImpure
final def unsafeMulticast[B >: A, R](processor: Subject[B, R])(implicit s: Scheduler): ConnectableObservable[R] =
ConnectableObservable.unsafeMulticast(this, processor)
/** Converts this observable into a multicast observable, useful for
* turning a cold observable into a hot one (i.e. whose source is
* shared by all observers). The underlying subject used is a
* [[monix.reactive.subjects.AsyncSubject AsyncSubject]].
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def publishLast(implicit s: Scheduler): ConnectableObservable[A] =
unsafeMulticast(AsyncSubject[A]())
/** Creates a new [[monix.execution.CancelableFuture CancelableFuture]]
* that upon execution will signal the first generated element of the
* source observable. Returns an `Option` because the source can be empty.
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def runAsyncGetFirst(implicit s: Scheduler, opts: Task.Options = defaultOptions): CancelableFuture[Option[A]] =
firstOptionL.runToFutureOpt(s, opts)
/** Creates a new [[monix.execution.CancelableFuture CancelableFuture]]
* that upon execution will signal the last generated element of the
* source observable. Returns an `Option` because the source can be empty.
*
* $unsafeBecauseImpure
*/
@UnsafeBecauseImpure
final def runAsyncGetLast(implicit s: Scheduler, opts: Task.Options = defaultOptions): CancelableFuture[Option[A]] =
lastOptionL.runToFutureOpt(s, opts)
/** Subscribes to the source `Observable` and foreach element emitted
* by the source it executes the given callback.
*/
@UnsafeBecauseImpure
final def foreach(cb: A => Unit)(implicit s: Scheduler): CancelableFuture[Unit] = {
val p = Promise[Unit]()
val onFinish = Callback.fromPromise(p)
val c = unsafeSubscribeFn(new ForeachSubscriber[A](cb, onFinish, s))
CancelableFuture(p.future, c)
}
// -----------------------------------------------------------------------
// Pure operations ...
/** Transforms the source using the given operator. */
final def liftByOperator[B](operator: Operator[A, B]): Observable[B] =
new LiftByOperatorObservable(self, operator)
/** On execution, consumes the source observable
* with the given [[Consumer]], effectively transforming the
* source observable into a [[monix.eval.Task Task]].
*/
final def consumeWith[R](f: Consumer[A, R]): Task[R] =
f(self)
/** Polymorphic version [[consumeWith]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLift]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def consumeWithF[F[_], R](f: Consumer[A, R])(implicit F: TaskLift[F]): F[R] =
f(self).to[F]
/** Alias for [[prepend]]. */
final def +:[B >: A](elem: B): Observable[B] =
prepend(elem)
/** Creates a new Observable that emits the given element and then it
* also emits the events of the source (prepend operation).
*/
final def prepend[B >: A](elem: B): Observable[B] =
Observable.cons(elem, self)
/** Alias for [[append]]. */
final def :+[B >: A](elem: B): Observable[B] =
append(elem)
/** Creates a new Observable that emits the events of the source and
* then it also emits the given element (appended to the stream).
*/
final def append[B >: A](elem: B): Observable[B] =
self.appendAll(Observable.now(elem))
/** Given the source observable and another `Observable`, emits all of
* the items from the first of these Observables to emit an item
* and cancel the other.
*/
final def ambWith[B >: A](other: Observable[B]): Observable[B] =
Observable.firstStartedOf(self, other)
/** Periodically gather items emitted by an observable into bundles
* and emit these bundles rather than emitting the items one at a
* time. This version of `buffer` is emitting items once the
* internal buffer has reached the given count.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* Usage:
*
* {{{
* // Emits [2, 3], [4, 5], [6]
* Observable.range(2, 7)
* .bufferTumbling(count = 2)
* }}}
*
* @param count the maximum size of each buffer before it should
* be emitted
*/
final def bufferTumbling(count: Int): Observable[Seq[A]] =
bufferSliding(count, count)
/** Returns an observable that emits buffers of items it collects from
* the source observable. The resulting observable emits buffers
* every `skip` items, each containing `count` items.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* For `count` and `skip` there are 3 possibilities:
*
* 1. in case `skip == count`, then there are no items dropped and
* no overlap, the call being equivalent to `bufferTumbling(count)`
* 1. in case `skip < count`, then overlap between buffers
* happens, with the number of elements being repeated being
* `count - skip`
* 1. in case `skip > count`, then `skip - count` elements start
* getting dropped between windows
*
* Usage:
*
* {{{
* // Emits [2, 3], [5, 6]
* Observable.range(2, 7)
* .bufferSliding(count = 2, skip = 3)
* }}}
*
* {{{
* // Emits [2, 3, 4], [4, 5, 6]
* Observable.range(2, 7)
* .bufferSliding(count = 3, skip = 2)
* }}}
*
* @param count the maximum size of each buffer before it should
* be emitted
* @param skip how many items emitted by the source observable should
* be skipped before starting a new buffer. Note that when
* skip and count are equal, this is the same operation as
* `bufferTumbling(count)`
*/
final def bufferSliding(count: Int, skip: Int): Observable[Seq[A]] =
liftByOperator(new BufferSlidingOperator(count, skip))
/** Periodically gather items emitted by an observable into bundles
* and emit these bundles rather than emitting the items one at a
* time.
*
* This version of `buffer` emits a new bundle of items
* periodically, every timespan amount of time, containing all
* items emitted by the source Observable since the previous bundle
* emission.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* @param timespan the interval of time at which it should emit
* the buffered bundle
*/
final def bufferTimed(timespan: FiniteDuration): Observable[Seq[A]] =
bufferTimedAndCounted(timespan, 0)
/** Periodically gather items emitted by an observable into bundles
* and emit these bundles rather than emitting the items one at a
* time.
*
* The resulting observable emits connected, non-overlapping
* buffers, each of a fixed duration specified by the `timespan`
* argument or a maximum size specified by the `maxCount` argument
* (whichever is reached first).
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* @param timespan the interval of time at which it should emit
* the buffered bundle
* @param maxCount is the maximum bundle size, after which the
* buffered bundle gets forcefully emitted
*/
final def bufferTimedAndCounted(timespan: FiniteDuration, maxCount: Int): Observable[Seq[A]] =
new BufferTimedObservable[A](self, timespan, maxCount)
/** Periodically gather items emitted by an observable into bundles
* and emit these bundles rather than emitting the items one at a
* time. Back-pressure the source when the buffer is full.
*
* The resulting observable emits connected, non-overlapping
* buffers, each of a fixed duration specified by the `period`
* argument.
*
* The bundles are emitted at a fixed rate. If the source is
* silent, then the resulting observable will start emitting empty
* sequences.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* A `maxSize` argument is specified as the capacity of the
* bundle. In case the source is too fast and `maxSize` is reached,
* then the source will be back-pressured.
*
* A `sizeOf` argument is specified as the weight each element
* represents in the bundle. Defaults to count each element as
* weighting 1.
*
* The difference with [[bufferTimedAndCounted]] is that
* [[bufferTimedWithPressure]] applies back-pressure from the time
* when the buffer is full until the buffer is emitted, whereas
* [[bufferTimedAndCounted]] will forcefully emit the buffer when
* it's full.
*
* @param period the interval of time at which it should emit
* the buffered bundle
* @param maxSize is the maximum buffer size, after which the
* source starts being back-pressured
* @param sizeOf is the function to compute the weight of each
* element in the buffer
*/
final def bufferTimedWithPressure[AA >: A](
period: FiniteDuration,
maxSize: Int,
sizeOf: AA => Int = (_: AA) => 1): Observable[Seq[AA]] = {
val sampler = Observable.intervalAtFixedRate(period, period)
new BufferWithSelectorObservable(self, sampler, maxSize, sizeOf)
}
/** Buffers elements while predicate returns true,
* after which it emits the buffered events as a single bundle
* and creates a new buffer.
*
* Usage:
*
* {{{
* import monix.eval.Task
*
* Observable(1, 1, 1, 2, 2, 1, 3)
* .bufferWhile(_ == 1)
* .doOnNext(l => Task(println(s"Emitted batch $$l")))
*
* // Emitted batch List(1, 1, 1)
* // Emitted batch List(2)
* // Emitted batch List(2, 1)
* // Emitted batch List(3)
* }}}
*
* @see [[bufferWhileInclusive]] for a similar operator that includes
* the value that caused `predicate` to return `false`
*/
final def bufferWhile(p: A => Boolean): Observable[Seq[A]] =
self.liftByOperator(new BufferWhileOperator(p, inclusive = false))
/** Buffers elements while predicate returns true,
* after which it emits the buffered events as a single bundle,
* including the value that caused `predicate` to return `false`
* and creates a new buffer.
*
* Usage:
*
* {{{
* import monix.eval.Task
*
* Observable(1, 1, 1, 2, 2, 1, 3)
* .bufferWhileInclusive(_ == 1)
* .doOnNext(l => Task(println(s"Emitted batch $$l")))
*
* // Emitted batch List(1, 1, 1, 2)
* // Emitted batch List(2)
* // Emitted batch List(1, 3)
* }}}
*
* @see [[bufferWhile]] for a similar operator that does not include
* the value that caused `predicate` to return `false`
*/
final def bufferWhileInclusive(p: A => Boolean): Observable[Seq[A]] =
self.liftByOperator(new BufferWhileOperator(p, inclusive = true))
/** $bufferWithSelectorDesc
*
* @param selector is the observable that triggers the
* signaling of the current buffer
*/
final def bufferWithSelector[S](selector: Observable[S]): Observable[Seq[A]] =
new BufferWithSelectorObservable[A, S](self, selector, 0, (_: A) => 1)
/** $bufferWithSelectorDesc
*
* A `maxSize` argument is specified as the capacity of the
* bundle. In case the source is too fast and `maxSize` is reached,
* then the source will be back-pressured.
*
* @param selector is the observable that triggers the signaling of the
* current buffer
* @param maxSize is the maximum bundle size, after which the
* source starts being back-pressured
*/
final def bufferWithSelector[S](selector: Observable[S], maxSize: Int): Observable[Seq[A]] =
new BufferWithSelectorObservable(self, selector, maxSize, (_: A) => 1)
/** Buffers signals while busy, after which it emits the
* buffered events as a single bundle.
*
* This operator starts applying back-pressure when the
* underlying buffer's size is exceeded.
*
* Usage:
*
* {{{
* import monix.eval.Task
* import scala.concurrent.duration._
*
* Observable.range(1, 6)
* .doOnNext(l => Task(println(s"Started $$l")))
* .bufferIntrospective(maxSize = 2)
* .doOnNext(l => Task(println(s"Emitted batch $$l")))
* .mapEval(l => Task(println(s"Processed batch $$l")).delayExecution(500.millis))
*
* // Started 1
* // Emitted batch List(1)
* // Started 2
* // Started 3
* // Processed batch List(1)
* // Emitted batch List(2, 3)
* // Started 4
* // Started 5
* // Processed batch List(2, 3)
* // Emitted batch List(4, 5)
* // Processed batch List(4, 5)
* }}}
*/
final def bufferIntrospective(maxSize: Int): Observable[List[A]] =
new BufferIntrospectiveObservable[A](self, maxSize)
/** Implementation of `bracket` from `cats.effect.Bracket`.
*
* See [[https://typelevel.org/cats-effect/typeclasses/bracket.html documentation]].
*/
final def bracket[B](use: A => Observable[B])(release: A => Task[Unit]): Observable[B] =
bracketCase(use)((a, _) => release(a))
/** Version of [[bracket]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So in `release` you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def bracketF[F[_], B](use: A => Observable[B])(release: A => F[Unit])(implicit F: TaskLike[F]): Observable[B] =
bracket(use)(release.andThen(F.apply))
/** Implementation of `bracketCase` from `cats.effect.Bracket`.
*
* See [[https://typelevel.org/cats-effect/typeclasses/bracket.html documentation]].
*/
final def bracketCase[B](use: A => Observable[B])(release: (A, ExitCase[Throwable]) => Task[Unit]): Observable[B] =
new ConcatMapObservable(uncancelable, use, release, delayErrors = false)
/** Version of [[bracketCase]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So in `release` you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def bracketCaseF[F[_], B](use: A => Observable[B])(release: (A, ExitCase[Throwable]) => F[Unit])(implicit
F: TaskLike[F]): Observable[B] =
bracketCase(use)((a, e) => F(release(a, e)))
/** Applies the given partial function to the source
* for each element for which the given partial function is defined.
*
* @param pf the function that filters and maps the source
* @return an observable that emits the transformed items by the
* given partial function
*/
final def collect[B](pf: PartialFunction[A, B]): Observable[B] =
self.liftByOperator(new CollectOperator(pf))
/** Takes longest prefix of elements that satisfy the given partial function
* and returns a new Observable that emits those elements.
*
* @param pf the function that filters and maps the source
* @return an observable that emits the transformed items by the
* given partial function until it is contained in the function's domain
*/
final def collectWhile[B](pf: PartialFunction[A, B]): Observable[B] =
self.liftByOperator(new CollectWhileOperator(pf))
/** Creates a new observable from the source and another given
* observable, by emitting elements combined in pairs.
*
* It emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (2, 3), (3, 3), (4, 3), (4, 4)
* </pre>
*
* See [[zip]] for an alternative that pairs the items in strict sequence.
*
* @param other is an observable that gets paired with the source
*/
final def combineLatest[B](other: Observable[B]): Observable[(A, B)] =
new CombineLatest2Observable[A, B, (A, B)](self, other)((a, b) => (a, b))
/** Creates a new observable from the source and another given
* observable, by emitting elements combined in pairs.
*
* It emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (2, 3), (3, 3), (4, 3), (4, 4)
* </pre>
*
* See [[zipMap]] for an alternative that pairs the items
* in strict sequence.
*
* @param other is an observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def combineLatestMap[B, R](other: Observable[B])(f: (A, B) => R): Observable[R] =
new CombineLatest2Observable[A, B, R](self, other)(f)
/** Ignores all items emitted by the source Observable and only calls
* onCompleted or onError.
*
* @return an empty Observable that only calls onCompleted or onError,
* based on which one is called by the source Observable
*/
final def completed: Observable[Nothing] =
self.liftByOperator(CompletedOperator)
/** Doesn't emit anything until a `timeout` period passes without the
* source emitting anything. When that timeout happens, we
* subscribe to the observable generated by the given function, an
* observable that will keep emitting until the source will break
* the silence by emitting another event.
*
* Note: If the source observable keeps emitting items more
* frequently than the length of the time window, then no items
* will be emitted by the resulting Observable.
*
* @param f is a function that receives the last element generated
* by the source, generating an observable to be subscribed
* when the source is timing out
* @param timeout the length of the window of time that must pass after
* the emission of an item from the source Observable in
* which that Observable emits no items in order for the
* item to be emitted by the resulting Observable
*/
final def debounceTo[B](timeout: FiniteDuration, f: A => Observable[B]): Observable[B] =
self.switchMap(a => f(a).delayExecution(timeout))
/** Hold an Observer's subscription request for a specified amount of
* time before passing it on to the source Observable.
*
* @param timespan is the time to wait before the subscription
* is being initiated.
*/
final def delayExecution(timespan: FiniteDuration): Observable[A] =
new DelayExecutionByTimespanObservable(self, timespan)
/** Convert an observable that emits observables into a single
* observable that emits the items emitted by the
* most-recently-emitted of those observables.
*
* Similar with [[concatMap]], however the source isn't
* back-pressured when emitting new events. Instead new events
* being emitted are cancelling the active child observables.
*
* ==Example==
*
* The `switchMap` can express a lot of cool, time-based operations.
* For example we can express [[debounce]] in terms of `switchMap`:
* {{{
* import scala.concurrent.duration._
*
* def debounce[A](stream: Observable[A], d: FiniteDuration): Observable[A] =
* stream.switchMap { x =>
* Observable.now(x).delayExecution(d)
* }
* }}}
*
* @param f is a generator for the streams that are being merged
*/
final def switchMap[B](f: A => Observable[B]): Observable[B] =
new SwitchMapObservable[A, B](self, f)
/** Emits the last item from the source Observable if a particular
* timespan has passed without it emitting another item, and keeps
* emitting that item at regular intervals until the source breaks
* the silence.
*
* So compared to regular [[debounceTo]] this version
* keeps emitting the last item of the source.
*
* Note: If the source Observable keeps emitting items more
* frequently than the length of the time window then no items will
* be emitted by the resulting Observable.
*
* @param period the length of the window of time that must pass after
* the emission of an item from the source Observable in
* which that Observable emits no items in order for the
* item to be emitted by the resulting Observable at regular
* intervals, also determined by period
* @see [[echoRepeated]] for a similar operator that also mirrors
* the source observable
*/
final def debounceRepeated(period: FiniteDuration): Observable[A] =
new DebounceObservable(self, period, repeat = true)
/** Emit items from the source, or emit a default item if
* the source completes after emitting no items.
*/
final def defaultIfEmpty[B >: A](default: => B): Observable[B] =
self.liftByOperator(new DefaultIfEmptyOperator[B](() => default))
/** Delays emitting the final `onComplete` event by the specified amount. */
final def delayOnComplete(delay: FiniteDuration): Observable[A] =
new DelayOnCompleteObservable(self, delay)
/** Returns an Observable that emits the items emitted by the source
* Observable shifted forward in time by a specified delay.
*
* Each time the source Observable emits an item, delay starts a
* timer, and when that timer reaches the given duration, the
* Observable returned from delay emits the same item.
*
* NOTE: this delay refers strictly to the time between the
* `onNext` event coming from our source and the time it takes the
* downstream observer to get this event. On the other hand the
* operator is also applying back-pressure, so on slow observers
* the actual time passing between two successive events may be
* higher than the specified `duration`.
*
* @param duration - the delay to shift the source by
* @return the source Observable shifted in time by the specified delay
*/
final def delayOnNext(duration: FiniteDuration): Observable[A] =
new DelayByTimespanObservable[A](self, duration)
/** Returns an Observable that emits the items emitted by the source
* Observable shifted forward in time.
*
* This variant of `delay` sets its delay duration on a per-item
* basis by passing each item from the source Observable into a
* function that returns an Observable and then monitoring those
* Observables. When any such Observable emits an item or
* completes, the Observable returned by delay emits the associated
* item.
*
* @param selector is a function that returns an Observable for
* each item emitted by the source Observable, which is then
* used to delay the emission of that item by the resulting
* Observable until the Observable returned from `selector`
* emits an item
* @return the source Observable shifted in time by
* the specified delay
*/
final def delayOnNextBySelector[B](selector: A => Observable[B]): Observable[A] =
new DelayBySelectorObservable[A, B](self, selector)
/** Hold an Observer's subscription request until the given `trigger`
* observable either emits an item or completes, before passing it
* on to the source Observable.
*
* If the given `trigger` completes in error, then the subscription is
* terminated with `onError`.
*
* @param trigger the observable that must either emit an item or
* complete in order for the source to be subscribed.
*/
final def delayExecutionWith[B](trigger: Observable[B]): Observable[A] =
new DelayExecutionWithTriggerObservable(self, trigger)
/** Version of [[delayExecutionWith]] that can work with generic `F[_]`
* tasks, anything that's supported via [[ObservableLike]] conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def delayExecutionWithF[F[_], B](trigger: F[B])(implicit F: ObservableLike[F]): Observable[A] =
delayExecutionWith(F.apply(trigger))
/** Converts the source Observable that emits `Notification[A]` (the
* result of [[materialize]]) back to an Observable that emits `A`.
*/
final def dematerialize[B](implicit ev: A <:< Notification[B]): Observable[B] =
self.asInstanceOf[Observable[Notification[B]]].liftByOperator(new DematerializeOperator[B])
/** Suppress duplicate consecutive items emitted by the source.
*
* Example:
* {{{
* // Needed to bring standard Eq instances in scope:
* import cats.implicits._
*
* // Yields 1, 2, 1, 3, 2, 4
* val stream = Observable(1, 1, 1, 2, 2, 1, 1, 3, 3, 3, 2, 2, 4, 4, 4)
* .distinctUntilChanged
* }}}
*
* Duplication is detected by using the equality relationship
* provided by the [[cats.Eq]] type class. This allows one to
* override the equality operation being used (e.g. maybe the
* default `.equals` is badly defined, or maybe you want reference
* equality, so depending on use case).
*
* $catsEqInterop
*
* @param A is the [[cats.Eq]] instance that defines equality
* for the elements emitted by the source
*/
final def distinctUntilChanged[AA >: A](implicit A: Eq[AA]): Observable[AA] =
self.liftByOperator(new DistinctUntilChangedOperator()(A))
/** Given a function that returns a key for each element emitted by
* the source, suppress consecutive duplicate items.
*
* Example:
* {{{
* // Needed to bring standard instances in scope:
* import cats.implicits._
*
* // Yields 1, 2, 3, 4
* val stream = Observable(1, 3, 2, 4, 2, 3, 5, 7, 4)
* .distinctUntilChangedByKey(_ % 2)
* }}}
*
* Duplication is detected by using the equality relationship
* provided by the [[cats.Eq]] type class. This allows one to
* override the equality operation being used (e.g. maybe the
* default `.equals` is badly defined, or maybe you want reference
* equality, so depending on use case).
*
* $catsEqInterop
*
* @param key is a function that returns a `K` key for each element,
* a value that's then used to do the deduplication
*
* @param K is the [[cats.Eq]] instance that defines equality for
* the key type `K`
*/
final def distinctUntilChangedByKey[K](key: A => K)(implicit K: Eq[K]): Observable[A] =
self.liftByOperator(new DistinctUntilChangedByKeyOperator(key)(K))
/** Executes the given task when the streaming is stopped
* due to a downstream [[monix.execution.Ack.Stop Stop]] signal
* returned by [[monix.reactive.Observer.onNext onNext]].
*
* The given `task` gets evaluated *before* the upstream
* receives the `Stop` event (is back-pressured).
*
* Example:
* {{{
* import monix.eval.Task
*
* val stream = Observable.range(0, Int.MaxValue)
* .doOnEarlyStop(Task(println("Stopped early!")))
* .take(100)
* }}}
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*/
final def doOnEarlyStop(task: Task[Unit]): Observable[A] =
self.liftByOperator(new DoOnEarlyStopOperator[A](task))
/** Version of [[doOnEarlyStop]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* Example:
* {{{
* import cats.effect.IO
*
* val stream = Observable.range(0, Int.MaxValue)
* .doOnEarlyStopF(IO(println("Stopped early!")))
* .take(100)
* }}}
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*/
final def doOnEarlyStopF[F[_]](task: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnEarlyStop(F(task))
/** Executes the given callback when the connection is being cancelled,
* via the [[monix.execution.Cancelable Cancelable]] reference returned
* on subscribing to the created observable.
*
* Example:
* {{{
* import monix.eval.Task
* import monix.execution.Scheduler
*
* implicit val s = Scheduler.global
*
* val cancelable =
* Observable
* .range(0, Int.MaxValue)
* .doOnSubscriptionCancel(Task(println("Cancelled!")))
* .subscribe()
*
* cancelable.cancel()
* }}}
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*/
final def doOnSubscriptionCancel(task: Task[Unit]): Observable[A] =
new DoOnSubscriptionCancelObservable[A](self, task)
/** Version of [[doOnSubscriptionCancel]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* Example:
* {{{
* import cats.effect.IO
* import monix.execution.Scheduler
*
* implicit val s = Scheduler.global
*
* val cancelable =
* Observable
* .range(0, Int.MaxValue)
* .doOnSubscriptionCancelF(IO(println("Cancelled!")))
* .subscribe()
*
* cancelable.cancel()
* }}}
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*/
final def doOnSubscriptionCancelF[F[_]](task: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnSubscriptionCancel(F(task))
/** Evaluates the given task when the stream has ended with an
* `onComplete` event, but before the complete event is emitted.
*
* The task gets evaluated and is finished *before* the `onComplete`
* signal gets sent downstream.
*
* {{{
* import monix.eval.Task
*
* Observable.range(0, 10)
* .doOnComplete(Task(println("Completed!")))
* }}}
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*
* @param task the task to execute when the `onComplete`
* event gets emitted
*/
final def doOnComplete(task: Task[Unit]): Observable[A] =
self.liftByOperator(new DoOnCompleteOperator[A](task))
/** Version of [[doOnComplete]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* {{{
* import cats.effect.IO
*
* Observable.range(0, 10)
* .doOnCompleteF(IO(println("Completed!")))
* }}}
*/
final def doOnCompleteF[F[_]](task: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnComplete(F(task))
/** Executes the given task when the stream is interrupted with an
* error, before the `onError` event is emitted downstream.
*
* Example:
* {{{
* import monix.eval.Task
*
* val dummy = new RuntimeException("dummy")
*
* (Observable.range(0, 10) ++ Observable.raiseError(dummy))
* .doOnError { e =>
* Task(println(s"Triggered error: $$e"))
* }
* }}}
*
* NOTE: should protect the code in this callback, because if it
* throws an exception the `onError` event will prefer signaling
* the original exception and otherwise the behavior is undefined.
*
* NOTE: in most cases what you want is [[guaranteeCase]]
* or [[bracketCase]]. This operator is available for
* fine-grained control.
*/
final def doOnError(cb: Throwable => Task[Unit]): Observable[A] =
self.liftByOperator(new DoOnErrorOperator[A](cb))
/** Version of [[doOnError]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* {{{
* import cats.effect.IO
*
* val dummy = new RuntimeException("dummy")
*
* (Observable.range(0, 10) ++ Observable.raiseError(dummy))
* .doOnErrorF { e =>
* IO(println(s"Triggered error: $$e"))
* }
* }}}
*/
final def doOnErrorF[F[_]](cb: Throwable => F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnError(e => F(cb(e)))
/** Evaluates the given callback for each element generated by the
* source Observable, useful for triggering async side-effects.
*
* @return a new Observable that executes the specified
* callback for each element
*
* @see [[doOnNext]] for a simpler version that doesn't allow
* asynchronous execution.
*/
final def doOnNext(cb: A => Task[Unit]): Observable[A] =
self.mapEval(a => cb(a).map(_ => a))
/** Version of [[doOnNext]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* @return a new Observable that executes the specified
* callback for each element
*/
final def doOnNextF[F[_]](cb: A => F[Unit])(implicit F: TaskLike[F]): Observable[A] =
self.doOnNext(a => F(cb(a)))
/** Executes the given callback on each acknowledgement received from
* the downstream subscriber, executing a generated
* [[monix.eval.Task Task]] and back-pressuring until the task
* is done.
*
* This method helps in executing logic after messages get
* processed, for example when messages are polled from
* some distributed message queue and an acknowledgement
* needs to be sent after each message in order to mark it
* as processed.
*
* @see [[doOnNextAckF]] for a version that can do evaluation with
* any data type via [[monix.eval.TaskLike]]
*/
final def doOnNextAck(cb: (A, Ack) => Task[Unit]): Observable[A] =
self.liftByOperator(new DoOnNextAckOperator[A](cb))
/** Version of [[doOnNextAck]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def doOnNextAckF[F[_]](cb: (A, Ack) => F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnNextAck((a, ack) => Task.from(cb(a, ack))(F))
/** Executes the given callback only for the first element generated
* by the source Observable, useful for doing a piece of
* computation only when the stream starts.
*
* For example this observable will have a "delayed execution"
* of 1 second, plus a delayed first element of another 1 second,
* therefore it will take a total of 2 seconds for the first
* element to be emitted:
*
* {{{
* import monix.eval._
* import scala.concurrent.duration._
*
* Observable.range(0, 100)
* .delayExecution(1.second)
* .doOnStart { a =>
* for {
* _ <- Task.sleep(1.second)
* _ <- Task(println(s"Started with: $$a"))
* } yield ()
* }
* }}}
*
* @return a new Observable that executes the specified task
* only for the first element
*/
final def doOnStart(cb: A => Task[Unit]): Observable[A] =
self.liftByOperator(new DoOnStartOperator[A](cb))
/** Version of [[doOnStart]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* {{{
* import cats.implicits._
* import cats.effect._
* import cats.effect.Timer
* import scala.concurrent.duration._
* import monix.execution.Scheduler.Implicits.global
* import monix.catnap.SchedulerEffect
* // Needed for IO.sleep
* implicit val timer: Timer[IO] = SchedulerEffect.timerLiftIO[IO](global)
*
* Observable.range(0, 100)
* .delayExecution(1.second)
* .doOnStartF { a =>
* for {
* _ <- IO.sleep(1.second)
* _ <- IO(println(s"Started with: $$a"))
* } yield ()
* }
* }}}
*/
final def doOnStartF[F[_]](cb: A => F[Unit])(implicit F: Effect[F]): Observable[A] =
doOnStart(a => Task.fromEffect(cb(a))(F))
/** Executes the given callback just _before_ the subscription
* to the source happens.
*
* For example this is equivalent with [[delayExecution]]:
*
* {{{
* import monix.eval.Task
* import scala.concurrent.duration._
*
* Observable.range(0, 10)
* .doOnSubscribe(Task.sleep(1.second))
* }}}
*
* @see [[doAfterSubscribe]] for executing a callback just after
* a subscription happens.
*/
final def doOnSubscribe(task: Task[Unit]): Observable[A] =
new DoOnSubscribeObservable.Before[A](self, task)
/** Version of [[doOnSubscribe]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* For example this is equivalent with [[delayExecution]]:
*
* {{{
* import cats.effect._
* import cats.effect.Timer
* import scala.concurrent.duration._
* import monix.execution.Scheduler.Implicits.global
* import monix.catnap.SchedulerEffect
* // Needed for IO.sleep
* implicit val timer: Timer[IO] = SchedulerEffect.timerLiftIO[IO](global)
*
* Observable.range(0, 10)
* .doOnSubscribeF(IO.sleep(1.second))
* }}}
*/
final def doOnSubscribeF[F[_]](task: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doOnSubscribe(F(task))
/** Executes the given callback just _after_ the subscription
* happens.
*
* The executed `Task` executes after the subscription happens
* and it will delay the first event being emitted. For example
* this would delay the emitting of the first event by 1 second:
*
* {{{
* import monix.eval.Task
* import scala.concurrent.duration._
*
* Observable.range(0, 100)
* .doAfterSubscribe(Task.sleep(1.second))
* }}}
*
* @see [[doOnSubscribe]] for executing a callback just before
* a subscription happens.
*/
final def doAfterSubscribe(task: Task[Unit]): Observable[A] =
new DoOnSubscribeObservable.After[A](self, task)
/** Version of [[doAfterSubscribe]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* {{{
* import cats.effect._
* import cats.effect.Timer
* import scala.concurrent.duration._
* import monix.execution.Scheduler.Implicits.global
* import monix.catnap.SchedulerEffect
* // Needed for IO.sleep
* implicit val timer: Timer[IO] = SchedulerEffect.timerLiftIO[IO](global)
*
* Observable.range(0, 100)
* .doAfterSubscribeF(IO.sleep(1.second))
* }}}
*/
final def doAfterSubscribeF[F[_]](task: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
doAfterSubscribe(F(task))
/** Creates a new observable that drops the events of the source, only
* for the specified `timestamp` window.
*
* @param timespan the window of time during which the new observable
* must drop events emitted by the source
*/
final def dropByTimespan(timespan: FiniteDuration): Observable[A] =
new DropByTimespanObservable(self, timespan)
/** Drops the last `n` elements (from the end).
*
* @param n the number of elements to drop
* @return a new Observable that drops the first ''n'' elements
* emitted by the source
*/
final def dropLast(n: Int): Observable[A] =
self.liftByOperator(new DropLastOperator[A](n))
/** Discard items emitted by the source until a second
* observable emits an item or completes.
*
* If the `trigger` observable completes in error, then the
* resulting observable will also end in error when it notices
* it (next time an element is emitted by the source).
*
* @param trigger the observable that has to emit an item before the
* source begin to be mirrored by the resulting observable
*/
final def dropUntil(trigger: Observable[Any]): Observable[A] =
new DropUntilObservable(self, trigger)
/** Drops the longest prefix of elements that satisfy the given
* predicate and returns a new observable that emits the rest.
*/
final def dropWhile(p: A => Boolean): Observable[A] =
self.liftByOperator(new DropByPredicateOperator(p, inclusive = false))
/** Drops the longest prefix of elements that satisfy the given
* predicate, inclusive of the value that caused `predicate` to return `false` and
* returns a new observable that emits the rest.
*/
final def dropWhileInclusive(p: A => Boolean): Observable[A] =
self.liftByOperator(new DropByPredicateOperator(p, inclusive = true))
/** Drops the longest prefix of elements that satisfy the given
* function and returns a new observable that emits the rest. In
* comparison with [[dropWhile]], this version accepts a function
* that takes an additional parameter: the zero-based index of the
* element.
*/
final def dropWhileWithIndex(p: (A, Int) => Boolean): Observable[A] =
self.liftByOperator(new DropByPredicateWithIndexOperator(p))
/** Utility that can be used for debugging purposes.
*/
final def dump(prefix: String, out: PrintStream = System.out): Observable[A] =
new DumpObservable[A](self, prefix, out)
/** Mirror the source observable as long as the source keeps emitting
* items, otherwise if `timeout` passes without the source emitting
* anything new then the observable will emit the last item.
*
* Note: If the source Observable keeps emitting items more
* frequently than the length of the time window then the resulting
* observable will mirror the source exactly.
*
* @param timeout the window of silence that must pass in order for the
* observable to echo the last item
*/
final def echoOnce(timeout: FiniteDuration): Observable[A] =
new EchoObservable(self, timeout, onlyOnce = true)
/** Mirror the source observable as long as the source keeps emitting
* items, otherwise if `timeout` passes without the source emitting
* anything new then the observable will start emitting the last
* item repeatedly.
*
* Note: If the source Observable keeps emitting items more
* frequently than the length of the time window then the resulting
* observable will mirror the source exactly.
*
* @param timeout the window of silence that must pass in order for the
* observable to start echoing the last item
*/
final def echoRepeated(timeout: FiniteDuration): Observable[A] =
new EchoObservable(self, timeout, onlyOnce = false)
/** Creates a new Observable that emits the events of the source and
* then it also emits the given elements (appended to the stream).
*/
final def endWith[B >: A](elems: Seq[B]): Observable[B] =
self.appendAll(Observable.fromIterable(elems))
/** Concatenates the source with another observable.
*
* Ordering of subscription is preserved, so the second observable
* starts only after the source observable is completed
* successfully with an `onComplete`. On the other hand, the second
* observable is never subscribed if the source completes with an
* error.
*
* == Visual Example ==
*
* <pre>
* streamA: a1 -- -- a2 -- -- a3 -- a4 -- --
* streamB: b1 -- -- b2 -- b3 -- -- -- -- b4
*
* result: a1, a2, a3, a4, b1, b2, b3, b4
* </pre>
*/
final def ++[B >: A](other: => Observable[B]): Observable[B] =
appendAll(Observable.defer(other))
/** A strict variant of [[++]].
*/
final def appendAll[B >: A](other: Observable[B]): Observable[B] =
new ConcatObservable[B](self, other)
/** Emits the given exception instead of `onComplete`.
*
* @param error the exception to emit onComplete
* @return a new Observable that emits an exception onComplete
*/
final def endWithError(error: Throwable): Observable[A] =
self.liftByOperator(new EndWithErrorOperator[A](error))
/** Returns an observable that emits a single Throwable, in case an
* error was thrown by the source, otherwise it isn't going to emit
* anything.
*/
final def failed: Observable[Throwable] =
self.liftByOperator(FailedOperator)
/** Alias for [[headOrElse]]. */
final def firstOrElse[B >: A](default: => B): Observable[B] =
headOrElse(default)
/** Emits the first element emitted by the source, or otherwise if the
* source is completed without emitting anything, then the
* `default` is emitted.
*/
final def headOrElse[B >: A](default: => B): Observable[B] =
head.foldLeft(Option.empty[B])((_, elem) => Some(elem)).map {
case Some(elem) => elem
case None => default
}
/** Returns a new observable that applies the given function
* to each item emitted by the source and emits the result.
*/
final def map[B](f: A => B): Observable[B] =
self.liftByOperator(new MapOperator(f))
/** Applies a function that you supply to each item emitted by the
* source observable, where that function returns a sequence of elements, and
* then concatenating those resulting sequences and emitting the
* results of this concatenation.
*
* ==Example==
* {{{
* Observable(1, 2, 3).concatMapIterable( x => List(x, x * 10, x * 100))
* }}}
*
* == Visual Example ==
*
* <pre>
* stream: 1 -- -- 2 -- -- 3 -- --
* result: 1, 10, 100, 2, 20, 200, 3, 30, 300
* </pre>
*
* @param f is a generator for the sequences being concatenated
*/
final def concatMapIterable[B](f: A => immutable.Iterable[B]): Observable[B] =
self.liftByOperator(new ConcatMapIterableOperator(f))
/** Alias for [[concatMapIterable]]
*
* NOTE: one primary difference between Monix and other Rx /
* ReactiveX implementations is that in Monix `flatMap` is an alias
* for `concatMap` and NOT `mergeMap`.
*/
final def flatMapIterable[B](f: A => immutable.Iterable[B]): Observable[B] =
self.concatMapIterable(f)
/** Alias for [[concatMap]].
*
* NOTE: one primary difference between Monix and other Rx /
* ReactiveX implementations is that in Monix `flatMap` is an alias
* for `concatMap` and NOT `mergeMap`.
*/
final def flatMap[B](f: A => Observable[B]): Observable[B] =
self.concatMap(f)
/** Applies a function that you supply to each item emitted by the
* source observable, where that function returns observables, and
* then concatenating those resulting sequences and emitting the
* results of this concatenation.
*
* This implements the lawful "monadic bind", the `flatMap`
* operation of [[cats.Monad]].
*
* ==Example==
* {{{
* Observable(1, 2, 3).concatMap { x =>
* for {
* _ <- Observable.eval(println(s"Processing $$x"))
* x <- Observable(x, x)
* } yield x
* }
* }}}
*
* $concatMergeDifference
*
* @param f is a generator for the streams being concatenated
* @return $concatReturn
*/
final def concatMap[B](f: A => Observable[B]): Observable[B] =
new ConcatMapObservable[A, B](self, f, null, delayErrors = false)
/** Alias of [[concatMapDelayErrors]]. */
final def flatMapDelayErrors[B](f: A => Observable[B]): Observable[B] =
concatMapDelayErrors(f)
/** Alias of [[switchMap]]. */
final def flatMapLatest[B](f: A => Observable[B]): Observable[B] =
self.switchMap(f)
/** Applies a binary operator to a start value and to elements
* produced by the source observable, going from left to right,
* producing and concatenating observables along the way.
*
* It's the combination between [[scan]] and [[flatMap]].
*
* @see [[flatScan0]] for the version that emits seed element at the beginning
*/
final def flatScan[R](seed: => R)(op: (R, A) => Observable[R]): Observable[R] =
new FlatScanObservable[A, R](self, () => seed, op, delayErrors = false)
/** Applies a binary operator to a start value and to elements
* produced by the source observable, going from left to right,
* producing and concatenating observables along the way.
*
* It's the combination between [[scan0]] and [[flatMap]].
*/
final def flatScan0[R](seed: => R)(op: (R, A) => Observable[R]): Observable[R] =
Observable.eval(seed).flatMap(s => s +: flatScan(s)(op))
/** Version of [[flatScan]] that delays the errors from the emitted
* streams until the source completes.
*
* $delayErrorsDescription
*
* @see [[flatScan]]
*/
final def flatScanDelayErrors[R](seed: => R)(op: (R, A) => Observable[R]): Observable[R] =
new FlatScanObservable[A, R](self, () => seed, op, delayErrors = true)
/** Version of [[flatScan0]] that delays the errors from the emitted
* streams until the source completes.
*
* $delayErrorsDescription
*
* @see [[flatScan0]]
*/
final def flatScan0DelayErrors[R](seed: => R)(op: (R, A) => Observable[R]): Observable[R] =
Observable.eval(seed).flatMap(s => s +: flatScanDelayErrors(s)(op))
/** $concatDescription
*
* Alias for [[Observable!.concat concat]].
*
* @return $concatReturn
*/
final def flatten[B](implicit ev: A <:< Observable[B]): Observable[B] =
concat
/** $concatDescription
*
* ==Equivalence with concatMap==
*
* The `concat` operation is basically `concatMap` with the
* identity function, as you can count on this equivalence:
*
* `stream.concat <-> stream.concatMap(x => x)`
*
* == Visual Example ==
*
* <pre>
* streamA: a1 -- -- a2 -- -- a3 -- a4 -- --
* streamB: b1 -- -- b2 -- b3 -- -- -- -- b4
*
* result: a1, a2, a3, a4, b1, b2, b3, b4
* </pre>
* @return $concatReturn
*/
final def concat[B](implicit ev: A <:< Observable[B]): Observable[B] =
concatMap[B](x => x)
/** Alias for [[concatDelayErrors]]. */
final def flattenDelayErrors[B](implicit ev: A <:< Observable[B]): Observable[B] =
concatDelayErrors
/** Version of [[Observable!.concat concat]] that delays errors emitted by child
* observables until the stream completes.
*
* $delayErrorsDescription
*
* ==Example==
*
* {{{
* val dummy1 = new RuntimeException("dummy1")
* val dummy2 = new RuntimeException("dummy2")
*
* val stream = Observable(
* Observable(1).endWithError(dummy1),
* Observable.raiseError(dummy2),
* Observable(2, 3)
* )
*
* val concatenated =
* stream.concatDelayErrors
* }}}
*
* The resulting stream in this example emits `1, 2, 3` in order
* and then completes with a `CompositeException` of both `dummy1`
* and `dummy2`.
*
* @return $concatReturn
*/
final def concatDelayErrors[B](implicit ev: A <:< Observable[B]): Observable[B] =
concatMapDelayErrors(x => x)
/** Applies a function that you supply to each item emitted by the
* source observable, where that function returns sequences
* and then concatenating those resulting sequences and emitting the
* results of this concatenation.
*
* $delayErrorsDescription
*
* ==Example==
*
* {{{
* val dummy1 = new RuntimeException("dummy1")
* val dummy2 = new RuntimeException("dummy2")
*
* Observable(1, 2, 3).concatMapDelayErrors {
* case 1 => Observable(1).endWithError(dummy1)
* case 2 => Observable.raiseError(dummy2)
* case x => Observable(x, x)
* }
* }}}
*
* The resulting stream in this example emits `1, 3, 3` in order
* and then completes with a `CompositeException` of both `dummy1`
* and `dummy2`.
*
* @param f is a generator for the streams being concatenated
* @return $concatReturn
*/
final def concatMapDelayErrors[B](f: A => Observable[B]): Observable[B] =
new ConcatMapObservable[A, B](self, f, null, delayErrors = true)
/** Alias for [[switch]]. */
final def flattenLatest[B](implicit ev: A <:< Observable[B]): Observable[B] =
self.switch
/** Convert an observable that emits observables into a single
* observable that emits the items emitted by the
* most-recently-emitted of those observables.
*
* Similar with [[flatten]], however the source isn't
* back-pressured when emitting new events. Instead new events
* being emitted are cancelling the active child observables.
*
* ==Equivalence with switchMap==
*
* The `switch` operation can be expressed in terms of [[switchMap]],
* as we have this equivalence:
*
* `stream.switch <-> stream.switchMap(x => x)`
*
* @see the description of [[switchMap]] for an example.
*/
final def switch[B](implicit ev: A <:< Observable[B]): Observable[B] =
self.switchMap(x => x)
/** Returns an Observable that emits a single boolean, either true, in
* case the given predicate holds for all the items emitted by the
* source, or false in case at least one item is not verifying the
* given predicate.
*
* @param p is a function that evaluates the items emitted by the source
* Observable, returning `true` if they pass the filter
* @return an Observable that emits only true or false in case the given
* predicate holds or not for all the items
*/
final def forall(p: A => Boolean): Observable[Boolean] =
exists(e => !p(e)).map(r => !r)
/** Returns an Observable which emits a single value, either true, in
* case the given predicate holds for at least one item, or false
* otherwise.
*
* @param p is a function that evaluates the items emitted by the
* source Observable, returning `true` if they pass the
* filter
* @return an Observable that emits only true or false in case
* the given predicate holds or not for at least one item
*/
final def exists(p: A => Boolean): Observable[Boolean] =
find(p).foldLeft(false)((_, _) => true)
/** Groups the items emitted by an Observable according to a specified
* criterion, and emits these grouped items as GroupedObservables,
* one GroupedObservable per group.
*
* Note: A [[monix.reactive.observables.GroupedObservable GroupedObservable]]
* will cache the items it is to emit until such time as it is
* subscribed to. For this reason, in order to avoid memory leaks,
* you should not simply ignore those GroupedObservables that do
* not concern you. Instead, you can signal to them that they may
* discard their buffers by doing something like `source.take(0)`.
*
* @param keySelector a function that extracts the key for each item
*/
final def groupBy[K](keySelector: A => K)(implicit
os: Synchronous[Nothing] = OverflowStrategy.Unbounded): Observable[GroupedObservable[K, A]] =
self.liftByOperator(new GroupByOperator[A, K](os, keySelector))
/** Given a routine make sure to execute it whenever the current
* stream reaches the end, successfully, in error, or canceled.
*
* Implements `cats.effect.Bracket.guarantee`.
*
* Example: {{{
* import monix.eval.Task
*
* Observable.suspend(???).guarantee(Task.eval {
* println("Releasing resources!")
* })
* }}}
*
* @param f is the function to execute on early stop
*/
final def guarantee(f: Task[Unit]): Observable[A] =
guaranteeCase(_ => f)
/** Version of [[guarantee]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def guaranteeF[F[_]](f: F[Unit])(implicit F: TaskLike[F]): Observable[A] =
guarantee(F(f))
/** Returns a new `Observable` in which `f` is scheduled to be executed
* when the source is completed, in success, error or when cancelled.
*
* Implements `cats.effect.Bracket.guaranteeCase`.
*
* This would typically be used to ensure that a finalizer
* will run at the end of the stream.
*
* Example: {{{
* import cats.effect.ExitCase
* import monix.eval.Task
*
* val stream = Observable.suspend(???).guaranteeCase(err => Task {
* err match {
* case ExitCase.Completed =>
* println("Completed successfully!")
* case ExitCase.Error(e) =>
* e.printStackTrace()
* case ExitCase.Canceled =>
* println("Was stopped early!")
* }
* })
* }}}
*
* NOTE this is using `cats.effect.ExitCase` to signal the termination
* condition, like this:
*
* - if completed via `onComplete` or via `Stop` signalled by the
* consumer, then the function receives `ExitCase.Completed`
* - if completed via `onError` or in certain cases in which errors
* are detected (e.g. the consumer returns an error), then the function
* receives `ExitCase.Error(e)`
* - if the subscription was cancelled, then the function receives
* `ExitCase.Canceled`
*
* In other words `Completed` is for normal termination conditions,
* `Error` is for exceptions being detected and `Canceled` is for
* when the subscription gets canceled.
*
* @param f is the finalizer to execute when streaming is terminated, by
* successful completion, error or cancellation; for specifying the
* side effects to use
*/
final def guaranteeCase(f: ExitCase[Throwable] => Task[Unit]): Observable[A] =
new GuaranteeCaseObservable[A](this, f)
/** Version of [[guaranteeCase]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def guaranteeCaseF[F[_]](f: ExitCase[Throwable] => F[Unit])(implicit F: TaskLike[F]): Observable[A] =
guaranteeCase(e => F(f(e)))
/** Alias for [[completed]]. Ignores all items emitted by
* the source and only calls onCompleted or onError.
*
* @return an empty sequence that only calls onCompleted or onError,
* based on which one is called by the source Observable
*/
final def ignoreElements: Observable[Nothing] =
self.liftByOperator(CompletedOperator)
/** Creates a new observable from this observable and another given
* observable by interleaving their items into a strictly
* alternating sequence.
*
* So the first item emitted by the new observable will be the item
* emitted by `self`, the second item will be emitted by the other
* observable, and so forth; when either `self` or `other` calls
* `onCompletes`, the items will then be directly coming from the
* observable that has not completed; when `onError` is called by
* either `self` or `other`, the new observable will call `onError`
* and halt.
*
* See [[Observable!.merge merge]] for a more relaxed alternative that doesn't emit
* items in strict alternating sequence.
*
* @param other is an observable that interleaves with the source
* @return a new observable sequence that alternates emission of
* the items from both child streams
*/
final def interleave[B >: A](other: Observable[B]): Observable[B] =
new Interleave2Observable(self, other)
/** Only emits the last element emitted by the source observable,
* after which it's completed immediately.
*/
final def last: Observable[A] = takeLast(1)
/** Creates a new observable that only emits the last `n` elements
* emitted by the source.
*
* In case the source triggers an error, then the underlying
* buffer gets dropped and the error gets emitted immediately.
*/
final def takeLast(n: Int): Observable[A] =
if (n <= 0) Observable.empty else new TakeLastObservable[A](self, n)
/** Maps elements from the source using a function that can do
* asynchronous processing by means of [[monix.eval.Task Task]].
*
* Example:
* {{{
* import monix.eval.Task
* import scala.concurrent.duration._
*
* Observable.range(0, 100)
* .mapEval(x => Task(x).delayExecution(1.second))
* }}}
*
* @see [[mapEvalF]] for a version that works with a generic
* `F[_]` (e.g. `cats.effect.IO`, Scala's `Future`),
* powered by [[monix.eval.TaskLike]]
*/
final def mapEval[B](f: A => Task[B]): Observable[B] =
new MapTaskObservable[A, B](self, f)
/** Version of [[mapEval]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* Example:
* {{{
* import cats.implicits._
* import cats.effect.IO
* import cats.effect.Timer
* import scala.concurrent.duration._
* import monix.execution.Scheduler.Implicits.global
* import monix.catnap.SchedulerEffect
* // Needed for IO.sleep
* implicit val timer: Timer[IO] = SchedulerEffect.timerLiftIO[IO](global)
*
* Observable.range(0, 100).mapEvalF { x =>
* IO.sleep(1.second) *> IO(x)
* }
* }}}
*
* @see [[mapEval]] for a version specialized for
* [[monix.eval.Task Task]]
*/
final def mapEvalF[F[_], B](f: A => F[B])(implicit F: TaskLike[F]): Observable[B] =
mapEval(a => Task.from(f(a))(F))
/** Given a mapping function that maps events to [[monix.eval.Task tasks]],
* applies it in parallel on the source, but with a specified
* `parallelism`, which indicates the maximum number of tasks that
* can be executed in parallel returning them preserving original order.
*
* Similar in spirit with
* [[monix.reactive.Consumer.loadBalance[A,R](parallelism* Consumer.loadBalance]],
* but expressed as an operator that executes [[monix.eval.Task Task]]
* instances in parallel.
*
* Note that when the specified `parallelism` is 1, it has the same
* behavior as [[mapEval]].
*
* @param parallelism is the maximum number of tasks that can be executed
* in parallel, over which the source starts being
* back-pressured
*
* @param f is the mapping function that produces tasks to execute
* in parallel, which will eventually produce events for the
* resulting observable stream
*
* @see [[mapParallelUnordered]] for a variant that does not preserve order
* which may lead to faster execution times
* @see [[mapEval]] for serial execution
*/
final def mapParallelOrdered[B](parallelism: Int)(f: A => Task[B])(implicit
os: OverflowStrategy[B] = OverflowStrategy.Default): Observable[B] =
new MapParallelOrderedObservable[A, B](self, parallelism, f, os)
/** Version of [[mapParallelOrderedF]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* @param parallelism is the maximum number of tasks that can be executed
* in parallel, over which the source starts being
* back-pressured
*
* @param f is the mapping function that produces tasks to execute
* in parallel, which will eventually produce events for the
* resulting observable stream
*
* @see [[mapParallelUnorderedF]] for a variant that does not preserve order
* which may lead to faster execution times
* @see [[mapEvalF]] for serial execution
*/
final def mapParallelOrderedF[F[_], B](parallelism: Int)(
f: A => F[B])(implicit os: OverflowStrategy[B] = OverflowStrategy.Default, F: TaskLike[F]): Observable[B] =
new MapParallelOrderedObservable[A, B](self, parallelism, f.andThen(F.apply), os)
/** Given a mapping function that maps events to [[monix.eval.Task tasks]],
* applies it in parallel on the source, but with a specified
* `parallelism`, which indicates the maximum number of tasks that
* can be executed in parallel.
*
* Similar in spirit with
* [[monix.reactive.Consumer.loadBalance[A,R](parallelism* Consumer.loadBalance]],
* but expressed as an operator that executes [[monix.eval.Task Task]]
* instances in parallel.
*
* Note that when the specified `parallelism` is 1, it has the same
* behavior as [[mapEval]].
*
* @param parallelism is the maximum number of tasks that can be executed
* in parallel, over which the source starts being
* back-pressured
*
* @param f is the mapping function that produces tasks to execute
* in parallel, which will eventually produce events for the
* resulting observable stream
*
* @see [[mapParallelOrdered]] for a variant that does preserve order
* @see [[mapEval]] for serial execution
*/
final def mapParallelUnordered[B](parallelism: Int)(f: A => Task[B])(implicit
os: OverflowStrategy[B] = OverflowStrategy.Default): Observable[B] =
new MapParallelUnorderedObservable[A, B](self, parallelism, f, os)
/** Version of [[mapParallelUnordered]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
* Note that when the specified `parallelism` is 1, it has the same
* behavior as [[mapEval]].
*
* @param parallelism is the maximum number of tasks that can be executed
* in parallel, over which the source starts being
* back-pressured
*
* @param f is the mapping function that produces tasks to execute
* in parallel, which will eventually produce events for the
* resulting observable stream
*
* @see [[mapParallelOrdered]] for a variant that does preserve order
* @see [[mapEval]] for serial execution
*/
final def mapParallelUnorderedF[F[_], B](parallelism: Int)(
f: A => F[B])(implicit os: OverflowStrategy[B] = OverflowStrategy.Default, F: TaskLike[F]): Observable[B] =
new MapParallelUnorderedObservable[A, B](self, parallelism, f.andThen(F.apply), os)
/** Converts the source Observable that emits `A` into an Observable
* that emits `Notification[A]`.
*/
final def materialize: Observable[Notification[A]] =
self.liftByOperator(new MaterializeOperator[A])
/** Concurrently merges the observables emitted by the source, into
* a single observable.
*
* ==Equivalence with mergeMap==
*
* The `merge` operation is [[mergeMap]] with the identity
* function:
*
* `stream.merge <-> stream.mergeMap(x => x)`
*
* $concatMergeDifference
*
* == Visual Example ==
*
* <pre>
* streamA: a1 -- -- a2 -- -- a3 -- a4 -- --
* streamB: b1 -- -- b2 -- b3 -- -- -- -- b4
*
* result: a1, b1, a2, b2, b3, a3, a4, b4
* </pre>
*
* @note $defaultOverflowStrategy
* @return $mergeReturn
*/
final def merge[B](implicit
ev: A <:< Observable[B],
os: OverflowStrategy[B] = OverflowStrategy.Default[B]): Observable[B] =
self.mergeMap(x => x)(os)
/** Concurrently merges the observables emitted by the source with
* the given generator function into a single observable.
*
* $concatMergeDifference
*
* ==Example==
* {{{
* Observable(1, 2, 3).mergeMap { x =>
* Observable.eval(println(s"Processing $$x"))
* .executeAsync
* .flatMap(_ => Observable(x, x))
* }
* }}}
*
* In this example the source will yield 3 streams and those 3
* streams are being subscribed immediately, therefore the order of
* the events will be non-deterministic, as the streams will be
* evaluated concurrently.
*
* == Visual Example ==
*
* <pre>
* streamA: a1 -- -- a2 -- -- a3 -- a4 -- --
* streamB: b1 -- -- b2 -- b3 -- -- -- -- b4
*
* result: a1, b1, a2, b2, b3, a3, a4, b4
* </pre>
* @param f is a generator for the streams that will get merged
* @return $mergeMapReturn
*/
final def mergeMap[B](f: A => Observable[B])(implicit
os: OverflowStrategy[B] = OverflowStrategy.Default): Observable[B] =
new MergeMapObservable[A, B](self, f, os, delayErrors = false)
/** $mergeDescription
*
* $delayErrorsDescription
*
* @note $defaultOverflowStrategy
* @return $mergeReturn
*/
final def mergeDelayErrors[B](implicit
ev: A <:< Observable[B],
os: OverflowStrategy[B] = OverflowStrategy.Default): Observable[B] =
self.mergeMap(x => x)(os)
/** $mergeMapDescription
*
* $delayErrorsDescription
*
* @param f is a generator for the streams that will get merged
* @return $mergeMapReturn
*/
final def mergeMapDelayErrors[B](f: A => Observable[B])(implicit
os: OverflowStrategy[B] = OverflowStrategy.Default): Observable[B] =
new MergeMapObservable[A, B](self, f, os, delayErrors = true)
/** Overrides the default [[monix.execution.Scheduler Scheduler]],
* possibly forcing an asynchronous boundary on subscription
* (if `forceAsync` is set to `true`, the default).
*
* When an `Observable` is subscribed with
* [[Observable.subscribe(subscriber* subscribe]],
* it needs a `Scheduler`, which is going to be injected in the
* processing pipeline, to be used for managing asynchronous
* boundaries, scheduling execution with delay, etc.
*
* Normally the [[monix.execution.Scheduler Scheduler]] gets injected
* implicitly when doing `subscribe`, but this operator overrides
* the injected subscriber for the given source. And if the source is
* normally using that injected scheduler (given by `subscribe`),
* then the effect will be that all processing will now happen
* on the override.
*
* To put it in other words, in Monix it's usually the consumer and
* not the producer that specifies the scheduler and this operator
* allows for a different behavior.
*
* This operator also subsumes the effects of [[subscribeOn]],
* meaning that the subscription logic itself will start on
* the provided scheduler if `forceAsync = true` (the default).
*
* @see [[observeOn(s:monix\\.execution\\.Scheduler)* observeOn]]
* and [[subscribeOn]].
*
* @param s is the [[monix.execution.Scheduler Scheduler]] to use
* for overriding the default scheduler and for forcing
* an asynchronous boundary if `forceAsync` is `true`
*
* @param forceAsync indicates whether an asynchronous boundary
* should be forced right before the subscription of the
* source `Observable`, managed by the provided `Scheduler`
*
* @return a new `Observable` that mirrors the source on subscription,
* but that uses the provided scheduler for overriding
* the default and possibly force an extra asynchronous
* boundary on execution
*/
final def executeOn(s: Scheduler, forceAsync: Boolean = true): Observable[A] =
new ExecuteOnObservable[A](self, s, forceAsync)
/** Mirrors the source observable, but upon subscription ensure
* that the evaluation forks into a separate (logical) thread.
*
* The execution is managed by the injected
* [[monix.execution.Scheduler scheduler]] in `subscribe()`.
*/
final def executeAsync: Observable[A] =
new ExecuteAsyncObservable(self)
/** Returns a new observable that will execute the source with a different
* [[monix.execution.ExecutionModel ExecutionModel]].
*
* This allows fine-tuning the options injected by the scheduler
* locally. Example:
*
* {{{
* import monix.execution.ExecutionModel.AlwaysAsyncExecution
*
* val stream = Observable(1, 2, 3)
* .executeWithModel(AlwaysAsyncExecution)
* }}}
*
* @param em is the
* [[monix.execution.ExecutionModel ExecutionModel]]
* that will be used when evaluating the source.
*/
final def executeWithModel(em: ExecutionModel): Observable[A] =
new ExecuteWithModelObservable[A](self, em)
/** Operator that specifies a different
* [[monix.execution.Scheduler Scheduler]], on which subscribers
* will observe events, instead of the default one.
*
* An `Observable` with an applied `observeOn` call will forward
* events into a buffer that uses the specified `Scheduler`
* reference to cycle through events and to make `onNext` calls to
* downstream listeners.
*
* Example:
* {{{
* import monix.execution.Scheduler
* import monix.execution.Scheduler.Implicits.global
* val io = Scheduler.io("my-io")
*
* Observable(1, 2, 3).map(_ + 1)
* .observeOn(io)
* .foreach(x => println(x))
* }}}
*
* In the above example the first `map` (whatever comes before the
* `observeOn` call) gets executed using the default `Scheduler`
* (might execute on the current thread even), however the
* `foreach` that's specified after `observeOn` will get executed
* on the indicated `Scheduler`.
*
* NOTE: this operator does not guarantee that downstream listeners
* will actually use the specified `Scheduler` to process events,
* because this depends on the rest of the pipeline. E.g. this will
* not work OK:
*
* {{{
* import monix.reactive.OverflowStrategy.Unbounded
*
* Observable.suspend(???)
* .observeOn(io).asyncBoundary(Unbounded)
* }}}
*
* This sample might not do what a user of `observeOn` would
* want. Indeed the implementation will use the provided `io`
* reference for calling `onNext` / `onComplete` / `onError`
* events, however because of the following asynchronous boundary
* created the actual listeners will probably end up being execute
* on a different `Scheduler`.
*
* The underlying implementation uses
* [[monix.reactive.observers.BufferedSubscriber a buffer]]
* to forward events. The
* [[monix.reactive.OverflowStrategy OverflowStrategy]]
* being applied is the
* [[monix.reactive.OverflowStrategy.Default default one]].
*
* @see [[observeOn[B>:A](s:monix\\.execution\\.Scheduler,os:monix\\.reactive\\.OverflowStrategy[B]* observeOn(Scheduler, OverflowStrategy)]]
* for the version that allows customizing the
* [[monix.reactive.OverflowStrategy OverflowStrategy]]
* being used by the underlying buffer.
*
* @param s is the alternative `Scheduler` reference to use
* for observing events
*/
final def observeOn(s: Scheduler): Observable[A] =
observeOn(s, OverflowStrategy.Default)
/** Operator that specifies a different
* [[monix.execution.Scheduler Scheduler]], on which subscribers
* will observe events, instead of the default one.
*
* This overloaded version of `observeOn` takes an extra
* [[monix.reactive.OverflowStrategy OverflowStrategy]]
* parameter specifying the behavior of the underlying buffer.
*
* @see [[observeOn(s:monix\\.execution\\.Scheduler)* observeOn(Scheduler)]] for
* the version that does not take an `OverflowStrategy` parameter.
*
* @param s is the alternative `Scheduler` reference to use
* for observing events
* @param os is the [[monix.reactive.OverflowStrategy OverflowStrategy]]
* to apply to the underlying buffer
*/
final def observeOn[B >: A](s: Scheduler, os: OverflowStrategy[B]): Observable[B] =
new ObserveOnObservable[B](self, s, os)
/** If the connection is [[monix.execution.Cancelable.cancel cancelled]]
* then trigger a `CancellationException`.
*
* A connection can be cancelled with the help of the
* [[monix.execution.Cancelable Cancelable]]
* returned on [[Observable.subscribe(subscriber* subscribe]].
*
* Because the cancellation is effectively concurrent with the
* signals the [[monix.reactive.Observer Observer]] receives and because
* we need to uphold the contract, this operator will effectively
* synchronize access to [[monix.reactive.Observer.onNext onNext]],
* [[monix.reactive.Observer.onComplete onComplete]] and
* [[monix.reactive.Observer.onError onError]]. It will also watch
* out for asynchronous [[monix.execution.Ack.Stop Stop]] events.
*
* In other words, this operator does heavy synchronization, can
* prove to be inefficient and you should avoid using it because
* the signaled error can interfere with functionality from other
* operators that use cancellation internally and cancellation in
* general is a side-effecting operation that should be avoided,
* unless it's necessary.
*/
final def onCancelTriggerError: Observable[A] =
new OnCancelTriggerErrorObservable[A](self)
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* the streaming of events continues with the specified backup
* sequence.
*
* The created Observable mirrors the behavior of the source in
* case the source does not end with an error.
*
* NOTE that compared with `onErrorResumeNext` from Rx.NET, the
* streaming is not resumed in case the source is terminated
* normally with an `onComplete`.
*
* @param that is a backup sequence that's being subscribed
* in case the source terminates with an error.
*/
final def onErrorFallbackTo[B >: A](that: Observable[B]): Observable[B] =
self.onErrorHandleWith(_ => that)
/** Returns an observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which
* case the streaming of events fallbacks to an observable
* emitting a single element generated by the backup function.
*
* See [[onErrorRecover]] for the version that takes a
* partial function as a parameter.
*
* @param f - a function that matches errors with a
* backup element that is emitted when the source
* throws an error.
*/
final def onErrorHandle[B >: A](f: Throwable => B): Observable[B] =
onErrorHandleWith { elem =>
Observable.now(f(elem))
}
/** Returns an observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which
* case the streaming of events fallbacks to an observable
* emitting a single element generated by the backup function.
*
* The created Observable mirrors the behavior of the source
* in case the source does not end with an error or if the
* thrown `Throwable` is not matched.
*
* See [[onErrorHandle]] for the version that takes a
* total function as a parameter.
*
* @param pf is a function that matches errors with a
* backup element that is emitted when the source
* throws an error.
*/
final def onErrorRecover[B >: A](pf: PartialFunction[Throwable, B]): Observable[B] =
onErrorHandleWith(ex => (pf.andThen(b => Observable.now(b)).applyOrElse(ex, Observable.raiseError _)))
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* the streaming of events continues with the specified backup
* sequence generated by the given function.
*
* The created Observable mirrors the behavior of the source in
* case the source does not end with an error or if the thrown
* `Throwable` is not matched.
*
* See [[onErrorHandleWith]] for the version that takes a
* total function as a parameter.
*
* @param pf is a function that matches errors with a
* backup throwable that is subscribed when the source
* throws an error.
*/
final def onErrorRecoverWith[B >: A](pf: PartialFunction[Throwable, Observable[B]]): Observable[B] =
onErrorHandleWith(ex => pf.applyOrElse(ex, Observable.raiseError _))
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* the streaming of events continues with the specified backup
* sequence generated by the given function.
*
* See [[onErrorRecoverWith]] for the version that takes a
* partial function as a parameter.
*
* @param f is a function that matches errors with a
* backup throwable that is subscribed when the source
* throws an error.
*/
final def onErrorHandleWith[B >: A](f: Throwable => Observable[B]): Observable[B] =
new OnErrorRecoverWithObservable(self, f)
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* it tries subscribing to the source again in the hope that it
* will complete without an error.
*
* The number of retries is limited by the specified `maxRetries`
* parameter, so for an Observable that always ends in error the
* total number of subscriptions that will eventually happen is
* `maxRetries + 1`.
*/
final def onErrorRestart(maxRetries: Long): Observable[A] = {
require(maxRetries >= 0, "maxRetries should be positive")
new OnErrorRetryCountedObservable(self, maxRetries)
}
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* it tries subscribing to the source again in the hope that it
* will complete without an error.
*
* The given predicate establishes if the subscription should be
* retried or not.
*/
final def onErrorRestartIf(p: Throwable => Boolean): Observable[A] =
new OnErrorRetryIfObservable[A](self, p)
/** Returns an Observable that mirrors the behavior of the source,
* unless the source is terminated with an `onError`, in which case
* it tries subscribing to the source again in the hope that it
* will complete without an error.
*
* NOTE: The number of retries is unlimited, so something like
* `Observable.error(new RuntimeException).onErrorRestartUnlimited`
* will loop forever.
*/
final def onErrorRestartUnlimited: Observable[A] =
new OnErrorRetryCountedObservable(self, -1)
/** Given a [[monix.reactive.Pipe Pipe]], transform
* the source observable with it.
*/
final def pipeThrough[I >: A, B](pipe: Pipe[I, B]): Observable[B] =
new PipeThroughObservable(self, pipe)
/** Returns an observable that emits the results of invoking a
* specified selector on items emitted by a
* [[monix.reactive.observables.ConnectableObservable ConnectableObservable]]
* backed by [[monix.reactive.subjects.PublishSubject PublishSubject]]
* which shares a single subscription to the underlying sequence.
*
* This operators takes a possibly pure Observable, transforms it to
* Hot Observable in the scope of supplied function and then returns
* a pure Observable again.
*
* ==Example==
*
* {{{
* import monix.reactive._
* import monix.eval.Task
* import scala.concurrent.duration._
* implicit val os: OverflowStrategy[Nothing] = OverflowStrategy.Default
*
* val obs = Observable(1, 2, 3)
* .doOnNext(i => Task(println(s"Produced $$i")).delayExecution(1.second))
*
* def consume(name: String, obs: Observable[Int]): Observable[Unit] =
* obs.mapEval(i => Task(println(s"$$name: got $$i")))
*
* obs.publishSelector { hot =>
* Observable(
* consume("Consumer 1", hot),
* consume("Consumer 2", hot).delayExecution(2.second)
* ).merge
* }
*
* }}}
*
* ==Output==
*
* Produced 1
* Consumer 1: got 1
* Produced 2
* Consumer 1: got 2
* Consumer 2: got 2
* Produced 3
* Consumer 1: got 3
* Consumer 2: got 3
*
* Note how Consumer 2 received less elements because it subscribed later.
*
* @param f is a selector function that can use the multicasted source sequence
* as many times as needed, without causing multiple subscriptions
* to the source sequence. Observers to the given source will
* receive all notifications of the source from the time of the
* subscription forward.
* @see [[pipeThroughSelector]] for a version that allows specifying a type of underlying Subject.
*/
final def publishSelector[R](f: Observable[A] => Observable[R]): Observable[R] =
pipeThroughSelector(Pipe.publish[A], f)
/** Returns an observable that emits the results of invoking a
* specified selector on items emitted by a
* [[monix.reactive.observables.ConnectableObservable ConnectableObservable]],
* which shares a single subscription to the underlying sequence.
*
* This operators takes a possibly pure Observable, transforms it to
* Hot Observable in the scope of supplied function and then returns
* a pure Observable again. The function allows specyfing underlying
* [[monix.reactive.subjects.Subject]] by means of [[monix.reactive.Pipe]].
*
* ==Example==
*
* {{{
* import monix.reactive._
* import monix.eval.Task
* import scala.concurrent.duration._
* implicit val os: OverflowStrategy[Nothing] = OverflowStrategy.Default
*
* val obs = Observable(1, 2, 3)
* .doOnNext(i => Task(println(s"Produced $$i")).delayExecution(1.second))
*
* def consume(name: String, obs: Observable[Int]): Observable[Unit] =
* obs.mapEval(i => Task(println(s"$$name: got $$i")))
*
* obs.pipeThroughSelector(Pipe.replay[Int], { (hot: Observable[Int]) =>
* Observable(
* consume("Consumer 1", hot),
* consume("Consumer 2", hot).delayExecution(2.second)
* ).merge
* })
*
* }}}
*
* ==Output==
*
* Produced 1
* Consumer 1: got 1
* Consumer 2: got 1
* Produced 2
* Consumer 1: got 2
* Consumer 2: got 2
* Produced 3
* Consumer 1: got 3
* Consumer 2: got 3
*
* Note how Consumer 2 received the same amount of elements as
* Consumer 1 despite subscribing later because of underlying ReplaySubject.
*
* @param pipe is the [[Pipe]] used to transform the source into a multicast
* (hot) observable that can be shared in the selector function
* @param f is a selector function that can use the multicasted source sequence
* as many times as needed, without causing multiple subscriptions
* to the source sequence. Observers to the given source will
* receive all notifications of the source from the time of the
* subscription forward.
*/
final def pipeThroughSelector[S >: A, B, R](pipe: Pipe[S, B], f: Observable[B] => Observable[R]): Observable[R] =
new PipeThroughSelectorObservable[S, B, R](self, pipe, f)
/** Applies a binary operator to a start value and all elements of
* this Observable, going left to right and returns a new
* Observable that emits only one item before `onComplete`.
*/
final def reduce[B >: A](op: (B, B) => B): Observable[B] =
self.liftByOperator(new ReduceOperator[B](op))
/** Repeats the items emitted by the source continuously. It
* caches the generated items until `onComplete` and repeats them
* forever.
*
* It terminates either on error or if the source is empty.
*/
final def repeat: Observable[A] =
new RepeatSourceObservable[A](self)
/** Keeps restarting / resubscribing the source until the predicate
* returns `true` for the the first emitted element, after which
* it starts mirroring the source.
*/
final def restartUntil(p: A => Boolean): Observable[A] =
new RestartUntilObservable[A](self, p)
/** Emit the most recent items emitted by an observable within
* periodic time intervals. If no new value has been emitted since
* the last time it was sampled, it signals the last emitted value
* anyway.
*
* @see [[sample]] for a variant that doesn't repeat the last value on silence
* @see [[sampleRepeatedBy]] for fine control
* @param period the timespan at which sampling occurs
*/
final def sampleRepeated(period: FiniteDuration): Observable[A] =
self.sampleRepeatedBy(Observable.intervalAtFixedRate(period, period))
/** Returns an observable that, when the specified sampler observable
* emits an item or completes, emits the most recently emitted item
* (if any) emitted by the source Observable since the previous
* emission from the sampler observable. If no new value has been
* emitted since the last time it was sampled, it signals the last
* emitted value anyway.
*
* @see [[sampleBy]] for a variant that doesn't repeat the last value on silence
* @see [[sampleRepeated]] for a periodic sampling
* @param sampler - the Observable to use for sampling the source Observable
*/
final def sampleRepeatedBy[B](sampler: Observable[B]): Observable[A] =
new ThrottleLastObservable[A, B](self, sampler, shouldRepeatOnSilence = true)
/** Applies a binary operator to a start value and all elements of
* this Observable, going left to right and returns a new
* Observable that emits on each step the result of the applied
* function.
*
* Similar to [[foldLeft]], but emits the state on each
* step. Useful for modeling finite state machines.
*
* @see [[scan0]] for the version that emits seed element at the beginning
*/
final def scan[S](seed: => S)(op: (S, A) => S): Observable[S] =
new ScanObservable[A, S](self, () => seed, op)
/** Applies a binary operator to a start value and all elements of
* this Observable, going left to right and returns a new
* Observable that emits on each step the result element of
* the applied function.
*
* Similar to [[scan]], but the supplied function returns a tuple
* of the next accumulator state and the result type emitted by
* the returned observable.
*/
final def mapAccumulate[S, R](seed: => S)(op: (S, A) => (S, R)): Observable[R] =
new MapAccumulateObservable[A, S, R](self, () => seed, op)
/** Applies a binary operator to a start value and all elements of
* this Observable, going left to right and returns a new
* Observable that emits on each step the result of the applied
* function.
*
* This is a version of [[scan]] that emits seed element at the beginning,
* similar to `scanLeft` on Scala collections
*/
final def scan0[S](seed: => S)(op: (S, A) => S): Observable[S] =
Observable.eval(seed).flatMap(s => s +: scan(s)(op))
/** Applies a binary operator to a start value and all elements of
* this stream, going left to right and returns a new stream that
* emits on each step the result of the applied function.
*
* Similar with [[scan]], but this can suspend and evaluate
* side effects with an `F[_]` data type that implements the
* `cats.effect.Effect` type class, thus allowing for lazy or
* asynchronous data processing.
*
* Similar to [[foldLeft]] and [[foldWhileLeft]], but emits the
* state on each step. Useful for modeling finite state machines.
*
* Example showing how state can be evolved and acted upon:
*
* {{{
* // Using cats.effect.IO for evaluating our side effects
* import cats.effect.IO
*
* sealed trait State[+A] { def count: Int }
* case object Init extends State[Nothing] { def count = 0 }
* case class Current[A](current: Option[A], count: Int)
* extends State[A]
*
* case class Person(id: Int, name: String)
*
* // TODO: to implement!
* def requestPersonDetails(id: Int): IO[Option[Person]] =
* IO.raiseError(new NotImplementedError)
*
* // TODO: to implement
* val source: Observable[Int] =
* Observable.raiseError(new NotImplementedError)
*
* // Initial state
* val seed = IO.pure(Init : State[Person])
*
* val scanned = source.scanEvalF(seed) { (state, id) =>
* requestPersonDetails(id).map { person =>
* state match {
* case Init =>
* Current(person, 1)
* case Current(_, count) =>
* Current(person, count + 1)
* }
* }
* }
*
* val filtered = scanned
* .takeWhile(_.count < 10)
* .collect { case Current(a, _) => a }
* }}}
*
* @see [[scanEval0F]] for the version that emits seed element at the beginning
*
* @see [[scan]] for the synchronous, non-lazy version, or
* [[scanEval]] for the [[monix.eval.Task Task]]-specialized
* version.
*
* @param seed is the initial state
* @param op is the function that evolves the current state
*
* @param F is the `cats.effect.Effect` type class implementation
* for type `F`, which controls the evaluation. `F` can be
* a data type such as [[monix.eval.Task]] or `cats.effect.IO`,
* which implement `Effect`.
*
* @return a new observable that emits all intermediate states being
* resulted from applying the given function
*/
final def scanEvalF[F[_], S](seed: F[S])(op: (S, A) => F[S])(implicit F: TaskLike[F]): Observable[S] =
scanEval(Task.from(seed)(F))((s, a) => Task.from(op(s, a))(F))
/** Applies a binary operator to a start value and all elements of
* this stream, going left to right and returns a new stream that
* emits on each step the result of the applied function.
*
* This is a version of [[scanEvalF]] that emits seed element at the beginning,
* similar to `scanLeft` on Scala collections
*/
final def scanEval0F[F[_], S](seed: F[S])(
op: (S, A) => F[S])(implicit F: TaskLike[F], A: Applicative[F]): Observable[S] =
Observable.fromTaskLike(seed).flatMap(s => s +: scanEvalF(A.pure(s))(op))
/** Applies a binary operator to a start value and all elements of
* this stream, going left to right and returns a new stream that
* emits on each step the result of the applied function.
*
* Similar with [[scan]], but this can suspend and evaluate
* side effects with [[monix.eval.Task Task]], thus allowing for
* asynchronous data processing.
*
* Similar to [[foldLeft]] and [[foldWhileLeft]], but emits the
* state on each step. Useful for modeling finite state machines.
*
* Example showing how state can be evolved and acted upon:
*
* {{{
* import monix.eval.Task
*
* sealed trait State[+A] { def count: Int }
* case object Init extends State[Nothing] { def count = 0 }
* case class Current[A](current: Option[A], count: Int)
* extends State[A]
*
* case class Person(id: Int, name: String)
*
* // TODO: to implement!
* def requestPersonDetails(id: Int): Task[Option[Person]] =
* Task.raiseError(new NotImplementedError)
*
* // TODO: to implement
* val source: Observable[Int] =
* Observable.raiseError(new NotImplementedError)
*
* // Initial state
* val seed = Task.pure(Init : State[Person])
*
* val scanned = source.scanEval(seed) { (state, id) =>
* requestPersonDetails(id).map { person =>
* state match {
* case Init =>
* Current(person, 1)
* case Current(_, count) =>
* Current(person, count + 1)
* }
* }
* }
*
* val filtered = scanned
* .takeWhile(_.count < 10)
* .collect { case Current(a, _) => a }
* }}}
*
* @see [[scanEval0]] for the version that emits seed element at the beginning
* @see [[scan]] for the version that does not require using `Task`
* in the provided operator
*
* @param seed is the initial state
* @param op is the function that evolves the current state
*
* @return a new observable that emits all intermediate states being
* resulted from applying the given function
*/
final def scanEval[S](seed: Task[S])(op: (S, A) => Task[S]): Observable[S] =
new ScanTaskObservable(self, seed, op)
/** Applies a binary operator to a start value and all elements of
* this stream, going left to right and returns a new stream that
* emits on each step the result of the applied function.
*
* This is a version of [[scanEval]] that emits seed element at the beginning.
*/
final def scanEval0[S](seed: Task[S])(op: (S, A) => Task[S]): Observable[S] =
Observable.fromTask(seed).flatMap(s => s +: scanEval(Task.pure(s))(op))
/** Given a mapping function that returns a `B` type for which we have
* a [[cats.Monoid]] instance, returns a new stream that folds the incoming
* elements of the sources using the provided `Monoid[B].combine`, with the
* initial seed being the `Monoid[B].empty` value, emitting the generated values
* at each step.
*
* Equivalent with [[scan]] applied with the given [[cats.Monoid]], so given
* our `f` mapping function returns a `B`, this law holds:
*
* <pre>
* val B = implicitly[Monoid[B]]
*
* stream.scanMap(f) <-> stream.scan(B.empty)(B.combine)
* </pre>
*
* Example:
* {{{
* import cats.implicits._
*
* // Yields 2, 6, 12, 20, 30, 42
* val stream = Observable(1, 2, 3, 4, 5, 6).scanMap(x => x * 2)
* }}}
*
* @param f is the mapping function applied to every incoming element of this `Observable`
* before folding using `Monoid[B].combine`
*
* @return a new `Observable` that emits all intermediate states being
* resulted from applying `Monoid[B].combine` function
*/
final def scanMap[B](f: A => B)(implicit B: Monoid[B]): Observable[B] =
self.scan(B.empty)((acc, a) => B.combine(acc, f(a)))
/** Given a mapping function that returns a `B` type for which we have
* a [[cats.Monoid]] instance, returns a new stream that folds the incoming
* elements of the sources using the provided `Monoid[B].combine`, with the
* initial seed being the `Monoid[B].empty` value, emitting the generated values
* at each step.
*
* This is a version of [[scanMap]] that emits seed element at the beginning.
*/
final def scanMap0[B](f: A => B)(implicit B: Monoid[B]): Observable[B] =
B.empty +: scanMap(f)
/** Creates a new Observable that emits the given elements and then
* it also emits the events of the source (prepend operation).
*/
final def startWith[B >: A](elems: Seq[B]): Observable[B] =
Observable.fromIterable(elems).appendAll(self)
/** Returns a new Observable that uses the specified `Scheduler` for
* initiating the subscription.
*
* This is different from [[executeOn]] because the given `scheduler`
* is only used to start the subscription, but does not override the
* default [[monix.execution.Scheduler Scheduler]].
*/
final def subscribeOn(scheduler: Scheduler): Observable[A] =
new SubscribeOnObservable[A](self, scheduler)
/** In case the source is empty, switch to the given backup. */
final def switchIfEmpty[B >: A](backup: Observable[B]): Observable[B] =
new SwitchIfEmptyObservable[B](self, backup)
/** Drops the first element of the source observable,
* emitting the rest.
*/
final def tail: Observable[A] = drop(1L)
/** Overload of [[drop(n:Long* drop(Long)]]. */
final def drop(n: Int): Observable[A] =
self.liftByOperator(new DropFirstOperator(n.toLong))
/** Drops the first `n` elements (from the start).
*
* @param n the number (Long) of elements to drop
* @return a new Observable that drops the first ''n'' elements
* emitted by the source
*/
final def drop(n: Long): Observable[A] =
self.liftByOperator(new DropFirstOperator(n))
/** Creates a new Observable that emits the events of the source, only
* for the specified `timestamp`, after which it completes.
*
* @param timespan the window of time during which the new Observable
* is allowed to emit the events of the source
*/
final def takeByTimespan(timespan: FiniteDuration): Observable[A] =
new TakeLeftByTimespanObservable(self, timespan)
/** Creates a new Observable that emits every n-th event from the source,
* dropping intermediary events.
*/
final def takeEveryNth(n: Int): Observable[A] =
self.liftByOperator(new TakeEveryNthOperator(n))
/** Creates a new observable that mirrors the source until
* the given `trigger` emits either an element or `onComplete`,
* after which it is completed.
*
* The resulting observable is completed as soon as `trigger`
* emits either an `onNext` or `onComplete`. If `trigger`
* emits an `onError`, then the resulting observable is also
* completed with error.
*
* @param trigger is an observable that will cancel the
* streaming as soon as it emits an event
*/
final def takeUntil(trigger: Observable[Any]): Observable[A] =
new TakeUntilObservable[A](self, trigger)
/** Version of [[takeUntil]] that can work with a trigger expressed by a [[monix.eval.Task]]
*
* @see [[takeUntil]] for version that works with Observable.
* @see [[takeUntilEvalF]] for version that works with generic `F[_]` powered by [[monix.eval.TaskLike]].
*
* @param trigger task that will cancel the stream as soon as it completes.
*/
final def takeUntilEval(trigger: Task[_]): Observable[A] =
self.takeUntil(Observable.fromTask(trigger))
/** Version of [[takeUntil]] that can work with a trigger expressed by a generic `F[_]`
* provided an implicit [[monix.eval.TaskLike]] exists.
*
* @see [[takeUntil]] for version that works with Observable.
* @see [[takeUntilEval]] for version that works with [[monix.eval.Task]].
*
* @param trigger operation that will cancel the stream as soon as it completes.
*/
final def takeUntilEvalF[F[_], B](trigger: F[B])(implicit taskLike: TaskLike[F]): Observable[A] =
self.takeUntil(Observable.fromTaskLike(trigger))
/** Takes longest prefix of elements that satisfy the given predicate
* and returns a new Observable that emits those elements.
*/
final def takeWhile(p: A => Boolean): Observable[A] =
self.liftByOperator(new TakeByPredicateOperator(p, inclusive = false))
/** Takes longest prefix of elements that satisfy the given predicate, inclusive of
* the value that caused `predicate` to return `false` and returns a new Observable that emits those elements.
*/
final def takeWhileInclusive(p: A => Boolean): Observable[A] =
self.liftByOperator(new TakeByPredicateOperator(p, inclusive = true))
/** Takes longest prefix of elements while given [[monix.execution.cancelables.BooleanCancelable BooleanCancelable]]
* is not canceled and returns a new Observable that emits those elements.
*/
final def takeWhileNotCanceled(c: BooleanCancelable): Observable[A] =
self.liftByOperator(new TakeWhileNotCanceledOperator(c))
/** Returns an Observable that emits maximum `n` items per given `period`.
*
* Unlike [[Observable!.throttleLast]] and [[Observable!.throttleFirst]]
* it does not discard any elements.
*
* If the source observable completes, then the current buffer gets
* signaled downstream. If the source triggers an error then the
* current buffer is being dropped and the error gets propagated
* immediately.
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
*
* // emits two items per second
* Observable.fromIterable(0 to 10)
* .throttle(1.second, 2)
* }}}
*
* @param period time that has to pass before emiting new items
* @param n maximum number of items emitted per given `period`
*/
final def throttle(period: FiniteDuration, n: Int): Observable[A] =
bufferTimedWithPressure[A](period, n).flatMap(Observable.fromIterable)
/** Returns an Observable that emits only the first item emitted by
* the source Observable during sequential time windows of a
* specified duration.
*
* This differs from [[Observable!.throttleLast]] in that this only
* tracks passage of time whereas `throttleLast` ticks at scheduled
* intervals.
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
*
* // emits 0, 5, 10 in 1 second intervals
* Observable.fromIterable(0 to 10)
* // without delay, it would return only 0
* .delayOnNext(200.millis)
* .throttleFirst(1.second)
* }}}
*
* @see [[throttle]] for a version that allows to specify number
* of elements processed by a period and does not drop any elements
* @param interval time to wait before emitting another item after
* emitting the last item
*/
final def throttleFirst(interval: FiniteDuration): Observable[A] =
self.liftByOperator(new ThrottleFirstOperator[A](interval))
/** Emit the most recent items emitted by the source within
* periodic time intervals.
*
* Alias for [[sample]].
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
*
* // emits 3, 8, 10 in 1 second intervals
* Observable.fromIterable(0 to 10)
* // without delay, it would return only 10
* .delayOnNext(200.millis)
* .throttleLast(1.second)
* }}}
*
* @see [[throttle]] for a version that allows to specify number
* of elements processed by a period and does not drop any elements
* @param period duration of windows within which the last item
* emitted by the source Observable will be emitted
*/
final def throttleLast(period: FiniteDuration): Observable[A] =
sample(period)
/** Emit first element emitted by the source and then
* emit the most recent items emitted by the source within
* periodic time intervals.
* Usage:
*
* {{{
* import scala.concurrent.duration._
*
* // emits 0 after 200 ms and then 4,9 in 1 sec intervals and 10 after the observable completes
* Observable.fromIterable(0 to 10)
* // without delay, it would return only 0, 10
* .delayOnNext(200.millis)
* .throttleLatest(1.second, true)
* }}}
*
* @param period duration of windows within which the last item
* emitted by the source Observable will be emitted
* @param emitLast if true last element will be emitted when source completes
* no matter if interval has passed or not
*/
final def throttleLatest(period: FiniteDuration, emitLast: Boolean): Observable[A] =
new ThrottleLatestObservable[A](self, period, emitLast)
/** Emit the most recent items emitted by the source within
* periodic time intervals.
*
* Use the `sample` operator to periodically look at an observable
* to see what item it has most recently emitted since the previous
* sampling. Note that if the source observable has emitted no
* items since the last time it was sampled, the observable that
* results from the `sample` operator will emit no item for that
* sampling period.
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
*
* // emits 3, 8, 10 in 1 second intervals
* Observable.fromIterable(0 to 10)
* // without delay, it would return only 10
* .delayOnNext(200.millis)
* .sample(1.second)
* }}}
*
* @see [[sampleBy]] for fine control
* @see [[sampleRepeated]] for repeating the last value on silence
* @see [[throttle]] for a version that allows to specify number
* of elements processed by a period and does not drop any elements
*
* @param period the timespan at which sampling occurs
*/
final def sample(period: FiniteDuration): Observable[A] =
self.sampleBy(Observable.intervalAtFixedRate(period, period))
/** Returns an observable that, when the specified sampler
* emits an item or completes, emits the most recently emitted item
* (if any) emitted by the source since the previous
* emission from the sampler.
*
* Use the `sampleBy` operator to periodically look at an observable
* to see what item it has most recently emitted since the previous
* sampling. Note that if the source observable has emitted no
* items since the last time it was sampled, the observable that
* results from the `sampleBy` operator will emit no item.
*
* @see [[sample]] for periodic sampling
* @see [[sampleRepeatedBy]] for repeating the last value on silence
* @param sampler - the observable to use for sampling the source
*/
final def sampleBy[B](sampler: Observable[B]): Observable[A] =
new ThrottleLastObservable[A, B](self, sampler, shouldRepeatOnSilence = false)
/** Only emit an item from an observable if a particular timespan has
* passed without it emitting another item.
*
* Note: If the source observable keeps emitting items more
* frequently than the length of the time window, then no items will
* be emitted by the resulting observable.
*
* Alias for [[debounce]].
*
* @param timeout the length of the window of time that must pass after
* the emission of an item from the source observable in
* which that observable emits no items in order for the
* item to be emitted by the resulting observable
* @see [[echoOnce]] for a similar operator that also mirrors
* the source observable
*/
final def throttleWithTimeout(timeout: FiniteDuration): Observable[A] =
debounce(timeout)
/** Only emit an item from an observable if a particular timespan has
* passed without it emitting another item.
*
* Note: If the source observable keeps emitting items more
* frequently than the length of the time window, then no items will
* be emitted by the resulting observable.
*
* Usage:
* {{{
* import scala.concurrent.duration._
*
* (Observable("M", "O", "N", "I", "X") ++ Observable.never)
* .delayOnNext(100.millis)
* .scan("")(_ ++ _)
* .debounce(200.millis)
* .dump("O")
*
* // Output:
* // 0: O --> MONIX
* }}}
*
* @param timeout the length of the window of time that must pass after
* the emission of an item from the source observable in
* which that observable emits no items in order for the
* item to be emitted by the resulting observable
* @see [[echoOnce]] for a similar operator that also mirrors
* the source observable
*/
final def debounce(timeout: FiniteDuration): Observable[A] =
new DebounceObservable(self, timeout, repeat = false)
/** Returns an observable that mirrors the source but applies a timeout
* for each `onNext` message. If downstream subscriber takes more time than the given
* timespan to process an `onNext` message, the source is terminated and downstream gets
* subscribed to the given backup.
*
* Note that this ignores the time it takes for the upstream to send
* `onNext` messages. For detecting slow producers see [[timeoutOnSlowUpstream]].
*
* @param timeout maximum duration for `onNext`.
* @param backup alternative data source to subscribe to on timeout.
*/
final def timeoutOnSlowDownstreamTo[B >: A](timeout: FiniteDuration, backup: Observable[B]): Observable[B] =
self.timeoutOnSlowDownstream(timeout).onErrorHandleWith {
case DownstreamTimeoutException(`timeout`) => backup
case other => Observable.raiseError(other)
}
/** Returns an observable that mirrors the source but that will trigger a
* [[monix.execution.exceptions.DownstreamTimeoutException DownstreamTimeoutException]]
* in case the downstream subscriber takes more than the given timespan
* to process an `onNext` message.
*
* Note that this ignores the time it takes for the upstream to send
* `onNext` messages. For detecting slow producers see [[timeoutOnSlowUpstream]].
*
* @param timeout maximum duration for `onNext`.
*/
final def timeoutOnSlowDownstream(timeout: FiniteDuration): Observable[A] =
new DownstreamTimeoutObservable[A](self, timeout)
/** Returns an observable that mirrors the source but applies a timeout
* for each emitted item by the upstream. If the next item isn't
* emitted within the specified timeout duration starting from its
* predecessor, the source is terminated and the downstream gets
* subscribed to the given backup.
*
* Note that this ignores the time it takes to process `onNext`.
* If dealing with a slow consumer, see [[timeoutOnSlowDownstream]].
*
* @param timeout maximum duration between emitted items before
* a timeout occurs (ignoring the time it takes to process `onNext`)
* @param backup is the alternative data source to subscribe to on timeout
*/
final def timeoutOnSlowUpstreamTo[B >: A](timeout: FiniteDuration, backup: Observable[B]): Observable[B] =
self.timeoutOnSlowUpstream(timeout).onErrorHandleWith {
case UpstreamTimeoutException(`timeout`) => backup
case other => Observable.raiseError(other)
}
/** Returns an observable that mirrors the source but applies a timeout
* for each emitted item by the upstream. If the next item isn't
* emitted within the specified timeout duration starting from its
* predecessor, the resulting Observable terminates and notifies
* observers of a TimeoutException.
*
* Note that this ignores the time it takes to process `onNext`.
* If dealing with a slow consumer, see [[timeoutOnSlowDownstream]].
*
* @param timeout maximum duration between emitted items before
* a timeout occurs (ignoring the time it takes to process `onNext`)
*/
final def timeoutOnSlowUpstream(timeout: FiniteDuration): Observable[A] =
new UpstreamTimeoutObservable[A](self, timeout)
/** While the destination observer is busy, buffers events, applying
* the given overflowStrategy.
*
* @param overflowStrategy - $overflowStrategyParam
*/
final def whileBusyBuffer[B >: A](overflowStrategy: OverflowStrategy.Synchronous[B]): Observable[B] =
asyncBoundary(overflowStrategy)
/** Forces a buffered asynchronous boundary.
* Asynchronous boundary refers to an independent processing
* of an upstream and a downstream - producer does not have to wait
* for consumer to acknowledge a new event.
*
* Internally it wraps the observer implementation given to
* `onSubscribe` into a
* [[monix.reactive.observers.BufferedSubscriber BufferedSubscriber]].
*
* Normally Monix's implementation guarantees that events are
* not emitted concurrently, and that the publisher MUST NOT
* emit the next event without acknowledgement from the
* consumer that it may proceed, however for badly behaved
* publishers, this wrapper provides the guarantee that the
* downstream [[monix.reactive.Observer Observer]] given in
* `subscribe` will not receive concurrent events.
*
* WARNING: if the buffer created by this operator is
* unbounded, it can blow up the process if the data source
* is pushing events faster than what the observer can
* consume, as it introduces an asynchronous boundary that
* eliminates the back-pressure requirements of the data
* source. Unbounded is the default
* [[monix.reactive.OverflowStrategy overflowStrategy]], see
* [[monix.reactive.OverflowStrategy OverflowStrategy]] for
* options.
*
* Usage:
*
* {{{
* import monix.eval.Task
* import scala.concurrent.duration._
*
* Observable("A", "B", "C", "D")
* .mapEval(i => Task { println(s"1: Processing $$i"); i ++ i })
* .asyncBoundary(OverflowStrategy.Unbounded)
* .mapEval(i => Task { println(s"2: Processing $$i") }.delayExecution(100.millis))
*
* // Without asyncBoundary it would process A, AA, B, BB, ...
* // 1: Processing A
* // 1: Processing B
* // 1: Processing C
* // 1: Processing D
* // 2: Processing AA
* // 2: Processing BB
* // 2: Processing CC
* // 2: Processing DD
* }}}
*
* @param overflowStrategy - $overflowStrategyParam
*/
final def asyncBoundary[B >: A](overflowStrategy: OverflowStrategy[B]): Observable[B] =
liftByOperator(new AsyncBoundaryOperator[B](overflowStrategy))
/** While the destination observer is busy, drop the incoming events.
*/
final def whileBusyDropEvents: Observable[A] =
self.liftByOperator(new WhileBusyDropEventsOperator[A])
/** While the destination observer is busy, drop the incoming events.
* When the downstream recovers, we can signal a special event
* meant to inform the downstream observer how many events where
* dropped.
*
* @param onOverflow - $onOverflowParam
*/
final def whileBusyDropEventsAndSignal[B >: A](onOverflow: Long => B): Observable[B] =
self.liftByOperator(new WhileBusyDropEventsAndSignalOperator[B](onOverflow))
/** Conflates events when a downstream is slower than the upstream.
*
* Emits: Immediately when an element is received if the downstream is waiting for elements. Otherwise emits when the
* downstream stops backpressuring and there is a conflated element available.
* Back pressures: Never (conflates instead)
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
* import cats.data.Chain
*
* // Emits [0], [1, 2], [3, 4]
* Observable.range(0, 5)
* .throttle(1.second, 1)
* .whileBusyAggregateEvents(Chain.apply(_)){ case (chain, ele) => chain.append(ele) }
* .throttle(2.seconds, 1)
* }}}
*
*/
def whileBusyAggregateEvents[S](seed: A => S)(aggregate: (S, A) => S): Observable[S] = {
self.liftByOperator(new WhileBusyAggregateEventsOperator[A, S](seed, aggregate))
}
/** Reduces elements when a downstream is slower than the upstream.
*
* Emits: Immediately when an element is received if the downstream is waiting for elements. Otherwise emits when the
* downstream stops backpressuring and there is a reduced element available.
* Back pressures: Never (reduces instead)
*
* Usage:
*
* {{{
* import scala.concurrent.duration._
* import cats.data.Chain
*
* // Emits 0, 3 (1+2), 7 (3+4)
* Observable.range(0, 5)
* .throttle(1.second, 1)
* .whileBusyReduceEvents(_ + _)
* .throttle(2.seconds, 1)
* }}}
*
*/
final def whileBusyReduceEvents[B >: A](op: (B, B) => B): Observable[B] = {
self.whileBusyAggregateEvents[B](identity)(op)
}
/** Combines the elements emitted by the source with the latest element
* emitted by another observable.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (3, 3), (4, 3)
* </pre>
*
* @param other is an observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom[B, R](other: Observable[B])(f: (A, B) => R): Observable[R] =
new WithLatestFromObservable[A, B, R](self, other, f)
/** Combines the elements emitted by the source with the latest elements
* emitted by two observables.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* @param o1 is the first observable that gets paired with the source
* @param o2 is the second observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom2[B1, B2, R](o1: Observable[B1], o2: Observable[B2])(f: (A, B1, B2) => R): Observable[R] =
self.withLatestFrom(Observable.combineLatest2(o1, o2)) { (a, tuple) =>
f(a, tuple._1, tuple._2)
}
/** Combines the elements emitted by the source with the latest elements
* emitted by three observables.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* @param o1 is the first observable that gets paired with the source
* @param o2 is the second observable that gets paired with the source
* @param o3 is the third observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom3[B1, B2, B3, R](o1: Observable[B1], o2: Observable[B2], o3: Observable[B3])(
f: (A, B1, B2, B3) => R): Observable[R] = {
self.withLatestFrom(Observable.combineLatest3(o1, o2, o3)) { (a, o) =>
f(a, o._1, o._2, o._3)
}
}
/** Combines the elements emitted by the source with the latest elements
* emitted by four observables.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* @param o1 is the first observable that gets paired with the source
* @param o2 is the second observable that gets paired with the source
* @param o3 is the third observable that gets paired with the source
* @param o4 is the fourth observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom4[B1, B2, B3, B4, R](
o1: Observable[B1],
o2: Observable[B2],
o3: Observable[B3],
o4: Observable[B4])(f: (A, B1, B2, B3, B4) => R): Observable[R] = {
self.withLatestFrom(Observable.combineLatest4(o1, o2, o3, o4)) { (a, o) =>
f(a, o._1, o._2, o._3, o._4)
}
}
/** Combines the elements emitted by the source with the latest elements
* emitted by five observables.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* @param o1 is the first observable that gets paired with the source
* @param o2 is the second observable that gets paired with the source
* @param o3 is the third observable that gets paired with the source
* @param o4 is the fourth observable that gets paired with the source
* @param o5 is the fifth observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom5[B1, B2, B3, B4, B5, R](
o1: Observable[B1],
o2: Observable[B2],
o3: Observable[B3],
o4: Observable[B4],
o5: Observable[B5])(f: (A, B1, B2, B3, B4, B5) => R): Observable[R] = {
self.withLatestFrom(Observable.combineLatest5(o1, o2, o3, o4, o5)) { (a, o) =>
f(a, o._1, o._2, o._3, o._4, o._5)
}
}
/** Combines the elements emitted by the source with the latest elements
* emitted by six observables.
*
* Similar with `combineLatest`, but only emits items when the single source
* emits an item (not when any of the Observables that are passed to the operator
* do, as combineLatest does).
*
* @param o1 is the first observable that gets paired with the source
* @param o2 is the second observable that gets paired with the source
* @param o3 is the third observable that gets paired with the source
* @param o4 is the fourth observable that gets paired with the source
* @param o5 is the fifth observable that gets paired with the source
* @param o6 is the sixth observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def withLatestFrom6[B1, B2, B3, B4, B5, B6, R](
o1: Observable[B1],
o2: Observable[B2],
o3: Observable[B3],
o4: Observable[B4],
o5: Observable[B5],
o6: Observable[B6])(f: (A, B1, B2, B3, B4, B5, B6) => R): Observable[R] = {
self.withLatestFrom(Observable.combineLatest6(o1, o2, o3, o4, o5, o6)) { (a, o) =>
f(a, o._1, o._2, o._3, o._4, o._5, o._6)
}
}
/** Creates a new observable from this observable and another given
* observable by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the tuple of the
* first items emitted by each of the source observables; the second item
* emitted by the new observable will be a tuple with the second items
* emitted by each of those observables; and so forth.
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (3, 3), (4, 4)
* </pre>
*
* See [[combineLatest]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param other is an observable that gets paired with the source
* @return a new observable sequence that emits the paired items
* of the source observables
*/
final def zip[B](other: Observable[B]): Observable[(A, B)] =
new Zip2Observable[A, B, (A, B)](self, other)((a, b) => (a, b))
/** Creates a new observable from this observable and another given
* observable by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first item emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second item
* emitted by each of those observables; and so forth.
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (3, 3), (4, 4)
* </pre>
*
* See [[combineLatestMap]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param other is an observable that gets paired with the source
* @param f is a mapping function over the generated pairs
*/
final def zipMap[B, R](other: Observable[B])(f: (A, B) => R): Observable[R] =
new Zip2Observable[A, B, R](self, other)(f)
/** Zips the emitted elements of the source with their indices. */
final def zipWithIndex: Observable[(A, Long)] =
self.liftByOperator(new ZipWithIndexOperator[A])
/** Creates a new observable from this observable that will emit a specific `separator`
* between every pair of elements.
*
* Usage sample:
*
* {{{
* // Yields "a : b : c : d"
* Observable("a", "b", "c", "d")
* .intersperse(" : ")
* .foldLeftL("")(_ ++ _)
* }}}
*
* @param separator is the separator
*/
final def intersperse[B >: A](separator: B): Observable[B] =
new IntersperseObservable(self, None, separator, None)
/** Creates a new observable from this observable that will emit the `start` element
* followed by the upstream elements paired with the `separator`, and lastly the `end` element.
*
* Usage sample:
*
* {{{
* // Yields "begin a : b : c : d end"
* Observable("a", "b", "c", "d")
* .intersperse("begin ", " : ", " end")
* .foldLeftL("")(_ ++ _)
* }}}
*
* @param start is the first element emitted
* @param separator is the separator
* @param end the last element emitted
*/
final def intersperse[B >: A](start: B, separator: B, end: B): Observable[B] =
new IntersperseObservable(self, Some(start), separator, Some(end))
/** Converts this `Observable` into an `org.reactivestreams.Publisher`.
*
* Meant for interoperability with other Reactive Streams
* implementations.
*
* Usage sample:
*
* {{{
* import monix.eval.Task
* import monix.execution.rstreams.SingleAssignSubscription
* import org.reactivestreams.{Publisher, Subscriber, Subscription}
*
* def sum(source: Publisher[Int], requestSize: Int): Task[Long] =
* Task.create { (_, cb) =>
* val sub = SingleAssignSubscription()
*
* source.subscribe(new Subscriber[Int] {
* private[this] var requested = 0L
* private[this] var sum = 0L
*
* def onSubscribe(s: Subscription): Unit = {
* sub := s
* requested = requestSize
* s.request(requestSize)
* }
*
* def onNext(t: Int): Unit = {
* sum += t
* if (requestSize != Long.MaxValue) requested -= 1
*
* if (requested <= 0) {
* requested = requestSize
* sub.request(requestSize)
* }
* }
*
* def onError(t: Throwable): Unit =
* cb.onError(t)
* def onComplete(): Unit =
* cb.onSuccess(sum)
* })
*
* // Cancelable that can be used by Task
* sub
* }
*
* import monix.execution.Scheduler.Implicits.global
* val pub = Observable(1, 2, 3, 4).toReactivePublisher
*
* // Yields 10
* sum(pub, requestSize = 128)
* }}}
*
* See the [[http://www.reactive-streams.org/ Reactive Streams]]
* protocol for details.
*/
final def toReactivePublisher[B >: A](implicit s: Scheduler): RPublisher[B] =
new RPublisher[B] {
def subscribe(subscriber: RSubscriber[_ >: B]): Unit = {
val subscription = SingleAssignCancelable()
subscription := unsafeSubscribeFn(
SafeSubscriber(
Subscriber.fromReactiveSubscriber(subscriber, subscription)
))
()
}
}
/** Returns a [[monix.eval.Task Task]] that upon execution
* will signal the last generated element of the source observable.
*
* Returns an `Option` because the source can be empty.
*/
final def lastOptionL: Task[Option[A]] =
map(Some.apply).lastOrElseL(None)
/** Creates a new [[monix.eval.Task Task]] that upon execution
* will signal the last generated element of the source observable.
*
* In case the stream was empty, then the given default gets
* evaluated and emitted.
*/
final def lastOrElseL[B >: A](default: => B): Task[B] =
Task.create { (s, cb) =>
unsafeSubscribeFn(new Subscriber.Sync[A] {
implicit val scheduler: Scheduler = s
private[this] var value: A = _
private[this] var isEmpty = true
def onNext(elem: A): Ack = {
if (isEmpty) isEmpty = false
value = elem
Continue
}
def onError(ex: Throwable): Unit = {
cb.onError(ex)
}
def onComplete(): Unit = {
if (isEmpty)
cb(Try(default))
else
cb.onSuccess(value)
}
})
}
/** Creates a new Observable that emits the total number of `onNext`
* events that were emitted by the source.
*
* Note that this Observable emits only one item after the source
* is complete. And in case the source emits an error, then only
* that error will be emitted.
*/
final def count: Observable[Long] =
self.liftByOperator(CountOperator)
/** Creates a task that emits the total number of `onNext`
* events that were emitted by the source.
*/
final def countL: Task[Long] =
count.headL
/** Returns an Observable which only emits the first item for which
* the predicate holds.
*
* @param p is a function that evaluates the items emitted by the
* source Observable, returning `true` if they pass the filter
* @return an Observable that emits only the first item in the original
* Observable for which the filter evaluates as `true`
*/
final def find(p: A => Boolean): Observable[A] =
filter(p).head
/** Returns a task which emits the first item for which
* the predicate holds.
*
* @param p is a function that evaluates the items emitted by the
* source observable, returning `true` if they pass the filter
*
* @return a task that emits the first item in the source
* observable for which the filter evaluates as `true`
*/
final def findL(p: A => Boolean): Task[Option[A]] =
find(p).headOptionL
/** Given evidence that type `A` has a `cats.Monoid` implementation,
* folds the stream with the provided monoid definition.
*
* For streams emitting numbers, this effectively sums them up.
* For strings, this concatenates them.
*
* Example:
*
* {{{
* import cats.implicits._
*
* // Yields 10
* val stream1 = Observable(1, 2, 3, 4).fold
*
* // Yields "1234"
* val stream2 = Observable("1", "2", "3", "4").fold
* }}}
*
* Note, in case you don't have a `Monoid` instance in scope,
* but you feel like you should, try this import:
*
* {{{
* import cats.instances.all._
* }}}
*
* @see [[Observable.foldL foldL]] for the version that returns a
* task instead of an observable.
*
* @param A is the `cats.Monoid` type class instance that's needed
* in scope for folding the source
*
* @return the result of combining all elements of the source,
* or the defined `Monoid.empty` element in case the
* stream is empty
*/
final def fold[AA >: A](implicit A: Monoid[AA]): Observable[AA] =
foldLeft(A.empty)(A.combine)
/** Given evidence that type `A` has a `cats.Monoid` implementation,
* folds the stream with the provided monoid definition.
*
* For streams emitting numbers, this effectively sums them up.
* For strings, this concatenates them.
*
* Example:
*
* {{{
* import cats.implicits._
*
* // Yields 10
* val stream1 = Observable(1, 2, 3, 4).foldL
*
* // Yields "1234"
* val stream2 = Observable("1", "2", "3", "4").foldL
* }}}
*
* @see [[fold]] for the version that returns an observable
* instead of a task.
*
* @param A is the `cats.Monoid` type class instance that's needed
* in scope for folding the source
*
* @return the result of combining all elements of the source,
* or the defined `Monoid.empty` element in case the
* stream is empty
*/
final def foldL[AA >: A](implicit A: Monoid[AA]): Task[AA] =
fold(A).headL
/** Folds the source observable, from start to finish, until the
* source completes, or until the operator short-circuits the
* process by returning `false`.
*
* Note that a call to [[foldLeft]] is equivalent to this function
* being called with an operator always returning `true` as the first
* member of its result.
*
* Example: {{{
* // Sums first 10 items
* val stream1 = Observable.range(0, 1000).foldWhileLeft((0L, 0)) {
* case ((sum, count), e) =>
* val next = (sum + e, count + 1)
* if (count + 1 < 10) Left(next) else Right(next)
* }
*
* // Implements exists(predicate)
* val stream2 = Observable(1, 2, 3, 4, 5).foldWhileLeft(false) {
* (default, e) =>
* if (e == 3) Right(true) else Left(default)
* }
*
* // Implements forall(predicate)
* val stream3 = Observable(1, 2, 3, 4, 5).foldWhileLeft(true) {
* (default, e) =>
* if (e != 3) Right(false) else Left(default)
* }
* }}}
*
* @see [[Observable.foldWhileLeftL foldWhileLeftL]] for a version
* that returns a task instead of an observable.
*
* @param seed is the initial state, specified as a possibly lazy value;
* it gets evaluated when the subscription happens and if it
* triggers an error then the subscriber will get immediately
* terminated with an error
*
* @param op is the binary operator returning either `Left`,
* signaling that the state should be evolved or a `Right`,
* signaling that the process can be short-circuited and
* the result returned immediately
*
* @return the result of inserting `op` between consecutive
* elements of this observable, going from left to right with
* the `seed` as the start value, or `seed` if the observable
* is empty
*/
final def foldWhileLeft[S](seed: => S)(op: (S, A) => Either[S, S]): Observable[S] =
new FoldWhileLeftObservable[A, S](self, () => seed, op)
/** Folds the source observable, from start to finish, until the
* source completes, or until the operator short-circuits the
* process by returning `false`.
*
* Note that a call to [[foldLeftL]] is equivalent to this function
* being called with an operator always returning `Left` results.
*
* Example: {{{
* // Sums first 10 items
* val stream1 = Observable.range(0, 1000).foldWhileLeftL((0L, 0)) {
* case ((sum, count), e) =>
* val next = (sum + e, count + 1)
* if (count + 1 < 10) Left(next) else Right(next)
* }
*
* // Implements exists(predicate)
* val stream2 = Observable(1, 2, 3, 4, 5).foldWhileLeftL(false) {
* (default, e) =>
* if (e == 3) Right(true) else Left(default)
* }
*
* // Implements forall(predicate)
* val stream3 = Observable(1, 2, 3, 4, 5).foldWhileLeftL(true) {
* (default, e) =>
* if (e != 3) Right(false) else Left(default)
* }
* }}}
*
* @see [[foldWhileLeft]] for a version that returns an observable
* instead of a task.
*
* @param seed is the initial state, specified as a possibly lazy value;
* it gets evaluated when the subscription happens and if it
* triggers an error then the subscriber will get immediately
* terminated with an error
*
* @param op is the binary operator returning either `Left`,
* signaling that the state should be evolved or a `Right`,
* signaling that the process can be short-circuited and
* the result returned immediately
*
* @return the result of inserting `op` between consecutive
* elements of this observable, going from left to right with
* the `seed` as the start value, or `seed` if the observable
* is empty
*/
final def foldWhileLeftL[S](seed: => S)(op: (S, A) => Either[S, S]): Task[S] =
foldWhileLeft(seed)(op).headL
/** Alias for [[firstL]]. */
final def headL: Task[A] = firstL
/** Creates a new [[monix.eval.Task Task]] that upon execution
* will signal the first generated element of the source observable.
*
* In case the stream was empty, then the `Task` gets completed
* in error with a `NoSuchElementException`.
*/
final def firstL: Task[A] =
firstOrElseL(throw new NoSuchElementException("firstL on empty observable"))
/** Creates a new [[monix.eval.Task Task]] that upon execution
* will signal the first generated element of the source observable.
*
* In case the stream was empty, then the given default
* gets evaluated and emitted.
*/
final def firstOrElseL[B >: A](default: => B): Task[B] =
Task.create { (s, cb) =>
unsafeSubscribeFn(new Subscriber.Sync[A] {
implicit val scheduler: Scheduler = s
private[this] var isDone = false
def onNext(elem: A): Ack = {
cb.onSuccess(elem)
isDone = true
Stop
}
def onError(ex: Throwable): Unit =
if (!isDone) {
isDone = true
cb.onError(ex)
}
def onComplete(): Unit =
if (!isDone) {
isDone = true
cb(Try(default))
}
})
}
/** Returns a `Task` that emits a single boolean, either true, in
* case the given predicate holds for all the items emitted by the
* source, or false in case at least one item is not verifying the
* given predicate.
*
* @param p is a function that evaluates the items emitted by the source
* observable, returning `true` if they pass the filter
* @return a task that emits only true or false in case the given
* predicate holds or not for all the items
*/
final def forallL(p: A => Boolean): Task[Boolean] =
existsL(e => !p(e)).map(r => !r)
/** Returns a `Task` which emits either `true`, in case the given predicate
* holds for at least one item, or `false` otherwise.
*
* @param p is a function that evaluates the items emitted by the
* source, returning `true` if they pass the filter
* @return a task that emits `true` or `false` in case
* the given predicate holds or not for at least one item
*/
final def existsL(p: A => Boolean): Task[Boolean] =
find(p).foldLeftL(false)((_, _) => true)
/** Only emits those items for which the given predicate holds.
*
* @see [[filterEval]] for a version that works with a [[monix.eval.Task]].
* @see [[filterEvalF]] for a version that works with a generic
* `F[_]` (e.g. `cats.effect.IO`, Scala's `Future`),
* powered by [[monix.eval.TaskLike]]
*
* @param p a function that evaluates the items emitted by the source
* returning `true` if they pass the filter
* @return a new observable that emits only those items in the source
* for which the filter evaluates as `true`
*/
final def filter(p: A => Boolean): Observable[A] =
self.liftByOperator(new FilterOperator(p))
/** Alias to [[filter]] to support syntax in for comprehension, i.e.
*
* Example: {{{
* case class Person(age: Long)
*
* val peopleObservable: Observable[Person] =
* Observable.range(1, 100).map(Person.apply)
*
* for {
* adult <- peopleObservable if adult.age >= 18
* } yield adult
* }}}
*/
final def withFilter(p: A => Boolean): Observable[A] =
filter(p)
/** Only emits those items for which the given predicate doesn't hold.
*
* @param p a function that evaluates the items emitted by the source
* returning `true` if they should be filtered out
* @return a new observable that emits only those items in the source
* for which the filter evaluates as `false`
*/
final def filterNot(p: A => Boolean): Observable[A] =
filter(p.andThen(!_))
/** Version of [[filter]] that can work with a predicate expressed by
* a [[monix.eval.Task]].
*
* @see [[filterEvalF]] for a version that works with a generic
* `F[_]` (e.g. `cats.effect.IO`, Scala's `Future`),
* powered by [[monix.eval.TaskLike]]
*/
final def filterEval(p: A => Task[Boolean]): Observable[A] =
self
.mapEval(a => p(a).map((a, _)))
.collect { case x if x._2 => x._1 }
/** Version of [[filterEval]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def filterEvalF[F[_]](p: A => F[Boolean])(implicit F: TaskLike[F]): Observable[A] =
filterEval(a => Task.from(p(a))(F))
/** Only emits the first element emitted by the source observable,
* after which it's completed immediately.
*/
final def head: Observable[A] = take(1L)
/** Selects the first `n` elements (from the start).
*
* @param n the number of elements to take
* @return a new Observable that emits only the first
* `n` elements from the source
*/
final def take(n: Long): Observable[A] =
if (n <= 0) Observable.empty else self.liftByOperator(new TakeLeftOperator(n))
/** Applies a binary operator to a start value and all elements of
* this Observable, going left to right and returns a new
* Observable that emits only one item before `onComplete`.
*
* @param seed is the initial state, specified as a possibly lazy value;
* it gets evaluated when the subscription happens and if it triggers
* an error then the subscriber will get immediately terminated
* with an error
*
* @param op is an operator that will fold the signals of the source
* observable, returning the next state
*/
final def foldLeft[R](seed: => R)(op: (R, A) => R): Observable[R] =
new FoldLeftObservable[A, R](self, () => seed, op)
/** Applies a binary operator to a start value and all elements of
* the source, going left to right and returns a new `Task` that
* upon evaluation will eventually emit the final result.
*/
final def foldLeftL[R](seed: => R)(op: (R, A) => R): Task[R] =
foldLeft(seed)(op).headL
/** Alias for [[firstOrElseL]]. */
final def headOrElseL[B >: A](default: => B): Task[B] = firstOrElseL(default)
/** Returns a [[monix.eval.Task Task]] that upon execution
* will signal the last generated element of the source observable.
*
* In case the stream was empty, then the `Task` gets completed
* in error with a `NoSuchElementException`.
*/
final def lastL: Task[A] =
lastOrElseL(throw new NoSuchElementException("lastL"))
/** Returns a task that emits `true` if the source observable is
* empty, otherwise `false`.
*/
final def isEmptyL: Task[Boolean] =
isEmpty.headL
/** Returns an Observable that emits true if the source Observable is
* empty, otherwise false.
*/
final def isEmpty: Observable[Boolean] =
self.liftByOperator(IsEmptyOperator)
/** Creates a new [[monix.eval.Task Task]] that will consume the
* source observable and upon completion of the source it will
* complete with `Unit`.
*/
final def completedL: Task[Unit] =
Task.create { (s, cb) =>
unsafeSubscribeFn(new Subscriber.Sync[A] {
implicit val scheduler: Scheduler = s
private[this] var isDone = false
def onNext(elem: A): Ack = Continue
def onError(ex: Throwable): Unit =
if (!isDone) {
isDone = true; cb.onError(ex)
}
def onComplete(): Unit =
if (!isDone) {
isDone = true; cb.onSuccess(())
}
})
}
/** Polymorphic version of [[completedL]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLift]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
final def completedF[F[_]](implicit F: TaskLift[F]): F[Unit] = completedL.to[F]
/** Given a [[cats.Order]] over the stream's elements, returns the
* maximum element in the stream.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* // Yields Some(20)
* val stream1 = Observable(10, 7, 6, 8, 20, 3, 5).maxL
*
* // Yields Observable.empty
* val stream2 = Observable.empty[Int].maxL
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.max maxF]] for the version that returns an
* observable instead of a `Task`.
*
* @param A is the [[cats.Order]] type class instance that's
* going to be used for comparing elements
*
* @return the maximum element of the source stream, relative
* to the defined `Order`
*/
final def maxL[AA >: A](implicit A: Order[AA]): Task[Option[AA]] =
max(A).headOptionL
/** Given a [[cats.Order]] over the stream's elements, returns the
* maximum element in the stream.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* // Yields Observable(20)
* val stream1 = Observable(10, 7, 6, 8, 20, 3, 5).max
*
* // Yields Observable.empty
* val stream2 = Observable.empty[Int].max
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.maxL maxL]] for the version that returns a
* [[monix.eval.Task Task]] instead of an observable.
*
* @param A is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the maximum element of the source stream, relative
* to the defined `Order`
*/
final def max[AA >: A](implicit A: Order[AA]): Observable[AA] =
self.liftByOperator(new MaxOperator[AA]()(A))
/** Alias for [[firstOptionL]]. */
final def headOptionL: Task[Option[A]] = firstOptionL
/** Creates a new [[monix.eval.Task Task]] that upon execution
* will signal the first generated element of the source observable.
*
* Returns an `Option` because the source can be empty.
*/
final def firstOptionL: Task[Option[A]] =
map(Some.apply).firstOrElseL(None)
/** Takes the elements of the source observable and emits the
* element that has the maximum key value, where the key is
* generated by the given function.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* case class Person(name: String, age: Int)
*
* // Yields Some(Person("Alex", 34))
* Observable(Person("Alex", 34), Person("Alice", 27))
* .maxByL(_.age)
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.maxBy maxBy]] for the version that returns an
* observable instead of a `Task`.
*
* @param key is the function that returns the key for which the
* given ordering is defined
*
* @param K is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the maximum element of the source stream, relative
* to its key generated by the given function and the
* given ordering
*/
final def maxByL[K](key: A => K)(implicit K: Order[K]): Task[Option[A]] =
maxBy(key)(K).headOptionL
/** Takes the elements of the source observable and emits the
* element that has the maximum key value, where the key is
* generated by the given function.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* case class Person(name: String, age: Int)
*
* // Yields Observable(Person("Alex", 34))
* Observable(Person("Alex", 34), Person("Alice", 27))
* .maxBy(_.age)
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.maxByL maxByL]] for the version that returns a
* [[monix.eval.Task Task]] instead of an observable.
*
* @param key is the function that returns the key for which the
* given ordering is defined
*
* @param K is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the maximum element of the source stream, relative
* to its key generated by the given function and the
* given ordering
*/
final def maxBy[K](key: A => K)(implicit K: Order[K]): Observable[A] =
self.liftByOperator(new MaxByOperator[A, K](key)(K))
/** Given a [[cats.Order]] over the stream's elements, returns the
* minimum element in the stream.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* // Yields Some(3)
* val stream1 =
* Observable(10, 7, 6, 8, 20, 3, 5).minL
*
* // Yields None
* val stream2 =
* Observable.empty[Int].minL
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.min minF]] for the version that returns an
* observable instead of a `Task`.
*
* @param A is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the minimum element of the source stream, relative
* to the defined `Order`
*/
final def minL[AA >: A](implicit A: Order[AA]): Task[Option[AA]] =
min(A).headOptionL
/** Given a [[cats.Order]] over the stream's elements, returns the
* minimum element in the stream.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* // Yields Observable(3)
* val stream1 =
* Observable(10, 7, 6, 8, 20, 3, 5).min
*
* // Yields Observable.empty
* val stream2 =
* Observable.empty[Int].min
* }}}
*
* $catsOrderInterop
*
* @see [[Observable.minL minL]] for the version that returns a
* [[monix.eval.Task Task]] instead of an observable.
*
* @param A is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the minimum element of the source stream, relative
* to the defined `Order`
*/
final def min[AA >: A](implicit A: Order[AA]): Observable[AA] =
self.liftByOperator(new MinOperator()(A))
/** Takes the elements of the source observable and emits the
* element that has the minimum key value, where the key is
* generated by the given function.
*
* ==Example==
*
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* case class Person(name: String, age: Int)
*
* // Yields Some(Person("Alice", 27))
* Observable(Person("Alex", 34), Person("Alice", 27))
* .minByL(_.age)
* }}}
*
* $catsOrderInterop
*
* @param key is the function that returns the key for which the
* given ordering is defined
*
* @param K is the [[cats.Order]] type class instance that's going
* to be used for comparing elements
*
* @return the minimum element of the source stream, relative
* to its key generated by the given function and the
* given ordering
*/
final def minByL[K](key: A => K)(implicit K: Order[K]): Task[Option[A]] =
minBy(key)(K).headOptionL
/** Takes the elements of the source observable and emits the
* element that has the minimum key value, where the key is
* generated by the given function.
*
* Example:
* {{{
* // Needed to bring the standard Order instances in scope:
* import cats.implicits._
*
* case class Person(name: String, age: Int)
*
* // Yields Observable(Person("Alice", 27))
* val stream = Observable(Person("Alex", 34), Person("Alice", 27))
* .minBy(_.age)
* }}}
*
* $catsOrderInterop
*
* @param key is the function that returns the key for which the
* given ordering is defined
*
* @param K is the [[cats.Order]] type class instance that's
* going to be used for comparing elements
*
* @return the minimum element of the source stream, relative
* to its key generated by the given function and the
* given ordering
*/
final def minBy[K](key: A => K)(implicit K: Order[K]): Observable[A] =
self.liftByOperator(new MinByOperator[A, K](key))
/** Returns a task that emits `false` if the source observable is
* empty, otherwise `true`.
*/
final def nonEmptyL: Task[Boolean] =
nonEmpty.headL
/** Returns an Observable that emits false if the source Observable is
* empty, otherwise true.
*/
final def nonEmpty: Observable[Boolean] =
self.liftByOperator(IsEmptyOperator).map(b => !b)
/** Given a source that emits numeric values, the `sum` operator sums
* up all values and returns the result.
*/
final def sumL[B >: A](implicit B: Numeric[B]): Task[B] =
sum(B).headL
/** Given a source that emits numeric values, the `sum` operator sums
* up all values and at onComplete it emits the total.
*/
final def sum[AA >: A](implicit A: Numeric[AA]): Observable[AA] =
foldLeft(A.zero)(A.plus)
/** Returns a `Task` that upon evaluation will collect all items from
* the source in a Scala `List` and return this list instead.
*
* WARNING: for infinite streams the process will eventually blow up
* with an out of memory error.
*/
final def toListL: Task[List[A]] =
foldLeftL(mutable.ListBuffer.empty[A])(_ += _).map(_.toList)
/** Makes the source `Observable` uninterruptible such that a `cancel`
* signal has no effect.
*
* ==Example==
*
* {{{
* import scala.concurrent.duration._
*
* Observable.eval(println("Hello!"))
* .delayExecution(10.seconds)
* .uncancelable
* }}}
*
* The created observable, after `subscribe`, will print "Hello!"
* even if cancellation is attempted.
*/
final def uncancelable: Observable[A] =
new UncancelableObservable[A](self)
/** Creates a new [[monix.eval.Task Task]] that will consume the
* source observable, executing the given callback for each element.
*/
final def foreachL(cb: A => Unit): Task[Unit] =
Task.create { (s, onFinish) =>
unsafeSubscribeFn(new ForeachSubscriber[A](cb, onFinish, s))
}
/** Transforms the source using the given transformer function. */
def transform[B](transformer: Transformer[A, B]): Observable[B] =
transformer(this)
}
/** Observable builders.
*
* @define multicastDesc Creates an input channel and an output observable
* pair for building a [[MulticastStrategy multicast]] data-source.
*
* Useful for building [[MulticastStrategy multicast]] observables
* from data-sources that cannot be back-pressured.
*
* Prefer [[Observable.create]] when possible.
*
* @define fromIteratorDesc Converts any `Iterator` into an observable.
*
* WARNING: reading from an `Iterator` is a destructive process.
* Therefore only a single subscriber is supported, the result being
* a single-subscriber observable. If multiple subscribers are attempted,
* all subscribers, except for the first one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* Therefore, if you need a factory of data sources, from a cold source
* from which you can open how many iterators you want,
* you can use [[Observable.defer]] to build such a factory. Or you can share
* the resulting observable by converting it into a
* [[monix.reactive.observables.ConnectableObservable ConnectableObservable]]
* by means of [[Observable!.multicast multicast]].
*
* @define blocksDefaultSchedulerDesc This operation will start processing on the current
* thread (on `subscribe()`), so in order to not block, it might be better to also do an
* [[Observable.executeAsync executeAsync]], or you may want to use the
* [[monix.execution.ExecutionModel.AlwaysAsyncExecution AlwaysAsyncExecution]]
* model, which can be configured per `Scheduler`, see
* [[monix.execution.Scheduler.withExecutionModel Scheduler.withExecutionModel]],
* or per `Observable`, see [[Observable.executeWithModel]].
*/
object Observable extends ObservableDeprecatedBuilders {
/** An `Operator` is a function for transforming observers,
* that can be used for lifting observables.
*
* See [[Observable.liftByOperator]].
*/
type Operator[-I, +O] = Subscriber[O] => Subscriber[I]
/** A `Transformer` is a function used for transforming observables.
*
* See [[Observable.transform]]
*/
type Transformer[-A, +B] = Observable[A] => Observable[B]
/** Given a sequence of elements, builds an observable from it. */
def apply[A](elems: A*): Observable[A] =
Observable.fromIterable(elems)
/** Lifts an element into the `Observable` context.
*
* Alias for [[now]].
*/
def pure[A](elem: A): Observable[A] =
new builders.NowObservable(elem)
/** Alias for [[eval]]. */
def delay[A](a: => A): Observable[A] = eval(a)
/** Given a non-strict value, converts it into an Observable
* that emits a single element and that memoizes the value
* for subsequent invocations.
*/
def evalOnce[A](f: => A): Observable[A] =
new builders.EvalOnceObservable(f)
/** Returns an `Observable` that on execution emits the given strict value.
*/
def now[A](elem: A): Observable[A] =
new builders.NowObservable(elem)
/** Creates an Observable that emits an error.
*/
def raiseError[A](ex: Throwable): Observable[A] =
new builders.ErrorObservable(ex)
/** Given a non-strict value, converts it into an Observable
* that upon subscription, evaluates the expression and
* emits a single element.
*/
def eval[A](a: => A): Observable[A] =
new builders.EvalAlwaysObservable(() => a)
/** Lifts a non-strict value into an observable that emits a single element,
* but upon subscription delay its evaluation by the specified timespan
*/
def evalDelayed[A](delay: FiniteDuration, a: => A): Observable[A] =
eval(a).delayExecution(delay)
/** Creates an Observable that doesn't emit anything and that never
* completes.
*/
def never[A]: Observable[A] =
builders.NeverObservable
/** Reusable value for an `Observable[Unit]` that emits a single
* event, the implementation for `cats.effect.Applicative.unit`.
*/
val unit: Observable[Unit] =
Observable.now(())
/** Keeps calling `f` and concatenating the resulting observables
* for each `scala.util.Left` event emitted by the source, concatenating
* the resulting observables and pushing every `scala.util.Right[B]`
* events downstream.
*
* Based on Phil Freeman's
* [[http://functorial.com/stack-safety-for-free/index.pdf Stack Safety for Free]].
*
* It helps to wrap your head around it if you think of it as being
* equivalent to this inefficient and unsafe implementation (for `Observable`):
*
* {{{
* // Don't do this kind of recursion, because `flatMap` can throw
* // stack overflow errors:
* def tailRecM[A, B](a: A)(f: (A) => Observable[Either[A, B]]): Observable[B] =
* f(a).flatMap {
* case Right(b) => Observable.pure(b)
* case Left(nextA) => tailRecM(nextA)(f)
* }
* }}}
*/
def tailRecM[A, B](a: A)(f: A => Observable[Either[A, B]]): Observable[B] =
new builders.TailRecMObservable[A, B](a, f)
/** Given a subscribe function, lifts it into an [[Observable]].
*
* This function is unsafe to use because users have to know and apply
* the Monix communication contract, related to thread-safety, communicating
* demand (back-pressure) and error handling.
*
* Only use if you know what you're doing. Otherwise prefer [[create]].
*/
def unsafeCreate[A](f: Subscriber[A] => Cancelable): Observable[A] =
new builders.UnsafeCreateObservable(f)
/** Creates an observable from a function that receives a
* concurrent and safe
* [[monix.reactive.observers.Subscriber.Sync Subscriber.Sync]].
*
* This builder represents the safe way of building observables
* from data-sources that cannot be back-pressured.
*
* @param overflowStrategy is the [[OverflowStrategy overflow strategy]]
* that specifies the type of the underlying buffer (unbounded,
* that overflows the head, etc). This parameter can only specify
* a "synchronous" strategy, so no back-pressuring allowed.
*
* @param producerType (UNSAFE) is the
* [[monix.execution.ChannelType.ProducerSide producer type]]
* and can be `MultiProducer` or `SingleProducer`, specified as an
* optimization option; if you don't know what you're doing, stick to
* `MultiProducer`, which says that multiple producers can push
* events at the same time, which is the default
*/
def create[A](
overflowStrategy: OverflowStrategy.Synchronous[A],
producerType: ChannelType.ProducerSide = MultiProducer)(f: Subscriber.Sync[A] => Cancelable): Observable[A] =
new builders.CreateObservable(overflowStrategy, producerType, f)
/** $multicastDesc
*
* @param multicast is the multicast strategy to use (e.g. publish, behavior,
* reply, async)
*/
def multicast[A](multicast: MulticastStrategy[A])(implicit s: Scheduler): (Observer.Sync[A], Observable[A]) = {
val ref = ConcurrentSubject(multicast)
(ref, ref)
}
/** $multicastDesc
*
* @param multicast is the multicast strategy to use (e.g. publish, behavior,
* reply, async)
* @param overflow is the overflow strategy for the buffer that gets placed
* in front (since this will be a hot data-source that cannot be
* back-pressured)
*/
def multicast[A](multicast: MulticastStrategy[A], overflow: OverflowStrategy.Synchronous[A])(implicit
s: Scheduler): (Observer.Sync[A], Observable[A]) = {
val ref = ConcurrentSubject(multicast, overflow)
(ref, ref)
}
/** Converts to [[Observable]] from any `F[_]` that has an [[ObservableLike]]
* instance.
*
* Supported types includes, but is not necessarily limited to:
*
* - [[cats.Eval]]
* - [[https://typelevel.org/cats-effect/datatypes/io.html cats.effect.IO]]
* - [[https://typelevel.org/cats-effect/datatypes/syncio.html cats.effect.SyncIO]]
* - [[https://typelevel.org/cats-effect/typeclasses/effect.html cats.effect.Effect (Async)]]
* - [[https://typelevel.org/cats-effect/typeclasses/concurrent-effect.html cats.effect.ConcurrentEffect]]
* - [[https://www.reactive-streams.org/ org.reactivestreams.Publisher]]
* - [[monix.eval.Coeval]]
* - [[monix.eval.Task]]
* - [[scala.Either]]
* - [[scala.util.Try]]
* - [[scala.concurrent.Future]]
*/
def from[F[_], A](fa: F[A])(implicit F: ObservableLike[F]): Observable[A] =
F.apply(fa)
/** Converts any `Iterable` into an [[Observable]].
*/
def fromIterable[A](iterable: Iterable[A]): Observable[A] =
new builders.IterableAsObservable[A](iterable)
/** Wraps a [[scala.Iterator]] into an `Observable`.
*
* This function uses [[monix.eval.Task Task]] in order to suspend
* the creation of the `Iterator`, because reading from an `Iterator`
* is a destructive process. The `Task` is being used as a "factory",
* in pace of [[scala.Iterable]].
*
* Example:
* {{{
* import monix.eval.Task
*
* Observable.fromIterator(Task(Iterator.from(1)))
* }}}
*
* @see [[fromIterable]]
*
* @see [[fromIterator[A](resource* fromIterator(Resource)]] for a version
* that uses `cats.effect.Resource`
*
* @see [[fromIteratorUnsafe]] for the unsafe version that can wrap an
* iterator directly
*/
def fromIterator[A](task: Task[Iterator[A]]): Observable[A] =
Observable.fromTask(task.map(fromIteratorUnsafe)).flatten
/** Wraps a [[scala.Iterator]] into an `Observable` in the context of a
* [[https://typelevel.org/cats-effect/datatypes/resource.html cats.effect.Resource]],
* which allows for specifying a finalizer.
*
* @see [[fromIterable]]
*
* @see [[fromIterator[A](task* fromIterator(task)]] for a version
* that uses [[monix.eval.Task Task]] for suspending side effects
*
* @see [[fromIteratorUnsafe]] for the unsafe version that can wrap an
* iterator directly
*/
def fromIterator[A](resource: Resource[Task, Iterator[A]]): Observable[A] =
Observable.fromResource(resource).flatMap(fromIteratorUnsafe)
/** Version of fromIterator that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def fromIteratorF[F[_], A](iteratorF: F[Iterator[A]])(implicit F: TaskLike[F]): Observable[A] =
fromIterator(F(iteratorF))
/** Converts any `Iterator` into an observable.
*
* '''UNSAFE WARNING''': reading from an `Iterator` is a destructive process.
* Therefore only a single subscriber is supported, the result being
* a single-subscriber observable. If multiple subscribers are attempted,
* all subscribers, except for the first one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* @see [[fromIterator[A](task* fromIterator(task)]] or
* [[fromIterator[A](resource* fromIterator(resource)]]
* for safe alternatives
*
* @param iterator to transform into an observable
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def fromIteratorUnsafe[A](iterator: Iterator[A]): Observable[A] =
new builders.IteratorAsObservable[A](iterator)
/** Wraps a [[scala.Iterator]] into an `Observable` that emits events in `chunkSize` batches.
*
* This function uses [[monix.eval.Task Task]] in order to suspend
* the creation of the `Iterator`, because reading from an `Iterator`
* is a destructive process. The `Task` is being used as a "factory",
* in pace of [[scala.Iterable]].
*
* Example:
* {{{
* import monix.eval.Task
*
* Observable.fromIteratorBuffered(Task(Iterator.from(1)), 2)
* }}}
*
* @see [[fromIterable]]
*
* @see [[fromIteratorBuffered[A](resource* fromIteratorBuffered(Resource)]] for a version
* that uses `cats.effect.Resource`
*
* @see [[fromIteratorBufferedUnsafe]] for the unsafe version that can wrap an
* iterator directly
*/
def fromIteratorBuffered[A](task: Task[Iterator[A]], bufferSize: Int): Observable[Seq[A]] =
Observable.fromTask(task.map(fromIteratorBufferedUnsafe(_, bufferSize))).flatten
/** Wraps a [[scala.Iterator]] into an `Observable` in the context of a
* [[https://typelevel.org/cats-effect/datatypes/resource.html cats.effect.Resource]],
* which allows for specifying a finalizer.
*
* @see [[fromIterable]]
*
* @see [[fromIteratorBuffered[A](task* fromIteratorBuffered(task)]] for a version
* that uses [[monix.eval.Task Task]] for suspending side effects
*
* @see [[fromIteratorBufferedUnsafe]] for the unsafe version that can wrap an
* iterator directly
*/
def fromIteratorBuffered[A](resource: Resource[Task, Iterator[A]], bufferSize: Int): Observable[Seq[A]] =
Observable.fromResource(resource).flatMap(fromIteratorBufferedUnsafe(_, bufferSize))
/** Converts any `Iterator` into an observable that emits events in `bufferSize` batches.
*
* '''UNSAFE WARNING''': reading from an `Iterator` is a destructive process.
* Therefore only a single subscriber is supported, the result being
* a single-subscriber observable. If multiple subscribers are attempted,
* all subscribers, except for the first one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* @see [[fromIteratorBuffered[A](task* fromIteratorBuffered(task)]] or
* [[fromIteratorBuffered[A](resource* fromIteratorBuffered(resource)]]
* for safe alternatives
*
* @param iterator to transform into an observable
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def fromIteratorBufferedUnsafe[A](iterator: Iterator[A], bufferSize: Int): Observable[Seq[A]] =
new builders.BufferedIteratorAsObservable[A](iterator, bufferSize)
/** Transforms any `cats.effect.Resource` into an [[Observable]].
*
* See the
* [[https://typelevel.org/cats-effect/datatypes/resource.html documentation for Resource]].
*
* {{{
* import cats.effect.Resource
* import monix.eval.Task
* import java.io._
*
* def openFileAsResource(file: File): Resource[Task, FileInputStream] =
* Resource.make(Task(new FileInputStream(file)))(h => Task(h.close()))
*
* def openFileAsStream(file: File): Observable[FileInputStream] =
* Observable.fromResource(openFileAsResource(file))
* }}}
*
* This example would be equivalent with usage of [[Observable.resource]]:
*
* {{{
* def openFileAsResource2(file: File): Observable[FileInputStream] = {
* Observable.resource(Task(new FileInputStream(file)))(h => Task(h.close()))
* }
* }}}
*
* This means that `flatMap` is safe to use:
*
* {{{
* def readBytes(file: File): Observable[Array[Byte]] =
* openFileAsStream(file).flatMap { in =>
* Observable.fromInputStreamUnsafe(in)
* }
* }}}
*/
def fromResource[F[_], A](resource: Resource[F, A])(implicit F: TaskLike[F]): Observable[A] =
resource match {
case ra: Resource.Allocate[F, A] @unchecked =>
Observable
.resourceCase(F(ra.resource)) { case ((_, release), exitCase) => F(release(exitCase)) }
.map(_._1)
case ra: Resource.Suspend[F, A] @unchecked =>
Observable.from(ra.resource).flatMap { res =>
fromResource(res)
}
case ra: Resource.Bind[F, Any, A] @unchecked =>
fromResource(ra.source).flatMap { s =>
fromResource(ra.fs(s))
}
}
/** Safely converts a `java.io.InputStream` into an observable that will
* emit `Array[Byte]` elements.
*
* Compared with [[fromInputStreamUnsafe]], this version:
*
* - is referentially transparent, the input being a "generator"
* powered by [[monix.eval.Task]]
* - automatically forks execution on subscription to ensure that
* the current thread isn't blocked by the ensuing blocking I/O
* - ensures that the input stream is closed on completion,
* failure or cancellation
*
* @param in the `Task[InputStream]` generator to convert into an observable
* @param chunkSize the maximum length of the emitted arrays of bytes, must be positive
*/
def fromInputStream(in: Task[InputStream], chunkSize: Int = 4096): Observable[Array[Byte]] = {
Observable
.resource(in)(h => Task(h.close()))
.flatMap(fromInputStreamUnsafe(_, chunkSize))
.executeAsync
}
/** Version of [[fromInputStream]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def fromInputStreamF[F[_]](in: F[InputStream], chunkSize: Int = 4096)(implicit
F: TaskLike[F]): Observable[Array[Byte]] =
fromInputStream(F(in), chunkSize)
/** Converts a `java.io.InputStream` into an observable that will
* emit `Array[Byte]` elements.
*
* '''UNSAFE WARNING''': this is an unsafe function, because reading from
* an input stream is a destructive process, also violating
* referential transparency. Therefore only a single subscriber is
* supported, the result being a single-subscriber observable. If
* multiple subscribers are attempted, all subscribers, except for
* the first one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* '''UNSAFE PROTOCOL''': the created Observable does not close the given
* `InputStream`. Usually it's the producer of a resource that needs
* to deallocate the resource.
*
* $blocksDefaultSchedulerDesc
*
* @see [[fromInputStream]] for the safe version
* @param in the `InputStream` to convert into an observable
* @param chunkSize the maximum length of the emitted arrays of bytes, must be positive
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def fromInputStreamUnsafe(in: InputStream, chunkSize: Int = 4096): Observable[Array[Byte]] =
new builders.InputStreamObservable(in, chunkSize)
/** Safely converts a `java.io.Reader` into an observable that will
* emit `Array[Char]` elements.
*
* Compared with [[fromCharsReaderUnsafe]], this version:
*
* - is referentially transparent, the input being a "generator"
* powered by [[monix.eval.Task]]
* - automatically forks execution on subscription to ensure that
* the current thread isn't blocked by the ensuing blocking I/O
* - ensures that the input stream is closed on completion,
* failure or cancellation
*
* @param in the `Task[Reader]` generator to convert into an observable
* @param chunkSize the maximum length of the emitted arrays of chars, must be positive
*/
def fromCharsReader(in: Task[Reader], chunkSize: Int = 4096): Observable[Array[Char]] = {
Observable
.resource(in)(h => Task(h.close()))
.flatMap(fromCharsReaderUnsafe(_, chunkSize))
.executeAsync
}
/** Version of [[fromCharsReader]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def fromCharsReaderF[F[_]](in: F[Reader], chunkSize: Int = 4096)(implicit F: TaskLike[F]): Observable[Array[Char]] =
fromCharsReader(F(in), chunkSize)
/** Converts a `java.io.Reader` into an observable that will emit
* `Array[Char]` elements.
*
* '''UNSAFE WARNING''': this is an unsafe function, because reading from
* a reader is a destructive process, also violating referential
* transparency. Therefore only a single subscriber is supported,
* the result being a single-subscriber observable. If multiple
* subscribers are attempted, all subscribers, except for the first
* one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* '''UNSAFE PROTOCOL''': the created Observable does not close the given
* `Reader`. Usually it's the producer of a resource that needs
* to deallocate the resource.
*
* $blocksDefaultSchedulerDesc
*
* @see [[fromCharsReader]] for the safe version
*
* @param in the `Reader` to convert into an observable
* @param chunkSize the maximum length of the emitted arrays of chars, must be positive
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def fromCharsReaderUnsafe(in: Reader, chunkSize: Int = 4096): Observable[Array[Char]] =
new builders.CharsReaderObservable(in, chunkSize)
/** Safely converts a `java.io.BufferedReader` into an observable that will
* emit `String` elements corresponding to text lines from the input.
*
* According to the specification of `BufferedReader`, a line is considered
* to be terminated by any one of a line feed (`\\n`), a carriage return (`\\r`),
* or a carriage return followed immediately by a linefeed.
*
* Compared with [[fromLinesReaderUnsafe]], this version:
*
* - is referentially transparent, the input being a "generator"
* powered by [[monix.eval.Task]]
* - automatically forks execution on subscription to ensure that
* the current thread isn't blocked by the ensuing blocking I/O
* - ensures that the input stream is closed on completion,
* failure or cancellation
*
* @param in is the `Task[BufferedReader]` generator to convert into an observable
*/
def fromLinesReader(in: Task[BufferedReader]): Observable[String] = {
Observable
.resource(in)(h => Task(h.close()))
.flatMap(fromLinesReaderUnsafe)
.executeAsync
}
/** Version of [[fromLinesReader]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def fromLinesReaderF[F[_]](in: F[BufferedReader])(implicit F: TaskLike[F]): Observable[String] =
fromLinesReader(F(in))
/** Converts a `java.io.BufferedReader` into an observable that will emit
* `String` text lines from the input.
*
* According to the specification of `BufferedReader`, a line is considered
* to be terminated by any one of a line feed (`\\n`), a carriage return (`\\r`),
* or a carriage return followed immediately by a linefeed.
*
* '''UNSAFE WARNING''': this is an unsafe function, because reading from
* a reader is a destructive process, also violating referential
* transparency. Therefore only a single subscriber is supported,
* the result being a single-subscriber observable. If multiple
* subscribers are attempted, all subscribers, except for the first
* one, will be terminated with a
* [[monix.execution.exceptions.APIContractViolationException APIContractViolationException]].
*
* '''UNSAFE PROTOCOL''': the created Observable does not close the given
* `Reader`. Usually it's the producer of a resource that needs
* to deallocate the resource.
*
* @see [[fromLinesReader]] for the safe version
*
* @param in is the `Reader` to convert into an observable
*/
@UnsafeProtocol
@UnsafeBecauseImpure
def fromLinesReaderUnsafe(in: BufferedReader): Observable[String] =
new builders.LinesReaderObservable(in)
/** Given a `org.reactivestreams.Publisher`, converts it into a
* Monix / Rx Observable.
*
* See the [[http://www.reactive-streams.org/ Reactive Streams]]
* protocol that Monix implements.
*
* @see [[Observable.toReactive]] for converting an `Observable` to
* a reactive publisher.
*
* @param publisher is the `org.reactivestreams.Publisher` reference to
* wrap into an [[Observable]]
*/
def fromReactivePublisher[A](publisher: RPublisher[A]): Observable[A] =
new builders.ReactiveObservable[A](publisher, 0)
/** Given a `org.reactivestreams.Publisher`, converts it into a
* Monix / Rx Observable.
*
* See the [[http://www.reactive-streams.org/ Reactive Streams]]
* protocol that Monix implements.
*
* @see [[Observable.toReactive]] for converting an `Observable` to
* a reactive publisher.
*
* @param publisher is the `org.reactivestreams.Publisher` reference to
* wrap into an [[Observable]]
*
* @param requestCount a strictly positive number, representing the size
* of the buffer used and the number of elements requested on each
* cycle when communicating demand, compliant with the
* reactive streams specification. If `Int.MaxValue` is given,
* then no back-pressuring logic will be applied (e.g. an unbounded
* buffer is used and the source has a license to stream as many
* events as it wants).
*/
def fromReactivePublisher[A](publisher: RPublisher[A], requestCount: Int): Observable[A] =
new builders.ReactiveObservable[A](publisher, requestCount)
/** Transforms a non-strict [[monix.eval.Coeval Coeval]] value
* into an `Observable` that emits a single element.
*/
def coeval[A](value: Coeval[A]): Observable[A] =
value match {
case Coeval.Now(a) => Observable.now(a)
case Coeval.Error(e) => Observable.raiseError(e)
case other => Observable.eval(other.value())
}
/** Converts a Scala `Try` into an `Observable`.
*
* {{{
* import scala.util.Try
*
* val value = Try(1)
* Observable.fromTry(value)
* }}}
*/
def fromTry[A](a: Try[A]): Observable[A] =
a match {
case Success(v) => Observable.now(v)
case Failure(e) => Observable.raiseError(e)
}
/** Builds an `Observable` instance out of a Scala `Either`.
*/
def fromEither[E <: Throwable, A](a: Either[E, A]): Observable[A] =
a match {
case Right(v) => Observable.now(v)
case Left(ex) => Observable.raiseError(ex)
}
/** Builds a [[Observable]] instance out of a Scala `Either`.
*/
def fromEither[E, A](f: E => Throwable)(a: Either[E, A]): Observable[A] =
a match {
case Right(v) => Observable.now(v)
case Left(ex) => Observable.raiseError(f(ex))
}
/** Converts a Scala `Future` provided into an [[Observable]].
*
* If the created instance is a
* [[monix.execution.CancelableFuture CancelableFuture]],
* then it will be used for the returned
* [[monix.execution.Cancelable Cancelable]] on `subscribe`.
*/
def fromFuture[A](factory: => Future[A]): Observable[A] =
new builders.FutureAsObservable(factory)
/** Converts generic `F[_]` effects to `Observable`.
*
* Currently supported data types:
*
* - [[monix.eval.Task]]
* - [[monix.eval.Coeval]]
* - [[scala.concurrent.Future]]
* - [[https://typelevel.org/cats-effect/datatypes/io.html cats.effect.IO]]
* - any [[https://typelevel.org/cats-effect/typeclasses/effect.html cats.effect.Effect]]
* - any [[https://typelevel.org/cats-effect/typeclasses/concurrent-effect.html cats.effect.ConcurrentEffect]]
*
* Sample:
*
* {{{
* import cats.implicits._
* import cats.effect.IO
* import cats.effect.Timer
* import scala.concurrent.duration._
* import monix.execution.Scheduler.global
* import monix.catnap.SchedulerEffect
*
* // Needed for IO.sleep
* implicit val timer: Timer[IO] = SchedulerEffect.timerLiftIO[IO](global)
* val task = IO.sleep(5.seconds) *> IO(println("Hello!"))
*
* Observable.fromTaskLike(task)
* }}}
*/
def fromTaskLike[F[_], A](fa: F[A])(implicit F: TaskLike[F]): Observable[A] =
fromTask(F(fa))
/** Converts any [[monix.eval.Task Task]] into an [[Observable]].
*
* {{{
* import monix.eval.Task
*
* val task = Task.eval("Hello!")
*
* Observable.fromTask(task)
* }}}
*/
def fromTask[A](task: Task[A]): Observable[A] =
new builders.TaskAsObservable(task)
/** Returns a `F ~> Coeval` (`FunctionK`) for transforming any
* supported data-type into [[Observable]].
*/
def liftFrom[F[_]](implicit F: ObservableLike[F]): F ~> Observable = F
/** Alias for [[defer]]. */
def suspend[A](fa: => Observable[A]): Observable[A] = defer(fa)
/** Returns a new observable that creates a sequence from the
* given factory on each subscription.
*/
def defer[A](fa: => Observable[A]): Observable[A] =
new builders.DeferObservable(() => fa)
/** Builds a new observable from a strict `head` and a lazily
* evaluated tail.
*/
def cons[A](head: A, tail: Observable[A]): Observable[A] =
new builders.ConsObservable[A](head, tail)
/** Creates a new observable from this observable and another given
* observable by interleaving their items into a strictly alternating sequence.
*
* So the first item emitted by the new observable will be the item emitted by
* `self`, the second item will be emitted by the other observable, and so forth;
* when either `self` or `other` calls `onCompletes`, the items will then be
* directly coming from the observable that has not completed; when `onError` is
* called by either `self` or `other`, the new observable will call `onError` and halt.
*
* See [[Observable!.merge merge]] for a more relaxed alternative that doesn't
* emit items in strict alternating sequence.
*/
def interleave2[A](oa1: Observable[A], oa2: Observable[A]): Observable[A] =
new builders.Interleave2Observable(oa1, oa2)
/** Creates an Observable that emits auto-incremented natural numbers
* (longs) spaced by a given time interval. Starts from 0 with `initialDelay`,
* after which it emits incremented numbers spaced by the
* `delay` of time. The given `delay` of time acts as a fixed
* delay between successive events.
*
* This version of the `intervalWithFixedDelay` allows specifying an
* `initialDelay` before events start being emitted.
*
* @param initialDelay is the delay to wait before emitting the first event
* @param delay the time to wait between 2 successive events
*/
def intervalWithFixedDelay(initialDelay: FiniteDuration, delay: FiniteDuration): Observable[Long] =
new builders.IntervalFixedDelayObservable(initialDelay, delay)
/** Creates an Observable that emits auto-incremented natural numbers
* (longs) spaced by a given time interval. Starts from 0 with no
* delay, after which it emits incremented numbers spaced by the
* `delay` of time. The given `delay` of time acts as a fixed
* delay between successive events.
*
* @param delay the delay between 2 successive events
*/
def interval(delay: FiniteDuration): Observable[Long] =
intervalWithFixedDelay(delay)
/** Creates an Observable that emits auto-incremented natural numbers
* (longs) spaced by a given time interval. Starts from 0 with no
* delay, after which it emits incremented numbers spaced by the
* `delay` of time. The given `delay` of time acts as a fixed
* delay between successive events.
*
* @param delay the delay between 2 successive events
*/
def intervalWithFixedDelay(delay: FiniteDuration): Observable[Long] =
new builders.IntervalFixedDelayObservable(Duration.Zero, delay)
/** Creates an Observable that emits auto-incremented natural numbers
* (longs) at a fixed rate, as given by the specified `period`. The
* time it takes to process an `onNext` event gets subtracted from
* the specified `period` and thus the created observable tries to
* emit events spaced by the given time interval, regardless of how
* long the processing of `onNext` takes.
*
* @param period the period between 2 successive `onNext` events
*/
def intervalAtFixedRate(period: FiniteDuration): Observable[Long] =
new builders.IntervalFixedRateObservable(Duration.Zero, period)
/** Creates an Observable that emits auto-incremented natural numbers
* (longs) at a fixed rate, as given by the specified `period`. The
* time it takes to process an `onNext` event gets subtracted from
* the specified `period` and thus the created observable tries to
* emit events spaced by the given time interval, regardless of how
* long the processing of `onNext` takes.
*
* This version of the `intervalAtFixedRate` allows specifying an
* `initialDelay` before events start being emitted.
*
* @param initialDelay is the initial delay before emitting the first event
* @param period the period between 2 successive `onNext` events
*/
def intervalAtFixedRate(initialDelay: FiniteDuration, period: FiniteDuration): Observable[Long] =
new builders.IntervalFixedRateObservable(initialDelay, period)
/** Creates an Observable that continuously emits the given ''item'' repeatedly.
*/
def repeat[A](elems: A*): Observable[A] =
new builders.RepeatObservable(elems: _*)
/** Repeats the execution of the given `task`, emitting
* the results indefinitely.
*/
def repeatEval[A](task: => A): Observable[A] =
new builders.RepeatEvalObservable(task)
/** Repeats the evaluation of given effectful value, emitting
* the results indefinitely.
*/
def repeatEvalF[F[_], A](fa: F[A])(implicit F: TaskLike[F]): Observable[A] =
repeat(()).mapEvalF(_ => fa)(F)
/** Creates an Observable that emits items in the given range.
*
* @param from the range start
* @param until the range end
* @param step increment step, either positive or negative
*/
def range(from: Long, until: Long, step: Long = 1L): Observable[Long] =
new builders.RangeObservable(from, until, step)
/** Given an initial state and a generator function that produces the
* next state and the next element in the sequence, creates an
* observable that keeps generating elements produced by our
* generator function.
*/
def fromStateAction[S, A](f: S => (A, S))(seed: => S): Observable[A] =
new builders.StateActionObservable(seed, f)
/** Given an initial state and a generator function that produces the
* next state and the next element in the sequence, creates an
* observable that keeps generating elements produced by our
* generator function until `None` is returned.
* @example {{{
* Observable.unfold(0)(i => if (i < 10) Some((i, i + 1)) else None).toListL
*
* result: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
* }}}
*
* @see [[paginate]] for a way to return one more value when generator returns `None`
*/
def unfold[S, A](seed: => S)(f: S => Option[(A, S)]): Observable[A] =
new UnfoldObservable(seed, f)
/** Given an initial state and a generator function that produces the
* next state and the next element in the sequence, creates an
* observable that keeps generating elements produced by our
* generator function until `None` is returned.
* @example {{{
* Observable.unfoldEval(0)(i => if (i < 10) Task.now(Some((i, i + 1))) else Task.now(None)).toListL
*
* result: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
* }}}
*
* @see [[paginateEval]] for a way to return one more value when generator returns `None`
*/
def unfoldEval[S, A](seed: => S)(f: S => Task[Option[(A, S)]]): Observable[A] =
new UnfoldEvalObservable(seed, f)
/** Similar to [[unfold]], but allows to take emission one step further.
* @example {{{
* Observable.paginate(0)(i => if (i < 10) (i, Some(i + 1)) else (i, None)).toListL
*
* result: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
* }}}
*/
def paginate[S, A](seed: => S)(f: S => (A, Option[S])): Observable[A] =
new PaginateObservable(seed, f)
/** Similar to [[unfoldEval]], but allows to take emission one step further.
* @example {{{
* Observable.paginateEval(0)(i => if (i < 10) Task.now((i, Some(i + 1))) else Task.now((i,None))).toListL
*
* result: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
* }}}
*/
def paginateEval[S, A](seed: => S)(f: S => Task[(A, Option[S])]): Observable[A] =
new PaginateEvalObservable(seed, f)
/** Version of [[unfoldEval]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* @see [[unfoldEval]] for a version specialized for
* [[monix.eval.Task Task]]
*/
def unfoldEvalF[F[_], S, A](seed: => S)(f: S => F[Option[(A, S)]])(implicit F: TaskLike[F]): Observable[A] =
unfoldEval(seed)(a => Task.from(f(a)))
/** Given an initial state and a generator function that produces the
* next state and the next element in the sequence, creates an
* observable that keeps generating elements produced by our
* generator function.
*/
def fromAsyncStateAction[S, A](f: S => Task[(A, S)])(seed: => S): Observable[A] =
new builders.AsyncStateActionObservable(seed, f)
/** Version of [[fromAsyncStateAction]] that can work with generic
* `F[_]` tasks, anything that's supported via [[monix.eval.TaskLike]]
* conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*
* @see [[fromAsyncStateAction]] for a version specialized for
* [[monix.eval.Task Task]]
*/
def fromAsyncStateActionF[F[_], S, A](f: S => F[(A, S)])(seed: => S)(implicit F: TaskLike[F]): Observable[A] =
fromAsyncStateAction[S, A](a => Task.from(f(a)))(seed)
/** Wraps this Observable into a `org.reactivestreams.Publisher`.
* See the [[http://www.reactive-streams.org/ Reactive Streams]]
* protocol that Monix implements.
*/
def toReactive[A](source: Observable[A])(implicit s: Scheduler): RPublisher[A] =
source.toReactivePublisher[A](s)
/** Create an Observable that repeatedly emits the given `item`, until
* the underlying Observer cancels.
*/
def timerRepeated[A](initialDelay: FiniteDuration, period: FiniteDuration, unit: A): Observable[A] =
new builders.RepeatedValueObservable[A](initialDelay, period, unit)
/** Creates a new observable from two observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (3, 3), (4, 4)
* </pre>
*
* See [[combineLatestMap2]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*/
def zip2[A1, A2](oa1: Observable[A1], oa2: Observable[A2]): Observable[(A1, A2)] =
new builders.Zip2Observable[A1, A2, (A1, A2)](oa1, oa2)((a1, a2) => (a1, a2))
/** Creates a new observable from two observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (3, 3), (4, 4)
* </pre>
*
* See [[combineLatestMap2]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param f is the mapping function applied over the generated pairs
*/
def zipMap2[A1, A2, R](oa1: Observable[A1], oa2: Observable[A2])(f: (A1, A2) => R): Observable[R] =
new builders.Zip2Observable[A1, A2, R](oa1, oa2)(f)
/** Creates a new observable from three observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap3]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*/
def zip3[A1, A2, A3](oa1: Observable[A1], oa2: Observable[A2], oa3: Observable[A3]): Observable[(A1, A2, A3)] =
new builders.Zip3Observable(oa1, oa2, oa3)((a1, a2, a3) => (a1, a2, a3))
/** Creates a new observable from three observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap3]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param f is the mapping function applied over the generated pairs
*/
def zipMap3[A1, A2, A3, R](oa1: Observable[A1], oa2: Observable[A2], oa3: Observable[A3])(
f: (A1, A2, A3) => R): Observable[R] =
new builders.Zip3Observable(oa1, oa2, oa3)(f)
/** Creates a new observable from four observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap4]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*/
def zip4[A1, A2, A3, A4](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4]): Observable[(A1, A2, A3, A4)] =
new builders.Zip4Observable(oa1, oa2, oa3, oa4)((a1, a2, a3, a4) => (a1, a2, a3, a4))
/** Creates a new observable from four observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap4]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param f is the mapping function applied over the generated pairs
*/
def zipMap4[A1, A2, A3, A4, R](oa1: Observable[A1], oa2: Observable[A2], oa3: Observable[A3], oa4: Observable[A4])(
f: (A1, A2, A3, A4) => R): Observable[R] =
new builders.Zip4Observable(oa1, oa2, oa3, oa4)(f)
/** Creates a new observable from five observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap5]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*/
def zip5[A1, A2, A3, A4, A5](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5]): Observable[(A1, A2, A3, A4, A5)] =
new builders.Zip5Observable(oa1, oa2, oa3, oa4, oa5)((a1, a2, a3, a4, a5) => (a1, a2, a3, a4, a5))
/** Creates a new observable from five observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap5]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param f is the mapping function applied over the generated pairs
*/
def zipMap5[A1, A2, A3, A4, A5, R](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5])(f: (A1, A2, A3, A4, A5) => R): Observable[R] =
new builders.Zip5Observable(oa1, oa2, oa3, oa4, oa5)(f)
/** Creates a new observable from five observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap5]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*/
def zip6[A1, A2, A3, A4, A5, A6](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5],
oa6: Observable[A6]): Observable[(A1, A2, A3, A4, A5, A6)] =
new builders.Zip6Observable(oa1, oa2, oa3, oa4, oa5, oa6)((a1, a2, a3, a4, a5, a6) => (a1, a2, a3, a4, a5, a6))
/** Creates a new observable from five observable sequences
* by combining their items in pairs in a strict sequence.
*
* So the first item emitted by the new observable will be the result
* of the function applied to the first items emitted by each of
* the source observables; the second item emitted by the new observable
* will be the result of the function applied to the second items
* emitted by each of those observables; and so forth.
*
* See [[combineLatestMap5]] for a more relaxed alternative that doesn't
* combine items in strict sequence.
*
* @param f is the mapping function applied over the generated pairs
*/
def zipMap6[A1, A2, A3, A4, A5, A6, R](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5],
oa6: Observable[A6])(f: (A1, A2, A3, A4, A5, A6) => R): Observable[R] =
new builders.Zip6Observable(oa1, oa2, oa3, oa4, oa5, oa6)(f)
/** Given an observable sequence, it [[Observable!.zip zips]] them
* together returning a new observable that generates sequences.
*/
def zipList[A](sources: Observable[A]*): Observable[Seq[A]] = {
if (sources.isEmpty) Observable.empty
else {
val seed = sources.head.map(t => Vector(t))
sources.tail.foldLeft(seed) { (acc, obs) =>
acc.zipMap(obs)((seq, elem) => seq :+ elem)
}
}
}
/** Creates an observable that doesn't emit anything, but immediately
* calls `onComplete` instead.
*/
def empty[A]: Observable[A] =
builders.EmptyObservable
/** Creates a `Observable` that depends on resource allocated by a
* monadic value, ensuring the resource is released.
*
* Typical use-cases are working with files or network sockets
*
* ==Example==
*
* {{{
* import monix.eval.Task
* import java.io.PrintWriter
*
* val printer =
* Observable.resource {
* Task(new PrintWriter("./lines.txt"))
* } { writer =>
* Task(writer.close())
* }
*
* // Safely use the resource, because the release is
* // scheduled to happen afterwards
* val writeLines = printer.flatMap { writer =>
* Observable
* .fromIterator(Task(Iterator.from(1)))
* .mapEval(i => Task { writer.println(s"Line #\\\\$i") })
* }
*
* // Write 100 numbered lines to the file, closing the writer
* // when finished (after `runAsync`):
* writeLines.take(100).completedL
* }}}
*
* @param acquire resource to acquire at the start of the stream
* @param release function that releases the acquired resource
*/
def resource[A](acquire: Task[A])(release: A => Task[Unit]): Observable[A] =
resourceCase(acquire)((a, _) => release(a))
/** Version of [[resource]] that can work with generic `F[_]` tasks,
* anything that's supported via [[monix.eval.TaskLike]] conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def resourceF[F[_], A](acquire: F[A])(release: A => F[Unit])(implicit F: TaskLike[F]): Observable[A] =
resource(F(acquire))(a => F(release(a)))
/** Creates a stream that depends on resource allocated by a
* monadic value, ensuring the resource is released.
*
* Typical use-cases are working with files or network sockets
*
* ==Example==
*
* {{{
* import cats.effect.ExitCase
* import monix.eval.Task
* import java.io.PrintWriter
*
* val printer =
* Observable.resourceCase {
* Task(new PrintWriter("./lines.txt"))
* } {
* case (writer, ExitCase.Canceled | ExitCase.Completed) =>
* Task(writer.close())
* case (writer, ExitCase.Error(e)) =>
* Task { println(e.getMessage); writer.close() }
* }
*
* // Safely use the resource, because the release is
* // scheduled to happen afterwards
* val writeLines = printer.flatMap { writer =>
* Observable
* .fromIterator(Task(Iterator.from(1)))
* .mapEval(i => Task { writer.println(s"Line #\\\\$i") })
* }
*
* // Write 100 numbered lines to the file, closing the writer
* // when finished (after `runAsync`):
* writeLines.take(100).completedL
* }}}
*
* @param acquire an effect that acquires an expensive resource
* @param release function that releases the acquired resource
*/
def resourceCase[A](acquire: Task[A])(release: (A, ExitCase[Throwable]) => Task[Unit]): Observable[A] =
new ResourceCaseObservable(acquire, release)
/** Version of [[resourceCase]] that can work with generic `F[_]` tasks,
* anything that's supported via [[monix.eval.TaskLike]] conversions.
*
* So you can work among others with:
*
* - `cats.effect.IO`
* - `monix.eval.Coeval`
* - `scala.concurrent.Future`
* - ...
*/
def resourceCaseF[F[_], A](acquire: F[A])(release: (A, ExitCase[Throwable]) => F[Unit])(implicit
F: TaskLike[F]): Observable[A] =
resourceCase(F(acquire))((a, e) => F(release(a, e)))
/** Creates a combined observable from 2 source observables.
*
* This operator behaves in a similar way to [[zip2]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (2, 3), (3, 3), (4, 3), (4, 4)
* </pre>
*/
def combineLatest2[A1, A2](oa1: Observable[A1], oa2: Observable[A2]): Observable[(A1, A2)] =
new builders.CombineLatest2Observable[A1, A2, (A1, A2)](oa1, oa2)((a1, a2) => (a1, a2))
/** Creates a combined observable from 2 source observables.
*
* This operator behaves in a similar way to [[zipMap2]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*
* == Visual Example ==
*
* <pre>
* stream1: 1 - - 2 - - 3 - 4 - -
* stream2: 1 - - 2 - 3 - - - - 4
*
* result: (1, 1), (2, 2), (2, 3), (3, 3), (4, 3), (4, 4)
* </pre>
*/
def combineLatestMap2[A1, A2, R](oa1: Observable[A1], oa2: Observable[A2])(f: (A1, A2) => R): Observable[R] =
new builders.CombineLatest2Observable[A1, A2, R](oa1, oa2)(f)
/** Creates a combined observable from 3 source observables.
*
* This operator behaves in a similar way to [[zip3]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatest3[A1, A2, A3](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3]): Observable[(A1, A2, A3)] =
new builders.CombineLatest3Observable(oa1, oa2, oa3)((a1, a2, a3) => (a1, a2, a3))
/** Creates a combined observable from 3 source observables.
*
* This operator behaves in a similar way to [[zipMap3]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatestMap3[A1, A2, A3, R](a1: Observable[A1], a2: Observable[A2], a3: Observable[A3])(
f: (A1, A2, A3) => R): Observable[R] =
new builders.CombineLatest3Observable[A1, A2, A3, R](a1, a2, a3)(f)
/** Creates a combined observable from 4 source observables.
*
* This operator behaves in a similar way to [[zip4]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatest4[A1, A2, A3, A4](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4]): Observable[(A1, A2, A3, A4)] =
new builders.CombineLatest4Observable(oa1, oa2, oa3, oa4)((a1, a2, a3, a4) => (a1, a2, a3, a4))
/** Creates a combined observable from 4 source observables.
*
* This operator behaves in a similar way to [[zipMap4]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatestMap4[A1, A2, A3, A4, R](
a1: Observable[A1],
a2: Observable[A2],
a3: Observable[A3],
a4: Observable[A4])(f: (A1, A2, A3, A4) => R): Observable[R] =
new builders.CombineLatest4Observable[A1, A2, A3, A4, R](a1, a2, a3, a4)(f)
/** Creates a combined observable from 5 source observables.
*
* This operator behaves in a similar way to [[zip5]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatest5[A1, A2, A3, A4, A5](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5]): Observable[(A1, A2, A3, A4, A5)] =
new builders.CombineLatest5Observable(oa1, oa2, oa3, oa4, oa5)((a1, a2, a3, a4, a5) => (a1, a2, a3, a4, a5))
/** Creates a combined observable from 5 source observables.
*
* This operator behaves in a similar way to [[zipMap5]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatestMap5[A1, A2, A3, A4, A5, R](
a1: Observable[A1],
a2: Observable[A2],
a3: Observable[A3],
a4: Observable[A4],
a5: Observable[A5])(f: (A1, A2, A3, A4, A5) => R): Observable[R] =
new builders.CombineLatest5Observable[A1, A2, A3, A4, A5, R](a1, a2, a3, a4, a5)(f)
/** Creates a combined observable from 6 source observables.
*
* This operator behaves in a similar way to [[zip6]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatest6[A1, A2, A3, A4, A5, A6](
oa1: Observable[A1],
oa2: Observable[A2],
oa3: Observable[A3],
oa4: Observable[A4],
oa5: Observable[A5],
oa6: Observable[A6]): Observable[(A1, A2, A3, A4, A5, A6)] =
new builders.CombineLatest6Observable(oa1, oa2, oa3, oa4, oa5, oa6)((a1, a2, a3, a4, a5, a6) =>
(a1, a2, a3, a4, a5, a6))
/** Creates a combined observable from 6 source observables.
*
* This operator behaves in a similar way to [[zipMap6]],
* but while `zip` emits items only when all of the zipped source
* observables have emitted a previously unzipped item, `combine`
* emits an item whenever any of the source Observables emits an
* item (so long as each of the source Observables has emitted at
* least one item).
*/
def combineLatestMap6[A1, A2, A3, A4, A5, A6, R](
a1: Observable[A1],
a2: Observable[A2],
a3: Observable[A3],
a4: Observable[A4],
a5: Observable[A5],
a6: Observable[A6])(f: (A1, A2, A3, A4, A5, A6) => R): Observable[R] =
new builders.CombineLatest6Observable[A1, A2, A3, A4, A5, A6, R](a1, a2, a3, a4, a5, a6)(f)
/** Given an observable sequence, it combines them together
* returning a new observable that generates sequences.
*/
def combineLatestList[A](sources: Observable[A]*): Observable[Seq[A]] = {
if (sources.isEmpty) {
Observable.empty
} else {
new CombineLatestListObservable[A](sources)
}
}
/** Given a sequence of priority/observable pairs, combines them into a new
* observable that eagerly emits source items downstream as soon as demand is
* signaled, choosing the item from the highest priority (greater numbers
* mean higher priority) source when items from multiple sources are
* available. If items are available from multiple sources with the same
* highest priority, one of them is chosen arbitrarily.
*
* Source items are buffered only to the extent necessary to accommodate
* backpressure from downstream, and thus if only a single item is available
* when demand is signaled, it will be emitted regardless of priority.
*
* Backpressure is propagated from downstream to the source observables, so
* that items from a given source will always be emitted downstream in the
* same order as received from the source, and at most a single item from a
* given source will be in flight at a time.
*/
def mergePrioritizedList[A](sources: (Int, Observable[A])*): Observable[A] = {
if (sources.isEmpty) {
Observable.empty
} else {
new MergePrioritizedListObservable[A](sources)
}
}
/** Given a list of source Observables, emits all of the items from
* the first of these Observables to emit an item or to complete,
* and cancel the rest.
*
* == Visual Example ==
*
* <pre>
* stream1: - - 1 1 1 - 1 - 1 - -
* stream2: - - - - - 2 2 2 2 2 2
*
* result: - - 1 1 1 - 1 - 1 - -
* </pre>
*/
def firstStartedOf[A](source: Observable[A]*): Observable[A] =
new builders.FirstStartedObservable(source: _*)
/** Implicit type class instances for [[Observable]]. */
implicit val catsInstances: CatsInstances =
new CatsInstances
/** Cats instances for [[Observable]]. */
class CatsInstances
extends Bracket[Observable, Throwable] with Alternative[Observable] with CoflatMap[Observable]
with FunctorFilter[Observable] with TaskLift[Observable] {
override def unit: Observable[Unit] =
Observable.unit
override def pure[A](a: A): Observable[A] =
Observable.now(a)
override def combineK[A](x: Observable[A], y: Observable[A]): Observable[A] =
x.appendAll(y)
override def flatMap[A, B](fa: Observable[A])(f: (A) => Observable[B]): Observable[B] =
fa.flatMap(f)
override def flatten[A](ffa: Observable[Observable[A]]): Observable[A] =
ffa.flatten
override def tailRecM[A, B](a: A)(f: (A) => Observable[Either[A, B]]): Observable[B] =
Observable.tailRecM(a)(f)
override def coflatMap[A, B](fa: Observable[A])(f: (Observable[A]) => B): Observable[B] =
Observable.eval(f(fa))
override def ap[A, B](ff: Observable[(A) => B])(fa: Observable[A]): Observable[B] =
for (f <- ff; a <- fa) yield f(a)
override def map2[A, B, Z](fa: Observable[A], fb: Observable[B])(f: (A, B) => Z): Observable[Z] =
for (a <- fa; b <- fb) yield f(a, b)
override def map[A, B](fa: Observable[A])(f: (A) => B): Observable[B] =
fa.map(f)
override def raiseError[A](e: Throwable): Observable[A] =
Observable.raiseError(e)
override def handleError[A](fa: Observable[A])(f: (Throwable) => A): Observable[A] =
fa.onErrorHandle(f)
override def handleErrorWith[A](fa: Observable[A])(f: (Throwable) => Observable[A]): Observable[A] =
fa.onErrorHandleWith(f)
override def recover[A](fa: Observable[A])(pf: PartialFunction[Throwable, A]): Observable[A] =
fa.onErrorRecover(pf)
override def recoverWith[A](fa: Observable[A])(pf: PartialFunction[Throwable, Observable[A]]): Observable[A] =
fa.onErrorRecoverWith(pf)
override def empty[A]: Observable[A] =
Observable.empty[A]
override def apply[A](task: Task[A]): Observable[A] =
Observable.fromTask(task)
override def bracketCase[A, B](acquire: Observable[A])(use: A => Observable[B])(
release: (A, ExitCase[Throwable]) => Observable[Unit]): Observable[B] =
acquire.bracketCase(use)((a, e) => release(a, e).completedL)
override def bracket[A, B](acquire: Observable[A])(use: A => Observable[B])(
release: A => Observable[Unit]): Observable[B] =
acquire.bracket(use)(release.andThen(_.completedL))
override def guarantee[A](fa: Observable[A])(finalizer: Observable[Unit]): Observable[A] =
fa.guarantee(finalizer.completedL)
override def guaranteeCase[A](fa: Observable[A])(
finalizer: ExitCase[Throwable] => Observable[Unit]): Observable[A] =
fa.guaranteeCase(e => finalizer(e).completedL)
override def uncancelable[A](fa: Observable[A]): Observable[A] =
fa.uncancelable
override def functor: Functor[Observable] = this
override def mapFilter[A, B](fa: Observable[A])(f: A => Option[B]): Observable[B] =
fa.map(f).collect { case Some(b) => b }
override def collect[A, B](fa: Observable[A])(f: PartialFunction[A, B]): Observable[B] =
fa.collect(f)
override def filter[A](fa: Observable[A])(f: A => Boolean): Observable[A] =
fa.filter(f)
}
/** [[cats.NonEmptyParallel]] instance for [[Observable]]. */
implicit val observableNonEmptyParallel: NonEmptyParallel.Aux[Observable, CombineObservable.Type] =
new NonEmptyParallel[Observable] {
import CombineObservable.unwrap
import CombineObservable.{apply => wrap}
override type F[A] = CombineObservable.Type[A]
override def flatMap: FlatMap[Observable] = implicitly[FlatMap[Observable]]
override def apply: Apply[CombineObservable.Type] = CombineObservable.combineObservableApplicative
override val sequential = new (CombineObservable.Type ~> Observable) {
def apply[A](fa: CombineObservable.Type[A]): Observable[A] = unwrap(fa)
}
override val parallel = new (Observable ~> CombineObservable.Type) {
def apply[A](fa: Observable[A]): CombineObservable.Type[A] = wrap(fa)
}
}
/** Exposes extension methods for deprecated [[Observable]] methods.
*/
implicit final class DeprecatedExtensions[+A](val self: Observable[A])
extends AnyVal with ObservableDeprecatedMethods[A]
}
|
monifu/monifu
|
monix-reactive/shared/src/main/scala/monix/reactive/Observable.scala
|
Scala
|
apache-2.0
| 250,934 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package dotty.tools.dotc.classpath
import java.io.File
import java.net.URL
import dotty.tools.io.{ AbstractFile, FileZipArchive }
import FileUtils.AbstractFileOps
import dotty.tools.io.{ClassPath, ClassRepresentation}
/**
* A trait allowing to look for classpath entries of given type in zip and jar files.
* It provides common logic for classes handling class and source files.
* It's aware of things like e.g. META-INF directory which is correctly skipped.
*/
trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath {
val zipFile: File
assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null")
override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL)
override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath)
private val archive = new FileZipArchive(zipFile.toPath)
override private[dotty] def packages(inPackage: String): Seq[PackageEntry] = {
val prefix = PackageNameUtils.packagePrefix(inPackage)
for {
dirEntry <- findDirEntry(inPackage).toSeq
entry <- dirEntry.iterator if entry.isPackage
}
yield PackageEntryImpl(prefix + entry.name)
}
protected def files(inPackage: String): Seq[FileEntryType] =
for {
dirEntry <- findDirEntry(inPackage).toSeq
entry <- dirEntry.iterator if isRequiredFileType(entry)
}
yield createFileEntry(entry)
protected def file(inPackage: String, name: String): Option[FileEntryType] =
for {
dirEntry <- findDirEntry(inPackage)
entry <- Option(dirEntry.lookupName(name, directory = false))
if isRequiredFileType(entry)
}
yield createFileEntry(entry)
override private[dotty] def hasPackage(pkg: String): Boolean = findDirEntry(pkg).isDefined
override private[dotty] def list(inPackage: String): ClassPathEntries = {
val foundDirEntry = findDirEntry(inPackage)
foundDirEntry map { dirEntry =>
val pkgBuf = collection.mutable.ArrayBuffer.empty[PackageEntry]
val fileBuf = collection.mutable.ArrayBuffer.empty[FileEntryType]
val prefix = PackageNameUtils.packagePrefix(inPackage)
for (entry <- dirEntry.iterator)
if (entry.isPackage)
pkgBuf += PackageEntryImpl(prefix + entry.name)
else if (isRequiredFileType(entry))
fileBuf += createFileEntry(entry)
ClassPathEntries(pkgBuf, fileBuf)
} getOrElse ClassPathEntries(Seq.empty, Seq.empty)
}
private def findDirEntry(pkg: String): Option[archive.DirEntry] = {
val dirName = pkg.replace('.', '/') + "/"
archive.allDirs.get(dirName)
}
protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType
protected def isRequiredFileType(file: AbstractFile): Boolean
}
|
som-snytt/dotty
|
compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala
|
Scala
|
apache-2.0
| 2,779 |
package org.littlewings.lucene.kuromoji
import scala.language.postfixOps
import scala.sys.process._
import java.io.{File, ByteArrayInputStream, StringReader}
import java.nio.charset.StandardCharsets
import org.apache.lucene.analysis.ja.dict.ConnectionCosts
import org.apache.lucene.analysis.ja.{GraphvizFormatter, JapaneseTokenizer}
object KuromojiViterbi {
def main(args: Array[String]): Unit = {
val word = args.toList.headOption.getOrElse("すもももももももものうち")
val graphvizFormatter = new GraphvizFormatter(ConnectionCosts.getInstance)
val tokenizer = new JapaneseTokenizer(null, true, JapaneseTokenizer.Mode.NORMAL)
tokenizer.setReader(new StringReader(word))
tokenizer.setGraphvizFormatter(graphvizFormatter)
tokenizer.reset()
Iterator.continually(tokenizer.incrementToken()).takeWhile(identity).foreach(_ => ())
tokenizer.end()
tokenizer.close()
val dotOutput = graphvizFormatter.finish()
"dot -Tgif" #< new ByteArrayInputStream(dotOutput.getBytes(StandardCharsets.UTF_8)) #> new File("out.gif") !
}
}
|
kazuhira-r/lucene-examples
|
lucene-kuromoji-viterbi/src/main/scala/org/littlewings/lucene/kuromoji/KuromojiViterbi.scala
|
Scala
|
mit
| 1,085 |
package org.flowpaint.model2
/**
*
*/
case class Vec2i(var x: Int, var y: Int) {
def swap(other: Vec2i) {
val tx = other.x
val ty = other.y
other.x = x
other.y = y
x = tx
y = ty
}
}
|
zzorn/flowpaint
|
src/main/scala/org/flowpaint/model2/Vec2i.scala
|
Scala
|
gpl-2.0
| 215 |
package demo.components.semanticui
import chandu0101.macros.tojs.GhPagesMacros
import chandu0101.scalajs.react.components.ReactMouseEventB
import chandu0101.scalajs.react.components.materialui._
import chandu0101.scalajs.react.components.semanticui.SuiButton
import demo.components.CodeExample
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
object SuiButtonDemo {
val code = GhPagesMacros.exampleSource
// EXAMPLE:START
case class Backend($ : BackendScope[Unit, Unit]) {
val handleOnClick = (e: ReactMouseEventB) => Callback.info("You clicked me!")
def render() =
<.div(
CodeExample(code, "SuiButton")(
SuiButton(onClick = handleOnClick)("Click Here"),
SuiButton(primary = true)("Primary"),
SuiButton(secondary = true)("Secondary")
)
)
}
val component = ScalaComponent
.builder[Unit]("SuiButtonDemo")
.renderBackend[Backend]
.build
// EXAMPLE:END
def apply() = component()
}
|
rleibman/scalajs-react-components
|
demo/src/main/scala/demo/components/semanticui/SuiButtonDemo.scala
|
Scala
|
apache-2.0
| 1,007 |
import sbt._
import sbt.Keys._
object BuildSettings {
val buildVersion = "0.12.0-SNAPSHOT"
val buildSettings = Defaults.defaultSettings ++ Seq(
organization := "org.reactivemongo",
version := buildVersion,
scalaVersion := "2.11.6",
scalacOptions ++= Seq("-unchecked", "-deprecation", "-target:jvm-1.8"),
crossScalaVersions := Seq("2.11.6"),
crossVersion := CrossVersion.binary,
shellPrompt := ShellPrompt.buildShellPrompt,
testOptions in Test += Tests.Cleanup(cl => {
import scala.language.reflectiveCalls
val c = cl.loadClass("Common$")
type M = { def closeDriver(): Unit }
val m: M = c.getField("MODULE$").get(null).asInstanceOf[M]
m.closeDriver()
})
) ++ Publish.settings ++ Format.settings ++ Travis.settings
}
object Publish {
@inline def env(n: String): String = sys.env.get(n).getOrElse(n)
private val repoName = env("PUBLISH_REPO_NAME")
private val repoUrl = env("PUBLISH_REPO_URL")
lazy val settings = Seq(
publishMavenStyle := true,
publishArtifact in Test := false,
publishTo := Some(repoUrl).map(repoName at _),
credentials += Credentials(repoName, env("PUBLISH_REPO_ID"),
env("PUBLISH_USER"), env("PUBLISH_PASS")),
pomIncludeRepository := { _ => false },
licenses := Seq("Apache 2.0" ->
url("http://www.apache.org/licenses/LICENSE-2.0")),
homepage := Some(url("http://reactivemongo.org")),
pomExtra := (
<scm>
<url>git://github.com/ReactiveMongo/Play-ReactiveMongo.git</url>
<connection>scm:git://github.com/ReactiveMongo/Play-ReactiveMongo.git</connection>
</scm>
<developers>
<developer>
<id>sgodbillon</id>
<name>Stephane Godbillon</name>
<url>http://stephane.godbillon.com</url>
</developer>
<developer>
<id>mandubian</id>
<name>Pascal Voitot</name>
<url>http://mandubian.com</url>
</developer>
</developers>))
}
object Format {
import com.typesafe.sbt.SbtScalariform._
lazy val settings = scalariformSettings ++ Seq(
ScalariformKeys.preferences := formattingPreferences)
lazy val formattingPreferences = {
import scalariform.formatter.preferences._
FormattingPreferences().
setPreference(AlignParameters, true).
setPreference(AlignSingleLineCaseStatements, true).
setPreference(CompactControlReadability, false).
setPreference(CompactStringConcatenation, false).
setPreference(DoubleIndentClassDeclaration, true).
setPreference(FormatXml, true).
setPreference(IndentLocalDefs, false).
setPreference(IndentPackageBlocks, true).
setPreference(IndentSpaces, 2).
setPreference(MultilineScaladocCommentsStartOnFirstLine, false).
setPreference(PreserveSpaceBeforeArguments, false).
setPreference(PreserveDanglingCloseParenthesis, false).
setPreference(RewriteArrowSymbols, false).
setPreference(SpaceBeforeColon, false).
setPreference(SpaceInsideBrackets, false).
setPreference(SpacesWithinPatternBinders, true)
}
}
// Shell prompt which show the current project,
// git branch and build version
object ShellPrompt {
object devnull extends ProcessLogger {
def info(s: => String) {}
def error(s: => String) {}
def buffer[T](f: => T): T = f
}
def currBranch = (
("git status -sb" lines_! devnull headOption)
getOrElse "-" stripPrefix "## "
)
val buildShellPrompt = {
(state: State) => {
val currProject = Project.extract(state).currentProject.id
"%s:%s:%s> ".format(
currProject, currBranch, BuildSettings.buildVersion
)
}
}
}
object Play2ReactiveMongoBuild extends Build {
import BuildSettings._
lazy val reactivemongo = Project(
"Play2-ReactiveMongo",
file("."),
settings = buildSettings ++ Seq(
resolvers := Seq(
"Sonatype snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/",
"Sonatype" at "http://oss.sonatype.org/content/groups/public/",
"Typesafe repository releases" at "http://repo.typesafe.com/typesafe/releases/",
"Typesafe repository snapshots" at "http://repo.typesafe.com/typesafe/snapshots/"
),
libraryDependencies ++= Seq(
"org.reactivemongo" %% "reactivemongo" % "0.11.6" cross CrossVersion.binary,
"com.typesafe.play" %% "play" % "2.4.0" % "provided" cross CrossVersion.binary,
"com.typesafe.play" %% "play-test" % "2.4.0" % "test" cross CrossVersion.binary,
"org.specs2" % "specs2" % "2.3.12" % "test" cross CrossVersion.binary,
"junit" % "junit" % "4.8" % "test" cross CrossVersion.Disabled,
"org.apache.logging.log4j" % "log4j-to-slf4j" % "2.0.2"
)
)
)
}
object Travis {
val travisSnapshotBranches =
SettingKey[Seq[String]]("branches that can be published on sonatype")
val travisCommand = Command.command("publishSnapshotsFromTravis") { state =>
val extracted = Project extract state
import extracted._
import scala.util.Properties.isJavaAtLeast
val thisRef = extracted.get(thisProjectRef)
val isSnapshot = getOpt(version).exists(_.endsWith("SNAPSHOT"))
val isTravisEnabled = sys.env.get("TRAVIS").exists(_ == "true")
val isNotPR = sys.env.get("TRAVIS_PULL_REQUEST").exists(_ == "false")
val isBranchAcceptable = sys.env.get("TRAVIS_BRANCH").exists(branch => getOpt(travisSnapshotBranches).exists(_.contains(branch)))
val isJavaVersion = !isJavaAtLeast("1.7")
if (isSnapshot && isTravisEnabled && isNotPR && isBranchAcceptable) {
println(s"publishing $thisRef from travis...")
val newState = append(
Seq(
publishTo := Some("Sonatype Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots/"),
credentials := Seq(Credentials(
"Sonatype Nexus Repository Manager",
"oss.sonatype.org",
sys.env.get("SONATYPE_USER").getOrElse(throw new RuntimeException("no SONATYPE_USER defined")),
sys.env.get("SONATYPE_PASSWORD").getOrElse(throw new RuntimeException("no SONATYPE_PASSWORD defined"))
))),
state
)
runTask(publish in thisRef, newState)
println(s"published $thisRef from travis")
} else {
println(s"not publishing $thisRef to Sonatype: isSnapshot=$isSnapshot, isTravisEnabled=$isTravisEnabled, isNotPR=$isNotPR, isBranchAcceptable=$isBranchAcceptable, javaVersionLessThen_1_7=$isJavaVersion")
}
state
}
val settings = Seq(
Travis.travisSnapshotBranches := Seq("master"),
commands += Travis.travisCommand)
}
|
fr3akX/Play-ReactiveMongo
|
project/Play2-ReactiveMongo.scala
|
Scala
|
apache-2.0
| 6,668 |
package sttp.client3
import sttp.client3.ws.{GotAWebSocketException, NotAWebSocketException}
import sttp.monad.MonadError
import scala.annotation.tailrec
/** Known exceptions that might occur when using a backend. Currently this covers:
* - connect exceptions: when a connection (tcp socket) can't be established to the target host
* - read exceptions: when a connection has been established, but there's any kind of problem receiving or handling
* the response (e.g. a broken socket or a deserialization error)
*
* In general, it's safe to assume that the request hasn't been sent in case of connect exceptions. With read
* exceptions, the target host might or might have not received and processed the request.
*
* The [[SttpBackend.send]] methods might also throw other exceptions, due to programming errors, bugs in the
* underlying implementations, bugs in sttp or an uncovered exception.
*
* @param request
* The request, which was being sent when the exception was thrown
* @param cause
* The original exception.
*/
abstract class SttpClientException(request: Request[_, _], cause: Exception)
extends Exception(s"Exception when sending request: ${request.method} ${request.uri}", cause)
object SttpClientException {
class ConnectException(request: Request[_, _], cause: Exception) extends SttpClientException(request, cause)
class ReadException(request: Request[_, _], cause: Exception) extends SttpClientException(request, cause)
@tailrec
def defaultExceptionToSttpClientException(request: Request[_, _], e: Exception): Option[Exception] =
e match {
case e: java.net.ConnectException => Some(new ConnectException(request, e))
case e: java.net.UnknownHostException => Some(new ConnectException(request, e))
case e: java.net.MalformedURLException => Some(new ConnectException(request, e))
case e: java.net.NoRouteToHostException => Some(new ConnectException(request, e))
case e: java.net.PortUnreachableException => Some(new ConnectException(request, e))
case e: java.net.ProtocolException => Some(new ConnectException(request, e))
case e: java.net.URISyntaxException => Some(new ConnectException(request, e))
case e: java.net.SocketTimeoutException => Some(new ReadException(request, e))
case e: java.net.UnknownServiceException => Some(new ReadException(request, e))
case e: java.net.SocketException => Some(new ReadException(request, e))
case e: java.util.concurrent.TimeoutException => Some(new ReadException(request, e))
case e: java.io.IOException => Some(new ReadException(request, e))
case e: NotAWebSocketException => Some(new ReadException(request, e))
case e: GotAWebSocketException => Some(new ReadException(request, e))
case e: ResponseException[_, _] => Some(new ReadException(request, e))
case e if e.getCause != null && e.getCause.isInstanceOf[Exception] =>
defaultExceptionToSttpClientException(request, e.getCause.asInstanceOf[Exception])
case _ => None
}
def adjustExceptions[F[_], T](
monadError: MonadError[F]
)(t: => F[T])(usingFn: Exception => Option[Exception]): F[T] = {
monadError.handleError(t) { case e: Exception =>
monadError.error(usingFn(e).getOrElse(e))
}
}
}
|
softwaremill/sttp
|
core/src/main/scala/sttp/client3/SttpClientException.scala
|
Scala
|
apache-2.0
| 3,448 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import SharedHelpers._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class ShouldFullyMatchSpec extends AnyFunSpec with ReturnsNormallyThrowsAssertion {
/*
s should include substring t
s should include regex t
s should startWith substring t
s should startWith regex t
s should endWith substring t
s should endWith regex t
s should fullyMatch regex t
*/
describe("The fullyMatch regex syntax") {
val decimal = """(-)?(\\d+)(\\.\\d*)?"""
val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
describe("(when the regex is specified by a string)") {
it("should do nothing if the string fully matches the regular expression specified as a string") {
"1.7" should fullyMatch regex ("1.7")
"1.7" should fullyMatch regex (decimal)
"-1.8" should fullyMatch regex (decimal)
"8" should fullyMatch regex (decimal)
"1." should fullyMatch regex (decimal)
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used with not") {
"eight" should not { fullyMatch regex (decimal) }
"1.eight" should not { fullyMatch regex (decimal) }
"one.8" should not { fullyMatch regex (decimal) }
"eight" should not fullyMatch regex (decimal)
"1.eight" should not fullyMatch regex (decimal)
"one.8" should not fullyMatch regex (decimal)
"1.8-" should not fullyMatch regex (decimal)
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression") {
"1.7" should (fullyMatch regex (decimal) and (fullyMatch regex (decimal)))
"1.7" should ((fullyMatch regex (decimal)) and (fullyMatch regex (decimal)))
"1.7" should (fullyMatch regex (decimal) and fullyMatch regex (decimal))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression") {
"1.7" should (fullyMatch regex ("hello") or (fullyMatch regex (decimal)))
"1.7" should ((fullyMatch regex ("hello")) or (fullyMatch regex (decimal)))
"1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimal))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression with not") {
"fred" should (not (fullyMatch regex ("bob")) and not (fullyMatch regex (decimal)))
"fred" should ((not fullyMatch regex ("bob")) and (not fullyMatch regex (decimal)))
"fred" should (not fullyMatch regex ("bob") and not fullyMatch regex (decimal))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression with not") {
"fred" should (not (fullyMatch regex ("fred")) or not (fullyMatch regex (decimal)))
"fred" should ((not fullyMatch regex ("fred")) or (not fullyMatch regex (decimal)))
"fred" should (not fullyMatch regex ("fred") or not fullyMatch regex (decimal))
}
it("should throw TestFailedException if the string does not match the regular expression specified as a string") {
val caught1 = intercept[TestFailedException] {
"1.7" should fullyMatch regex ("1.78")
}
assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should fullyMatch regex ("21.7")
}
assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 21.7")
val caught3 = intercept[TestFailedException] {
"-1.eight" should fullyMatch regex (decimal)
}
assert(caught3.getMessage === "\\"-1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should fullyMatch regex (decimal)
}
assert(caught6.getMessage === "\\"eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught7 = intercept[TestFailedException] {
"1.eight" should fullyMatch regex (decimal)
}
assert(caught7.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught8 = intercept[TestFailedException] {
"one.8" should fullyMatch regex (decimal)
}
assert(caught8.getMessage === "\\"one.8\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught9 = intercept[TestFailedException] {
"1.8-" should fullyMatch regex (decimal)
}
assert(caught9.getMessage === "\\"1.8-\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string does matches the regular expression specified as a string when used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should not { fullyMatch regex ("1.7") }
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { fullyMatch regex (decimal) }
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { fullyMatch regex (decimal) }
}
assert(caught3.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { fullyMatch regex (decimal) }
}
assert(caught4.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { fullyMatch regex (decimal) }
}
assert(caught5.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not fullyMatch regex ("1.7")
}
assert(caught11.getMessage === "\\"1.7\\" fully matched the regular expression 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not fullyMatch regex (decimal)
}
assert(caught12.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not fullyMatch regex (decimal)
}
assert(caught13.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not fullyMatch regex (decimal)
}
assert(caught14.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not fullyMatch regex (decimal)
}
assert(caught15.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
"1.7" should (fullyMatch regex (decimal) and (fullyMatch regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((fullyMatch regex (decimal)) and (fullyMatch regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (fullyMatch regex (decimal) and fullyMatch regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"1.eight" should (fullyMatch regex (decimal) and (fullyMatch regex ("1.8")))
}
assert(caught4.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.eight" should ((fullyMatch regex (decimal)) and (fullyMatch regex ("1.8")))
}
assert(caught5.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"1.eight" should (fullyMatch regex (decimal) and fullyMatch regex ("1.8"))
}
assert(caught6.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
"1.seven" should (fullyMatch regex (decimal) or (fullyMatch regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.seven" should ((fullyMatch regex (decimal)) or (fullyMatch regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.seven" should (fullyMatch regex (decimal) or fullyMatch regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex ("1.8") and (not fullyMatch regex (decimal)))
}
assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not fullyMatch regex ("1.8")) and (not fullyMatch regex (decimal)))
}
assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex ("1.8") and not fullyMatch regex (decimal))
}
assert(caught3.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex (decimal) or (not fullyMatch regex ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not fullyMatch regex (decimal)) or (not fullyMatch regex ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex (decimal) or not fullyMatch regex ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (fullyMatch regex (decimal)) or not (fullyMatch regex ("1.7")))
}
assert(caught4.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
}
}
describe("(when the regex is specifed by a string and with group)") {
describe("(when used with should)") {
it("should do nothing if the string fully matches the regular expression and with group as specified") {
"abbc" should fullyMatch regex ("a(b*)c" withGroup "bb")
"abbcc" should fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bb") and (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bb")) and (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bb") and fullyMatch regex ("a(b*)c" withGroup "bb"))
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
"abbc" should (equal ("abbc") and (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should ((equal ("abbc")) and (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should (equal ("abbc") and fullyMatch regex ("a(b*)c" withGroup "bb"))
"abbcc" should (equal ("abbcc") and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should ((equal ("abbcc")) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should (equal ("abbcc") and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") or (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bbb")) or (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") or fullyMatch regex ("a(b*)c" withGroup "bb"))
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bbb", "cc")) or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bbb", "cc"))) or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bbb", "cc")) or fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
"abbc" should (equal ("abbc") or (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should ((equal ("abbc")) or (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbc" should (equal ("abbc") or fullyMatch regex ("a(b*)c" withGroup "bb"))
"abbcc" should (equal ("abbc") or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should ((equal ("abbc")) or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
"abbcc" should (equal ("abbc") or fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
}
it("should throw TestFailedException if the string fully matches the regular expression but does not match specified group") {
val caught1 = intercept[TestFailedException] {
"abbbc" should fullyMatch regex ("a(b*)c" withGroup "bb")
}
assert(caught1.message === Some("\\"abbbc\\" fully matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"))
assert(caught1.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
"abbccc" should fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))
}
assert(caught2.message === Some("\\"abbccc\\" fully matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"))
assert(caught2.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bb") and (fullyMatch regex ("a(b*)c" withGroup "bbb")))
}
assert(caught3.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught3.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bb")) and (fullyMatch regex ("a(b*)c" withGroup "bbb")))
}
assert(caught4.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught4.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bb") and fullyMatch regex ("a(b*)c" withGroup "bbb"))
}
assert(caught5.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught5.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught6 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") and fullyMatch regex ("a(b*)c" withGroup "bbbb"))
}
assert(caught6.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught6.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bbb")) and (fullyMatch regex ("a(b*)c" withGroup "bbb")))
}
assert(caught7.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught7.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") and fullyMatch regex ("a(b*)c" withGroup "bbbb"))
}
assert(caught8.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught8.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))))
}
assert(caught9.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught9.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))))
}
assert(caught10.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught10.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")))
}
assert(caught11.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught11.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught12 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc")))
}
assert(caught12.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught12.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
}
assert(caught13.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught13.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc")))
}
assert(caught14.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught14.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") or (fullyMatch regex ("a(b*)c" withGroup "bbbb")))
}
assert(caught15.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught15.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught16 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bbb")) or (fullyMatch regex ("a(b*)c" withGroup "bbbb")))
}
assert(caught16.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught16.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught16.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught17 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") or fullyMatch regex ("a(b*)c" withGroup "bbbb"))
}
assert(caught17.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught17.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught17.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught18 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
}
assert(caught18.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught18.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught18.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught19 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))) or (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
}
assert(caught19.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught19.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught19.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught20 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) or fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc")))
}
assert(caught20.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught20.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught20.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught21 = intercept[TestFailedException] {
"abbc" should (equal ("abbc") and (fullyMatch regex ("a(b*)c" withGroup "bbb")))
}
assert(caught21.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught21.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught21.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught22 = intercept[TestFailedException] {
"abbc" should ((equal ("abbc")) and (fullyMatch regex ("a(b*)c" withGroup "bbb")))
}
assert(caught22.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught22.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught22.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught23 = intercept[TestFailedException] {
"abbc" should (equal ("abbc") and fullyMatch regex ("a(b*)c" withGroup "bbb"))
}
assert(caught23.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught23.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught23.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught24 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") and (equal ("abbc")))
}
assert(caught24.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught24.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught24.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught25 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c" withGroup "bbb")) and (equal ("abbc")))
}
assert(caught25.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught25.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught25.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught26 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c" withGroup "bbb") and equal ("abbc"))
}
assert(caught26.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught26.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught26.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught27 = intercept[TestFailedException] {
"abbcc" should (equal ("abbcc") and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))))
}
assert(caught27.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught27.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught27.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught28 = intercept[TestFailedException] {
"abbcc" should ((equal ("abbcc")) and (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))))
}
assert(caught28.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught28.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught28.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught29 = intercept[TestFailedException] {
"abbcc" should (equal ("abbcc") and fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")))
}
assert(caught29.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught29.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught29.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught30 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and (equal ("abbcc")))
}
assert(caught30.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught30.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught30.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught31 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))) and (equal ("abbcc")))
}
assert(caught31.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught31.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught31.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught32 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and equal ("abbcc"))
}
assert(caught32.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught32.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught32.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("(when used with should not)") {
it("should do nothing if the string does not fully match the regular expression and with group as specified") {
"abbbc" should not { fullyMatch regex ("a(b*)c" withGroup "bb") }
"abbbc" should not fullyMatch regex ("a(b*)c" withGroup "bb")
"abbccc" should not { fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) }
"abbccc" should not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))
"abbbc" should (not (fullyMatch regex ("a(b*)c" withGroup "bb")) and not (fullyMatch regex ("a(b*)c" withGroup "bbbb")))
"abbbc" should ((not fullyMatch regex ("a(b*)c" withGroup "bb")) and (not fullyMatch regex ("a(b*)c" withGroup "bbbb")))
"abbbc" should (not fullyMatch regex ("a(b*)c" withGroup "bb") and not fullyMatch regex ("a(b*)c" withGroup "bbbb"))
"abbccc" should (not (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))) and not (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
"abbccc" should ((not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))) and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
"abbccc" should (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) and not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc")))
"abbbc" should (not (fullyMatch regex ("a(b*)c" withGroup "bbb")) or not (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbbc" should ((not fullyMatch regex ("a(b*)c" withGroup "bbb")) or (not fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbbc" should (not fullyMatch regex ("a(b*)c" withGroup "bbb") or not fullyMatch regex ("a(b*)c" withGroup "bb"))
"abbbc" should (not (equal ("abbcc")) and not (fullyMatch regex ("a(b*)c" withGroup "bbbb")))
"abbbc" should ((not equal "abbcc") and (not fullyMatch regex ("a(b*)c" withGroup "bbbb")))
"abbbc" should (not equal "abbcc" and not fullyMatch regex ("a(b*)c" withGroup "bbbb"))
"abbccc" should (not (equal ("abbcc")) and not (fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
"abbccc" should ((not equal ("abbcc")) and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc"))))
"abbccc" should (not equal ("abbcc") and not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cccc")))
"abbbc" should (not (equal ("abbbc")) or not (fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbbc" should ((not equal ("abbbc")) or (not fullyMatch regex ("a(b*)c" withGroup "bb")))
"abbbc" should (not equal ("abbbc") or not fullyMatch regex ("a(b*)c" withGroup "bb"))
}
it("should throw TestFailedException if the string fully matches the regular expression and with group as specified") {
val caught1 = intercept[TestFailedException] {
"abbc" should not { fullyMatch regex ("a(b*)c" withGroup "bb") }
}
assert(caught1.message === Some("\\"abbc\\" fully matched the regular expression a(b*)c and group bb"))
assert(caught1.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
"abbc" should not fullyMatch regex ("a(b*)c" withGroup "bb")
}
assert(caught2.message === Some("\\"abbc\\" fully matched the regular expression a(b*)c and group bb"))
assert(caught2.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
"abbcc" should not { fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")) }
}
assert(caught3.message === Some("\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc"))
assert(caught3.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
"abbcc" should not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))
}
assert(caught4.message === Some("\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc"))
assert(caught4.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
"abbc" should (not fullyMatch regex ("a(b*)c" withGroup "bbb") and (not fullyMatch regex ("a(b*)c" withGroup "bb")))
}
assert(caught5.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught5.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[TestFailedException] {
"abbc" should ((not fullyMatch regex ("a(b*)c" withGroup "bbb")) and (not fullyMatch regex ("a(b*)c" withGroup "bb")))
}
assert(caught6.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught6.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[TestFailedException] {
"abbc" should (not fullyMatch regex ("a(b*)c" withGroup "bbb") and not fullyMatch regex ("a(b*)c" withGroup "bb"))
}
assert(caught7.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught7.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[TestFailedException] {
"abbcc" should (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
}
assert(caught8.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught8.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[TestFailedException] {
"abbcc" should ((not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc"))) and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
}
assert(caught9.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught9.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[TestFailedException] {
"abbcc" should (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "ccc")) and not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
}
assert(caught10.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught10.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[TestFailedException] {
"abbc" should (not equal ("abbcc") and (not fullyMatch regex ("a(b*)c" withGroup "bb")))
}
assert(caught11.getMessage === "\\"abbc[]\\" did not equal \\"abbc[c]\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught11.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught12 = intercept[TestFailedException] {
"abbc" should ((not equal ("abbcc")) and (not fullyMatch regex ("a(b*)c" withGroup "bb")))
}
assert(caught12.getMessage === "\\"abbc[]\\" did not equal \\"abbc[c]\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught12.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[TestFailedException] {
"abbc" should (not equal ("abbcc") and not fullyMatch regex ("a(b*)c" withGroup "bb"))
}
assert(caught13.getMessage === "\\"abbc[]\\" did not equal \\"abbc[c]\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught13.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[TestFailedException] {
"abbcc" should (not equal ("abbccc") and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
}
assert(caught14.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught14.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[TestFailedException] {
"abbcc" should ((not equal "abbccc") and (not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc"))))
}
assert(caught15.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught15.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught16 = intercept[TestFailedException] {
"abbcc" should (not equal "abbccc" and not fullyMatch regex ("a(b*)(c*)" withGroups ("bb", "cc")))
}
assert(caught16.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught16.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught16.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
describe("(when the regex is specified by an actual Regex)") {
it("should do nothing if the string fully matches the regular expression specified as a string") {
"1.7" should fullyMatch regex ("1.7")
"1.7" should fullyMatch regex (decimalRegex)
"-1.8" should fullyMatch regex (decimalRegex)
"8" should fullyMatch regex (decimalRegex)
"1." should fullyMatch regex (decimalRegex)
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used with not") {
"eight" should not { fullyMatch regex (decimalRegex) }
"1.eight" should not { fullyMatch regex (decimalRegex) }
"one.8" should not { fullyMatch regex (decimalRegex) }
"eight" should not fullyMatch regex (decimalRegex)
"1.eight" should not fullyMatch regex (decimalRegex)
"one.8" should not fullyMatch regex (decimalRegex)
"1.8-" should not fullyMatch regex (decimalRegex)
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression") {
"1.7" should (fullyMatch regex (decimalRegex) and (fullyMatch regex (decimalRegex)))
"1.7" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex (decimalRegex)))
"1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex (decimalRegex))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression") {
"1.7" should (fullyMatch regex ("hello") or (fullyMatch regex (decimalRegex)))
"1.7" should ((fullyMatch regex ("hello")) or (fullyMatch regex (decimalRegex)))
"1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimalRegex))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-and expression with not") {
"fred" should (not (fullyMatch regex ("bob")) and not (fullyMatch regex (decimalRegex)))
"fred" should ((not fullyMatch regex ("bob")) and (not fullyMatch regex (decimalRegex)))
"fred" should (not fullyMatch regex ("bob") and not fullyMatch regex (decimalRegex))
}
it("should do nothing if the string does not fully match the regular expression specified as a string when used in a logical-or expression with not") {
"fred" should (not (fullyMatch regex ("fred")) or not (fullyMatch regex (decimalRegex)))
"fred" should ((not fullyMatch regex ("fred")) or (not fullyMatch regex (decimalRegex)))
"fred" should (not fullyMatch regex ("fred") or not fullyMatch regex (decimalRegex))
}
it("should throw TestFailedException if the string does not match the regular expression specified as a string") {
val caught1 = intercept[TestFailedException] {
"1.7" should fullyMatch regex ("1.78")
}
assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should fullyMatch regex ("21.7")
}
assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 21.7")
val caught3 = intercept[TestFailedException] {
"-1.eight" should fullyMatch regex (decimalRegex)
}
assert(caught3.getMessage === "\\"-1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should fullyMatch regex (decimalRegex)
}
assert(caught6.getMessage === "\\"eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught7 = intercept[TestFailedException] {
"1.eight" should fullyMatch regex (decimalRegex)
}
assert(caught7.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught8 = intercept[TestFailedException] {
"one.8" should fullyMatch regex (decimalRegex)
}
assert(caught8.getMessage === "\\"one.8\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught9 = intercept[TestFailedException] {
"1.8-" should fullyMatch regex (decimalRegex)
}
assert(caught9.getMessage === "\\"1.8-\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string does matches the regular expression specified as a string when used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should not { fullyMatch regex ("1.7") }
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { fullyMatch regex (decimalRegex) }
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { fullyMatch regex (decimalRegex) }
}
assert(caught3.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { fullyMatch regex (decimalRegex) }
}
assert(caught4.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { fullyMatch regex (decimalRegex) }
}
assert(caught5.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not fullyMatch regex ("1.7")
}
assert(caught11.getMessage === "\\"1.7\\" fully matched the regular expression 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not fullyMatch regex (decimalRegex)
}
assert(caught12.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not fullyMatch regex (decimalRegex)
}
assert(caught13.getMessage === "\\"-1.8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not fullyMatch regex (decimalRegex)
}
assert(caught14.getMessage === "\\"8\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not fullyMatch regex (decimalRegex)
}
assert(caught15.getMessage === "\\"1.\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression") {
val caught1 = intercept[TestFailedException] {
"1.7" should (fullyMatch regex (decimalRegex) and (fullyMatch regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not fully match the regular expression 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"1.eight" should (fullyMatch regex (decimalRegex) and (fullyMatch regex ("1.8")))
}
assert(caught4.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.eight" should ((fullyMatch regex (decimalRegex)) and (fullyMatch regex ("1.8")))
}
assert(caught5.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"1.eight" should (fullyMatch regex (decimalRegex) and fullyMatch regex ("1.8"))
}
assert(caught6.getMessage === "\\"1.eight\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression") {
val caught1 = intercept[TestFailedException] {
"1.seven" should (fullyMatch regex (decimalRegex) or (fullyMatch regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.seven" should ((fullyMatch regex (decimalRegex)) or (fullyMatch regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.seven" should (fullyMatch regex (decimalRegex) or fullyMatch regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.seven\\" did not fully match the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not fully match the regular expression 1.8")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-and expression used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex ("1.8") and (not fullyMatch regex (decimalRegex)))
}
assert(caught1.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not fullyMatch regex ("1.8")) and (not fullyMatch regex (decimalRegex)))
}
assert(caught2.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex ("1.8") and not fullyMatch regex (decimalRegex))
}
assert(caught3.getMessage === "\\"1.7\\" did not fully match the regular expression 1.8, but \\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
it("should throw TestFailedException if the string fully matches the regular expression specified as a string when used in a logical-or expression used with not") {
val caught1 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex (decimalRegex) or (not fullyMatch regex ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not fullyMatch regex (decimalRegex)) or (not fullyMatch regex ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not fullyMatch regex (decimalRegex) or not fullyMatch regex ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (fullyMatch regex (decimalRegex)) or not (fullyMatch regex ("1.7")))
}
assert(caught4.getMessage === "\\"1.7\\" fully matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" fully matched the regular expression 1.7")
}
}
describe("(when the regex is specifed by a actual Regex and with group)") {
describe("(when used with should)") {
it("should do nothing if the string fully matches the regular expression and with group as specified") {
"abbc" should fullyMatch regex ("a(b*)c".r withGroup "bb")
"abbcc" should fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bb") and (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should ((fullyMatch regex ("a(b*)c".r withGroup "bb")) and (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bb") and fullyMatch regex ("a(b*)c".r withGroup "bb"))
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should ((fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") or (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should ((fullyMatch regex ("a(b*)c".r withGroup "bbb")) or (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") or fullyMatch regex ("a(b*)c".r withGroup "bb"))
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bbb", "cc")) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should ((fullyMatch regex ("a(b*)(c*)".r withGroups ("bbb", "cc"))) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bbb", "cc")) or fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
"abbc" should (equal ("abbc") and (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should ((equal ("abbc")) and (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should (equal ("abbc") and fullyMatch regex ("a(b*)c".r withGroup "bb"))
"abbcc" should (equal ("abbcc") and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should ((equal ("abbcc")) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should (equal ("abbcc") and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
"abbc" should (equal ("abbbc") or (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should ((equal ("abbbc")) or (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbc" should (equal ("abbbc") or fullyMatch regex ("a(b*)c".r withGroup "bb"))
"abbcc" should (equal ("abbbcc") or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should ((equal ("abbbcc")) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
"abbcc" should (equal ("abbbcc") or fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
}
it("should throw TestFailedException if the string fully matches the regular expression but does not match specified group") {
val caught1 = intercept[TestFailedException] {
"abbbc" should fullyMatch regex ("a(b*)c".r withGroup "bb")
}
assert(caught1.message === Some("\\"abbbc\\" fully matched the regular expression a(b*)c, but \\"bbb\\" did not match group bb"))
assert(caught1.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
"abbccc" should fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))
}
assert(caught2.message === Some("\\"abbccc\\" fully matched the regular expression a(b*)(c*), but \\"ccc\\" did not match group cc at index 1"))
assert(caught2.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bb") and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught3.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught3.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c".r withGroup "bb")) and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught4.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught4.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bb") and fullyMatch regex ("a(b*)c".r withGroup "bbb"))
}
assert(caught5.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c and group bb, but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught5.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught6 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") and fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught6.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught6.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c".r withGroup "bbb")) and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught7.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught7.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") and fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught8.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught8.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))))
}
assert(caught9.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught9.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))))
}
assert(caught10.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught10.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")))
}
assert(caught11.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc, but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught11.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught12 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught12.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught12.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught13.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught13.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught14.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught14.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") or (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
}
assert(caught15.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught15.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught16 = intercept[TestFailedException] {
"abbc" should ((fullyMatch regex ("a(b*)c".r withGroup "bbb")) or (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
}
assert(caught16.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught16.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught16.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught17 = intercept[TestFailedException] {
"abbc" should (fullyMatch regex ("a(b*)c".r withGroup "bbb") or fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught17.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught17.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught17.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught18 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught18.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught18.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught18.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught19 = intercept[TestFailedException] {
"abbcc" should ((fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught19.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught19.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught19.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught20 = intercept[TestFailedException] {
"abbcc" should (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) or fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught20.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught20.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught20.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught21 = intercept[TestFailedException] {
"abbc" should (equal ("abbc") and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught21.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught21.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught21.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught22 = intercept[TestFailedException] {
"abbc" should ((equal ("abbc")) and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught22.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught22.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught22.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught23 = intercept[TestFailedException] {
"abbc" should (equal ("abbc") and fullyMatch regex ("a(b*)c".r withGroup "bbb"))
}
assert(caught23.getMessage === "\\"abbc\\" equaled \\"abbc\\", but \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb")
assert(caught23.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught23.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught24 = intercept[TestFailedException] {
"abbc" should (equal ("abbbc") and fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught24.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\"")
assert(caught24.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught24.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught25 = intercept[TestFailedException] {
"abbc" should ((equal ("abbbc")) and (fullyMatch regex ("a(b*)c".r withGroup "bbb")))
}
assert(caught25.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\"")
assert(caught25.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught25.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught26 = intercept[TestFailedException] {
"abbc" should (equal ("abbbc") and fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught26.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\"")
assert(caught26.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught26.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught27 = intercept[TestFailedException] {
"abbcc" should (equal ("abbcc") and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))))
}
assert(caught27.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught27.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught27.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught28 = intercept[TestFailedException] {
"abbcc" should ((equal ("abbcc")) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))))
}
assert(caught28.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught28.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught28.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught29 = intercept[TestFailedException] {
"abbcc" should (equal ("abbcc") and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")))
}
assert(caught29.getMessage === "\\"abbcc\\" equaled \\"abbcc\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1")
assert(caught29.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught29.failedCodeLineNumber === Some(thisLineNumber - 4))
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught30 = intercept[TestFailedException] {
"abbcc" should (equal ("abbccc") and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught30.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\"")
assert(caught30.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught30.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught31 = intercept[TestFailedException] {
"abbcc" should ((equal ("abbccc")) and (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught31.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\"")
assert(caught31.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught31.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught32 = intercept[TestFailedException] {
"abbcc" should (equal ("abbccc") and fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught32.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\"")
assert(caught32.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught32.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught33 = intercept[TestFailedException] {
"abbc" should (equal ("abbbc") or (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
}
assert(caught33.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught33.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught33.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught34 = intercept[TestFailedException] {
"abbc" should ((equal ("abbbc")) or (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
}
assert(caught34.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught34.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught34.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught35 = intercept[TestFailedException] {
"abbc" should (equal ("abbbc") or fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
}
assert(caught35.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", and \\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbbb")
assert(caught35.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught35.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught36 = intercept[TestFailedException] {
"abbcc" should (equal ("abbccc") or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught36.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught36.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught36.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught37 = intercept[TestFailedException] {
"abbcc" should ((equal ("abbccc")) or (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
}
assert(caught37.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught37.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught37.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught38 = intercept[TestFailedException] {
"abbcc" should (equal ("abbccc") or fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
}
assert(caught38.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", and \\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group cccc at index 1")
assert(caught38.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught38.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
describe("(when used with should not)") {
it("should do nothing if the string does not fully match the regular expression and with group as specified") {
"abbbc" should not { fullyMatch regex ("a(b*)c".r withGroup "bb") }
"abbbc" should not fullyMatch regex ("a(b*)c".r withGroup "bb")
"abbccc" should not { fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) }
"abbccc" should not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))
"abbbc" should (not (fullyMatch regex ("a(b*)c".r withGroup "bb")) and not (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
"abbbc" should ((not fullyMatch regex ("a(b*)c".r withGroup "bb")) and (not fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
"abbbc" should (not fullyMatch regex ("a(b*)c".r withGroup "bb") and not fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
"abbccc" should (not (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))) and not (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
"abbccc" should ((not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))) and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
"abbccc" should (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) and not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
"abbbc" should (not (fullyMatch regex ("a(b*)c".r withGroup "bbb")) or not (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbbc" should ((not fullyMatch regex ("a(b*)c".r withGroup "bbb")) or (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbbc" should (not fullyMatch regex ("a(b*)c".r withGroup "bbb") or not fullyMatch regex ("a(b*)c".r withGroup "bb"))
"abbbc" should (not (equal ("abbc")) and not (fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
"abbbc" should ((not equal ("abbc")) and (not fullyMatch regex ("a(b*)c".r withGroup "bbbb")))
"abbbc" should (not equal ("abbc") and not fullyMatch regex ("a(b*)c".r withGroup "bbbb"))
"abbccc" should (not (equal ("abbcc")) and not (fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
"abbccc" should ((not equal ("abbcc")) and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc"))))
"abbccc" should (not equal ("abbcc") and not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cccc")))
"abbbc" should (not (equal ("abbbc")) or not (fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbbc" should ((not equal ("abbbc")) or (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
"abbbc" should (not equal ("abbbc") or not fullyMatch regex ("a(b*)c".r withGroup "bb"))
}
it("should throw TestFailedException if the string fully matches the regular expression and with group as specified") {
val caught1 = intercept[TestFailedException] {
"abbc" should not { fullyMatch regex ("a(b*)c".r withGroup "bb") }
}
assert(caught1.message === Some("\\"abbc\\" fully matched the regular expression a(b*)c and group bb"))
assert(caught1.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
"abbc" should not fullyMatch regex ("a(b*)c".r withGroup "bb")
}
assert(caught2.message === Some("\\"abbc\\" fully matched the regular expression a(b*)c and group bb"))
assert(caught2.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
"abbcc" should not { fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")) }
}
assert(caught3.message === Some("\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc"))
assert(caught3.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
"abbcc" should not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))
}
assert(caught4.message === Some("\\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc"))
assert(caught4.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[TestFailedException] {
"abbc" should (not fullyMatch regex ("a(b*)c".r withGroup "bbb") and (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
}
assert(caught5.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught5.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[TestFailedException] {
"abbc" should ((not fullyMatch regex ("a(b*)c".r withGroup "bbb")) and (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
}
assert(caught6.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught6.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[TestFailedException] {
"abbc" should (not fullyMatch regex ("a(b*)c".r withGroup "bbb") and not fullyMatch regex ("a(b*)c".r withGroup "bb"))
}
assert(caught7.getMessage === "\\"abbc\\" fully matched the regular expression a(b*)c, but \\"bb\\" did not match group bbb, but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught7.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[TestFailedException] {
"abbcc" should (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
}
assert(caught8.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught8.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[TestFailedException] {
"abbcc" should ((not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc"))) and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
}
assert(caught9.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught9.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[TestFailedException] {
"abbcc" should (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "ccc")) and not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
}
assert(caught10.getMessage === "\\"abbcc\\" fully matched the regular expression a(b*)(c*), but \\"cc\\" did not match group ccc at index 1, but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught10.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[TestFailedException] {
"abbc" should (not equal ("abbbc") and (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
}
assert(caught11.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught11.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught12 = intercept[TestFailedException] {
"abbc" should ((not equal ("abbbc")) and (not fullyMatch regex ("a(b*)c".r withGroup "bb")))
}
assert(caught12.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught12.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[TestFailedException] {
"abbc" should (not equal ("abbbc") and not fullyMatch regex ("a(b*)c".r withGroup "bb"))
}
assert(caught13.getMessage === "\\"abb[]c\\" did not equal \\"abb[b]c\\", but \\"abbc\\" fully matched the regular expression a(b*)c and group bb")
assert(caught13.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[TestFailedException] {
"abbcc" should (not equal ("abbccc") and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
}
assert(caught14.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught14.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[TestFailedException] {
"abbcc" should ((not equal ("abbccc")) and (not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc"))))
}
assert(caught15.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught15.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught16 = intercept[TestFailedException] {
"abbcc" should (not equal ("abbccc") and not fullyMatch regex ("a(b*)(c*)".r withGroups ("bb", "cc")))
}
assert(caught16.getMessage === "\\"abbcc[]\\" did not equal \\"abbcc[c]\\", but \\"abbcc\\" fully matched the regular expression a(b*)(c*) and group bb, cc")
assert(caught16.failedCodeFileName === Some("ShouldFullyMatchSpec.scala"))
assert(caught16.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/ShouldFullyMatchSpec.scala
|
Scala
|
apache-2.0
| 94,133 |
package im.tox.antox.callbacks
import android.content.Context
class AntoxOnGroupJoinRejectedCallback(private var ctx: Context) /* extends GroupJoinRejectedCallback */ {
private var reconnecting = false
/* override def groupJoinRejected(groupNumber: Int, reason: ToxGroupJoinRejected): Unit = {
if (reason == ToxGroupJoinRejected.NICK_TAKEN) {
if (ToxSingleton.tox.getGroupSelfName(groupNumber).length < Constants.MAX_NAME_LENGTH) {
//FIXME
//ToxSingleton.tox.setGroupSelfName(groupNumber, PreferenceManager
// .getDefaultSharedPreferences(ctx)
// .getString("nickname", ""))
if (!reconnecting) {
new Thread(new Runnable {
override def run(): Unit = {
reconnecting = true
Thread.sleep(10000)
ToxSingleton.tox.reconnectGroup(groupNumber)
reconnecting = false
}
}).start()
}
}
} else {
println("Tox Group Join Rejected: " + reason)
}
} */
}
|
Ansa89/Antox
|
app/src/main/scala/im/tox/antox/callbacks/AntoxOnGroupJoinRejectedCallback.scala
|
Scala
|
gpl-3.0
| 1,026 |
/*
* Copyright (C) 2012 Julien Letrouit
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package frac
import java.lang.StringBuffer
/** Renders the given definition in a simple string */
class StringRenderer {
def render(definition: FractalDefinition, depth: Int): String = {
val res = new StringBuffer()
def callback(symbol: Symbol) {
res.append(symbol.toString)
}
definition.execute(depth, callback _)
res.toString
}
}
|
jletroui/frac
|
src/main/scala/frac/StringRenderer.scala
|
Scala
|
apache-2.0
| 964 |
package org.lolczak.common.util
import org.scalatest.{FlatSpec, Matchers}
class ResourceHelperSpec extends FlatSpec with Matchers {
"A resource helper" should "load string content of resource" in {
import Resources._
resource"test.txt" shouldBe "test"
}
}
|
lolczak/scala-commons
|
src/test/scala/org/lolczak/common/util/ResourceHelperSpec.scala
|
Scala
|
apache-2.0
| 272 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.util
import org.bdgenomics.adam.rdd.variant.GenotypeRDD
import org.bdgenomics.formats.avro.{ Genotype, GenotypeAllele }
import scala.collection.JavaConversions._
private[avocado] trait RewriteHetsArgs extends Serializable {
/**
* The maximum allele fraction for the alternate allele in a het SNP call.
*
* Set to a negative value to omit.
*/
var maxHetSnpAltAllelicFraction: Float
/**
* The maximum allele fraction for the alternate allele in a het SNP call.
*
* Set to a negative value to omit.
*/
var maxHetIndelAltAllelicFraction: Float
/**
* If true, does not attempt to rewrite het SNPs.
*/
var disableHetSnpRewriting: Boolean
/**
* If true, does not attempt to rewrite het INDELs.
*/
var disableHetIndelRewriting: Boolean
}
/**
* Rewrites high allelic fraction het genotypes as homozygous alternate calls.
*/
object RewriteHets extends Serializable {
/**
* Identifies high allelic fraction het calls in an RDD of genotypes and
* rewrites them as homozygous alt calls.
*
* @param rdd The RDD of genotypes to filter.
* @param args The arguments to configure the rewriter.
* @return Returns a new RDD of genotypes.
*/
def apply(rdd: GenotypeRDD,
args: RewriteHetsArgs): GenotypeRDD = {
val maxSnpAllelicFraction = args.maxHetSnpAltAllelicFraction
val maxIndelAllelicFraction = args.maxHetIndelAltAllelicFraction
val rewriteHetSnps = !args.disableHetSnpRewriting
val rewriteHetIndels = !args.disableHetIndelRewriting
if (rewriteHetSnps || rewriteHetIndels) {
rdd.transform(gtRdd => gtRdd.map(processGenotype(_,
maxSnpAllelicFraction,
maxIndelAllelicFraction,
rewriteHetSnps,
rewriteHetIndels)))
} else {
rdd
}
}
/**
* Examines a single genotype to see if it should be rewritten.
*
* @param gt The genotype to examine.
* @param maxSnpAllelicFraction The threshold for considering a het SNP call
* to be a miscalled homozygous SNP.
* @param maxIndelAllelicFraction The threshold for considering a het INDEL
* call to be a miscalled homozygous INDEL
* @param rewriteHetSnps If false, disables SNP checking.
* @param rewriteHetIndels If false, disables INDEL checking.
* @return Returns true if the genotype should be rewritten..
*/
private[util] def shouldRewrite(gt: Genotype,
maxSnpAllelicFraction: Float,
maxIndelAllelicFraction: Float,
rewriteHetSnps: Boolean,
rewriteHetIndels: Boolean): Boolean = {
if (gt.getVariant.getAlternateAllele == null) {
false
} else {
val isSnp = ((gt.getVariant.getReferenceAllele.length == 1) &&
(gt.getVariant.getAlternateAllele.length == 1))
val numAlts = gt.getAlleles.count(_ == GenotypeAllele.ALT)
val isHet = (numAlts != 0) && (numAlts != gt.getAlleles.length)
def checkAf(af: Float): Boolean = {
(Option(gt.getReadDepth), Option(gt.getAlternateReadDepth)) match {
case (Some(dp), Some(alt)) => (alt.toFloat / dp.toFloat) >= af
case _ => false
}
}
if (rewriteHetSnps && isSnp && isHet) {
checkAf(maxSnpAllelicFraction)
} else if (rewriteHetIndels && !isSnp && isHet) {
checkAf(maxIndelAllelicFraction)
} else {
false
}
}
}
/**
* Rewrites a het genotype as a hom alt call.
*
* @param gt The genotype to rewrite.
* @return Returns the rewritten genotype.
*/
private[util] def rewriteGenotype(gt: Genotype): Genotype = {
val numAlleles = gt.getAlleles.length
val newAlleles = List.fill(numAlleles) { GenotypeAllele.ALT }
Genotype.newBuilder(gt)
.setGenotypeQuality(null)
.setAlleles(newAlleles)
.build
}
/**
* Processes a single genotype, and rewrites it if it appears to be a
* miscalled hom alt.
*
* @param gt The genotype to examine.
* @param maxSnpAllelicFraction The threshold for considering a het SNP call
* to be a miscalled homozygous SNP.
* @param maxIndelAllelicFraction The threshold for considering a het INDEL
* call to be a miscalled homozygous INDEL
* @param rewriteHetSnps If false, disables SNP checking.
* @param rewriteHetIndels If false, disables INDEL checking.
* @return Returns the rewritten genotype if the genotype should be rewritten,
* else returns the original genotype.
*/
private[util] def processGenotype(gt: Genotype,
maxSnpAllelicFraction: Float,
maxIndelAllelicFraction: Float,
rewriteHetSnps: Boolean,
rewriteHetIndels: Boolean): Genotype = {
if (shouldRewrite(gt,
maxSnpAllelicFraction,
maxIndelAllelicFraction,
rewriteHetSnps,
rewriteHetIndels)) {
rewriteGenotype(gt)
} else {
gt
}
}
}
|
heuermh/bdg-avocado
|
avocado-core/src/main/scala/org/bdgenomics/avocado/util/RewriteHets.scala
|
Scala
|
apache-2.0
| 5,901 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.